max_stars_repo_path
stringlengths 4
286
| max_stars_repo_name
stringlengths 5
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.03M
| content_cleaned
stringlengths 6
1.03M
| language
stringclasses 111
values | language_score
float64 0.03
1
| comments
stringlengths 0
556k
| edu_score
float64 0.32
5.03
| edu_int_score
int64 0
5
|
---|---|---|---|---|---|---|---|---|---|---|
pyxdsm/tests/test_xdsm.py | yqliaohk/pyXDSM | 0 | 7500 | import unittest
import os
from pyxdsm.XDSM import XDSM, __file__
from numpy.distutils.exec_command import find_executable
def filter_lines(lns):
# Empty lines are excluded.
# Leading and trailing whitespaces are removed
# Comments are removed.
return [ln.strip() for ln in lns if ln.strip() and not ln.strip().startswith('%')]
class TestXDSM(unittest.TestCase):
def setUp(self):
import os
import tempfile
self.startdir = os.getcwd()
self.tempdir = tempfile.mkdtemp(prefix='testdir-')
os.chdir(self.tempdir)
def tearDown(self):
import os
import shutil
os.chdir(self.startdir)
try:
shutil.rmtree(self.tempdir)
except OSError:
pass
def test_examples(self):
'''
This test just builds the three examples, and assert that the output files exist.
Unlike the other tests, this one requires LaTeX to be available.
'''
os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../examples'))
filenames = ['kitchen_sink', 'mdf']
for f in filenames:
os.system('python {}.py'.format(f))
self.assertTrue(os.path.isfile(f + '.tikz'))
self.assertTrue(os.path.isfile(f + '.tex'))
# look for the pdflatex executable
pdflatex = find_executable('pdflatex') is not None
# if no pdflatex, then do not assert that the pdf was compiled
self.assertTrue(not pdflatex or os.path.isfile(f + '.pdf'))
os.system('python mat_eqn.py')
self.assertTrue(os.path.isfile('mat_eqn_example.pdf'))
# change back to previous directory
os.chdir(self.tempdir)
def test_connect(self):
x = XDSM(use_sfmath=False)
x.add_system('D1', 'Function', 'D_1', label_width=2)
x.add_system('D2', 'Function', 'D_2', stack=False)
try:
x.connect('D1', 'D2', r'\mathcal{R}(y_1)', 'foobar')
except ValueError as err:
self.assertEquals(str(err), 'label_width argument must be an integer')
else:
self.fail('Expected ValueError')
def test_options(self):
filename = 'xdsm_test_options'
spec_dir = filename + '_specs'
# Change `use_sfmath` to False to use computer modern
x = XDSM(use_sfmath=False)
x.add_system('opt', 'Optimization', r'\text{Optimizer}')
x.add_system('solver', 'MDA', r'\text{Newton}')
x.add_system('D1', 'Function', 'D_1', label_width=2)
x.add_system('D2', 'Function', 'D_2', stack=False)
x.add_system('F', 'Function', 'F', faded=True)
x.add_system('G', 'Function', 'G', spec_name="G_spec")
x.connect('opt', 'D1', 'x, z')
x.connect('opt', 'D2', 'z')
x.connect('opt', 'F', 'x, z')
x.connect('solver', 'D1', 'y_2')
x.connect('solver', 'D2', 'y_1')
x.connect('D1', 'solver', r'\mathcal{R}(y_1)')
x.connect('solver', 'F', 'y_1, y_2')
x.connect('D2', 'solver', r'\mathcal{R}(y_2)')
x.connect('solver', 'G', 'y_1, y_2')
x.connect('F', 'opt', 'f')
x.connect('G', 'opt', 'g')
x.add_output('opt', 'x^*, z^*', side='right')
x.add_output('D1', 'y_1^*', side='left', stack=True)
x.add_output('D2', 'y_2^*', side='left')
x.add_output('F', 'f^*', side='left')
x.add_output('G', 'g^*')
x.write(filename)
x.write_sys_specs(spec_dir)
# Test if files where created
self.assertTrue(os.path.isfile(filename + '.tikz'))
self.assertTrue(os.path.isfile(filename + '.tex'))
self.assertTrue(os.path.isdir(spec_dir))
self.assertTrue(os.path.isfile(os.path.join(spec_dir, 'F.json')))
self.assertTrue(os.path.isfile(os.path.join(spec_dir, 'G_spec.json')))
def test_stacked_system(self):
x = XDSM()
x.add_system('test', 'Optimization', r'\text{test}', stack=True)
file_name = "stacked_test"
x.write(file_name)
tikz_file = file_name + '.tikz'
with open(tikz_file, "r") as f:
tikz = f.read()
self.assertIn(r"\node [Optimization,stack]", tikz)
def test_tikz_content(self):
# Check if TiKZ file was created.
# Compare the content of the sample below and the newly created TiKZ file.
sample_txt = r"""
%%% Preamble Requirements %%%
% \usepackage{geometry}
% \usepackage{amsfonts}
% \usepackage{amsmath}
% \usepackage{amssymb}
% \usepackage{tikz}
% Optional packages such as sfmath set through python interface
% \usepackage{sfmath}
% \usetikzlibrary{arrows,chains,positioning,scopes,shapes.geometric,shapes.misc,shadows}
%%% End Preamble Requirements %%%
\input{"path/to/diagram_styles"}
\begin{tikzpicture}
\matrix[MatrixSetup]{
%Row 0
\node [DataIO] (left_output_opt) {$x^*, z^*$};&
\node [Optimization] (opt) {$\text{Optimizer}$};&
&
\node [DataInter] (opt-D1) {$x, z$};&
\node [DataInter] (opt-D2) {$z$};&
\node [DataInter] (opt-F) {$x, z$};&
\\
%Row 1
&
&
\node [MDA] (solver) {$\text{Newton}$};&
\node [DataInter] (solver-D1) {$y_2$};&
\node [DataInter] (solver-D2) {$y_1$};&
\node [DataInter] (solver-F) {$y_1, y_2$};&
\node [DataInter] (solver-G) {$y_1, y_2$};\\
%Row 2
\node [DataIO] (left_output_D1) {$y_1^*$};&
&
\node [DataInter] (D1-solver) {$\mathcal{R}(y_1)$};&
\node [Function] (D1) {$D_1$};&
&
&
\\
%Row 3
\node [DataIO] (left_output_D2) {$y_2^*$};&
&
\node [DataInter] (D2-solver) {$\mathcal{R}(y_2)$};&
&
\node [Function] (D2) {$D_2$};&
&
\\
%Row 4
\node [DataIO] (left_output_F) {$f^*$};&
\node [DataInter] (F-opt) {$f$};&
&
&
&
\node [Function] (F) {$F$};&
\\
%Row 5
\node [DataIO] (left_output_G) {$g^*$};&
\node [DataInter] (G-opt) {$g$};&
&
&
&
&
\node [Function] (G) {$G$};\\
%Row 6
&
&
&
&
&
&
\\
};
% XDSM process chains
\begin{pgfonlayer}{data}
\path
% Horizontal edges
(opt) edge [DataLine] (opt-D1)
(opt) edge [DataLine] (opt-D2)
(opt) edge [DataLine] (opt-F)
(solver) edge [DataLine] (solver-D1)
(solver) edge [DataLine] (solver-D2)
(D1) edge [DataLine] (D1-solver)
(solver) edge [DataLine] (solver-F)
(D2) edge [DataLine] (D2-solver)
(solver) edge [DataLine] (solver-G)
(F) edge [DataLine] (F-opt)
(G) edge [DataLine] (G-opt)
(opt) edge [DataLine] (left_output_opt)
(D1) edge [DataLine] (left_output_D1)
(D2) edge [DataLine] (left_output_D2)
(F) edge [DataLine] (left_output_F)
(G) edge [DataLine] (left_output_G)
% Vertical edges
(opt-D1) edge [DataLine] (D1)
(opt-D2) edge [DataLine] (D2)
(opt-F) edge [DataLine] (F)
(solver-D1) edge [DataLine] (D1)
(solver-D2) edge [DataLine] (D2)
(D1-solver) edge [DataLine] (solver)
(solver-F) edge [DataLine] (F)
(D2-solver) edge [DataLine] (solver)
(solver-G) edge [DataLine] (G)
(F-opt) edge [DataLine] (opt)
(G-opt) edge [DataLine] (opt);
\end{pgfonlayer}
\end{tikzpicture}"""
filename = 'xdsm_test_tikz'
x = XDSM(use_sfmath=True)
x.add_system('opt', 'Optimization', r'\text{Optimizer}')
x.add_system('solver', 'MDA', r'\text{Newton}')
x.add_system('D1', 'Function', 'D_1')
x.add_system('D2', 'Function', 'D_2')
x.add_system('F', 'Function', 'F')
x.add_system('G', 'Function', 'G')
x.connect('opt', 'D1', 'x, z')
x.connect('opt', 'D2', 'z')
x.connect('opt', 'F', 'x, z')
x.connect('solver', 'D1', 'y_2')
x.connect('solver', 'D2', 'y_1')
x.connect('D1', 'solver', r'\mathcal{R}(y_1)')
x.connect('solver', 'F', 'y_1, y_2')
x.connect('D2', 'solver', r'\mathcal{R}(y_2)')
x.connect('solver', 'G', 'y_1, y_2')
x.connect('F', 'opt', 'f')
x.connect('G', 'opt', 'g')
x.add_output('opt', 'x^*, z^*', side='left')
x.add_output('D1', 'y_1^*', side='left')
x.add_output('D2', 'y_2^*', side='left')
x.add_output('F', 'f^*', side='left')
x.add_output('G', 'g^*', side='left')
x.write(filename)
# Check if file was created
tikz_file = filename + '.tikz'
self.assertTrue(os.path.isfile(tikz_file))
sample_lines = sample_txt.split('\n')
sample_lines = filter_lines(sample_lines)
with open(tikz_file, "r") as f:
new_lines = filter_lines(f.readlines())
sample_no_match = [] # Sample text
new_no_match = [] # New text
for new_line, sample_line in zip(new_lines, sample_lines):
if new_line.startswith(r'\input{'):
continue
if new_line != sample_line: # else everything is okay
# This can be because of the different ordering of lines or because of an error.
sample_no_match.append(new_line)
new_no_match.append(sample_line)
# Sort both sets of suspicious lines
sample_no_match.sort()
new_no_match.sort()
for sample_line, new_line in zip(sample_no_match, new_no_match):
# Now the lines should match, if only the ordering was different
self.assertEqual(new_line, sample_line)
# To be sure, check the length, otherwise a missing last line could get unnoticed because of using zip
self.assertEqual(len(new_lines), len(sample_lines))
if __name__ == "__main__":
unittest.main()
| import unittest
import os
from pyxdsm.XDSM import XDSM, __file__
from numpy.distutils.exec_command import find_executable
def filter_lines(lns):
# Empty lines are excluded.
# Leading and trailing whitespaces are removed
# Comments are removed.
return [ln.strip() for ln in lns if ln.strip() and not ln.strip().startswith('%')]
class TestXDSM(unittest.TestCase):
def setUp(self):
import os
import tempfile
self.startdir = os.getcwd()
self.tempdir = tempfile.mkdtemp(prefix='testdir-')
os.chdir(self.tempdir)
def tearDown(self):
import os
import shutil
os.chdir(self.startdir)
try:
shutil.rmtree(self.tempdir)
except OSError:
pass
def test_examples(self):
'''
This test just builds the three examples, and assert that the output files exist.
Unlike the other tests, this one requires LaTeX to be available.
'''
os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../examples'))
filenames = ['kitchen_sink', 'mdf']
for f in filenames:
os.system('python {}.py'.format(f))
self.assertTrue(os.path.isfile(f + '.tikz'))
self.assertTrue(os.path.isfile(f + '.tex'))
# look for the pdflatex executable
pdflatex = find_executable('pdflatex') is not None
# if no pdflatex, then do not assert that the pdf was compiled
self.assertTrue(not pdflatex or os.path.isfile(f + '.pdf'))
os.system('python mat_eqn.py')
self.assertTrue(os.path.isfile('mat_eqn_example.pdf'))
# change back to previous directory
os.chdir(self.tempdir)
def test_connect(self):
x = XDSM(use_sfmath=False)
x.add_system('D1', 'Function', 'D_1', label_width=2)
x.add_system('D2', 'Function', 'D_2', stack=False)
try:
x.connect('D1', 'D2', r'\mathcal{R}(y_1)', 'foobar')
except ValueError as err:
self.assertEquals(str(err), 'label_width argument must be an integer')
else:
self.fail('Expected ValueError')
def test_options(self):
filename = 'xdsm_test_options'
spec_dir = filename + '_specs'
# Change `use_sfmath` to False to use computer modern
x = XDSM(use_sfmath=False)
x.add_system('opt', 'Optimization', r'\text{Optimizer}')
x.add_system('solver', 'MDA', r'\text{Newton}')
x.add_system('D1', 'Function', 'D_1', label_width=2)
x.add_system('D2', 'Function', 'D_2', stack=False)
x.add_system('F', 'Function', 'F', faded=True)
x.add_system('G', 'Function', 'G', spec_name="G_spec")
x.connect('opt', 'D1', 'x, z')
x.connect('opt', 'D2', 'z')
x.connect('opt', 'F', 'x, z')
x.connect('solver', 'D1', 'y_2')
x.connect('solver', 'D2', 'y_1')
x.connect('D1', 'solver', r'\mathcal{R}(y_1)')
x.connect('solver', 'F', 'y_1, y_2')
x.connect('D2', 'solver', r'\mathcal{R}(y_2)')
x.connect('solver', 'G', 'y_1, y_2')
x.connect('F', 'opt', 'f')
x.connect('G', 'opt', 'g')
x.add_output('opt', 'x^*, z^*', side='right')
x.add_output('D1', 'y_1^*', side='left', stack=True)
x.add_output('D2', 'y_2^*', side='left')
x.add_output('F', 'f^*', side='left')
x.add_output('G', 'g^*')
x.write(filename)
x.write_sys_specs(spec_dir)
# Test if files where created
self.assertTrue(os.path.isfile(filename + '.tikz'))
self.assertTrue(os.path.isfile(filename + '.tex'))
self.assertTrue(os.path.isdir(spec_dir))
self.assertTrue(os.path.isfile(os.path.join(spec_dir, 'F.json')))
self.assertTrue(os.path.isfile(os.path.join(spec_dir, 'G_spec.json')))
def test_stacked_system(self):
x = XDSM()
x.add_system('test', 'Optimization', r'\text{test}', stack=True)
file_name = "stacked_test"
x.write(file_name)
tikz_file = file_name + '.tikz'
with open(tikz_file, "r") as f:
tikz = f.read()
self.assertIn(r"\node [Optimization,stack]", tikz)
def test_tikz_content(self):
# Check if TiKZ file was created.
# Compare the content of the sample below and the newly created TiKZ file.
sample_txt = r"""
%%% Preamble Requirements %%%
% \usepackage{geometry}
% \usepackage{amsfonts}
% \usepackage{amsmath}
% \usepackage{amssymb}
% \usepackage{tikz}
% Optional packages such as sfmath set through python interface
% \usepackage{sfmath}
% \usetikzlibrary{arrows,chains,positioning,scopes,shapes.geometric,shapes.misc,shadows}
%%% End Preamble Requirements %%%
\input{"path/to/diagram_styles"}
\begin{tikzpicture}
\matrix[MatrixSetup]{
%Row 0
\node [DataIO] (left_output_opt) {$x^*, z^*$};&
\node [Optimization] (opt) {$\text{Optimizer}$};&
&
\node [DataInter] (opt-D1) {$x, z$};&
\node [DataInter] (opt-D2) {$z$};&
\node [DataInter] (opt-F) {$x, z$};&
\\
%Row 1
&
&
\node [MDA] (solver) {$\text{Newton}$};&
\node [DataInter] (solver-D1) {$y_2$};&
\node [DataInter] (solver-D2) {$y_1$};&
\node [DataInter] (solver-F) {$y_1, y_2$};&
\node [DataInter] (solver-G) {$y_1, y_2$};\\
%Row 2
\node [DataIO] (left_output_D1) {$y_1^*$};&
&
\node [DataInter] (D1-solver) {$\mathcal{R}(y_1)$};&
\node [Function] (D1) {$D_1$};&
&
&
\\
%Row 3
\node [DataIO] (left_output_D2) {$y_2^*$};&
&
\node [DataInter] (D2-solver) {$\mathcal{R}(y_2)$};&
&
\node [Function] (D2) {$D_2$};&
&
\\
%Row 4
\node [DataIO] (left_output_F) {$f^*$};&
\node [DataInter] (F-opt) {$f$};&
&
&
&
\node [Function] (F) {$F$};&
\\
%Row 5
\node [DataIO] (left_output_G) {$g^*$};&
\node [DataInter] (G-opt) {$g$};&
&
&
&
&
\node [Function] (G) {$G$};\\
%Row 6
&
&
&
&
&
&
\\
};
% XDSM process chains
\begin{pgfonlayer}{data}
\path
% Horizontal edges
(opt) edge [DataLine] (opt-D1)
(opt) edge [DataLine] (opt-D2)
(opt) edge [DataLine] (opt-F)
(solver) edge [DataLine] (solver-D1)
(solver) edge [DataLine] (solver-D2)
(D1) edge [DataLine] (D1-solver)
(solver) edge [DataLine] (solver-F)
(D2) edge [DataLine] (D2-solver)
(solver) edge [DataLine] (solver-G)
(F) edge [DataLine] (F-opt)
(G) edge [DataLine] (G-opt)
(opt) edge [DataLine] (left_output_opt)
(D1) edge [DataLine] (left_output_D1)
(D2) edge [DataLine] (left_output_D2)
(F) edge [DataLine] (left_output_F)
(G) edge [DataLine] (left_output_G)
% Vertical edges
(opt-D1) edge [DataLine] (D1)
(opt-D2) edge [DataLine] (D2)
(opt-F) edge [DataLine] (F)
(solver-D1) edge [DataLine] (D1)
(solver-D2) edge [DataLine] (D2)
(D1-solver) edge [DataLine] (solver)
(solver-F) edge [DataLine] (F)
(D2-solver) edge [DataLine] (solver)
(solver-G) edge [DataLine] (G)
(F-opt) edge [DataLine] (opt)
(G-opt) edge [DataLine] (opt);
\end{pgfonlayer}
\end{tikzpicture}"""
filename = 'xdsm_test_tikz'
x = XDSM(use_sfmath=True)
x.add_system('opt', 'Optimization', r'\text{Optimizer}')
x.add_system('solver', 'MDA', r'\text{Newton}')
x.add_system('D1', 'Function', 'D_1')
x.add_system('D2', 'Function', 'D_2')
x.add_system('F', 'Function', 'F')
x.add_system('G', 'Function', 'G')
x.connect('opt', 'D1', 'x, z')
x.connect('opt', 'D2', 'z')
x.connect('opt', 'F', 'x, z')
x.connect('solver', 'D1', 'y_2')
x.connect('solver', 'D2', 'y_1')
x.connect('D1', 'solver', r'\mathcal{R}(y_1)')
x.connect('solver', 'F', 'y_1, y_2')
x.connect('D2', 'solver', r'\mathcal{R}(y_2)')
x.connect('solver', 'G', 'y_1, y_2')
x.connect('F', 'opt', 'f')
x.connect('G', 'opt', 'g')
x.add_output('opt', 'x^*, z^*', side='left')
x.add_output('D1', 'y_1^*', side='left')
x.add_output('D2', 'y_2^*', side='left')
x.add_output('F', 'f^*', side='left')
x.add_output('G', 'g^*', side='left')
x.write(filename)
# Check if file was created
tikz_file = filename + '.tikz'
self.assertTrue(os.path.isfile(tikz_file))
sample_lines = sample_txt.split('\n')
sample_lines = filter_lines(sample_lines)
with open(tikz_file, "r") as f:
new_lines = filter_lines(f.readlines())
sample_no_match = [] # Sample text
new_no_match = [] # New text
for new_line, sample_line in zip(new_lines, sample_lines):
if new_line.startswith(r'\input{'):
continue
if new_line != sample_line: # else everything is okay
# This can be because of the different ordering of lines or because of an error.
sample_no_match.append(new_line)
new_no_match.append(sample_line)
# Sort both sets of suspicious lines
sample_no_match.sort()
new_no_match.sort()
for sample_line, new_line in zip(sample_no_match, new_no_match):
# Now the lines should match, if only the ordering was different
self.assertEqual(new_line, sample_line)
# To be sure, check the length, otherwise a missing last line could get unnoticed because of using zip
self.assertEqual(len(new_lines), len(sample_lines))
if __name__ == "__main__":
unittest.main()
| en | 0.505954 | # Empty lines are excluded. # Leading and trailing whitespaces are removed # Comments are removed. This test just builds the three examples, and assert that the output files exist. Unlike the other tests, this one requires LaTeX to be available. # look for the pdflatex executable # if no pdflatex, then do not assert that the pdf was compiled # change back to previous directory # Change `use_sfmath` to False to use computer modern # Test if files where created # Check if TiKZ file was created. # Compare the content of the sample below and the newly created TiKZ file. %%% Preamble Requirements %%% % \usepackage{geometry} % \usepackage{amsfonts} % \usepackage{amsmath} % \usepackage{amssymb} % \usepackage{tikz} % Optional packages such as sfmath set through python interface % \usepackage{sfmath} % \usetikzlibrary{arrows,chains,positioning,scopes,shapes.geometric,shapes.misc,shadows} %%% End Preamble Requirements %%% \input{"path/to/diagram_styles"} \begin{tikzpicture} \matrix[MatrixSetup]{ %Row 0 \node [DataIO] (left_output_opt) {$x^*, z^*$};& \node [Optimization] (opt) {$\text{Optimizer}$};& & \node [DataInter] (opt-D1) {$x, z$};& \node [DataInter] (opt-D2) {$z$};& \node [DataInter] (opt-F) {$x, z$};& \\ %Row 1 & & \node [MDA] (solver) {$\text{Newton}$};& \node [DataInter] (solver-D1) {$y_2$};& \node [DataInter] (solver-D2) {$y_1$};& \node [DataInter] (solver-F) {$y_1, y_2$};& \node [DataInter] (solver-G) {$y_1, y_2$};\\ %Row 2 \node [DataIO] (left_output_D1) {$y_1^*$};& & \node [DataInter] (D1-solver) {$\mathcal{R}(y_1)$};& \node [Function] (D1) {$D_1$};& & & \\ %Row 3 \node [DataIO] (left_output_D2) {$y_2^*$};& & \node [DataInter] (D2-solver) {$\mathcal{R}(y_2)$};& & \node [Function] (D2) {$D_2$};& & \\ %Row 4 \node [DataIO] (left_output_F) {$f^*$};& \node [DataInter] (F-opt) {$f$};& & & & \node [Function] (F) {$F$};& \\ %Row 5 \node [DataIO] (left_output_G) {$g^*$};& \node [DataInter] (G-opt) {$g$};& & & & & \node [Function] (G) {$G$};\\ %Row 6 & & & & & & \\ }; % XDSM process chains \begin{pgfonlayer}{data} \path % Horizontal edges (opt) edge [DataLine] (opt-D1) (opt) edge [DataLine] (opt-D2) (opt) edge [DataLine] (opt-F) (solver) edge [DataLine] (solver-D1) (solver) edge [DataLine] (solver-D2) (D1) edge [DataLine] (D1-solver) (solver) edge [DataLine] (solver-F) (D2) edge [DataLine] (D2-solver) (solver) edge [DataLine] (solver-G) (F) edge [DataLine] (F-opt) (G) edge [DataLine] (G-opt) (opt) edge [DataLine] (left_output_opt) (D1) edge [DataLine] (left_output_D1) (D2) edge [DataLine] (left_output_D2) (F) edge [DataLine] (left_output_F) (G) edge [DataLine] (left_output_G) % Vertical edges (opt-D1) edge [DataLine] (D1) (opt-D2) edge [DataLine] (D2) (opt-F) edge [DataLine] (F) (solver-D1) edge [DataLine] (D1) (solver-D2) edge [DataLine] (D2) (D1-solver) edge [DataLine] (solver) (solver-F) edge [DataLine] (F) (D2-solver) edge [DataLine] (solver) (solver-G) edge [DataLine] (G) (F-opt) edge [DataLine] (opt) (G-opt) edge [DataLine] (opt); \end{pgfonlayer} \end{tikzpicture} # Check if file was created # Sample text # New text # else everything is okay # This can be because of the different ordering of lines or because of an error. # Sort both sets of suspicious lines # Now the lines should match, if only the ordering was different # To be sure, check the length, otherwise a missing last line could get unnoticed because of using zip | 2.32417 | 2 |
wecom_sdk/base/callback.py | quanttide/wecom-sdk-py | 9 | 7501 | # -*- coding: utf-8 -*-
from wecom_sdk.base.crypt import encrypt_msg, decrypt_msg
class WeChatWorkCallbackSDK(object):
"""
企业微信回调SDK基本类,用于实现内部系统和企业微信客户端的双向通信
详细说明:https://work.weixin.qq.com/api/doc/90000/90135/90930
"""
def __init__(self, token, encoding_aes_key):
self.token = token
self.encoding_aes_key = encoding_aes_key
def encrypt(self, data: dict) -> str:
"""
服务端加密数据
:param data:
:param timestamp:
:param nonce:
:return:
"""
return encrypt_msg(data, token=self.token, encoding_aes_key=self.encoding_aes_key)
def decrypt(self, xml, sign, timestamp, nonce) -> dict:
"""
验证并解密来自客户端的数据
:return:
"""
return decrypt_msg(xml_text=xml, encrypt_sign=sign, timestamp=timestamp, nonce=nonce,
token=self.token, encoding_aes_key=self.encoding_aes_key) | # -*- coding: utf-8 -*-
from wecom_sdk.base.crypt import encrypt_msg, decrypt_msg
class WeChatWorkCallbackSDK(object):
"""
企业微信回调SDK基本类,用于实现内部系统和企业微信客户端的双向通信
详细说明:https://work.weixin.qq.com/api/doc/90000/90135/90930
"""
def __init__(self, token, encoding_aes_key):
self.token = token
self.encoding_aes_key = encoding_aes_key
def encrypt(self, data: dict) -> str:
"""
服务端加密数据
:param data:
:param timestamp:
:param nonce:
:return:
"""
return encrypt_msg(data, token=self.token, encoding_aes_key=self.encoding_aes_key)
def decrypt(self, xml, sign, timestamp, nonce) -> dict:
"""
验证并解密来自客户端的数据
:return:
"""
return decrypt_msg(xml_text=xml, encrypt_sign=sign, timestamp=timestamp, nonce=nonce,
token=self.token, encoding_aes_key=self.encoding_aes_key) | zh | 0.634491 | # -*- coding: utf-8 -*- 企业微信回调SDK基本类,用于实现内部系统和企业微信客户端的双向通信 详细说明:https://work.weixin.qq.com/api/doc/90000/90135/90930 服务端加密数据 :param data: :param timestamp: :param nonce: :return: 验证并解密来自客户端的数据 :return: | 2.32582 | 2 |
scripts/get-table-schemas.py | numankh/GRE-Vocab-Helper | 0 | 7502 | <gh_stars>0
import psycopg2
from decouple import config
import pandas as pd
import dbconnect
cursor, connection = dbconnect.connect_to_db()
sql = """
SELECT "table_name","column_name", "data_type", "table_schema"
FROM INFORMATION_SCHEMA.COLUMNS
WHERE "table_schema" = 'public'
ORDER BY table_name
"""
df = pd.read_sql(sql, con=connection)
print(df.to_string()) | import psycopg2
from decouple import config
import pandas as pd
import dbconnect
cursor, connection = dbconnect.connect_to_db()
sql = """
SELECT "table_name","column_name", "data_type", "table_schema"
FROM INFORMATION_SCHEMA.COLUMNS
WHERE "table_schema" = 'public'
ORDER BY table_name
"""
df = pd.read_sql(sql, con=connection)
print(df.to_string()) | en | 0.564946 | SELECT "table_name","column_name", "data_type", "table_schema" FROM INFORMATION_SCHEMA.COLUMNS WHERE "table_schema" = 'public' ORDER BY table_name | 3.039368 | 3 |
tests/test_table/test_pivot.py | andriyor/agate | 663 | 7503 | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
try:
from cdecimal import Decimal
except ImportError: # pragma: no cover
from decimal import Decimal
from agate import Table
from agate.aggregations import Sum
from agate.computations import Percent
from agate.data_types import Number, Text
from agate.testcase import AgateTestCase
class TestPivot(AgateTestCase):
def setUp(self):
self.rows = (
('joe', 'white', 'male', 20, 'blue'),
('jane', 'white', 'female', 20, 'blue'),
('josh', 'black', 'male', 20, 'blue'),
('jim', 'latino', 'male', 25, 'blue'),
('julia', 'white', 'female', 25, 'green'),
('joan', 'asian', 'female', 25, 'green')
)
self.number_type = Number()
self.text_type = Text()
self.column_names = ['name', 'race', 'gender', 'age', 'color']
self.column_types = [self.text_type, self.text_type, self.text_type, self.number_type, self.text_type]
def test_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender')
pivot_rows = (
('white', 1, 2),
('black', 1, 0),
('latino', 1, 0),
('asian', 0, 1)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertRowNames(pivot_table, ['white', 'black', 'latino', 'asian'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(lambda r: r['gender'])
pivot_rows = (
('male', 3),
('female', 3)
)
self.assertColumnNames(pivot_table, ['group', 'Count'])
self.assertRowNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda_group_name(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(lambda r: r['gender'], key_name='gender')
pivot_rows = (
('male', 3),
('female', 3)
)
self.assertColumnNames(pivot_table, ['gender', 'Count'])
self.assertRowNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda_group_name_sequence_invalid(self):
table = Table(self.rows, self.column_names, self.column_types)
with self.assertRaises(ValueError):
table.pivot(['race', 'gender'], key_name='foo')
def test_pivot_no_key(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(pivot='gender')
pivot_rows = (
(3, 3),
)
self.assertColumnNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_no_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race')
pivot_rows = (
('white', 3),
('black', 1),
('latino', 1),
('asian', 1)
)
self.assertColumnNames(pivot_table, ['race', 'Count'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_sum(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender', Sum('age'))
pivot_rows = (
('white', 20, 45),
('black', 20, 0),
('latino', 25, 0),
('asian', 0, 25)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_multiple_keys(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(['race', 'gender'], 'age')
pivot_rows = (
('white', 'male', 1, 0),
('white', 'female', 1, 1),
('black', 'male', 1, 0),
('latino', 'male', 0, 1),
('asian', 'female', 0, 1),
)
self.assertRows(pivot_table, pivot_rows)
self.assertColumnNames(pivot_table, ['race', 'gender', '20', '25'])
self.assertRowNames(pivot_table, [
('white', 'male'),
('white', 'female'),
('black', 'male'),
('latino', 'male'),
('asian', 'female'),
])
self.assertColumnTypes(pivot_table, [Text, Text, Number, Number])
def test_pivot_multiple_keys_no_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(['race', 'gender'])
pivot_rows = (
('white', 'male', 1),
('white', 'female', 2),
('black', 'male', 1),
('latino', 'male', 1),
('asian', 'female', 1),
)
self.assertRows(pivot_table, pivot_rows)
self.assertColumnNames(pivot_table, ['race', 'gender', 'Count'])
self.assertColumnTypes(pivot_table, [Text, Text, Number])
def test_pivot_default_value(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender', default_value=None)
pivot_rows = (
('white', 1, 2),
('black', 1, None),
('latino', 1, None),
('asian', None, 1)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', computation=Percent('Count'))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(50)),
('female', Decimal(50)),
)
self.assertColumnNames(pivot_table, ['gender', 'Percent'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute_pivots(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', 'color', computation=Percent('Count'))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(50), 0),
('female', Decimal(1) / Decimal(6) * Decimal(100), Decimal(1) / Decimal(3) * Decimal(100)),
)
self.assertColumnNames(pivot_table, ['gender', 'blue', 'green'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute_kwargs(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', 'color', computation=Percent('Count', total=8))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(3) / Decimal(8) * Decimal(100), 0),
('female', Decimal(1) / Decimal(8) * Decimal(100), Decimal(2) / Decimal(8) * Decimal(100)),
)
self.assertColumnNames(pivot_table, ['gender', 'blue', 'green'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
| #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
try:
from cdecimal import Decimal
except ImportError: # pragma: no cover
from decimal import Decimal
from agate import Table
from agate.aggregations import Sum
from agate.computations import Percent
from agate.data_types import Number, Text
from agate.testcase import AgateTestCase
class TestPivot(AgateTestCase):
def setUp(self):
self.rows = (
('joe', 'white', 'male', 20, 'blue'),
('jane', 'white', 'female', 20, 'blue'),
('josh', 'black', 'male', 20, 'blue'),
('jim', 'latino', 'male', 25, 'blue'),
('julia', 'white', 'female', 25, 'green'),
('joan', 'asian', 'female', 25, 'green')
)
self.number_type = Number()
self.text_type = Text()
self.column_names = ['name', 'race', 'gender', 'age', 'color']
self.column_types = [self.text_type, self.text_type, self.text_type, self.number_type, self.text_type]
def test_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender')
pivot_rows = (
('white', 1, 2),
('black', 1, 0),
('latino', 1, 0),
('asian', 0, 1)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertRowNames(pivot_table, ['white', 'black', 'latino', 'asian'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(lambda r: r['gender'])
pivot_rows = (
('male', 3),
('female', 3)
)
self.assertColumnNames(pivot_table, ['group', 'Count'])
self.assertRowNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda_group_name(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(lambda r: r['gender'], key_name='gender')
pivot_rows = (
('male', 3),
('female', 3)
)
self.assertColumnNames(pivot_table, ['gender', 'Count'])
self.assertRowNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_by_lambda_group_name_sequence_invalid(self):
table = Table(self.rows, self.column_names, self.column_types)
with self.assertRaises(ValueError):
table.pivot(['race', 'gender'], key_name='foo')
def test_pivot_no_key(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(pivot='gender')
pivot_rows = (
(3, 3),
)
self.assertColumnNames(pivot_table, ['male', 'female'])
self.assertColumnTypes(pivot_table, [Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_no_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race')
pivot_rows = (
('white', 3),
('black', 1),
('latino', 1),
('asian', 1)
)
self.assertColumnNames(pivot_table, ['race', 'Count'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_sum(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender', Sum('age'))
pivot_rows = (
('white', 20, 45),
('black', 20, 0),
('latino', 25, 0),
('asian', 0, 25)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_multiple_keys(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(['race', 'gender'], 'age')
pivot_rows = (
('white', 'male', 1, 0),
('white', 'female', 1, 1),
('black', 'male', 1, 0),
('latino', 'male', 0, 1),
('asian', 'female', 0, 1),
)
self.assertRows(pivot_table, pivot_rows)
self.assertColumnNames(pivot_table, ['race', 'gender', '20', '25'])
self.assertRowNames(pivot_table, [
('white', 'male'),
('white', 'female'),
('black', 'male'),
('latino', 'male'),
('asian', 'female'),
])
self.assertColumnTypes(pivot_table, [Text, Text, Number, Number])
def test_pivot_multiple_keys_no_pivot(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot(['race', 'gender'])
pivot_rows = (
('white', 'male', 1),
('white', 'female', 2),
('black', 'male', 1),
('latino', 'male', 1),
('asian', 'female', 1),
)
self.assertRows(pivot_table, pivot_rows)
self.assertColumnNames(pivot_table, ['race', 'gender', 'Count'])
self.assertColumnTypes(pivot_table, [Text, Text, Number])
def test_pivot_default_value(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('race', 'gender', default_value=None)
pivot_rows = (
('white', 1, 2),
('black', 1, None),
('latino', 1, None),
('asian', None, 1)
)
self.assertColumnNames(pivot_table, ['race', 'male', 'female'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', computation=Percent('Count'))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(50)),
('female', Decimal(50)),
)
self.assertColumnNames(pivot_table, ['gender', 'Percent'])
self.assertColumnTypes(pivot_table, [Text, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute_pivots(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', 'color', computation=Percent('Count'))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(50), 0),
('female', Decimal(1) / Decimal(6) * Decimal(100), Decimal(1) / Decimal(3) * Decimal(100)),
)
self.assertColumnNames(pivot_table, ['gender', 'blue', 'green'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows)
def test_pivot_compute_kwargs(self):
table = Table(self.rows, self.column_names, self.column_types)
pivot_table = table.pivot('gender', 'color', computation=Percent('Count', total=8))
pivot_table.print_table(output=sys.stdout)
pivot_rows = (
('male', Decimal(3) / Decimal(8) * Decimal(100), 0),
('female', Decimal(1) / Decimal(8) * Decimal(100), Decimal(2) / Decimal(8) * Decimal(100)),
)
self.assertColumnNames(pivot_table, ['gender', 'blue', 'green'])
self.assertColumnTypes(pivot_table, [Text, Number, Number])
self.assertRows(pivot_table, pivot_rows) | en | 0.302971 | #!/usr/bin/env python # -*- coding: utf8 -*- # pragma: no cover | 2.295288 | 2 |
external/pyTorchChamferDistance/chamfer_distance/__init__.py | chengzhag/DeepPanoContext | 52 | 7504 | import os
os.makedirs(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'build')), exist_ok=True)
from .chamfer_distance import ChamferDistance
| import os
os.makedirs(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'build')), exist_ok=True)
from .chamfer_distance import ChamferDistance
| none | 1 | 1.741661 | 2 |
|
test_impartial.py | georg-wolflein/impartial | 0 | 7505 | <filename>test_impartial.py
from functools import partial
from impartial import impartial
def f(x: int, y: int, z: int = 0) -> int:
return x + 2*y + z
def test_simple_call_args():
assert impartial(f, 1)(2) == f(1, 2)
def test_simple_call_kwargs():
assert impartial(f, y=2)(x=1) == f(1, 2)
def test_simple_call_empty():
assert impartial(f, 1, y=2)() == f(1, 2)
def test_decorator():
@impartial
def f(x, y):
return x + 2*y
assert f.with_y(2)(1) == 5
def test_func():
assert impartial(f, 1).func is f
def test_with_kwargs():
assert impartial(f, 1).with_z(3)(2) == f(1, 2, 3)
def test_multiple_with_kwargs():
assert impartial(f, 1).with_z(3).with_y(2)() == f(1, 2, 3)
def test_with_kwargs_override():
assert impartial(f, 1, 2).with_z(3).with_z(4)() == f(1, 2, 4)
def test_nested_impartial():
imp = impartial(f, x=1, y=2)
imp = impartial(imp, x=2)
imp = impartial(imp, x=3)
assert imp() == f(3, 2)
assert not isinstance(imp.func, impartial)
assert imp.func is f
def test_nested_partial():
imp = partial(f, x=1, y=2)
imp = partial(imp, x=2)
imp = impartial(imp, x=3)
assert imp() == f(3, 2)
assert not isinstance(imp.func, partial)
assert imp.func is f
def test_configure():
assert impartial(f, 1, z=2).configure(2, z=3)() == f(1, 2, 3)
| <filename>test_impartial.py
from functools import partial
from impartial import impartial
def f(x: int, y: int, z: int = 0) -> int:
return x + 2*y + z
def test_simple_call_args():
assert impartial(f, 1)(2) == f(1, 2)
def test_simple_call_kwargs():
assert impartial(f, y=2)(x=1) == f(1, 2)
def test_simple_call_empty():
assert impartial(f, 1, y=2)() == f(1, 2)
def test_decorator():
@impartial
def f(x, y):
return x + 2*y
assert f.with_y(2)(1) == 5
def test_func():
assert impartial(f, 1).func is f
def test_with_kwargs():
assert impartial(f, 1).with_z(3)(2) == f(1, 2, 3)
def test_multiple_with_kwargs():
assert impartial(f, 1).with_z(3).with_y(2)() == f(1, 2, 3)
def test_with_kwargs_override():
assert impartial(f, 1, 2).with_z(3).with_z(4)() == f(1, 2, 4)
def test_nested_impartial():
imp = impartial(f, x=1, y=2)
imp = impartial(imp, x=2)
imp = impartial(imp, x=3)
assert imp() == f(3, 2)
assert not isinstance(imp.func, impartial)
assert imp.func is f
def test_nested_partial():
imp = partial(f, x=1, y=2)
imp = partial(imp, x=2)
imp = impartial(imp, x=3)
assert imp() == f(3, 2)
assert not isinstance(imp.func, partial)
assert imp.func is f
def test_configure():
assert impartial(f, 1, z=2).configure(2, z=3)() == f(1, 2, 3)
| none | 1 | 3.077515 | 3 |
|
manager.py | smilechaser/screeps-script-caddy | 2 | 7506 | <reponame>smilechaser/screeps-script-caddy
'''
Python script for uploading/downloading scripts for use with the game Screeps.
http://support.screeps.com/hc/en-us/articles/203022612-Commiting-scripts-using-direct-API-access
Usage:
#
# general help/usage
#
python3 manager.py --help
#
# retrieve all scripts from the game and store them
# in the folder "some_folder"
#
python3 manager.py from_game some_folder
#
# send all *.js files to the game
#
python3 manager.py to_game some_folder
WARNING: Use at your own risk! Make backups of all your game content!
'''
import sys
import os
import argparse
import json
import requests
from requests.auth import HTTPBasicAuth
SCREEPS_ENDPOINT = 'https://screeps.com/api/user/code'
USER_ENV = 'SCREEPS_USER'
PASSWORD_ENV = 'SCREEPS_PASSWORD'
TO_SCREEPS = 'to_game'
FROM_SCREEPS = 'from_game'
def get_user_from_env():
user = os.environ.get('SCREEPS_USER')
if not user:
print('You must provide a username, i.e. export '
'{}=<your email address>'.
format(USER_ENV))
sys.exit()
return user
def get_password_from_env():
password = os.environ.get('SCREEPS_PASSWORD')
if not password:
print('You must provide a password, i.e. export {}=<your password>'.
format(PASSWORD_ENV))
sys.exit()
return password
def get_data(user, password):
print('Retrieving data...')
response = requests.get(SCREEPS_ENDPOINT,
auth=HTTPBasicAuth(user, password))
response.raise_for_status()
data = response.json()
if data['ok'] != 1:
raise Exception()
return data
def send_data(user, password, modules):
auth = HTTPBasicAuth(user, password)
headers = {'Content-Type': 'application/json; charset=utf-8'}
data = {'modules': modules}
resp = requests.post(SCREEPS_ENDPOINT,
data=json.dumps(data),
headers=headers,
auth=auth)
resp.raise_for_status()
def check_for_collisions(target_folder, modules):
for module in modules:
target = os.path.join(target_folder, '{}.js'.format(module))
if os.path.exists(target):
print('File {} exists.'.format(target))
print('Specify --force to overwrite. Aborting...')
sys.exit()
def main():
parser = argparse.ArgumentParser(description='')
parser.add_argument('operation',
choices=(TO_SCREEPS, FROM_SCREEPS),
help='')
parser.add_argument('destination', help='')
parser.add_argument('--user', help='')
parser.add_argument('--password', help='')
parser.add_argument('--force', action='store_const', const=True,
help='force overwrite of files in an existing folder')
parser.add_argument('--merge', action='store_const', const=True,
help='merge scripts into a single main.js module')
args = parser.parse_args()
user = args.user if args.user else get_user_from_env()
password = args.password if args.password else get_password_from_env()
target_folder = os.path.abspath(args.destination)
if args.operation == FROM_SCREEPS:
data = get_data(user, password)
# does the folder exist?
if not os.path.isdir(target_folder):
# no - create it
print('Creating new folder "{}"...'.format(target_folder))
os.makedirs(target_folder)
else:
# yes - check for collisions (unless --force was specified)
if not args.force:
print('Checking for collisions...')
check_for_collisions(target_folder, data['modules'])
print('Ok, no collisions.')
# for each module, create a corresponding filename and put it in
# the target folder
for module in data['modules']:
target = os.path.join(target_folder, '{}.js'.format(module))
with open(target, 'w') as fout:
fout.write(data['modules'][module])
else:
modules = {}
for root, folders, files in os.walk(target_folder):
folders[:] = []
for target_file in files:
name, ext = os.path.splitext(target_file)
if ext != '.js':
continue
with open(os.path.join(root, target_file), 'r') as fin:
modules[name] = fin.read()
if args.merge:
merge_modules(modules)
# upload modules
send_data(user, password, modules)
def generate_header(filename):
return '''
// {border}
// {name}
// {border}
'''.format(border='-' * 25, name=filename)
def merge_modules(modules):
keys = [x for x in modules.keys()]
keys.sort()
merged = ''
for key in keys:
merged = merged + generate_header(key) + modules[key]
del(modules[key])
modules['main.js'] = merged
if __name__ == '__main__':
main()
| '''
Python script for uploading/downloading scripts for use with the game Screeps.
http://support.screeps.com/hc/en-us/articles/203022612-Commiting-scripts-using-direct-API-access
Usage:
#
# general help/usage
#
python3 manager.py --help
#
# retrieve all scripts from the game and store them
# in the folder "some_folder"
#
python3 manager.py from_game some_folder
#
# send all *.js files to the game
#
python3 manager.py to_game some_folder
WARNING: Use at your own risk! Make backups of all your game content!
'''
import sys
import os
import argparse
import json
import requests
from requests.auth import HTTPBasicAuth
SCREEPS_ENDPOINT = 'https://screeps.com/api/user/code'
USER_ENV = 'SCREEPS_USER'
PASSWORD_ENV = 'SCREEPS_PASSWORD'
TO_SCREEPS = 'to_game'
FROM_SCREEPS = 'from_game'
def get_user_from_env():
user = os.environ.get('SCREEPS_USER')
if not user:
print('You must provide a username, i.e. export '
'{}=<your email address>'.
format(USER_ENV))
sys.exit()
return user
def get_password_from_env():
password = os.environ.get('SCREEPS_PASSWORD')
if not password:
print('You must provide a password, i.e. export {}=<your password>'.
format(PASSWORD_ENV))
sys.exit()
return password
def get_data(user, password):
print('Retrieving data...')
response = requests.get(SCREEPS_ENDPOINT,
auth=HTTPBasicAuth(user, password))
response.raise_for_status()
data = response.json()
if data['ok'] != 1:
raise Exception()
return data
def send_data(user, password, modules):
auth = HTTPBasicAuth(user, password)
headers = {'Content-Type': 'application/json; charset=utf-8'}
data = {'modules': modules}
resp = requests.post(SCREEPS_ENDPOINT,
data=json.dumps(data),
headers=headers,
auth=auth)
resp.raise_for_status()
def check_for_collisions(target_folder, modules):
for module in modules:
target = os.path.join(target_folder, '{}.js'.format(module))
if os.path.exists(target):
print('File {} exists.'.format(target))
print('Specify --force to overwrite. Aborting...')
sys.exit()
def main():
parser = argparse.ArgumentParser(description='')
parser.add_argument('operation',
choices=(TO_SCREEPS, FROM_SCREEPS),
help='')
parser.add_argument('destination', help='')
parser.add_argument('--user', help='')
parser.add_argument('--password', help='')
parser.add_argument('--force', action='store_const', const=True,
help='force overwrite of files in an existing folder')
parser.add_argument('--merge', action='store_const', const=True,
help='merge scripts into a single main.js module')
args = parser.parse_args()
user = args.user if args.user else get_user_from_env()
password = args.password if args.password else get_password_from_env()
target_folder = os.path.abspath(args.destination)
if args.operation == FROM_SCREEPS:
data = get_data(user, password)
# does the folder exist?
if not os.path.isdir(target_folder):
# no - create it
print('Creating new folder "{}"...'.format(target_folder))
os.makedirs(target_folder)
else:
# yes - check for collisions (unless --force was specified)
if not args.force:
print('Checking for collisions...')
check_for_collisions(target_folder, data['modules'])
print('Ok, no collisions.')
# for each module, create a corresponding filename and put it in
# the target folder
for module in data['modules']:
target = os.path.join(target_folder, '{}.js'.format(module))
with open(target, 'w') as fout:
fout.write(data['modules'][module])
else:
modules = {}
for root, folders, files in os.walk(target_folder):
folders[:] = []
for target_file in files:
name, ext = os.path.splitext(target_file)
if ext != '.js':
continue
with open(os.path.join(root, target_file), 'r') as fin:
modules[name] = fin.read()
if args.merge:
merge_modules(modules)
# upload modules
send_data(user, password, modules)
def generate_header(filename):
return '''
// {border}
// {name}
// {border}
'''.format(border='-' * 25, name=filename)
def merge_modules(modules):
keys = [x for x in modules.keys()]
keys.sort()
merged = ''
for key in keys:
merged = merged + generate_header(key) + modules[key]
del(modules[key])
modules['main.js'] = merged
if __name__ == '__main__':
main() | en | 0.758872 | Python script for uploading/downloading scripts for use with the game Screeps. http://support.screeps.com/hc/en-us/articles/203022612-Commiting-scripts-using-direct-API-access Usage: # # general help/usage # python3 manager.py --help # # retrieve all scripts from the game and store them # in the folder "some_folder" # python3 manager.py from_game some_folder # # send all *.js files to the game # python3 manager.py to_game some_folder WARNING: Use at your own risk! Make backups of all your game content! # does the folder exist? # no - create it # yes - check for collisions (unless --force was specified) # for each module, create a corresponding filename and put it in # the target folder # upload modules // {border} // {name} // {border} | 2.980259 | 3 |
contact.py | Nemfeto/python_training | 0 | 7507 | <gh_stars>0
class Contact:
def __init__(self, first_name, last_name, nickname, address, mobile, email):
self.first_name = first_name
self.last_name = last_name
self.nickname = nickname
self.address = address
self.mobile = mobile
self.email = email
| class Contact:
def __init__(self, first_name, last_name, nickname, address, mobile, email):
self.first_name = first_name
self.last_name = last_name
self.nickname = nickname
self.address = address
self.mobile = mobile
self.email = email | none | 1 | 2.804572 | 3 |
|
integreat_cms/cms/views/dashboard/admin_dashboard_view.py | Integreat/cms-v2 | 21 | 7508 | <filename>integreat_cms/cms/views/dashboard/admin_dashboard_view.py
import logging
from django.views.generic import TemplateView
from ...models import Feedback
from ..chat.chat_context_mixin import ChatContextMixin
logger = logging.getLogger(__name__)
class AdminDashboardView(TemplateView, ChatContextMixin):
"""
View for the admin dashboard
"""
#: The template to render (see :class:`~django.views.generic.base.TemplateResponseMixin`)
template_name = "dashboard/admin_dashboard.html"
#: The context dict passed to the template (see :class:`~django.views.generic.base.ContextMixin`)
extra_context = {"current_menu_item": "admin_dashboard"}
def get_context_data(self, **kwargs):
r"""
Returns a dictionary representing the template context
(see :meth:`~django.views.generic.base.ContextMixin.get_context_data`).
:param \**kwargs: The given keyword arguments
:type \**kwargs: dict
:return: The template context
:rtype: dict
"""
context = super().get_context_data(**kwargs)
context["admin_feedback"] = Feedback.objects.filter(
is_technical=True, read_by=None
)[:5]
return context
| <filename>integreat_cms/cms/views/dashboard/admin_dashboard_view.py
import logging
from django.views.generic import TemplateView
from ...models import Feedback
from ..chat.chat_context_mixin import ChatContextMixin
logger = logging.getLogger(__name__)
class AdminDashboardView(TemplateView, ChatContextMixin):
"""
View for the admin dashboard
"""
#: The template to render (see :class:`~django.views.generic.base.TemplateResponseMixin`)
template_name = "dashboard/admin_dashboard.html"
#: The context dict passed to the template (see :class:`~django.views.generic.base.ContextMixin`)
extra_context = {"current_menu_item": "admin_dashboard"}
def get_context_data(self, **kwargs):
r"""
Returns a dictionary representing the template context
(see :meth:`~django.views.generic.base.ContextMixin.get_context_data`).
:param \**kwargs: The given keyword arguments
:type \**kwargs: dict
:return: The template context
:rtype: dict
"""
context = super().get_context_data(**kwargs)
context["admin_feedback"] = Feedback.objects.filter(
is_technical=True, read_by=None
)[:5]
return context
| en | 0.295978 | View for the admin dashboard #: The template to render (see :class:`~django.views.generic.base.TemplateResponseMixin`) #: The context dict passed to the template (see :class:`~django.views.generic.base.ContextMixin`) Returns a dictionary representing the template context (see :meth:`~django.views.generic.base.ContextMixin.get_context_data`). :param \**kwargs: The given keyword arguments :type \**kwargs: dict :return: The template context :rtype: dict | 1.970991 | 2 |
src/tangled_up_in_unicode/tangled_up_in_unicode_14_0_0.py | bhumikapahariapuresoftware/tangled-up-in-unicode | 2 | 7509 | <filename>src/tangled_up_in_unicode/tangled_up_in_unicode_14_0_0.py
from typing import Optional
import bisect
from tangled_up_in_unicode.u14_0_0_data.prop_list_to_property import prop_list_to_property
from tangled_up_in_unicode.u14_0_0_data.blocks_to_block_start import blocks_to_block_start
from tangled_up_in_unicode.u14_0_0_data.blocks_to_block_end import blocks_to_block_end
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_age_short_to_long import property_value_alias_age_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_bc_short_to_long import property_value_alias_bc_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_blk_long_to_short import property_value_alias_blk_long_to_short
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_ccc_short_to_long import property_value_alias_ccc_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_ea_short_to_long import property_value_alias_ea_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_gc_short_to_long import property_value_alias_gc_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_sc_long_to_short import property_value_alias_sc_long_to_short
from tangled_up_in_unicode.u14_0_0_data.scripts_to_script_start import scripts_to_script_start
from tangled_up_in_unicode.u14_0_0_data.scripts_to_script_end import scripts_to_script_end
from tangled_up_in_unicode.u14_0_0_data.east_asian_width_to_east_asian_width_start import east_asian_width_to_east_asian_width_start
from tangled_up_in_unicode.u14_0_0_data.east_asian_width_to_east_asian_width_end import east_asian_width_to_east_asian_width_end
from tangled_up_in_unicode.u14_0_0_data.derived_age_to_age_start import derived_age_to_age_start
from tangled_up_in_unicode.u14_0_0_data.derived_age_to_age_end import derived_age_to_age_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_name_start import unicode_data_to_name_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_category_start import unicode_data_to_category_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_category_end import unicode_data_to_category_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_bidirectional_start import unicode_data_to_bidirectional_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_bidirectional_end import unicode_data_to_bidirectional_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_decimal_start import unicode_data_to_decimal_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_digit_start import unicode_data_to_digit_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_numeric_start import unicode_data_to_numeric_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_combining_start import unicode_data_to_combining_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_mirrored_start import unicode_data_to_mirrored_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_mirrored_end import unicode_data_to_mirrored_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_decomposition_start import unicode_data_to_decomposition_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_uppercase_start import unicode_data_to_uppercase_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_lowercase_start import unicode_data_to_lowercase_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_titlecase_start import unicode_data_to_titlecase_start
unidata_version = "14.0.0"
def name(chr: str, default=None) -> str:
"""Returns the name assigned to the character chr as a string.
If no name is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_name_start[idx]
except KeyError:
if default is None:
raise ValueError("no such name")
else:
return default
def category(chr: str) -> str:
"""Returns the general category assigned to the character chr as string."""
idx = ord(chr)
start_keys = sorted(unicode_data_to_category_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = unicode_data_to_category_start[key_start]
end_keys = sorted(unicode_data_to_category_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = unicode_data_to_category_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = unicode_data_to_category_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "Zzzz"
except IndexError:
return "Zzzz"
def bidirectional(chr: str) -> str:
"""Returns the bidirectional class assigned to the character chr as string.
If no such value is defined, an empty string is returned."""
idx = ord(chr)
start_keys = sorted(unicode_data_to_bidirectional_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = unicode_data_to_bidirectional_start[key_start]
end_keys = sorted(unicode_data_to_bidirectional_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = unicode_data_to_bidirectional_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = unicode_data_to_bidirectional_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return ""
except IndexError:
return ""
def decimal(chr: str, default=None) -> int:
"""Returns the decimal value assigned to the character chr as integer.
If no such value is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_decimal_start[idx]
except KeyError:
if default is None:
raise ValueError("not a decimal")
else:
return default
def digit(chr: str, default=None) -> int:
"""Returns the digit value assigned to the character chr as integer.
If no such value is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_digit_start[idx]
except KeyError:
if default is None:
raise ValueError("not a digit")
else:
return default
def numeric(chr: str, default=None) -> float:
"""Returns the numeric value assigned to the character chr as float.
If no such value is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_numeric_start[idx]
except KeyError:
if default is None:
raise ValueError("not a numeric character")
else:
return default
def combining(chr: str) -> int:
"""Returns the canonical combining class assigned to the character chr as integer.
Returns 0 if no combining class is defined."""
idx = ord(chr)
try:
return unicode_data_to_combining_start[idx]
except KeyError:
return 0
def mirrored(chr: str) -> int:
"""Returns the mirrored property assigned to the character chr as integer.
Returns 1 if the character has been identified as a "mirrored" character in bidirectional text, 0 otherwise."""
idx = ord(chr)
start_keys = sorted(unicode_data_to_mirrored_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = unicode_data_to_mirrored_start[key_start]
end_keys = sorted(unicode_data_to_mirrored_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = unicode_data_to_mirrored_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = unicode_data_to_mirrored_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return 0
except IndexError:
return 0
def decomposition(chr: str) -> str:
"""Returns the character decomposition mapping assigned to the character chr as string.
An empty string is returned in case no such mapping is defined."""
idx = ord(chr)
try:
return unicode_data_to_decomposition_start[idx]
except KeyError:
return ""
def uppercase(chr: str) -> str:
""""""
idx = ord(chr)
try:
return unicode_data_to_uppercase_start[idx]
except KeyError:
return ""
def lowercase(chr: str) -> str:
""""""
idx = ord(chr)
try:
return unicode_data_to_lowercase_start[idx]
except KeyError:
return ""
def titlecase(chr: str) -> str:
""""""
idx = ord(chr)
try:
return unicode_data_to_titlecase_start[idx]
except KeyError:
return ""
def east_asian_width(chr: str, default=None) -> str:
"""Returns the east asian width assigned to the character chr as string."""
idx = ord(chr)
start_keys = sorted(east_asian_width_to_east_asian_width_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = east_asian_width_to_east_asian_width_start[key_start]
end_keys = sorted(east_asian_width_to_east_asian_width_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
key_end = end_keys[insertion_point]
result_end = east_asian_width_to_east_asian_width_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = east_asian_width_to_east_asian_width_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
if default is None:
raise ValueError("no east asian width")
else:
return default
def age(chr: str) -> str:
""""""
idx = ord(chr)
start_keys = sorted(derived_age_to_age_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = derived_age_to_age_start[key_start]
end_keys = sorted(derived_age_to_age_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = derived_age_to_age_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = derived_age_to_age_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "1.0"
except IndexError:
return "1.0"
def block(chr: str) -> str:
""""""
idx = ord(chr)
start_keys = sorted(blocks_to_block_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = blocks_to_block_start[key_start]
end_keys = sorted(blocks_to_block_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = blocks_to_block_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = blocks_to_block_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "Unknown"
except IndexError:
return "Unknown"
def script(chr: str) -> str:
""""""
idx = ord(chr)
start_keys = sorted(scripts_to_script_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = scripts_to_script_start[key_start]
end_keys = sorted(scripts_to_script_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = scripts_to_script_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = scripts_to_script_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "Unknown"
except IndexError:
return "Unknown"
def prop_list(chr: str) -> list:
""""""
idx = ord(chr)
try:
return prop_list_to_property[idx]
except KeyError:
return set()
def age_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_age_short_to_long[value]
except KeyError:
return None
def category_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_gc_short_to_long[value]
except KeyError:
return None
def east_asian_width_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_ea_short_to_long[value]
except KeyError:
return None
def bidirectional_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_bc_short_to_long[value]
except KeyError:
return None
def combining_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_ccc_short_to_long[value]
except KeyError:
return None
def block_abbr(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_blk_long_to_short[value]
except KeyError:
return None
def script_abbr(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_sc_long_to_short[value]
except KeyError:
return None
| <filename>src/tangled_up_in_unicode/tangled_up_in_unicode_14_0_0.py
from typing import Optional
import bisect
from tangled_up_in_unicode.u14_0_0_data.prop_list_to_property import prop_list_to_property
from tangled_up_in_unicode.u14_0_0_data.blocks_to_block_start import blocks_to_block_start
from tangled_up_in_unicode.u14_0_0_data.blocks_to_block_end import blocks_to_block_end
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_age_short_to_long import property_value_alias_age_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_bc_short_to_long import property_value_alias_bc_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_blk_long_to_short import property_value_alias_blk_long_to_short
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_ccc_short_to_long import property_value_alias_ccc_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_ea_short_to_long import property_value_alias_ea_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_gc_short_to_long import property_value_alias_gc_short_to_long
from tangled_up_in_unicode.u14_0_0_data.property_value_alias_sc_long_to_short import property_value_alias_sc_long_to_short
from tangled_up_in_unicode.u14_0_0_data.scripts_to_script_start import scripts_to_script_start
from tangled_up_in_unicode.u14_0_0_data.scripts_to_script_end import scripts_to_script_end
from tangled_up_in_unicode.u14_0_0_data.east_asian_width_to_east_asian_width_start import east_asian_width_to_east_asian_width_start
from tangled_up_in_unicode.u14_0_0_data.east_asian_width_to_east_asian_width_end import east_asian_width_to_east_asian_width_end
from tangled_up_in_unicode.u14_0_0_data.derived_age_to_age_start import derived_age_to_age_start
from tangled_up_in_unicode.u14_0_0_data.derived_age_to_age_end import derived_age_to_age_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_name_start import unicode_data_to_name_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_category_start import unicode_data_to_category_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_category_end import unicode_data_to_category_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_bidirectional_start import unicode_data_to_bidirectional_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_bidirectional_end import unicode_data_to_bidirectional_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_decimal_start import unicode_data_to_decimal_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_digit_start import unicode_data_to_digit_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_numeric_start import unicode_data_to_numeric_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_combining_start import unicode_data_to_combining_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_mirrored_start import unicode_data_to_mirrored_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_mirrored_end import unicode_data_to_mirrored_end
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_decomposition_start import unicode_data_to_decomposition_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_uppercase_start import unicode_data_to_uppercase_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_lowercase_start import unicode_data_to_lowercase_start
from tangled_up_in_unicode.u14_0_0_data.unicode_data_to_titlecase_start import unicode_data_to_titlecase_start
unidata_version = "14.0.0"
def name(chr: str, default=None) -> str:
"""Returns the name assigned to the character chr as a string.
If no name is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_name_start[idx]
except KeyError:
if default is None:
raise ValueError("no such name")
else:
return default
def category(chr: str) -> str:
"""Returns the general category assigned to the character chr as string."""
idx = ord(chr)
start_keys = sorted(unicode_data_to_category_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = unicode_data_to_category_start[key_start]
end_keys = sorted(unicode_data_to_category_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = unicode_data_to_category_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = unicode_data_to_category_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "Zzzz"
except IndexError:
return "Zzzz"
def bidirectional(chr: str) -> str:
"""Returns the bidirectional class assigned to the character chr as string.
If no such value is defined, an empty string is returned."""
idx = ord(chr)
start_keys = sorted(unicode_data_to_bidirectional_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = unicode_data_to_bidirectional_start[key_start]
end_keys = sorted(unicode_data_to_bidirectional_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = unicode_data_to_bidirectional_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = unicode_data_to_bidirectional_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return ""
except IndexError:
return ""
def decimal(chr: str, default=None) -> int:
"""Returns the decimal value assigned to the character chr as integer.
If no such value is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_decimal_start[idx]
except KeyError:
if default is None:
raise ValueError("not a decimal")
else:
return default
def digit(chr: str, default=None) -> int:
"""Returns the digit value assigned to the character chr as integer.
If no such value is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_digit_start[idx]
except KeyError:
if default is None:
raise ValueError("not a digit")
else:
return default
def numeric(chr: str, default=None) -> float:
"""Returns the numeric value assigned to the character chr as float.
If no such value is defined, default is returned, or, if not given, ValueError is raised."""
idx = ord(chr)
try:
return unicode_data_to_numeric_start[idx]
except KeyError:
if default is None:
raise ValueError("not a numeric character")
else:
return default
def combining(chr: str) -> int:
"""Returns the canonical combining class assigned to the character chr as integer.
Returns 0 if no combining class is defined."""
idx = ord(chr)
try:
return unicode_data_to_combining_start[idx]
except KeyError:
return 0
def mirrored(chr: str) -> int:
"""Returns the mirrored property assigned to the character chr as integer.
Returns 1 if the character has been identified as a "mirrored" character in bidirectional text, 0 otherwise."""
idx = ord(chr)
start_keys = sorted(unicode_data_to_mirrored_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = unicode_data_to_mirrored_start[key_start]
end_keys = sorted(unicode_data_to_mirrored_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = unicode_data_to_mirrored_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = unicode_data_to_mirrored_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return 0
except IndexError:
return 0
def decomposition(chr: str) -> str:
"""Returns the character decomposition mapping assigned to the character chr as string.
An empty string is returned in case no such mapping is defined."""
idx = ord(chr)
try:
return unicode_data_to_decomposition_start[idx]
except KeyError:
return ""
def uppercase(chr: str) -> str:
""""""
idx = ord(chr)
try:
return unicode_data_to_uppercase_start[idx]
except KeyError:
return ""
def lowercase(chr: str) -> str:
""""""
idx = ord(chr)
try:
return unicode_data_to_lowercase_start[idx]
except KeyError:
return ""
def titlecase(chr: str) -> str:
""""""
idx = ord(chr)
try:
return unicode_data_to_titlecase_start[idx]
except KeyError:
return ""
def east_asian_width(chr: str, default=None) -> str:
"""Returns the east asian width assigned to the character chr as string."""
idx = ord(chr)
start_keys = sorted(east_asian_width_to_east_asian_width_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = east_asian_width_to_east_asian_width_start[key_start]
end_keys = sorted(east_asian_width_to_east_asian_width_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
key_end = end_keys[insertion_point]
result_end = east_asian_width_to_east_asian_width_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = east_asian_width_to_east_asian_width_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
if default is None:
raise ValueError("no east asian width")
else:
return default
def age(chr: str) -> str:
""""""
idx = ord(chr)
start_keys = sorted(derived_age_to_age_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = derived_age_to_age_start[key_start]
end_keys = sorted(derived_age_to_age_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = derived_age_to_age_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = derived_age_to_age_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "1.0"
except IndexError:
return "1.0"
def block(chr: str) -> str:
""""""
idx = ord(chr)
start_keys = sorted(blocks_to_block_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = blocks_to_block_start[key_start]
end_keys = sorted(blocks_to_block_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = blocks_to_block_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = blocks_to_block_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "Unknown"
except IndexError:
return "Unknown"
def script(chr: str) -> str:
""""""
idx = ord(chr)
start_keys = sorted(scripts_to_script_start.keys())
insertion_point = bisect.bisect_left(start_keys, idx)
if insertion_point == len(start_keys) or start_keys[insertion_point] != idx:
insertion_point -= 1
key_start = start_keys[insertion_point]
result_start = scripts_to_script_start[key_start]
end_keys = sorted(scripts_to_script_end.keys())
insertion_point = bisect.bisect_left(end_keys, idx)
try:
key_end = end_keys[insertion_point]
result_end = scripts_to_script_end[key_end]
if result_end != key_start:
result_end = result_start
key_end = key_start
else:
result_end = scripts_to_script_start[result_end]
if key_start <= idx <= key_end and result_start == result_end:
return result_start
else:
return "Unknown"
except IndexError:
return "Unknown"
def prop_list(chr: str) -> list:
""""""
idx = ord(chr)
try:
return prop_list_to_property[idx]
except KeyError:
return set()
def age_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_age_short_to_long[value]
except KeyError:
return None
def category_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_gc_short_to_long[value]
except KeyError:
return None
def east_asian_width_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_ea_short_to_long[value]
except KeyError:
return None
def bidirectional_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_bc_short_to_long[value]
except KeyError:
return None
def combining_long(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_ccc_short_to_long[value]
except KeyError:
return None
def block_abbr(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_blk_long_to_short[value]
except KeyError:
return None
def script_abbr(value: str) -> Optional[str]:
""""""
try:
return property_value_alias_sc_long_to_short[value]
except KeyError:
return None
| en | 0.907021 | Returns the name assigned to the character chr as a string. If no name is defined, default is returned, or, if not given, ValueError is raised. Returns the general category assigned to the character chr as string. Returns the bidirectional class assigned to the character chr as string. If no such value is defined, an empty string is returned. Returns the decimal value assigned to the character chr as integer. If no such value is defined, default is returned, or, if not given, ValueError is raised. Returns the digit value assigned to the character chr as integer. If no such value is defined, default is returned, or, if not given, ValueError is raised. Returns the numeric value assigned to the character chr as float. If no such value is defined, default is returned, or, if not given, ValueError is raised. Returns the canonical combining class assigned to the character chr as integer. Returns 0 if no combining class is defined. Returns the mirrored property assigned to the character chr as integer. Returns 1 if the character has been identified as a "mirrored" character in bidirectional text, 0 otherwise. Returns the character decomposition mapping assigned to the character chr as string. An empty string is returned in case no such mapping is defined. Returns the east asian width assigned to the character chr as string. | 1.538689 | 2 |
to_display.py | namib-project/weatherstation-image | 0 | 7510 | from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import sys
import ST7735
# Create ST7735 LCD display class object and set pin numbers and display hardware information.
disp = ST7735.ST7735(
dc=24,
cs=ST7735.BG_SPI_CS_BACK,
rst=25,
port=0,
width=122,
height=160,
rotation=270
)
# Initialize display.
disp.begin()
WIDTH = disp.width
HEIGHT = disp.height
img = Image.new('RGB', (WIDTH, HEIGHT), color=(0, 0, 0))
draw = ImageDraw.Draw(img)
font = ImageFont.truetype("/usr/share/fonts/truetype/liberation/LiberationSans-Regular.ttf", 12)
# Initialize a secondary text with the empty string
text2 = ""
# Print test-output on the display if n oargument is given
if len(sys.argv) == 1:
text = "Temperature:\nHumidity:\nUV:\nRain:\nLight:"
text2 = "20°C\n50 %\n42\nyes\nOn"
# Print the argument if only one is given
elif len(sys.argv) == 2:
text = sys.argv[1]
# If 2 arguments are given use the second as the secondary text
elif len(sys.argv) == 3:
text = sys.argv[1]
text2 = sys.argv[2]
# For any other number of arguments draw them in one line each
else:
text = ''.join(i + "\n" for i in sys.argv[1:])
# Print both texts, with the secondary one starting with an 100 px offset
draw.text((10, 10), text, font=font, fill=(255, 255, 255))
draw.text((110, 10), text2, font=font, fill=(255, 255, 255))
disp.display(img)
| from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import sys
import ST7735
# Create ST7735 LCD display class object and set pin numbers and display hardware information.
disp = ST7735.ST7735(
dc=24,
cs=ST7735.BG_SPI_CS_BACK,
rst=25,
port=0,
width=122,
height=160,
rotation=270
)
# Initialize display.
disp.begin()
WIDTH = disp.width
HEIGHT = disp.height
img = Image.new('RGB', (WIDTH, HEIGHT), color=(0, 0, 0))
draw = ImageDraw.Draw(img)
font = ImageFont.truetype("/usr/share/fonts/truetype/liberation/LiberationSans-Regular.ttf", 12)
# Initialize a secondary text with the empty string
text2 = ""
# Print test-output on the display if n oargument is given
if len(sys.argv) == 1:
text = "Temperature:\nHumidity:\nUV:\nRain:\nLight:"
text2 = "20°C\n50 %\n42\nyes\nOn"
# Print the argument if only one is given
elif len(sys.argv) == 2:
text = sys.argv[1]
# If 2 arguments are given use the second as the secondary text
elif len(sys.argv) == 3:
text = sys.argv[1]
text2 = sys.argv[2]
# For any other number of arguments draw them in one line each
else:
text = ''.join(i + "\n" for i in sys.argv[1:])
# Print both texts, with the secondary one starting with an 100 px offset
draw.text((10, 10), text, font=font, fill=(255, 255, 255))
draw.text((110, 10), text2, font=font, fill=(255, 255, 255))
disp.display(img)
| en | 0.697224 | # Create ST7735 LCD display class object and set pin numbers and display hardware information. # Initialize display. # Initialize a secondary text with the empty string # Print test-output on the display if n oargument is given # Print the argument if only one is given # If 2 arguments are given use the second as the secondary text # For any other number of arguments draw them in one line each # Print both texts, with the secondary one starting with an 100 px offset | 3.289944 | 3 |
Smart User Targeted Advertising/MinorPro/FINALPROJECT/Resources/testInsert.py | saransh808/Projects | 0 | 7511 | import sqlite3
conn=sqlite3.connect('Survey.db')
fo=open('insertcommand.txt')
str=fo.readline()
while str:
str="INSERT INTO data VALUES"+str
conn.execute(str)
#print(str)
str=fo.readline()
conn.commit()
conn.close()
fo.close()
| import sqlite3
conn=sqlite3.connect('Survey.db')
fo=open('insertcommand.txt')
str=fo.readline()
while str:
str="INSERT INTO data VALUES"+str
conn.execute(str)
#print(str)
str=fo.readline()
conn.commit()
conn.close()
fo.close()
| ru | 0.318771 | #print(str) | 3.635465 | 4 |
cdp/headless_experimental.py | HyperionGray/python-chrome-devtools-protocol | 42 | 7512 | # DO NOT EDIT THIS FILE!
#
# This file is generated from the CDP specification. If you need to make
# changes, edit the generator and regenerate all of the modules.
#
# CDP domain: HeadlessExperimental (experimental)
from __future__ import annotations
from cdp.util import event_class, T_JSON_DICT
from dataclasses import dataclass
import enum
import typing
@dataclass
class ScreenshotParams:
'''
Encoding options for a screenshot.
'''
#: Image compression format (defaults to png).
format_: typing.Optional[str] = None
#: Compression quality from range [0..100] (jpeg only).
quality: typing.Optional[int] = None
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
if self.format_ is not None:
json['format'] = self.format_
if self.quality is not None:
json['quality'] = self.quality
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ScreenshotParams:
return cls(
format_=str(json['format']) if 'format' in json else None,
quality=int(json['quality']) if 'quality' in json else None,
)
def begin_frame(
frame_time_ticks: typing.Optional[float] = None,
interval: typing.Optional[float] = None,
no_display_updates: typing.Optional[bool] = None,
screenshot: typing.Optional[ScreenshotParams] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[bool, typing.Optional[str]]]:
'''
Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a
screenshot from the resulting frame. Requires that the target was created with enabled
BeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also
https://goo.gl/3zHXhB for more background.
:param frame_time_ticks: *(Optional)* Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set, the current time will be used.
:param interval: *(Optional)* The interval between BeginFrames that is reported to the compositor, in milliseconds. Defaults to a 60 frames/second interval, i.e. about 16.666 milliseconds.
:param no_display_updates: *(Optional)* Whether updates should not be committed and drawn onto the display. False by default. If true, only side effects of the BeginFrame will be run, such as layout and animations, but any visual updates may not be visible on the display or in screenshots.
:param screenshot: *(Optional)* If set, a screenshot of the frame will be captured and returned in the response. Otherwise, no screenshot will be captured. Note that capturing a screenshot can fail, for example, during renderer initialization. In such a case, no screenshot data will be returned.
:returns: A tuple with the following items:
0. **hasDamage** - Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the display. Reported for diagnostic uses, may be removed in the future.
1. **screenshotData** - *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken.
'''
params: T_JSON_DICT = dict()
if frame_time_ticks is not None:
params['frameTimeTicks'] = frame_time_ticks
if interval is not None:
params['interval'] = interval
if no_display_updates is not None:
params['noDisplayUpdates'] = no_display_updates
if screenshot is not None:
params['screenshot'] = screenshot.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.beginFrame',
'params': params,
}
json = yield cmd_dict
return (
bool(json['hasDamage']),
str(json['screenshotData']) if 'screenshotData' in json else None
)
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Disables headless events for the target.
'''
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.disable',
}
json = yield cmd_dict
def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enables headless events for the target.
'''
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.enable',
}
json = yield cmd_dict
@event_class('HeadlessExperimental.needsBeginFramesChanged')
@dataclass
class NeedsBeginFramesChanged:
'''
Issued when the target starts or stops needing BeginFrames.
'''
#: True if BeginFrames are needed, false otherwise.
needs_begin_frames: bool
@classmethod
def from_json(cls, json: T_JSON_DICT) -> NeedsBeginFramesChanged:
return cls(
needs_begin_frames=bool(json['needsBeginFrames'])
)
| # DO NOT EDIT THIS FILE!
#
# This file is generated from the CDP specification. If you need to make
# changes, edit the generator and regenerate all of the modules.
#
# CDP domain: HeadlessExperimental (experimental)
from __future__ import annotations
from cdp.util import event_class, T_JSON_DICT
from dataclasses import dataclass
import enum
import typing
@dataclass
class ScreenshotParams:
'''
Encoding options for a screenshot.
'''
#: Image compression format (defaults to png).
format_: typing.Optional[str] = None
#: Compression quality from range [0..100] (jpeg only).
quality: typing.Optional[int] = None
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
if self.format_ is not None:
json['format'] = self.format_
if self.quality is not None:
json['quality'] = self.quality
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ScreenshotParams:
return cls(
format_=str(json['format']) if 'format' in json else None,
quality=int(json['quality']) if 'quality' in json else None,
)
def begin_frame(
frame_time_ticks: typing.Optional[float] = None,
interval: typing.Optional[float] = None,
no_display_updates: typing.Optional[bool] = None,
screenshot: typing.Optional[ScreenshotParams] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[bool, typing.Optional[str]]]:
'''
Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a
screenshot from the resulting frame. Requires that the target was created with enabled
BeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also
https://goo.gl/3zHXhB for more background.
:param frame_time_ticks: *(Optional)* Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set, the current time will be used.
:param interval: *(Optional)* The interval between BeginFrames that is reported to the compositor, in milliseconds. Defaults to a 60 frames/second interval, i.e. about 16.666 milliseconds.
:param no_display_updates: *(Optional)* Whether updates should not be committed and drawn onto the display. False by default. If true, only side effects of the BeginFrame will be run, such as layout and animations, but any visual updates may not be visible on the display or in screenshots.
:param screenshot: *(Optional)* If set, a screenshot of the frame will be captured and returned in the response. Otherwise, no screenshot will be captured. Note that capturing a screenshot can fail, for example, during renderer initialization. In such a case, no screenshot data will be returned.
:returns: A tuple with the following items:
0. **hasDamage** - Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the display. Reported for diagnostic uses, may be removed in the future.
1. **screenshotData** - *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken.
'''
params: T_JSON_DICT = dict()
if frame_time_ticks is not None:
params['frameTimeTicks'] = frame_time_ticks
if interval is not None:
params['interval'] = interval
if no_display_updates is not None:
params['noDisplayUpdates'] = no_display_updates
if screenshot is not None:
params['screenshot'] = screenshot.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.beginFrame',
'params': params,
}
json = yield cmd_dict
return (
bool(json['hasDamage']),
str(json['screenshotData']) if 'screenshotData' in json else None
)
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Disables headless events for the target.
'''
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.disable',
}
json = yield cmd_dict
def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enables headless events for the target.
'''
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.enable',
}
json = yield cmd_dict
@event_class('HeadlessExperimental.needsBeginFramesChanged')
@dataclass
class NeedsBeginFramesChanged:
'''
Issued when the target starts or stops needing BeginFrames.
'''
#: True if BeginFrames are needed, false otherwise.
needs_begin_frames: bool
@classmethod
def from_json(cls, json: T_JSON_DICT) -> NeedsBeginFramesChanged:
return cls(
needs_begin_frames=bool(json['needsBeginFrames'])
)
| en | 0.825816 | # DO NOT EDIT THIS FILE! # # This file is generated from the CDP specification. If you need to make # changes, edit the generator and regenerate all of the modules. # # CDP domain: HeadlessExperimental (experimental) Encoding options for a screenshot. #: Image compression format (defaults to png). #: Compression quality from range [0..100] (jpeg only). Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a screenshot from the resulting frame. Requires that the target was created with enabled BeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also https://goo.gl/3zHXhB for more background. :param frame_time_ticks: *(Optional)* Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set, the current time will be used. :param interval: *(Optional)* The interval between BeginFrames that is reported to the compositor, in milliseconds. Defaults to a 60 frames/second interval, i.e. about 16.666 milliseconds. :param no_display_updates: *(Optional)* Whether updates should not be committed and drawn onto the display. False by default. If true, only side effects of the BeginFrame will be run, such as layout and animations, but any visual updates may not be visible on the display or in screenshots. :param screenshot: *(Optional)* If set, a screenshot of the frame will be captured and returned in the response. Otherwise, no screenshot will be captured. Note that capturing a screenshot can fail, for example, during renderer initialization. In such a case, no screenshot data will be returned. :returns: A tuple with the following items: 0. **hasDamage** - Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the display. Reported for diagnostic uses, may be removed in the future. 1. **screenshotData** - *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken. Disables headless events for the target. Enables headless events for the target. Issued when the target starts or stops needing BeginFrames. #: True if BeginFrames are needed, false otherwise. | 2.011031 | 2 |
tests/test_geometry.py | resurtm/wvflib | 1 | 7513 | <filename>tests/test_geometry.py
import unittest
from wvflib.geometry import Face
class TestGeometry(unittest.TestCase):
def test_constructor(self):
f = Face()
self.assertTrue(len(f.vertices) == 0)
if __name__ == '__main__':
unittest.main()
| <filename>tests/test_geometry.py
import unittest
from wvflib.geometry import Face
class TestGeometry(unittest.TestCase):
def test_constructor(self):
f = Face()
self.assertTrue(len(f.vertices) == 0)
if __name__ == '__main__':
unittest.main()
| none | 1 | 2.492651 | 2 |
|
tests/test_core.py | d066y/detectem | 0 | 7514 | import pytest
from detectem.core import Detector, Result, ResultCollection
from detectem.plugin import Plugin, PluginCollection
from detectem.settings import INDICATOR_TYPE, HINT_TYPE, MAIN_ENTRY, GENERIC_TYPE
from detectem.plugins.helpers import meta_generator
class TestDetector():
HAR_ENTRY_1 = {
'request': {
'url': 'http://domain.tld/libA-1.4.2.js'
},
'response': {
'url': 'http://domain.tld/libA-1.4.2.js'
},
}
HAR_NO_URL_REDIRECT = [
{
'request': {'url': 'http://domain.tld/'},
'response': {},
},
{
'request': {'url': 'http://domain.tld/js/script.js'},
'response': {},
}
]
HAR_URL_REDIRECT_PATH = [
{
'request': {'url': 'http://domain.tld/'},
'response': {'headers': [
{'name': 'Location', 'value': '/new/default.html'}
]},
},
{
'request': {'url': 'http://domain.tld/new/default.html'},
'response': {},
}
]
HAR_URL_REDIRECT_ABS = [
{
'request': {'url': 'http://domain.tld/'},
'response': {'headers': [
{'name': 'Location', 'value': 'http://other-domain.tld/'}
]},
},
{
'request': {'url': 'http://other-domain.tld/'},
'response': {},
}
]
URL = 'http://domain.tld/'
FOO_PLUGIN = {
'name': 'foo',
'homepage': 'foo',
'matchers': {
'url': 'foo.*-(?P<version>[0-9\.]+)\.js',
'header': ('FooHeader', 'Foo.* v(?P<version>[0-9\.]+)'),
'body': 'Foo.* v(?P<version>[0-9\.]+)',
'xpath': (meta_generator('foo-min'), '(?P<version>[0-9\.]+)'),
},
'indicators': {
'url': 'foo.*\.js',
'header': ('FooHeader', 'Foo'),
'body': 'Foo',
'xpath': "//meta[@name='generator']",
},
'modular_matchers': {
'url': 'foo-(?P<name>\w+)-.*\.js',
'header': ('FooHeader', 'Foo-(?P<name>\w+)'),
'body': 'Foo-(?P<name>\w+)',
'xpath': (meta_generator('foo-min'), 'foo-(?P<name>\w+)'),
},
}
FOO_RESULTS = [
[{'name': 'foo', 'version': '1.1'}],
[{'name': 'foo'}],
[{'name': 'foo-min', 'version': '1.1'}],
]
MATCHER_SOURCES = [
['matchers'],
['indicators'],
['matchers', 'modular_matchers'],
]
def test_detector_starts_with_empty_results(self):
d = Detector({'har': None, 'softwares': None}, [], None)
assert not d._results.get_results()
@pytest.mark.parametrize("har,index", [
(HAR_NO_URL_REDIRECT, 0),
(HAR_URL_REDIRECT_PATH, 1),
(HAR_URL_REDIRECT_ABS, 1),
])
def test_mark_main_entry(self, har, index):
d = self._create_detector(har, [])
assert d.har[index]['detectem']['type'] == MAIN_ENTRY
def test_convert_inline_script_to_har_entry(self):
script = 'Inline script'
d = Detector({'har': [], 'softwares': [], 'scripts': [script]}, None, self.URL)
e = d.har[0]
assert e['request']['url'] == self.URL
assert e['response']['content']['text'] == script
@pytest.mark.parametrize("scripts,n_entries", [
([], 0),
(['script1', 'script2'], 2),
])
def test_add_inline_scripts_to_har(self, scripts, n_entries):
d = Detector({'har': [], 'softwares': [], 'scripts': scripts}, None, self.URL)
assert len(d.har) == n_entries
def _create_plugin(self, template, sources, matchers):
class TestPlugin(Plugin):
name = template['name']
homepage = template['homepage']
p = TestPlugin()
for s in sources:
g = [{m: template[s][m]} for m in matchers]
setattr(p, s, g)
return p
def _create_detector(self, har, plugins):
pc = PluginCollection()
for p in plugins:
pc.add(p)
return Detector({'har': har, 'softwares': []}, pc, self.URL)
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_headers(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'headers': [
{'name': 'FooHeader', 'value': 'Foo-min v1.1'}
]
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['header'])
d = self._create_detector(har, [p])
assert d.get_results() == result
@pytest.mark.parametrize('sources', MATCHER_SOURCES)
def test_match_from_headers_ignores_resource_entries(self, sources):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'headers': [],
},
},
{
'request': {'url': 'http://foo.org/lib/foo.js'},
'response': {
'url': 'http://foo.org/lib/foo.js',
'headers': [
{'name': 'FooHeader', 'value': 'Foo-min v1.1'}
]
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['header'])
d = self._create_detector(har, [p])
assert not d.get_results()
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_body(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'content': {'text': 'Main content'},
},
},
{
'request': {'url': 'http://foo.org/lib/foo.js'},
'response': {
'url': 'http://foo.org/lib/foo.js',
'content': {'text': 'Plugin Foo-min v1.1'},
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['body'])
d = self._create_detector(har, [p])
assert d.get_results() == result
@pytest.mark.parametrize('sources', MATCHER_SOURCES)
def test_match_from_body_excludes_main_entry(self, sources):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'content': {'text': 'About Foo-min v1.1'},
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['body'])
d = self._create_detector(har, [p])
assert not d.get_results()
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_url(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {'url': self.URL},
},
{
'request': {'url': 'http://foo.org/lib/foo-min-1.1.js'},
'response': {
'url': 'http://foo.org/lib/foo-min-1.1.js',
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['url'])
d = self._create_detector(har, [p])
assert d.get_results() == result
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_xpath(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'content': {
'text': '<meta name="generator" content="foo-min 1.1">'
},
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['xpath'])
d = self._create_detector(har, [p])
assert d.get_results() == result
def test_get_hints_with_valid_hint(self):
class TestPlugin(Plugin):
name = 'test'
homepage = 'test'
class BlaPlugin(Plugin):
name = 'bla'
hints = ['test']
detector = self._create_detector(None, [TestPlugin()])
hints = detector.get_hints(BlaPlugin())
assert hints
def test_get_hints_with_invalid_hint(self):
class BlaPlugin(Plugin):
name = 'bla'
hints = ['test']
detector = self._create_detector(None, [])
hints = detector.get_hints(BlaPlugin())
assert not hints
class TestResultCollection():
@staticmethod
def _assert_results(detected, results):
c = ResultCollection()
for d in detected:
c.add_result(d)
assert set(c.get_results()) == set(results)
@pytest.mark.parametrize('detected,results', [
(
[Result('pluginA', '1.1'), Result('pluginB', '3.8.7'), Result('pluginC', '4.0')],
[Result('pluginA', '1.1'), Result('pluginB', '3.8.7'), Result('pluginC', '4.0')]
),
(
[Result('pluginA', '1.3'), Result('pluginA', '1.2'), Result('pluginA', '1.1')],
[Result('pluginA', '1.1'), Result('pluginA', '1.2'), Result('pluginA', '1.3')],
),
(
[
Result('pluginA', '1.1'),
Result('pluginC', type=HINT_TYPE),
Result('pluginB', type=INDICATOR_TYPE),
Result('pluginD', type=GENERIC_TYPE),
],
[
Result('pluginA', '1.1'),
Result('pluginB', type=INDICATOR_TYPE),
Result('pluginC', type=HINT_TYPE),
Result('pluginD', type=GENERIC_TYPE),
]
),
])
def test_get_all_detected_plugins(self, detected, results):
self._assert_results(detected, results)
@pytest.mark.parametrize('detected,results', [
(
[Result('pluginA', '1.1'), Result('pluginA', '1.2'), Result('pluginA', '1.1')],
[Result('pluginA', '1.1'), Result('pluginA', '1.2')]
),
(
[
Result('pluginA', '1.1'),
Result('pluginA', type=INDICATOR_TYPE),
Result('pluginA', type=HINT_TYPE),
],
[Result('pluginA', '1.1')]
),
(
[Result('pluginB', type=HINT_TYPE), Result('pluginB', type=HINT_TYPE)],
[Result('pluginB', type=HINT_TYPE)]
),
(
[Result('pluginB', type=INDICATOR_TYPE), Result('pluginB', type=INDICATOR_TYPE)],
[Result('pluginB', type=INDICATOR_TYPE)]
),
(
[Result('pluginB', type=INDICATOR_TYPE), Result('pluginB', type=HINT_TYPE)],
[Result('pluginB', type=INDICATOR_TYPE)]
),
(
[Result('pluginB', type=INDICATOR_TYPE), Result('pluginB', type=GENERIC_TYPE)],
[Result('pluginB', type=INDICATOR_TYPE)]
),
])
def test_remove_duplicated_results(self, detected, results):
self._assert_results(detected, results)
| import pytest
from detectem.core import Detector, Result, ResultCollection
from detectem.plugin import Plugin, PluginCollection
from detectem.settings import INDICATOR_TYPE, HINT_TYPE, MAIN_ENTRY, GENERIC_TYPE
from detectem.plugins.helpers import meta_generator
class TestDetector():
HAR_ENTRY_1 = {
'request': {
'url': 'http://domain.tld/libA-1.4.2.js'
},
'response': {
'url': 'http://domain.tld/libA-1.4.2.js'
},
}
HAR_NO_URL_REDIRECT = [
{
'request': {'url': 'http://domain.tld/'},
'response': {},
},
{
'request': {'url': 'http://domain.tld/js/script.js'},
'response': {},
}
]
HAR_URL_REDIRECT_PATH = [
{
'request': {'url': 'http://domain.tld/'},
'response': {'headers': [
{'name': 'Location', 'value': '/new/default.html'}
]},
},
{
'request': {'url': 'http://domain.tld/new/default.html'},
'response': {},
}
]
HAR_URL_REDIRECT_ABS = [
{
'request': {'url': 'http://domain.tld/'},
'response': {'headers': [
{'name': 'Location', 'value': 'http://other-domain.tld/'}
]},
},
{
'request': {'url': 'http://other-domain.tld/'},
'response': {},
}
]
URL = 'http://domain.tld/'
FOO_PLUGIN = {
'name': 'foo',
'homepage': 'foo',
'matchers': {
'url': 'foo.*-(?P<version>[0-9\.]+)\.js',
'header': ('FooHeader', 'Foo.* v(?P<version>[0-9\.]+)'),
'body': 'Foo.* v(?P<version>[0-9\.]+)',
'xpath': (meta_generator('foo-min'), '(?P<version>[0-9\.]+)'),
},
'indicators': {
'url': 'foo.*\.js',
'header': ('FooHeader', 'Foo'),
'body': 'Foo',
'xpath': "//meta[@name='generator']",
},
'modular_matchers': {
'url': 'foo-(?P<name>\w+)-.*\.js',
'header': ('FooHeader', 'Foo-(?P<name>\w+)'),
'body': 'Foo-(?P<name>\w+)',
'xpath': (meta_generator('foo-min'), 'foo-(?P<name>\w+)'),
},
}
FOO_RESULTS = [
[{'name': 'foo', 'version': '1.1'}],
[{'name': 'foo'}],
[{'name': 'foo-min', 'version': '1.1'}],
]
MATCHER_SOURCES = [
['matchers'],
['indicators'],
['matchers', 'modular_matchers'],
]
def test_detector_starts_with_empty_results(self):
d = Detector({'har': None, 'softwares': None}, [], None)
assert not d._results.get_results()
@pytest.mark.parametrize("har,index", [
(HAR_NO_URL_REDIRECT, 0),
(HAR_URL_REDIRECT_PATH, 1),
(HAR_URL_REDIRECT_ABS, 1),
])
def test_mark_main_entry(self, har, index):
d = self._create_detector(har, [])
assert d.har[index]['detectem']['type'] == MAIN_ENTRY
def test_convert_inline_script_to_har_entry(self):
script = 'Inline script'
d = Detector({'har': [], 'softwares': [], 'scripts': [script]}, None, self.URL)
e = d.har[0]
assert e['request']['url'] == self.URL
assert e['response']['content']['text'] == script
@pytest.mark.parametrize("scripts,n_entries", [
([], 0),
(['script1', 'script2'], 2),
])
def test_add_inline_scripts_to_har(self, scripts, n_entries):
d = Detector({'har': [], 'softwares': [], 'scripts': scripts}, None, self.URL)
assert len(d.har) == n_entries
def _create_plugin(self, template, sources, matchers):
class TestPlugin(Plugin):
name = template['name']
homepage = template['homepage']
p = TestPlugin()
for s in sources:
g = [{m: template[s][m]} for m in matchers]
setattr(p, s, g)
return p
def _create_detector(self, har, plugins):
pc = PluginCollection()
for p in plugins:
pc.add(p)
return Detector({'har': har, 'softwares': []}, pc, self.URL)
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_headers(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'headers': [
{'name': 'FooHeader', 'value': 'Foo-min v1.1'}
]
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['header'])
d = self._create_detector(har, [p])
assert d.get_results() == result
@pytest.mark.parametrize('sources', MATCHER_SOURCES)
def test_match_from_headers_ignores_resource_entries(self, sources):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'headers': [],
},
},
{
'request': {'url': 'http://foo.org/lib/foo.js'},
'response': {
'url': 'http://foo.org/lib/foo.js',
'headers': [
{'name': 'FooHeader', 'value': 'Foo-min v1.1'}
]
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['header'])
d = self._create_detector(har, [p])
assert not d.get_results()
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_body(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'content': {'text': 'Main content'},
},
},
{
'request': {'url': 'http://foo.org/lib/foo.js'},
'response': {
'url': 'http://foo.org/lib/foo.js',
'content': {'text': 'Plugin Foo-min v1.1'},
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['body'])
d = self._create_detector(har, [p])
assert d.get_results() == result
@pytest.mark.parametrize('sources', MATCHER_SOURCES)
def test_match_from_body_excludes_main_entry(self, sources):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'content': {'text': 'About Foo-min v1.1'},
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['body'])
d = self._create_detector(har, [p])
assert not d.get_results()
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_url(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {'url': self.URL},
},
{
'request': {'url': 'http://foo.org/lib/foo-min-1.1.js'},
'response': {
'url': 'http://foo.org/lib/foo-min-1.1.js',
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['url'])
d = self._create_detector(har, [p])
assert d.get_results() == result
@pytest.mark.parametrize('sources,result', zip(MATCHER_SOURCES, FOO_RESULTS))
def test_match_from_xpath(self, sources, result):
har = [
{
'request': {'url': self.URL},
'response': {
'url': self.URL,
'content': {
'text': '<meta name="generator" content="foo-min 1.1">'
},
},
},
]
p = self._create_plugin(self.FOO_PLUGIN, sources, ['xpath'])
d = self._create_detector(har, [p])
assert d.get_results() == result
def test_get_hints_with_valid_hint(self):
class TestPlugin(Plugin):
name = 'test'
homepage = 'test'
class BlaPlugin(Plugin):
name = 'bla'
hints = ['test']
detector = self._create_detector(None, [TestPlugin()])
hints = detector.get_hints(BlaPlugin())
assert hints
def test_get_hints_with_invalid_hint(self):
class BlaPlugin(Plugin):
name = 'bla'
hints = ['test']
detector = self._create_detector(None, [])
hints = detector.get_hints(BlaPlugin())
assert not hints
class TestResultCollection():
@staticmethod
def _assert_results(detected, results):
c = ResultCollection()
for d in detected:
c.add_result(d)
assert set(c.get_results()) == set(results)
@pytest.mark.parametrize('detected,results', [
(
[Result('pluginA', '1.1'), Result('pluginB', '3.8.7'), Result('pluginC', '4.0')],
[Result('pluginA', '1.1'), Result('pluginB', '3.8.7'), Result('pluginC', '4.0')]
),
(
[Result('pluginA', '1.3'), Result('pluginA', '1.2'), Result('pluginA', '1.1')],
[Result('pluginA', '1.1'), Result('pluginA', '1.2'), Result('pluginA', '1.3')],
),
(
[
Result('pluginA', '1.1'),
Result('pluginC', type=HINT_TYPE),
Result('pluginB', type=INDICATOR_TYPE),
Result('pluginD', type=GENERIC_TYPE),
],
[
Result('pluginA', '1.1'),
Result('pluginB', type=INDICATOR_TYPE),
Result('pluginC', type=HINT_TYPE),
Result('pluginD', type=GENERIC_TYPE),
]
),
])
def test_get_all_detected_plugins(self, detected, results):
self._assert_results(detected, results)
@pytest.mark.parametrize('detected,results', [
(
[Result('pluginA', '1.1'), Result('pluginA', '1.2'), Result('pluginA', '1.1')],
[Result('pluginA', '1.1'), Result('pluginA', '1.2')]
),
(
[
Result('pluginA', '1.1'),
Result('pluginA', type=INDICATOR_TYPE),
Result('pluginA', type=HINT_TYPE),
],
[Result('pluginA', '1.1')]
),
(
[Result('pluginB', type=HINT_TYPE), Result('pluginB', type=HINT_TYPE)],
[Result('pluginB', type=HINT_TYPE)]
),
(
[Result('pluginB', type=INDICATOR_TYPE), Result('pluginB', type=INDICATOR_TYPE)],
[Result('pluginB', type=INDICATOR_TYPE)]
),
(
[Result('pluginB', type=INDICATOR_TYPE), Result('pluginB', type=HINT_TYPE)],
[Result('pluginB', type=INDICATOR_TYPE)]
),
(
[Result('pluginB', type=INDICATOR_TYPE), Result('pluginB', type=GENERIC_TYPE)],
[Result('pluginB', type=INDICATOR_TYPE)]
),
])
def test_remove_duplicated_results(self, detected, results):
self._assert_results(detected, results)
| none | 1 | 1.928826 | 2 |
|
twitter-clone/twitter/views.py | Mlitwin98/twitter-clone | 0 | 7515 | <filename>twitter-clone/twitter/views.py
from django.dispatch.dispatcher import receiver
from django.shortcuts import get_object_or_404, redirect, render
from django.contrib.auth.decorators import login_required
from django.http.response import HttpResponse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, logout as auth_logout, login as auth_login
from django.contrib import messages
from django.db.models import Count
from django.template.loader import render_to_string
from django.http import HttpResponseRedirect, JsonResponse
from twitter.models import Tweet, Follow, Notification, Comment
from twitter.myDecor import check_if_user_logged
from twitter.forms import SignUpForm
# Create your views here.
@check_if_user_logged
def index(request):
return render(request, 'index.html')
@check_if_user_logged
def login(request):
if request.method == 'POST':
if 'login' in request.POST:
mail = request.POST['email']
pwd = request.POST['password']
user = authenticate(request, username=mail, password=<PASSWORD>)
if user is not None:
auth_login(request, user)
return redirect('home')
else:
messages.error(request, 'Invalid credentials')
return render(request, 'login.html')
elif 'cancel' in request.POST:
return redirect('index')
else:
return render(request, 'login.html')
def logout(reqeuest):
auth_logout(reqeuest)
return redirect('index')
@check_if_user_logged
def register(request):
if request.method == 'POST':
if 'cancel' in request.POST:
return redirect('index')
elif 'register' in request.POST:
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save()
email = form.cleaned_data.get('email')
raw_password = form.cleaned_data.get('password')
user.set_password(<PASSWORD>)
user.save()
user = authenticate(request, username=email, password = <PASSWORD>)
auth_login(request, user)
return redirect('home')
else:
form = SignUpForm()
messages.error(request, 'Invalid form fill')
return render(request, 'register.html', {'form':form})
else:
form = SignUpForm()
return render(request, 'register.html', {'form':form})
@login_required(redirect_field_name=None)
def home(request):
if request.method == 'POST':
author = request.user
content = request.POST['tweet']
tweet = Tweet(author=author, content=content)
tweet.save()
for follower in request.user.following.all().values_list('following_user_id', flat=True):
Notification.objects.create(sender = request.user, receiver = User.objects.get(id=follower), target = tweet, type = 'L')
return redirect('home')
else:
followedUsers = [request.user]
for followed in request.user.followers.all():
followedUsers.append(User.objects.get(id=followed.user_id_id))
tweets = Tweet.objects.filter(author__in=followedUsers).order_by('-timeStamp')
rec_profiles = User.objects.annotate(count=Count('followers')).order_by('followers').exclude(username=request.user.username).exclude(id__in=request.user.followers.all().values_list('user_id', flat=True))[:5]
return render(request, 'home.html', {'tweets':tweets, 'rec_profiles':rec_profiles})
def profile(request, username):
if request.method == 'POST':
user = User.objects.get(username=username)
user.profile.bio = request.POST['bio']
user.profile.profilePic = request.FILES['pic'] if 'pic' in request.FILES else user.profile.profilePic
user.profile.backgroundPic = request.FILES['banner'] if 'banner' in request.FILES else user.profile.backgroundPic
user.save()
return redirect('profile', username=username)
else:
try:
userProfile = User.objects.get(username=username)
except User.DoesNotExist:
return HttpResponse('User Not Found')
tweets = Tweet.objects.filter(author__exact=userProfile).order_by('-timeStamp')
is_following = False
for follow in request.user.followers.all():
if userProfile.id == follow.user_id_id:
is_following=True
rec_profiles = User.objects.annotate(count=Count('followers')).order_by('followers').exclude(username=request.user.username).exclude(username=username).exclude(id__in=request.user.followers.all().values_list('user_id', flat=True))[:5]
return render(request, 'profile.html', {'userProfile':userProfile, 'tweets':tweets, 'is_following':is_following, 'rec_profiles':rec_profiles})
@login_required(redirect_field_name=None)
def delete_post(request, tweetID):
if request.method == 'POST':
tweet = Tweet.objects.get(id=tweetID)
if tweet.author == request.user:
tweet.delete()
return redirect('profile', username=request.user.username)
else:
return redirect('home')
@login_required(redirect_field_name=None)
def like_post(request):
tweet = get_object_or_404(Tweet, id=request.POST.get('id'))
if tweet.likes.filter(id=request.user.id).exists():
tweet.likes.remove(request.user)
is_liked = False
else:
tweet.likes.add(request.user)
is_liked = True
if(request.user != tweet.author):
Notification.objects.create(sender = request.user, receiver = User.objects.get(username = tweet.author), target = tweet, type = 'L')
context = {
'tweet': tweet,
'is_liked': is_liked,
}
if request.is_ajax():
html = render_to_string('tweet.html', context, request=request)
return JsonResponse({'form':html})
@login_required(redirect_field_name=None)
def change_mode(request):
if request.method == 'POST':
usr = User.objects.get(id=request.user.id)
usr.profile.mode = request.POST['mode']
usr.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
else:
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
@login_required(redirect_field_name=None)
def follow_profile(request):
followed_user = get_object_or_404(User, id=request.POST.get('id'))
if Follow.objects.filter(user_id=followed_user.id, following_user_id = request.user.id).exists():
Follow.objects.filter(user_id=followed_user.id, following_user_id = request.user.id).delete()
is_following = False
else:
Follow.objects.create(user_id=followed_user, following_user_id = request.user)
Notification.objects.create(sender = request.user, receiver = followed_user, target = None, type = 'F')
is_following = True
context = {
'profile':followed_user,
'userProfile':followed_user,
'is_following':is_following
}
if request.is_ajax():
html = render_to_string('follow_button.html', context, request=request)
return JsonResponse({'form':html})
def notifications(request):
notifics = request.user.your_notifications.all()
for notific in notifics:
notific.seen = True
notific.save()
notifics = request.user.your_notifications.all().order_by('-id')[:10]
return render(request, 'notifications.html', {'notifics':notifics})
def tweet_details(request, tweetID):
tweet = Tweet.objects.get(id=tweetID)
comments = tweet.main_tweet.all().order_by('-timeStamp')
return render(request, 'tweet_details.html', {'tweet':tweet, 'comments':comments})
def comment(request, tweetID):
if request.method == 'POST':
author = request.user
content = request.POST['comment']
tweet = Tweet.objects.get(id=tweetID)
Comment.objects.create(author=author, main_tweet=tweet, content=content)
if(request.user != tweet.author):
Notification.objects.create(sender = request.user, receiver = tweet.author, target = tweet, type = 'C')
return redirect(tweet_details, tweetID=tweetID)
else:
return redirect(home)
#Notification on post comment | <filename>twitter-clone/twitter/views.py
from django.dispatch.dispatcher import receiver
from django.shortcuts import get_object_or_404, redirect, render
from django.contrib.auth.decorators import login_required
from django.http.response import HttpResponse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, logout as auth_logout, login as auth_login
from django.contrib import messages
from django.db.models import Count
from django.template.loader import render_to_string
from django.http import HttpResponseRedirect, JsonResponse
from twitter.models import Tweet, Follow, Notification, Comment
from twitter.myDecor import check_if_user_logged
from twitter.forms import SignUpForm
# Create your views here.
@check_if_user_logged
def index(request):
return render(request, 'index.html')
@check_if_user_logged
def login(request):
if request.method == 'POST':
if 'login' in request.POST:
mail = request.POST['email']
pwd = request.POST['password']
user = authenticate(request, username=mail, password=<PASSWORD>)
if user is not None:
auth_login(request, user)
return redirect('home')
else:
messages.error(request, 'Invalid credentials')
return render(request, 'login.html')
elif 'cancel' in request.POST:
return redirect('index')
else:
return render(request, 'login.html')
def logout(reqeuest):
auth_logout(reqeuest)
return redirect('index')
@check_if_user_logged
def register(request):
if request.method == 'POST':
if 'cancel' in request.POST:
return redirect('index')
elif 'register' in request.POST:
form = SignUpForm(request.POST)
if form.is_valid():
user = form.save()
email = form.cleaned_data.get('email')
raw_password = form.cleaned_data.get('password')
user.set_password(<PASSWORD>)
user.save()
user = authenticate(request, username=email, password = <PASSWORD>)
auth_login(request, user)
return redirect('home')
else:
form = SignUpForm()
messages.error(request, 'Invalid form fill')
return render(request, 'register.html', {'form':form})
else:
form = SignUpForm()
return render(request, 'register.html', {'form':form})
@login_required(redirect_field_name=None)
def home(request):
if request.method == 'POST':
author = request.user
content = request.POST['tweet']
tweet = Tweet(author=author, content=content)
tweet.save()
for follower in request.user.following.all().values_list('following_user_id', flat=True):
Notification.objects.create(sender = request.user, receiver = User.objects.get(id=follower), target = tweet, type = 'L')
return redirect('home')
else:
followedUsers = [request.user]
for followed in request.user.followers.all():
followedUsers.append(User.objects.get(id=followed.user_id_id))
tweets = Tweet.objects.filter(author__in=followedUsers).order_by('-timeStamp')
rec_profiles = User.objects.annotate(count=Count('followers')).order_by('followers').exclude(username=request.user.username).exclude(id__in=request.user.followers.all().values_list('user_id', flat=True))[:5]
return render(request, 'home.html', {'tweets':tweets, 'rec_profiles':rec_profiles})
def profile(request, username):
if request.method == 'POST':
user = User.objects.get(username=username)
user.profile.bio = request.POST['bio']
user.profile.profilePic = request.FILES['pic'] if 'pic' in request.FILES else user.profile.profilePic
user.profile.backgroundPic = request.FILES['banner'] if 'banner' in request.FILES else user.profile.backgroundPic
user.save()
return redirect('profile', username=username)
else:
try:
userProfile = User.objects.get(username=username)
except User.DoesNotExist:
return HttpResponse('User Not Found')
tweets = Tweet.objects.filter(author__exact=userProfile).order_by('-timeStamp')
is_following = False
for follow in request.user.followers.all():
if userProfile.id == follow.user_id_id:
is_following=True
rec_profiles = User.objects.annotate(count=Count('followers')).order_by('followers').exclude(username=request.user.username).exclude(username=username).exclude(id__in=request.user.followers.all().values_list('user_id', flat=True))[:5]
return render(request, 'profile.html', {'userProfile':userProfile, 'tweets':tweets, 'is_following':is_following, 'rec_profiles':rec_profiles})
@login_required(redirect_field_name=None)
def delete_post(request, tweetID):
if request.method == 'POST':
tweet = Tweet.objects.get(id=tweetID)
if tweet.author == request.user:
tweet.delete()
return redirect('profile', username=request.user.username)
else:
return redirect('home')
@login_required(redirect_field_name=None)
def like_post(request):
tweet = get_object_or_404(Tweet, id=request.POST.get('id'))
if tweet.likes.filter(id=request.user.id).exists():
tweet.likes.remove(request.user)
is_liked = False
else:
tweet.likes.add(request.user)
is_liked = True
if(request.user != tweet.author):
Notification.objects.create(sender = request.user, receiver = User.objects.get(username = tweet.author), target = tweet, type = 'L')
context = {
'tweet': tweet,
'is_liked': is_liked,
}
if request.is_ajax():
html = render_to_string('tweet.html', context, request=request)
return JsonResponse({'form':html})
@login_required(redirect_field_name=None)
def change_mode(request):
if request.method == 'POST':
usr = User.objects.get(id=request.user.id)
usr.profile.mode = request.POST['mode']
usr.save()
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
else:
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
@login_required(redirect_field_name=None)
def follow_profile(request):
followed_user = get_object_or_404(User, id=request.POST.get('id'))
if Follow.objects.filter(user_id=followed_user.id, following_user_id = request.user.id).exists():
Follow.objects.filter(user_id=followed_user.id, following_user_id = request.user.id).delete()
is_following = False
else:
Follow.objects.create(user_id=followed_user, following_user_id = request.user)
Notification.objects.create(sender = request.user, receiver = followed_user, target = None, type = 'F')
is_following = True
context = {
'profile':followed_user,
'userProfile':followed_user,
'is_following':is_following
}
if request.is_ajax():
html = render_to_string('follow_button.html', context, request=request)
return JsonResponse({'form':html})
def notifications(request):
notifics = request.user.your_notifications.all()
for notific in notifics:
notific.seen = True
notific.save()
notifics = request.user.your_notifications.all().order_by('-id')[:10]
return render(request, 'notifications.html', {'notifics':notifics})
def tweet_details(request, tweetID):
tweet = Tweet.objects.get(id=tweetID)
comments = tweet.main_tweet.all().order_by('-timeStamp')
return render(request, 'tweet_details.html', {'tweet':tweet, 'comments':comments})
def comment(request, tweetID):
if request.method == 'POST':
author = request.user
content = request.POST['comment']
tweet = Tweet.objects.get(id=tweetID)
Comment.objects.create(author=author, main_tweet=tweet, content=content)
if(request.user != tweet.author):
Notification.objects.create(sender = request.user, receiver = tweet.author, target = tweet, type = 'C')
return redirect(tweet_details, tweetID=tweetID)
else:
return redirect(home)
#Notification on post comment | en | 0.87851 | # Create your views here. #Notification on post comment | 2.306471 | 2 |
custom_app/custom_app/doctype/depart/test_depart.py | Amruthaohm/custom_app | 0 | 7516 | <gh_stars>0
# Copyright (c) 2022, momscode and Contributors
# See license.txt
# import frappe
import unittest
class Testdepart(unittest.TestCase):
pass
| # Copyright (c) 2022, momscode and Contributors
# See license.txt
# import frappe
import unittest
class Testdepart(unittest.TestCase):
pass | en | 0.550405 | # Copyright (c) 2022, momscode and Contributors # See license.txt # import frappe | 1.02374 | 1 |
stix2/__init__.py | khdesai/cti-python-stix2 | 0 | 7517 | <filename>stix2/__init__.py
"""Python APIs for STIX 2.
.. autosummary::
:toctree: api
confidence
datastore
environment
equivalence
exceptions
markings
parsing
pattern_visitor
patterns
properties
serialization
utils
v20
v21
versioning
workbench
"""
# flake8: noqa
DEFAULT_VERSION = '2.1' # Default version will always be the latest STIX 2.X version
from .confidence import scales
from .datastore import CompositeDataSource
from .datastore.filesystem import (
FileSystemSink, FileSystemSource, FileSystemStore,
)
from .datastore.filters import Filter
from .datastore.memory import MemorySink, MemorySource, MemoryStore
from .datastore.taxii import (
TAXIICollectionSink, TAXIICollectionSource, TAXIICollectionStore,
)
from .environment import Environment, ObjectFactory
from .markings import (
add_markings, clear_markings, get_markings, is_marked, remove_markings,
set_markings,
)
from .parsing import _collect_stix2_mappings, parse, parse_observable
from .patterns import (
AndBooleanExpression, AndObservationExpression, BasicObjectPathComponent,
BinaryConstant, BooleanConstant, EqualityComparisonExpression,
FloatConstant, FollowedByObservationExpression,
GreaterThanComparisonExpression, GreaterThanEqualComparisonExpression,
HashConstant, HexConstant, InComparisonExpression, IntegerConstant,
IsSubsetComparisonExpression, IsSupersetComparisonExpression,
LessThanComparisonExpression, LessThanEqualComparisonExpression,
LikeComparisonExpression, ListConstant, ListObjectPathComponent,
MatchesComparisonExpression, ObjectPath, ObservationExpression,
OrBooleanExpression, OrObservationExpression, ParentheticalExpression,
QualifiedObservationExpression, ReferenceObjectPathComponent,
RepeatQualifier, StartStopQualifier, StringConstant, TimestampConstant,
WithinQualifier,
)
from .v21 import * # This import will always be the latest STIX 2.X version
from .version import __version__
from .versioning import new_version, revoke
_collect_stix2_mappings()
| <filename>stix2/__init__.py
"""Python APIs for STIX 2.
.. autosummary::
:toctree: api
confidence
datastore
environment
equivalence
exceptions
markings
parsing
pattern_visitor
patterns
properties
serialization
utils
v20
v21
versioning
workbench
"""
# flake8: noqa
DEFAULT_VERSION = '2.1' # Default version will always be the latest STIX 2.X version
from .confidence import scales
from .datastore import CompositeDataSource
from .datastore.filesystem import (
FileSystemSink, FileSystemSource, FileSystemStore,
)
from .datastore.filters import Filter
from .datastore.memory import MemorySink, MemorySource, MemoryStore
from .datastore.taxii import (
TAXIICollectionSink, TAXIICollectionSource, TAXIICollectionStore,
)
from .environment import Environment, ObjectFactory
from .markings import (
add_markings, clear_markings, get_markings, is_marked, remove_markings,
set_markings,
)
from .parsing import _collect_stix2_mappings, parse, parse_observable
from .patterns import (
AndBooleanExpression, AndObservationExpression, BasicObjectPathComponent,
BinaryConstant, BooleanConstant, EqualityComparisonExpression,
FloatConstant, FollowedByObservationExpression,
GreaterThanComparisonExpression, GreaterThanEqualComparisonExpression,
HashConstant, HexConstant, InComparisonExpression, IntegerConstant,
IsSubsetComparisonExpression, IsSupersetComparisonExpression,
LessThanComparisonExpression, LessThanEqualComparisonExpression,
LikeComparisonExpression, ListConstant, ListObjectPathComponent,
MatchesComparisonExpression, ObjectPath, ObservationExpression,
OrBooleanExpression, OrObservationExpression, ParentheticalExpression,
QualifiedObservationExpression, ReferenceObjectPathComponent,
RepeatQualifier, StartStopQualifier, StringConstant, TimestampConstant,
WithinQualifier,
)
from .v21 import * # This import will always be the latest STIX 2.X version
from .version import __version__
from .versioning import new_version, revoke
_collect_stix2_mappings()
| en | 0.517711 | Python APIs for STIX 2. .. autosummary:: :toctree: api confidence datastore environment equivalence exceptions markings parsing pattern_visitor patterns properties serialization utils v20 v21 versioning workbench # flake8: noqa # Default version will always be the latest STIX 2.X version # This import will always be the latest STIX 2.X version | 1.917326 | 2 |
0/1/1436/1436.py | chr0m3/boj-codes | 3 | 7518 | count = int(input())
title = 0
while count > 0:
title += 1
if '666' in str(title):
count -= 1
print(title)
| count = int(input())
title = 0
while count > 0:
title += 1
if '666' in str(title):
count -= 1
print(title)
| none | 1 | 3.483974 | 3 |
|
functions/source/GreengrassLambda/idna/uts46data.py | jieatelement/quickstart-aws-industrial-machine-connectivity | 40 | 7519 | <filename>functions/source/GreengrassLambda/idna/uts46data.py
# This file is automatically generated by tools/idna-data
# vim: set fileencoding=utf-8 :
"""IDNA Mapping Table from UTS46."""
__version__ = "11.0.0"
def _seg_0():
return [
(0x0, '3'),
(0x1, '3'),
(0x2, '3'),
(0x3, '3'),
(0x4, '3'),
(0x5, '3'),
(0x6, '3'),
(0x7, '3'),
(0x8, '3'),
(0x9, '3'),
(0xA, '3'),
(0xB, '3'),
(0xC, '3'),
(0xD, '3'),
(0xE, '3'),
(0xF, '3'),
(0x10, '3'),
(0x11, '3'),
(0x12, '3'),
(0x13, '3'),
(0x14, '3'),
(0x15, '3'),
(0x16, '3'),
(0x17, '3'),
(0x18, '3'),
(0x19, '3'),
(0x1A, '3'),
(0x1B, '3'),
(0x1C, '3'),
(0x1D, '3'),
(0x1E, '3'),
(0x1F, '3'),
(0x20, '3'),
(0x21, '3'),
(0x22, '3'),
(0x23, '3'),
(0x24, '3'),
(0x25, '3'),
(0x26, '3'),
(0x27, '3'),
(0x28, '3'),
(0x29, '3'),
(0x2A, '3'),
(0x2B, '3'),
(0x2C, '3'),
(0x2D, 'V'),
(0x2E, 'V'),
(0x2F, '3'),
(0x30, 'V'),
(0x31, 'V'),
(0x32, 'V'),
(0x33, 'V'),
(0x34, 'V'),
(0x35, 'V'),
(0x36, 'V'),
(0x37, 'V'),
(0x38, 'V'),
(0x39, 'V'),
(0x3A, '3'),
(0x3B, '3'),
(0x3C, '3'),
(0x3D, '3'),
(0x3E, '3'),
(0x3F, '3'),
(0x40, '3'),
(0x41, 'M', u'a'),
(0x42, 'M', u'b'),
(0x43, 'M', u'c'),
(0x44, 'M', u'd'),
(0x45, 'M', u'e'),
(0x46, 'M', u'f'),
(0x47, 'M', u'g'),
(0x48, 'M', u'h'),
(0x49, 'M', u'i'),
(0x4A, 'M', u'j'),
(0x4B, 'M', u'k'),
(0x4C, 'M', u'l'),
(0x4D, 'M', u'm'),
(0x4E, 'M', u'n'),
(0x4F, 'M', u'o'),
(0x50, 'M', u'p'),
(0x51, 'M', u'q'),
(0x52, 'M', u'r'),
(0x53, 'M', u's'),
(0x54, 'M', u't'),
(0x55, 'M', u'u'),
(0x56, 'M', u'v'),
(0x57, 'M', u'w'),
(0x58, 'M', u'x'),
(0x59, 'M', u'y'),
(0x5A, 'M', u'z'),
(0x5B, '3'),
(0x5C, '3'),
(0x5D, '3'),
(0x5E, '3'),
(0x5F, '3'),
(0x60, '3'),
(0x61, 'V'),
(0x62, 'V'),
(0x63, 'V'),
]
def _seg_1():
return [
(0x64, 'V'),
(0x65, 'V'),
(0x66, 'V'),
(0x67, 'V'),
(0x68, 'V'),
(0x69, 'V'),
(0x6A, 'V'),
(0x6B, 'V'),
(0x6C, 'V'),
(0x6D, 'V'),
(0x6E, 'V'),
(0x6F, 'V'),
(0x70, 'V'),
(0x71, 'V'),
(0x72, 'V'),
(0x73, 'V'),
(0x74, 'V'),
(0x75, 'V'),
(0x76, 'V'),
(0x77, 'V'),
(0x78, 'V'),
(0x79, 'V'),
(0x7A, 'V'),
(0x7B, '3'),
(0x7C, '3'),
(0x7D, '3'),
(0x7E, '3'),
(0x7F, '3'),
(0x80, 'X'),
(0x81, 'X'),
(0x82, 'X'),
(0x83, 'X'),
(0x84, 'X'),
(0x85, 'X'),
(0x86, 'X'),
(0x87, 'X'),
(0x88, 'X'),
(0x89, 'X'),
(0x8A, 'X'),
(0x8B, 'X'),
(0x8C, 'X'),
(0x8D, 'X'),
(0x8E, 'X'),
(0x8F, 'X'),
(0x90, 'X'),
(0x91, 'X'),
(0x92, 'X'),
(0x93, 'X'),
(0x94, 'X'),
(0x95, 'X'),
(0x96, 'X'),
(0x97, 'X'),
(0x98, 'X'),
(0x99, 'X'),
(0x9A, 'X'),
(0x9B, 'X'),
(0x9C, 'X'),
(0x9D, 'X'),
(0x9E, 'X'),
(0x9F, 'X'),
(0xA0, '3', u' '),
(0xA1, 'V'),
(0xA2, 'V'),
(0xA3, 'V'),
(0xA4, 'V'),
(0xA5, 'V'),
(0xA6, 'V'),
(0xA7, 'V'),
(0xA8, '3', u' ̈'),
(0xA9, 'V'),
(0xAA, 'M', u'a'),
(0xAB, 'V'),
(0xAC, 'V'),
(0xAD, 'I'),
(0xAE, 'V'),
(0xAF, '3', u' ̄'),
(0xB0, 'V'),
(0xB1, 'V'),
(0xB2, 'M', u'2'),
(0xB3, 'M', u'3'),
(0xB4, '3', u' ́'),
(0xB5, 'M', u'μ'),
(0xB6, 'V'),
(0xB7, 'V'),
(0xB8, '3', u' ̧'),
(0xB9, 'M', u'1'),
(0xBA, 'M', u'o'),
(0xBB, 'V'),
(0xBC, 'M', u'1⁄4'),
(0xBD, 'M', u'1⁄2'),
(0xBE, 'M', u'3⁄4'),
(0xBF, 'V'),
(0xC0, 'M', u'à'),
(0xC1, 'M', u'á'),
(0xC2, 'M', u'â'),
(0xC3, 'M', u'ã'),
(0xC4, 'M', u'ä'),
(0xC5, 'M', u'å'),
(0xC6, 'M', u'æ'),
(0xC7, 'M', u'ç'),
]
def _seg_2():
return [
(0xC8, 'M', u'è'),
(0xC9, 'M', u'é'),
(0xCA, 'M', u'ê'),
(0xCB, 'M', u'ë'),
(0xCC, 'M', u'ì'),
(0xCD, 'M', u'í'),
(0xCE, 'M', u'î'),
(0xCF, 'M', u'ï'),
(0xD0, 'M', u'ð'),
(0xD1, 'M', u'ñ'),
(0xD2, 'M', u'ò'),
(0xD3, 'M', u'ó'),
(0xD4, 'M', u'ô'),
(0xD5, 'M', u'õ'),
(0xD6, 'M', u'ö'),
(0xD7, 'V'),
(0xD8, 'M', u'ø'),
(0xD9, 'M', u'ù'),
(0xDA, 'M', u'ú'),
(0xDB, 'M', u'û'),
(0xDC, 'M', u'ü'),
(0xDD, 'M', u'ý'),
(0xDE, 'M', u'þ'),
(0xDF, 'D', u'ss'),
(0xE0, 'V'),
(0xE1, 'V'),
(0xE2, 'V'),
(0xE3, 'V'),
(0xE4, 'V'),
(0xE5, 'V'),
(0xE6, 'V'),
(0xE7, 'V'),
(0xE8, 'V'),
(0xE9, 'V'),
(0xEA, 'V'),
(0xEB, 'V'),
(0xEC, 'V'),
(0xED, 'V'),
(0xEE, 'V'),
(0xEF, 'V'),
(0xF0, 'V'),
(0xF1, 'V'),
(0xF2, 'V'),
(0xF3, 'V'),
(0xF4, 'V'),
(0xF5, 'V'),
(0xF6, 'V'),
(0xF7, 'V'),
(0xF8, 'V'),
(0xF9, 'V'),
(0xFA, 'V'),
(0xFB, 'V'),
(0xFC, 'V'),
(0xFD, 'V'),
(0xFE, 'V'),
(0xFF, 'V'),
(0x100, 'M', u'ā'),
(0x101, 'V'),
(0x102, 'M', u'ă'),
(0x103, 'V'),
(0x104, 'M', u'ą'),
(0x105, 'V'),
(0x106, 'M', u'ć'),
(0x107, 'V'),
(0x108, 'M', u'ĉ'),
(0x109, 'V'),
(0x10A, 'M', u'ċ'),
(0x10B, 'V'),
(0x10C, 'M', u'č'),
(0x10D, 'V'),
(0x10E, 'M', u'ď'),
(0x10F, 'V'),
(0x110, 'M', u'đ'),
(0x111, 'V'),
(0x112, 'M', u'ē'),
(0x113, 'V'),
(0x114, 'M', u'ĕ'),
(0x115, 'V'),
(0x116, 'M', u'ė'),
(0x117, 'V'),
(0x118, 'M', u'ę'),
(0x119, 'V'),
(0x11A, 'M', u'ě'),
(0x11B, 'V'),
(0x11C, 'M', u'ĝ'),
(0x11D, 'V'),
(0x11E, 'M', u'ğ'),
(0x11F, 'V'),
(0x120, 'M', u'ġ'),
(0x121, 'V'),
(0x122, 'M', u'ģ'),
(0x123, 'V'),
(0x124, 'M', u'ĥ'),
(0x125, 'V'),
(0x126, 'M', u'ħ'),
(0x127, 'V'),
(0x128, 'M', u'ĩ'),
(0x129, 'V'),
(0x12A, 'M', u'ī'),
(0x12B, 'V'),
]
def _seg_3():
return [
(0x12C, 'M', u'ĭ'),
(0x12D, 'V'),
(0x12E, 'M', u'į'),
(0x12F, 'V'),
(0x130, 'M', u'i̇'),
(0x131, 'V'),
(0x132, 'M', u'ij'),
(0x134, 'M', u'ĵ'),
(0x135, 'V'),
(0x136, 'M', u'ķ'),
(0x137, 'V'),
(0x139, 'M', u'ĺ'),
(0x13A, 'V'),
(0x13B, 'M', u'ļ'),
(0x13C, 'V'),
(0x13D, 'M', u'ľ'),
(0x13E, 'V'),
(0x13F, 'M', u'l·'),
(0x141, 'M', u'ł'),
(0x142, 'V'),
(0x143, 'M', u'ń'),
(0x144, 'V'),
(0x145, 'M', u'ņ'),
(0x146, 'V'),
(0x147, 'M', u'ň'),
(0x148, 'V'),
(0x149, 'M', u'ʼn'),
(0x14A, 'M', u'ŋ'),
(0x14B, 'V'),
(0x14C, 'M', u'ō'),
(0x14D, 'V'),
(0x14E, 'M', u'ŏ'),
(0x14F, 'V'),
(0x150, 'M', u'ő'),
(0x151, 'V'),
(0x152, 'M', u'œ'),
(0x153, 'V'),
(0x154, 'M', u'ŕ'),
(0x155, 'V'),
(0x156, 'M', u'ŗ'),
(0x157, 'V'),
(0x158, 'M', u'ř'),
(0x159, 'V'),
(0x15A, 'M', u'ś'),
(0x15B, 'V'),
(0x15C, 'M', u'ŝ'),
(0x15D, 'V'),
(0x15E, 'M', u'ş'),
(0x15F, 'V'),
(0x160, 'M', u'š'),
(0x161, 'V'),
(0x162, 'M', u'ţ'),
(0x163, 'V'),
(0x164, 'M', u'ť'),
(0x165, 'V'),
(0x166, 'M', u'ŧ'),
(0x167, 'V'),
(0x168, 'M', u'ũ'),
(0x169, 'V'),
(0x16A, 'M', u'ū'),
(0x16B, 'V'),
(0x16C, 'M', u'ŭ'),
(0x16D, 'V'),
(0x16E, 'M', u'ů'),
(0x16F, 'V'),
(0x170, 'M', u'ű'),
(0x171, 'V'),
(0x172, 'M', u'ų'),
(0x173, 'V'),
(0x174, 'M', u'ŵ'),
(0x175, 'V'),
(0x176, 'M', u'ŷ'),
(0x177, 'V'),
(0x178, 'M', u'ÿ'),
(0x179, 'M', u'ź'),
(0x17A, 'V'),
(0x17B, 'M', u'ż'),
(0x17C, 'V'),
(0x17D, 'M', u'ž'),
(0x17E, 'V'),
(0x17F, 'M', u's'),
(0x180, 'V'),
(0x181, 'M', u'ɓ'),
(0x182, 'M', u'ƃ'),
(0x183, 'V'),
(0x184, 'M', u'ƅ'),
(0x185, 'V'),
(0x186, 'M', u'ɔ'),
(0x187, 'M', u'ƈ'),
(0x188, 'V'),
(0x189, 'M', u'ɖ'),
(0x18A, 'M', u'ɗ'),
(0x18B, 'M', u'ƌ'),
(0x18C, 'V'),
(0x18E, 'M', u'ǝ'),
(0x18F, 'M', u'ə'),
(0x190, 'M', u'ɛ'),
(0x191, 'M', u'ƒ'),
(0x192, 'V'),
(0x193, 'M', u'ɠ'),
]
def _seg_4():
return [
(0x194, 'M', u'ɣ'),
(0x195, 'V'),
(0x196, 'M', u'ɩ'),
(0x197, 'M', u'ɨ'),
(0x198, 'M', u'ƙ'),
(0x199, 'V'),
(0x19C, 'M', u'ɯ'),
(0x19D, 'M', u'ɲ'),
(0x19E, 'V'),
(0x19F, 'M', u'ɵ'),
(0x1A0, 'M', u'ơ'),
(0x1A1, 'V'),
(0x1A2, 'M', u'ƣ'),
(0x1A3, 'V'),
(0x1A4, 'M', u'ƥ'),
(0x1A5, 'V'),
(0x1A6, 'M', u'ʀ'),
(0x1A7, 'M', u'ƨ'),
(0x1A8, 'V'),
(0x1A9, 'M', u'ʃ'),
(0x1AA, 'V'),
(0x1AC, 'M', u'ƭ'),
(0x1AD, 'V'),
(0x1AE, 'M', u'ʈ'),
(0x1AF, 'M', u'ư'),
(0x1B0, 'V'),
(0x1B1, 'M', u'ʊ'),
(0x1B2, 'M', u'ʋ'),
(0x1B3, 'M', u'ƴ'),
(0x1B4, 'V'),
(0x1B5, 'M', u'ƶ'),
(0x1B6, 'V'),
(0x1B7, 'M', u'ʒ'),
(0x1B8, 'M', u'ƹ'),
(0x1B9, 'V'),
(0x1BC, 'M', u'ƽ'),
(0x1BD, 'V'),
(0x1C4, 'M', u'dž'),
(0x1C7, 'M', u'lj'),
(0x1CA, 'M', u'nj'),
(0x1CD, 'M', u'ǎ'),
(0x1CE, 'V'),
(0x1CF, 'M', u'ǐ'),
(0x1D0, 'V'),
(0x1D1, 'M', u'ǒ'),
(0x1D2, 'V'),
(0x1D3, 'M', u'ǔ'),
(0x1D4, 'V'),
(0x1D5, 'M', u'ǖ'),
(0x1D6, 'V'),
(0x1D7, 'M', u'ǘ'),
(0x1D8, 'V'),
(0x1D9, 'M', u'ǚ'),
(0x1DA, 'V'),
(0x1DB, 'M', u'ǜ'),
(0x1DC, 'V'),
(0x1DE, 'M', u'ǟ'),
(0x1DF, 'V'),
(0x1E0, 'M', u'ǡ'),
(0x1E1, 'V'),
(0x1E2, 'M', u'ǣ'),
(0x1E3, 'V'),
(0x1E4, 'M', u'ǥ'),
(0x1E5, 'V'),
(0x1E6, 'M', u'ǧ'),
(0x1E7, 'V'),
(0x1E8, 'M', u'ǩ'),
(0x1E9, 'V'),
(0x1EA, 'M', u'ǫ'),
(0x1EB, 'V'),
(0x1EC, 'M', u'ǭ'),
(0x1ED, 'V'),
(0x1EE, 'M', u'ǯ'),
(0x1EF, 'V'),
(0x1F1, 'M', u'dz'),
(0x1F4, 'M', u'ǵ'),
(0x1F5, 'V'),
(0x1F6, 'M', u'ƕ'),
(0x1F7, 'M', u'ƿ'),
(0x1F8, 'M', u'ǹ'),
(0x1F9, 'V'),
(0x1FA, 'M', u'ǻ'),
(0x1FB, 'V'),
(0x1FC, 'M', u'ǽ'),
(0x1FD, 'V'),
(0x1FE, 'M', u'ǿ'),
(0x1FF, 'V'),
(0x200, 'M', u'ȁ'),
(0x201, 'V'),
(0x202, 'M', u'ȃ'),
(0x203, 'V'),
(0x204, 'M', u'ȅ'),
(0x205, 'V'),
(0x206, 'M', u'ȇ'),
(0x207, 'V'),
(0x208, 'M', u'ȉ'),
(0x209, 'V'),
(0x20A, 'M', u'ȋ'),
(0x20B, 'V'),
(0x20C, 'M', u'ȍ'),
]
def _seg_5():
return [
(0x20D, 'V'),
(0x20E, 'M', u'ȏ'),
(0x20F, 'V'),
(0x210, 'M', u'ȑ'),
(0x211, 'V'),
(0x212, 'M', u'ȓ'),
(0x213, 'V'),
(0x214, 'M', u'ȕ'),
(0x215, 'V'),
(0x216, 'M', u'ȗ'),
(0x217, 'V'),
(0x218, 'M', u'ș'),
(0x219, 'V'),
(0x21A, 'M', u'ț'),
(0x21B, 'V'),
(0x21C, 'M', u'ȝ'),
(0x21D, 'V'),
(0x21E, 'M', u'ȟ'),
(0x21F, 'V'),
(0x220, 'M', u'ƞ'),
(0x221, 'V'),
(0x222, 'M', u'ȣ'),
(0x223, 'V'),
(0x224, 'M', u'ȥ'),
(0x225, 'V'),
(0x226, 'M', u'ȧ'),
(0x227, 'V'),
(0x228, 'M', u'ȩ'),
(0x229, 'V'),
(0x22A, 'M', u'ȫ'),
(0x22B, 'V'),
(0x22C, 'M', u'ȭ'),
(0x22D, 'V'),
(0x22E, 'M', u'ȯ'),
(0x22F, 'V'),
(0x230, 'M', u'ȱ'),
(0x231, 'V'),
(0x232, 'M', u'ȳ'),
(0x233, 'V'),
(0x23A, 'M', u'ⱥ'),
(0x23B, 'M', u'ȼ'),
(0x23C, 'V'),
(0x23D, 'M', u'ƚ'),
(0x23E, 'M', u'ⱦ'),
(0x23F, 'V'),
(0x241, 'M', u'ɂ'),
(0x242, 'V'),
(0x243, 'M', u'ƀ'),
(0x244, 'M', u'ʉ'),
(0x245, 'M', u'ʌ'),
(0x246, 'M', u'ɇ'),
(0x247, 'V'),
(0x248, 'M', u'ɉ'),
(0x249, 'V'),
(0x24A, 'M', u'ɋ'),
(0x24B, 'V'),
(0x24C, 'M', u'ɍ'),
(0x24D, 'V'),
(0x24E, 'M', u'ɏ'),
(0x24F, 'V'),
(0x2B0, 'M', u'h'),
(0x2B1, 'M', u'ɦ'),
(0x2B2, 'M', u'j'),
(0x2B3, 'M', u'r'),
(0x2B4, 'M', u'ɹ'),
(0x2B5, 'M', u'ɻ'),
(0x2B6, 'M', u'ʁ'),
(0x2B7, 'M', u'w'),
(0x2B8, 'M', u'y'),
(0x2B9, 'V'),
(0x2D8, '3', u' ̆'),
(0x2D9, '3', u' ̇'),
(0x2DA, '3', u' ̊'),
(0x2DB, '3', u' ̨'),
(0x2DC, '3', u' ̃'),
(0x2DD, '3', u' ̋'),
(0x2DE, 'V'),
(0x2E0, 'M', u'ɣ'),
(0x2E1, 'M', u'l'),
(0x2E2, 'M', u's'),
(0x2E3, 'M', u'x'),
(0x2E4, 'M', u'ʕ'),
(0x2E5, 'V'),
(0x340, 'M', u'̀'),
(0x341, 'M', u'́'),
(0x342, 'V'),
(0x343, 'M', u'̓'),
(0x344, 'M', u'̈́'),
(0x345, 'M', u'ι'),
(0x346, 'V'),
(0x34F, 'I'),
(0x350, 'V'),
(0x370, 'M', u'ͱ'),
(0x371, 'V'),
(0x372, 'M', u'ͳ'),
(0x373, 'V'),
(0x374, 'M', u'ʹ'),
(0x375, 'V'),
(0x376, 'M', u'ͷ'),
(0x377, 'V'),
]
def _seg_6():
return [
(0x378, 'X'),
(0x37A, '3', u' ι'),
(0x37B, 'V'),
(0x37E, '3', u';'),
(0x37F, 'M', u'ϳ'),
(0x380, 'X'),
(0x384, '3', u' ́'),
(0x385, '3', u' ̈́'),
(0x386, 'M', u'ά'),
(0x387, 'M', u'·'),
(0x388, 'M', u'έ'),
(0x389, 'M', u'ή'),
(0x38A, 'M', u'ί'),
(0x38B, 'X'),
(0x38C, 'M', u'ό'),
(0x38D, 'X'),
(0x38E, 'M', u'ύ'),
(0x38F, 'M', u'ώ'),
(0x390, 'V'),
(0x391, 'M', u'α'),
(0x392, 'M', u'β'),
(0x393, 'M', u'γ'),
(0x394, 'M', u'δ'),
(0x395, 'M', u'ε'),
(0x396, 'M', u'ζ'),
(0x397, 'M', u'η'),
(0x398, 'M', u'θ'),
(0x399, 'M', u'ι'),
(0x39A, 'M', u'κ'),
(0x39B, 'M', u'λ'),
(0x39C, 'M', u'μ'),
(0x39D, 'M', u'ν'),
(0x39E, 'M', u'ξ'),
(0x39F, 'M', u'ο'),
(0x3A0, 'M', u'π'),
(0x3A1, 'M', u'ρ'),
(0x3A2, 'X'),
(0x3A3, 'M', u'σ'),
(0x3A4, 'M', u'τ'),
(0x3A5, 'M', u'υ'),
(0x3A6, 'M', u'φ'),
(0x3A7, 'M', u'χ'),
(0x3A8, 'M', u'ψ'),
(0x3A9, 'M', u'ω'),
(0x3AA, 'M', u'ϊ'),
(0x3AB, 'M', u'ϋ'),
(0x3AC, 'V'),
(0x3C2, 'D', u'σ'),
(0x3C3, 'V'),
(0x3CF, 'M', u'ϗ'),
(0x3D0, 'M', u'β'),
(0x3D1, 'M', u'θ'),
(0x3D2, 'M', u'υ'),
(0x3D3, 'M', u'ύ'),
(0x3D4, 'M', u'ϋ'),
(0x3D5, 'M', u'φ'),
(0x3D6, 'M', u'π'),
(0x3D7, 'V'),
(0x3D8, 'M', u'ϙ'),
(0x3D9, 'V'),
(0x3DA, 'M', u'ϛ'),
(0x3DB, 'V'),
(0x3DC, 'M', u'ϝ'),
(0x3DD, 'V'),
(0x3DE, 'M', u'ϟ'),
(0x3DF, 'V'),
(0x3E0, 'M', u'ϡ'),
(0x3E1, 'V'),
(0x3E2, 'M', u'ϣ'),
(0x3E3, 'V'),
(0x3E4, 'M', u'ϥ'),
(0x3E5, 'V'),
(0x3E6, 'M', u'ϧ'),
(0x3E7, 'V'),
(0x3E8, 'M', u'ϩ'),
(0x3E9, 'V'),
(0x3EA, 'M', u'ϫ'),
(0x3EB, 'V'),
(0x3EC, 'M', u'ϭ'),
(0x3ED, 'V'),
(0x3EE, 'M', u'ϯ'),
(0x3EF, 'V'),
(0x3F0, 'M', u'κ'),
(0x3F1, 'M', u'ρ'),
(0x3F2, 'M', u'σ'),
(0x3F3, 'V'),
(0x3F4, 'M', u'θ'),
(0x3F5, 'M', u'ε'),
(0x3F6, 'V'),
(0x3F7, 'M', u'ϸ'),
(0x3F8, 'V'),
(0x3F9, 'M', u'σ'),
(0x3FA, 'M', u'ϻ'),
(0x3FB, 'V'),
(0x3FD, 'M', u'ͻ'),
(0x3FE, 'M', u'ͼ'),
(0x3FF, 'M', u'ͽ'),
(0x400, 'M', u'ѐ'),
(0x401, 'M', u'ё'),
(0x402, 'M', u'ђ'),
]
def _seg_7():
return [
(0x403, 'M', u'ѓ'),
(0x404, 'M', u'є'),
(0x405, 'M', u'ѕ'),
(0x406, 'M', u'і'),
(0x407, 'M', u'ї'),
(0x408, 'M', u'ј'),
(0x409, 'M', u'љ'),
(0x40A, 'M', u'њ'),
(0x40B, 'M', u'ћ'),
(0x40C, 'M', u'ќ'),
(0x40D, 'M', u'ѝ'),
(0x40E, 'M', u'ў'),
(0x40F, 'M', u'џ'),
(0x410, 'M', u'а'),
(0x411, 'M', u'б'),
(0x412, 'M', u'в'),
(0x413, 'M', u'г'),
(0x414, 'M', u'д'),
(0x415, 'M', u'е'),
(0x416, 'M', u'ж'),
(0x417, 'M', u'з'),
(0x418, 'M', u'и'),
(0x419, 'M', u'й'),
(0x41A, 'M', u'к'),
(0x41B, 'M', u'л'),
(0x41C, 'M', u'м'),
(0x41D, 'M', u'н'),
(0x41E, 'M', u'о'),
(0x41F, 'M', u'п'),
(0x420, 'M', u'р'),
(0x421, 'M', u'с'),
(0x422, 'M', u'т'),
(0x423, 'M', u'у'),
(0x424, 'M', u'ф'),
(0x425, 'M', u'х'),
(0x426, 'M', u'ц'),
(0x427, 'M', u'ч'),
(0x428, 'M', u'ш'),
(0x429, 'M', u'щ'),
(0x42A, 'M', u'ъ'),
(0x42B, 'M', u'ы'),
(0x42C, 'M', u'ь'),
(0x42D, 'M', u'э'),
(0x42E, 'M', u'ю'),
(0x42F, 'M', u'я'),
(0x430, 'V'),
(0x460, 'M', u'ѡ'),
(0x461, 'V'),
(0x462, 'M', u'ѣ'),
(0x463, 'V'),
(0x464, 'M', u'ѥ'),
(0x465, 'V'),
(0x466, 'M', u'ѧ'),
(0x467, 'V'),
(0x468, 'M', u'ѩ'),
(0x469, 'V'),
(0x46A, 'M', u'ѫ'),
(0x46B, 'V'),
(0x46C, 'M', u'ѭ'),
(0x46D, 'V'),
(0x46E, 'M', u'ѯ'),
(0x46F, 'V'),
(0x470, 'M', u'ѱ'),
(0x471, 'V'),
(0x472, 'M', u'ѳ'),
(0x473, 'V'),
(0x474, 'M', u'ѵ'),
(0x475, 'V'),
(0x476, 'M', u'ѷ'),
(0x477, 'V'),
(0x478, 'M', u'ѹ'),
(0x479, 'V'),
(0x47A, 'M', u'ѻ'),
(0x47B, 'V'),
(0x47C, 'M', u'ѽ'),
(0x47D, 'V'),
(0x47E, 'M', u'ѿ'),
(0x47F, 'V'),
(0x480, 'M', u'ҁ'),
(0x481, 'V'),
(0x48A, 'M', u'ҋ'),
(0x48B, 'V'),
(0x48C, 'M', u'ҍ'),
(0x48D, 'V'),
(0x48E, 'M', u'ҏ'),
(0x48F, 'V'),
(0x490, 'M', u'ґ'),
(0x491, 'V'),
(0x492, 'M', u'ғ'),
(0x493, 'V'),
(0x494, 'M', u'ҕ'),
(0x495, 'V'),
(0x496, 'M', u'җ'),
(0x497, 'V'),
(0x498, 'M', u'ҙ'),
(0x499, 'V'),
(0x49A, 'M', u'қ'),
(0x49B, 'V'),
(0x49C, 'M', u'ҝ'),
(0x49D, 'V'),
]
def _seg_8():
return [
(0x49E, 'M', u'ҟ'),
(0x49F, 'V'),
(0x4A0, 'M', u'ҡ'),
(0x4A1, 'V'),
(0x4A2, 'M', u'ң'),
(0x4A3, 'V'),
(0x4A4, 'M', u'ҥ'),
(0x4A5, 'V'),
(0x4A6, 'M', u'ҧ'),
(0x4A7, 'V'),
(0x4A8, 'M', u'ҩ'),
(0x4A9, 'V'),
(0x4AA, 'M', u'ҫ'),
(0x4AB, 'V'),
(0x4AC, 'M', u'ҭ'),
(0x4AD, 'V'),
(0x4AE, 'M', u'ү'),
(0x4AF, 'V'),
(0x4B0, 'M', u'ұ'),
(0x4B1, 'V'),
(0x4B2, 'M', u'ҳ'),
(0x4B3, 'V'),
(0x4B4, 'M', u'ҵ'),
(0x4B5, 'V'),
(0x4B6, 'M', u'ҷ'),
(0x4B7, 'V'),
(0x4B8, 'M', u'ҹ'),
(0x4B9, 'V'),
(0x4BA, 'M', u'һ'),
(0x4BB, 'V'),
(0x4BC, 'M', u'ҽ'),
(0x4BD, 'V'),
(0x4BE, 'M', u'ҿ'),
(0x4BF, 'V'),
(0x4C0, 'X'),
(0x4C1, 'M', u'ӂ'),
(0x4C2, 'V'),
(0x4C3, 'M', u'ӄ'),
(0x4C4, 'V'),
(0x4C5, 'M', u'ӆ'),
(0x4C6, 'V'),
(0x4C7, 'M', u'ӈ'),
(0x4C8, 'V'),
(0x4C9, 'M', u'ӊ'),
(0x4CA, 'V'),
(0x4CB, 'M', u'ӌ'),
(0x4CC, 'V'),
(0x4CD, 'M', u'ӎ'),
(0x4CE, 'V'),
(0x4D0, 'M', u'ӑ'),
(0x4D1, 'V'),
(0x4D2, 'M', u'ӓ'),
(0x4D3, 'V'),
(0x4D4, 'M', u'ӕ'),
(0x4D5, 'V'),
(0x4D6, 'M', u'ӗ'),
(0x4D7, 'V'),
(0x4D8, 'M', u'ә'),
(0x4D9, 'V'),
(0x4DA, 'M', u'ӛ'),
(0x4DB, 'V'),
(0x4DC, 'M', u'ӝ'),
(0x4DD, 'V'),
(0x4DE, 'M', u'ӟ'),
(0x4DF, 'V'),
(0x4E0, 'M', u'ӡ'),
(0x4E1, 'V'),
(0x4E2, 'M', u'ӣ'),
(0x4E3, 'V'),
(0x4E4, 'M', u'ӥ'),
(0x4E5, 'V'),
(0x4E6, 'M', u'ӧ'),
(0x4E7, 'V'),
(0x4E8, 'M', u'ө'),
(0x4E9, 'V'),
(0x4EA, 'M', u'ӫ'),
(0x4EB, 'V'),
(0x4EC, 'M', u'ӭ'),
(0x4ED, 'V'),
(0x4EE, 'M', u'ӯ'),
(0x4EF, 'V'),
(0x4F0, 'M', u'ӱ'),
(0x4F1, 'V'),
(0x4F2, 'M', u'ӳ'),
(0x4F3, 'V'),
(0x4F4, 'M', u'ӵ'),
(0x4F5, 'V'),
(0x4F6, 'M', u'ӷ'),
(0x4F7, 'V'),
(0x4F8, 'M', u'ӹ'),
(0x4F9, 'V'),
(0x4FA, 'M', u'ӻ'),
(0x4FB, 'V'),
(0x4FC, 'M', u'ӽ'),
(0x4FD, 'V'),
(0x4FE, 'M', u'ӿ'),
(0x4FF, 'V'),
(0x500, 'M', u'ԁ'),
(0x501, 'V'),
(0x502, 'M', u'ԃ'),
]
def _seg_9():
return [
(0x503, 'V'),
(0x504, 'M', u'ԅ'),
(0x505, 'V'),
(0x506, 'M', u'ԇ'),
(0x507, 'V'),
(0x508, 'M', u'ԉ'),
(0x509, 'V'),
(0x50A, 'M', u'ԋ'),
(0x50B, 'V'),
(0x50C, 'M', u'ԍ'),
(0x50D, 'V'),
(0x50E, 'M', u'ԏ'),
(0x50F, 'V'),
(0x510, 'M', u'ԑ'),
(0x511, 'V'),
(0x512, 'M', u'ԓ'),
(0x513, 'V'),
(0x514, 'M', u'ԕ'),
(0x515, 'V'),
(0x516, 'M', u'ԗ'),
(0x517, 'V'),
(0x518, 'M', u'ԙ'),
(0x519, 'V'),
(0x51A, 'M', u'ԛ'),
(0x51B, 'V'),
(0x51C, 'M', u'ԝ'),
(0x51D, 'V'),
(0x51E, 'M', u'ԟ'),
(0x51F, 'V'),
(0x520, 'M', u'ԡ'),
(0x521, 'V'),
(0x522, 'M', u'ԣ'),
(0x523, 'V'),
(0x524, 'M', u'ԥ'),
(0x525, 'V'),
(0x526, 'M', u'ԧ'),
(0x527, 'V'),
(0x528, 'M', u'ԩ'),
(0x529, 'V'),
(0x52A, 'M', u'ԫ'),
(0x52B, 'V'),
(0x52C, 'M', u'ԭ'),
(0x52D, 'V'),
(0x52E, 'M', u'ԯ'),
(0x52F, 'V'),
(0x530, 'X'),
(0x531, 'M', u'ա'),
(0x532, 'M', u'բ'),
(0x533, 'M', u'գ'),
(0x534, 'M', u'դ'),
(0x535, 'M', u'ե'),
(0x536, 'M', u'զ'),
(0x537, 'M', u'է'),
(0x538, 'M', u'ը'),
(0x539, 'M', u'թ'),
(0x53A, 'M', u'ժ'),
(0x53B, 'M', u'ի'),
(0x53C, 'M', u'լ'),
(0x53D, 'M', u'խ'),
(0x53E, 'M', u'ծ'),
(0x53F, 'M', u'կ'),
(0x540, 'M', u'հ'),
(0x541, 'M', u'ձ'),
(0x542, 'M', u'ղ'),
(0x543, 'M', u'ճ'),
(0x544, 'M', u'մ'),
(0x545, 'M', u'յ'),
(0x546, 'M', u'ն'),
(0x547, 'M', u'շ'),
(0x548, 'M', u'ո'),
(0x549, 'M', u'չ'),
(0x54A, 'M', u'պ'),
(0x54B, 'M', u'ջ'),
(0x54C, 'M', u'ռ'),
(0x54D, 'M', u'ս'),
(0x54E, 'M', u'վ'),
(0x54F, 'M', u'տ'),
(0x550, 'M', u'ր'),
(0x551, 'M', u'ց'),
(0x552, 'M', u'ւ'),
(0x553, 'M', u'փ'),
(0x554, 'M', u'ք'),
(0x555, 'M', u'օ'),
(0x556, 'M', u'ֆ'),
(0x557, 'X'),
(0x559, 'V'),
(0x587, 'M', u'եւ'),
(0x588, 'V'),
(0x58B, 'X'),
(0x58D, 'V'),
(0x590, 'X'),
(0x591, 'V'),
(0x5C8, 'X'),
(0x5D0, 'V'),
(0x5EB, 'X'),
(0x5EF, 'V'),
(0x5F5, 'X'),
(0x606, 'V'),
(0x61C, 'X'),
(0x61E, 'V'),
]
def _seg_10():
return [
(0x675, 'M', u'اٴ'),
(0x676, 'M', u'وٴ'),
(0x677, 'M', u'ۇٴ'),
(0x678, 'M', u'يٴ'),
(0x679, 'V'),
(0x6DD, 'X'),
(0x6DE, 'V'),
(0x70E, 'X'),
(0x710, 'V'),
(0x74B, 'X'),
(0x74D, 'V'),
(0x7B2, 'X'),
(0x7C0, 'V'),
(0x7FB, 'X'),
(0x7FD, 'V'),
(0x82E, 'X'),
(0x830, 'V'),
(0x83F, 'X'),
(0x840, 'V'),
(0x85C, 'X'),
(0x85E, 'V'),
(0x85F, 'X'),
(0x860, 'V'),
(0x86B, 'X'),
(0x8A0, 'V'),
(0x8B5, 'X'),
(0x8B6, 'V'),
(0x8BE, 'X'),
(0x8D3, 'V'),
(0x8E2, 'X'),
(0x8E3, 'V'),
(0x958, 'M', u'क़'),
(0x959, 'M', u'ख़'),
(0x95A, 'M', u'ग़'),
(0x95B, 'M', u'ज़'),
(0x95C, 'M', u'ड़'),
(0x95D, 'M', u'ढ़'),
(0x95E, 'M', u'फ़'),
(0x95F, 'M', u'य़'),
(0x960, 'V'),
(0x984, 'X'),
(0x985, 'V'),
(0x98D, 'X'),
(0x98F, 'V'),
(0x991, 'X'),
(0x993, 'V'),
(0x9A9, 'X'),
(0x9AA, 'V'),
(0x9B1, 'X'),
(0x9B2, 'V'),
(0x9B3, 'X'),
(0x9B6, 'V'),
(0x9BA, 'X'),
(0x9BC, 'V'),
(0x9C5, 'X'),
(0x9C7, 'V'),
(0x9C9, 'X'),
(0x9CB, 'V'),
(0x9CF, 'X'),
(0x9D7, 'V'),
(0x9D8, 'X'),
(0x9DC, 'M', u'ড়'),
(0x9DD, 'M', u'ঢ়'),
(0x9DE, 'X'),
(0x9DF, 'M', u'য়'),
(0x9E0, 'V'),
(0x9E4, 'X'),
(0x9E6, 'V'),
(0x9FF, 'X'),
(0xA01, 'V'),
(0xA04, 'X'),
(0xA05, 'V'),
(0xA0B, 'X'),
(0xA0F, 'V'),
(0xA11, 'X'),
(0xA13, 'V'),
(0xA29, 'X'),
(0xA2A, 'V'),
(0xA31, 'X'),
(0xA32, 'V'),
(0xA33, 'M', u'ਲ਼'),
(0xA34, 'X'),
(0xA35, 'V'),
(0xA36, 'M', u'ਸ਼'),
(0xA37, 'X'),
(0xA38, 'V'),
(0xA3A, 'X'),
(0xA3C, 'V'),
(0xA3D, 'X'),
(0xA3E, 'V'),
(0xA43, 'X'),
(0xA47, 'V'),
(0xA49, 'X'),
(0xA4B, 'V'),
(0xA4E, 'X'),
(0xA51, 'V'),
(0xA52, 'X'),
(0xA59, 'M', u'ਖ਼'),
(0xA5A, 'M', u'ਗ਼'),
(0xA5B, 'M', u'ਜ਼'),
]
def _seg_11():
return [
(0xA5C, 'V'),
(0xA5D, 'X'),
(0xA5E, 'M', u'ਫ਼'),
(0xA5F, 'X'),
(0xA66, 'V'),
(0xA77, 'X'),
(0xA81, 'V'),
(0xA84, 'X'),
(0xA85, 'V'),
(0xA8E, 'X'),
(0xA8F, 'V'),
(0xA92, 'X'),
(0xA93, 'V'),
(0xAA9, 'X'),
(0xAAA, 'V'),
(0xAB1, 'X'),
(0xAB2, 'V'),
(0xAB4, 'X'),
(0xAB5, 'V'),
(0xABA, 'X'),
(0xABC, 'V'),
(0xAC6, 'X'),
(0xAC7, 'V'),
(0xACA, 'X'),
(0xACB, 'V'),
(0xACE, 'X'),
(0xAD0, 'V'),
(0xAD1, 'X'),
(0xAE0, 'V'),
(0xAE4, 'X'),
(0xAE6, 'V'),
(0xAF2, 'X'),
(0xAF9, 'V'),
(0xB00, 'X'),
(0xB01, 'V'),
(0xB04, 'X'),
(0xB05, 'V'),
(0xB0D, 'X'),
(0xB0F, 'V'),
(0xB11, 'X'),
(0xB13, 'V'),
(0xB29, 'X'),
(0xB2A, 'V'),
(0xB31, 'X'),
(0xB32, 'V'),
(0xB34, 'X'),
(0xB35, 'V'),
(0xB3A, 'X'),
(0xB3C, 'V'),
(0xB45, 'X'),
(0xB47, 'V'),
(0xB49, 'X'),
(0xB4B, 'V'),
(0xB4E, 'X'),
(0xB56, 'V'),
(0xB58, 'X'),
(0xB5C, 'M', u'ଡ଼'),
(0xB5D, 'M', u'ଢ଼'),
(0xB5E, 'X'),
(0xB5F, 'V'),
(0xB64, 'X'),
(0xB66, 'V'),
(0xB78, 'X'),
(0xB82, 'V'),
(0xB84, 'X'),
(0xB85, 'V'),
(0xB8B, 'X'),
(0xB8E, 'V'),
(0xB91, 'X'),
(0xB92, 'V'),
(0xB96, 'X'),
(0xB99, 'V'),
(0xB9B, 'X'),
(0xB9C, 'V'),
(0xB9D, 'X'),
(0xB9E, 'V'),
(0xBA0, 'X'),
(0xBA3, 'V'),
(0xBA5, 'X'),
(0xBA8, 'V'),
(0xBAB, 'X'),
(0xBAE, 'V'),
(0xBBA, 'X'),
(0xBBE, 'V'),
(0xBC3, 'X'),
(0xBC6, 'V'),
(0xBC9, 'X'),
(0xBCA, 'V'),
(0xBCE, 'X'),
(0xBD0, 'V'),
(0xBD1, 'X'),
(0xBD7, 'V'),
(0xBD8, 'X'),
(0xBE6, 'V'),
(0xBFB, 'X'),
(0xC00, 'V'),
(0xC0D, 'X'),
(0xC0E, 'V'),
(0xC11, 'X'),
(0xC12, 'V'),
]
def _seg_12():
return [
(0xC29, 'X'),
(0xC2A, 'V'),
(0xC3A, 'X'),
(0xC3D, 'V'),
(0xC45, 'X'),
(0xC46, 'V'),
(0xC49, 'X'),
(0xC4A, 'V'),
(0xC4E, 'X'),
(0xC55, 'V'),
(0xC57, 'X'),
(0xC58, 'V'),
(0xC5B, 'X'),
(0xC60, 'V'),
(0xC64, 'X'),
(0xC66, 'V'),
(0xC70, 'X'),
(0xC78, 'V'),
(0xC8D, 'X'),
(0xC8E, 'V'),
(0xC91, 'X'),
(0xC92, 'V'),
(0xCA9, 'X'),
(0xCAA, 'V'),
(0xCB4, 'X'),
(0xCB5, 'V'),
(0xCBA, 'X'),
(0xCBC, 'V'),
(0xCC5, 'X'),
(0xCC6, 'V'),
(0xCC9, 'X'),
(0xCCA, 'V'),
(0xCCE, 'X'),
(0xCD5, 'V'),
(0xCD7, 'X'),
(0xCDE, 'V'),
(0xCDF, 'X'),
(0xCE0, 'V'),
(0xCE4, 'X'),
(0xCE6, 'V'),
(0xCF0, 'X'),
(0xCF1, 'V'),
(0xCF3, 'X'),
(0xD00, 'V'),
(0xD04, 'X'),
(0xD05, 'V'),
(0xD0D, 'X'),
(0xD0E, 'V'),
(0xD11, 'X'),
(0xD12, 'V'),
(0xD45, 'X'),
(0xD46, 'V'),
(0xD49, 'X'),
(0xD4A, 'V'),
(0xD50, 'X'),
(0xD54, 'V'),
(0xD64, 'X'),
(0xD66, 'V'),
(0xD80, 'X'),
(0xD82, 'V'),
(0xD84, 'X'),
(0xD85, 'V'),
(0xD97, 'X'),
(0xD9A, 'V'),
(0xDB2, 'X'),
(0xDB3, 'V'),
(0xDBC, 'X'),
(0xDBD, 'V'),
(0xDBE, 'X'),
(0xDC0, 'V'),
(0xDC7, 'X'),
(0xDCA, 'V'),
(0xDCB, 'X'),
(0xDCF, 'V'),
(0xDD5, 'X'),
(0xDD6, 'V'),
(0xDD7, 'X'),
(0xDD8, 'V'),
(0xDE0, 'X'),
(0xDE6, 'V'),
(0xDF0, 'X'),
(0xDF2, 'V'),
(0xDF5, 'X'),
(0xE01, 'V'),
(0xE33, 'M', u'ํา'),
(0xE34, 'V'),
(0xE3B, 'X'),
(0xE3F, 'V'),
(0xE5C, 'X'),
(0xE81, 'V'),
(0xE83, 'X'),
(0xE84, 'V'),
(0xE85, 'X'),
(0xE87, 'V'),
(0xE89, 'X'),
(0xE8A, 'V'),
(0xE8B, 'X'),
(0xE8D, 'V'),
(0xE8E, 'X'),
(0xE94, 'V'),
]
def _seg_13():
return [
(0xE98, 'X'),
(0xE99, 'V'),
(0xEA0, 'X'),
(0xEA1, 'V'),
(0xEA4, 'X'),
(0xEA5, 'V'),
(0xEA6, 'X'),
(0xEA7, 'V'),
(0xEA8, 'X'),
(0xEAA, 'V'),
(0xEAC, 'X'),
(0xEAD, 'V'),
(0xEB3, 'M', u'ໍາ'),
(0xEB4, 'V'),
(0xEBA, 'X'),
(0xEBB, 'V'),
(0xEBE, 'X'),
(0xEC0, 'V'),
(0xEC5, 'X'),
(0xEC6, 'V'),
(0xEC7, 'X'),
(0xEC8, 'V'),
(0xECE, 'X'),
(0xED0, 'V'),
(0xEDA, 'X'),
(0xEDC, 'M', u'ຫນ'),
(0xEDD, 'M', u'ຫມ'),
(0xEDE, 'V'),
(0xEE0, 'X'),
(0xF00, 'V'),
(0xF0C, 'M', u'་'),
(0xF0D, 'V'),
(0xF43, 'M', u'གྷ'),
(0xF44, 'V'),
(0xF48, 'X'),
(0xF49, 'V'),
(0xF4D, 'M', u'ཌྷ'),
(0xF4E, 'V'),
(0xF52, 'M', u'དྷ'),
(0xF53, 'V'),
(0xF57, 'M', u'བྷ'),
(0xF58, 'V'),
(0xF5C, 'M', u'ཛྷ'),
(0xF5D, 'V'),
(0xF69, 'M', u'ཀྵ'),
(0xF6A, 'V'),
(0xF6D, 'X'),
(0xF71, 'V'),
(0xF73, 'M', u'ཱི'),
(0xF74, 'V'),
(0xF75, 'M', u'ཱུ'),
(0xF76, 'M', u'ྲྀ'),
(0xF77, 'M', u'ྲཱྀ'),
(0xF78, 'M', u'ླྀ'),
(0xF79, 'M', u'ླཱྀ'),
(0xF7A, 'V'),
(0xF81, 'M', u'ཱྀ'),
(0xF82, 'V'),
(0xF93, 'M', u'ྒྷ'),
(0xF94, 'V'),
(0xF98, 'X'),
(0xF99, 'V'),
(0xF9D, 'M', u'ྜྷ'),
(0xF9E, 'V'),
(0xFA2, 'M', u'ྡྷ'),
(0xFA3, 'V'),
(0xFA7, 'M', u'ྦྷ'),
(0xFA8, 'V'),
(0xFAC, 'M', u'ྫྷ'),
(0xFAD, 'V'),
(0xFB9, 'M', u'ྐྵ'),
(0xFBA, 'V'),
(0xFBD, 'X'),
(0xFBE, 'V'),
(0xFCD, 'X'),
(0xFCE, 'V'),
(0xFDB, 'X'),
(0x1000, 'V'),
(0x10A0, 'X'),
(0x10C7, 'M', u'ⴧ'),
(0x10C8, 'X'),
(0x10CD, 'M', u'ⴭ'),
(0x10CE, 'X'),
(0x10D0, 'V'),
(0x10FC, 'M', u'ნ'),
(0x10FD, 'V'),
(0x115F, 'X'),
(0x1161, 'V'),
(0x1249, 'X'),
(0x124A, 'V'),
(0x124E, 'X'),
(0x1250, 'V'),
(0x1257, 'X'),
(0x1258, 'V'),
(0x1259, 'X'),
(0x125A, 'V'),
(0x125E, 'X'),
(0x1260, 'V'),
(0x1289, 'X'),
(0x128A, 'V'),
]
def _seg_14():
return [
(0x128E, 'X'),
(0x1290, 'V'),
(0x12B1, 'X'),
(0x12B2, 'V'),
(0x12B6, 'X'),
(0x12B8, 'V'),
(0x12BF, 'X'),
(0x12C0, 'V'),
(0x12C1, 'X'),
(0x12C2, 'V'),
(0x12C6, 'X'),
(0x12C8, 'V'),
(0x12D7, 'X'),
(0x12D8, 'V'),
(0x1311, 'X'),
(0x1312, 'V'),
(0x1316, 'X'),
(0x1318, 'V'),
(0x135B, 'X'),
(0x135D, 'V'),
(0x137D, 'X'),
(0x1380, 'V'),
(0x139A, 'X'),
(0x13A0, 'V'),
(0x13F6, 'X'),
(0x13F8, 'M', u'Ᏸ'),
(0x13F9, 'M', u'Ᏹ'),
(0x13FA, 'M', u'Ᏺ'),
(0x13FB, 'M', u'Ᏻ'),
(0x13FC, 'M', u'Ᏼ'),
(0x13FD, 'M', u'Ᏽ'),
(0x13FE, 'X'),
(0x1400, 'V'),
(0x1680, 'X'),
(0x1681, 'V'),
(0x169D, 'X'),
(0x16A0, 'V'),
(0x16F9, 'X'),
(0x1700, 'V'),
(0x170D, 'X'),
(0x170E, 'V'),
(0x1715, 'X'),
(0x1720, 'V'),
(0x1737, 'X'),
(0x1740, 'V'),
(0x1754, 'X'),
(0x1760, 'V'),
(0x176D, 'X'),
(0x176E, 'V'),
(0x1771, 'X'),
(0x1772, 'V'),
(0x1774, 'X'),
(0x1780, 'V'),
(0x17B4, 'X'),
(0x17B6, 'V'),
(0x17DE, 'X'),
(0x17E0, 'V'),
(0x17EA, 'X'),
(0x17F0, 'V'),
(0x17FA, 'X'),
(0x1800, 'V'),
(0x1806, 'X'),
(0x1807, 'V'),
(0x180B, 'I'),
(0x180E, 'X'),
(0x1810, 'V'),
(0x181A, 'X'),
(0x1820, 'V'),
(0x1879, 'X'),
(0x1880, 'V'),
(0x18AB, 'X'),
(0x18B0, 'V'),
(0x18F6, 'X'),
(0x1900, 'V'),
(0x191F, 'X'),
(0x1920, 'V'),
(0x192C, 'X'),
(0x1930, 'V'),
(0x193C, 'X'),
(0x1940, 'V'),
(0x1941, 'X'),
(0x1944, 'V'),
(0x196E, 'X'),
(0x1970, 'V'),
(0x1975, 'X'),
(0x1980, 'V'),
(0x19AC, 'X'),
(0x19B0, 'V'),
(0x19CA, 'X'),
(0x19D0, 'V'),
(0x19DB, 'X'),
(0x19DE, 'V'),
(0x1A1C, 'X'),
(0x1A1E, 'V'),
(0x1A5F, 'X'),
(0x1A60, 'V'),
(0x1A7D, 'X'),
(0x1A7F, 'V'),
(0x1A8A, 'X'),
(0x1A90, 'V'),
]
def _seg_15():
return [
(0x1A9A, 'X'),
(0x1AA0, 'V'),
(0x1AAE, 'X'),
(0x1AB0, 'V'),
(0x1ABF, 'X'),
(0x1B00, 'V'),
(0x1B4C, 'X'),
(0x1B50, 'V'),
(0x1B7D, 'X'),
(0x1B80, 'V'),
(0x1BF4, 'X'),
(0x1BFC, 'V'),
(0x1C38, 'X'),
(0x1C3B, 'V'),
(0x1C4A, 'X'),
(0x1C4D, 'V'),
(0x1C80, 'M', u'в'),
(0x1C81, 'M', u'д'),
(0x1C82, 'M', u'о'),
(0x1C83, 'M', u'с'),
(0x1C84, 'M', u'т'),
(0x1C86, 'M', u'ъ'),
(0x1C87, 'M', u'ѣ'),
(0x1C88, 'M', u'ꙋ'),
(0x1C89, 'X'),
(0x1CC0, 'V'),
(0x1CC8, 'X'),
(0x1CD0, 'V'),
(0x1CFA, 'X'),
(0x1D00, 'V'),
(0x1D2C, 'M', u'a'),
(0x1D2D, 'M', u'æ'),
(0x1D2E, 'M', u'b'),
(0x1D2F, 'V'),
(0x1D30, 'M', u'd'),
(0x1D31, 'M', u'e'),
(0x1D32, 'M', u'ǝ'),
(0x1D33, 'M', u'g'),
(0x1D34, 'M', u'h'),
(0x1D35, 'M', u'i'),
(0x1D36, 'M', u'j'),
(0x1D37, 'M', u'k'),
(0x1D38, 'M', u'l'),
(0x1D39, 'M', u'm'),
(0x1D3A, 'M', u'n'),
(0x1D3B, 'V'),
(0x1D3C, 'M', u'o'),
(0x1D3D, 'M', u'ȣ'),
(0x1D3E, 'M', u'p'),
(0x1D3F, 'M', u'r'),
(0x1D40, 'M', u't'),
(0x1D41, 'M', u'u'),
(0x1D42, 'M', u'w'),
(0x1D43, 'M', u'a'),
(0x1D44, 'M', u'ɐ'),
(0x1D45, 'M', u'ɑ'),
(0x1D46, 'M', u'ᴂ'),
(0x1D47, 'M', u'b'),
(0x1D48, 'M', u'd'),
(0x1D49, 'M', u'e'),
(0x1D4A, 'M', u'ə'),
(0x1D4B, 'M', u'ɛ'),
(0x1D4C, 'M', u'ɜ'),
(0x1D4D, 'M', u'g'),
(0x1D4E, 'V'),
(0x1D4F, 'M', u'k'),
(0x1D50, 'M', u'm'),
(0x1D51, 'M', u'ŋ'),
(0x1D52, 'M', u'o'),
(0x1D53, 'M', u'ɔ'),
(0x1D54, 'M', u'ᴖ'),
(0x1D55, 'M', u'ᴗ'),
(0x1D56, 'M', u'p'),
(0x1D57, 'M', u't'),
(0x1D58, 'M', u'u'),
(0x1D59, 'M', u'ᴝ'),
(0x1D5A, 'M', u'ɯ'),
(0x1D5B, 'M', u'v'),
(0x1D5C, 'M', u'ᴥ'),
(0x1D5D, 'M', u'β'),
(0x1D5E, 'M', u'γ'),
(0x1D5F, 'M', u'δ'),
(0x1D60, 'M', u'φ'),
(0x1D61, 'M', u'χ'),
(0x1D62, 'M', u'i'),
(0x1D63, 'M', u'r'),
(0x1D64, 'M', u'u'),
(0x1D65, 'M', u'v'),
(0x1D66, 'M', u'β'),
(0x1D67, 'M', u'γ'),
(0x1D68, 'M', u'ρ'),
(0x1D69, 'M', u'φ'),
(0x1D6A, 'M', u'χ'),
(0x1D6B, 'V'),
(0x1D78, 'M', u'н'),
(0x1D79, 'V'),
(0x1D9B, 'M', u'ɒ'),
(0x1D9C, 'M', u'c'),
(0x1D9D, 'M', u'ɕ'),
(0x1D9E, 'M', u'ð'),
]
def _seg_16():
return [
(0x1D9F, 'M', u'ɜ'),
(0x1DA0, 'M', u'f'),
(0x1DA1, 'M', u'ɟ'),
(0x1DA2, 'M', u'ɡ'),
(0x1DA3, 'M', u'ɥ'),
(0x1DA4, 'M', u'ɨ'),
(0x1DA5, 'M', u'ɩ'),
(0x1DA6, 'M', u'ɪ'),
(0x1DA7, 'M', u'ᵻ'),
(0x1DA8, 'M', u'ʝ'),
(0x1DA9, 'M', u'ɭ'),
(0x1DAA, 'M', u'ᶅ'),
(0x1DAB, 'M', u'ʟ'),
(0x1DAC, 'M', u'ɱ'),
(0x1DAD, 'M', u'ɰ'),
(0x1DAE, 'M', u'ɲ'),
(0x1DAF, 'M', u'ɳ'),
(0x1DB0, 'M', u'ɴ'),
(0x1DB1, 'M', u'ɵ'),
(0x1DB2, 'M', u'ɸ'),
(0x1DB3, 'M', u'ʂ'),
(0x1DB4, 'M', u'ʃ'),
(0x1DB5, 'M', u'ƫ'),
(0x1DB6, 'M', u'ʉ'),
(0x1DB7, 'M', u'ʊ'),
(0x1DB8, 'M', u'ᴜ'),
(0x1DB9, 'M', u'ʋ'),
(0x1DBA, 'M', u'ʌ'),
(0x1DBB, 'M', u'z'),
(0x1DBC, 'M', u'ʐ'),
(0x1DBD, 'M', u'ʑ'),
(0x1DBE, 'M', u'ʒ'),
(0x1DBF, 'M', u'θ'),
(0x1DC0, 'V'),
(0x1DFA, 'X'),
(0x1DFB, 'V'),
(0x1E00, 'M', u'ḁ'),
(0x1E01, 'V'),
(0x1E02, 'M', u'ḃ'),
(0x1E03, 'V'),
(0x1E04, 'M', u'ḅ'),
(0x1E05, 'V'),
(0x1E06, 'M', u'ḇ'),
(0x1E07, 'V'),
(0x1E08, 'M', u'ḉ'),
(0x1E09, 'V'),
(0x1E0A, 'M', u'ḋ'),
(0x1E0B, 'V'),
(0x1E0C, 'M', u'ḍ'),
(0x1E0D, 'V'),
(0x1E0E, 'M', u'ḏ'),
(0x1E0F, 'V'),
(0x1E10, 'M', u'ḑ'),
(0x1E11, 'V'),
(0x1E12, 'M', u'ḓ'),
(0x1E13, 'V'),
(0x1E14, 'M', u'ḕ'),
(0x1E15, 'V'),
(0x1E16, 'M', u'ḗ'),
(0x1E17, 'V'),
(0x1E18, 'M', u'ḙ'),
(0x1E19, 'V'),
(0x1E1A, 'M', u'ḛ'),
(0x1E1B, 'V'),
(0x1E1C, 'M', u'ḝ'),
(0x1E1D, 'V'),
(0x1E1E, 'M', u'ḟ'),
(0x1E1F, 'V'),
(0x1E20, 'M', u'ḡ'),
(0x1E21, 'V'),
(0x1E22, 'M', u'ḣ'),
(0x1E23, 'V'),
(0x1E24, 'M', u'ḥ'),
(0x1E25, 'V'),
(0x1E26, 'M', u'ḧ'),
(0x1E27, 'V'),
(0x1E28, 'M', u'ḩ'),
(0x1E29, 'V'),
(0x1E2A, 'M', u'ḫ'),
(0x1E2B, 'V'),
(0x1E2C, 'M', u'ḭ'),
(0x1E2D, 'V'),
(0x1E2E, 'M', u'ḯ'),
(0x1E2F, 'V'),
(0x1E30, 'M', u'ḱ'),
(0x1E31, 'V'),
(0x1E32, 'M', u'ḳ'),
(0x1E33, 'V'),
(0x1E34, 'M', u'ḵ'),
(0x1E35, 'V'),
(0x1E36, 'M', u'ḷ'),
(0x1E37, 'V'),
(0x1E38, 'M', u'ḹ'),
(0x1E39, 'V'),
(0x1E3A, 'M', u'ḻ'),
(0x1E3B, 'V'),
(0x1E3C, 'M', u'ḽ'),
(0x1E3D, 'V'),
(0x1E3E, 'M', u'ḿ'),
(0x1E3F, 'V'),
]
def _seg_17():
return [
(0x1E40, 'M', u'ṁ'),
(0x1E41, 'V'),
(0x1E42, 'M', u'ṃ'),
(0x1E43, 'V'),
(0x1E44, 'M', u'ṅ'),
(0x1E45, 'V'),
(0x1E46, 'M', u'ṇ'),
(0x1E47, 'V'),
(0x1E48, 'M', u'ṉ'),
(0x1E49, 'V'),
(0x1E4A, 'M', u'ṋ'),
(0x1E4B, 'V'),
(0x1E4C, 'M', u'ṍ'),
(0x1E4D, 'V'),
(0x1E4E, 'M', u'ṏ'),
(0x1E4F, 'V'),
(0x1E50, 'M', u'ṑ'),
(0x1E51, 'V'),
(0x1E52, 'M', u'ṓ'),
(0x1E53, 'V'),
(0x1E54, 'M', u'ṕ'),
(0x1E55, 'V'),
(0x1E56, 'M', u'ṗ'),
(0x1E57, 'V'),
(0x1E58, 'M', u'ṙ'),
(0x1E59, 'V'),
(0x1E5A, 'M', u'ṛ'),
(0x1E5B, 'V'),
(0x1E5C, 'M', u'ṝ'),
(0x1E5D, 'V'),
(0x1E5E, 'M', u'ṟ'),
(0x1E5F, 'V'),
(0x1E60, 'M', u'ṡ'),
(0x1E61, 'V'),
(0x1E62, 'M', u'ṣ'),
(0x1E63, 'V'),
(0x1E64, 'M', u'ṥ'),
(0x1E65, 'V'),
(0x1E66, 'M', u'ṧ'),
(0x1E67, 'V'),
(0x1E68, 'M', u'ṩ'),
(0x1E69, 'V'),
(0x1E6A, 'M', u'ṫ'),
(0x1E6B, 'V'),
(0x1E6C, 'M', u'ṭ'),
(0x1E6D, 'V'),
(0x1E6E, 'M', u'ṯ'),
(0x1E6F, 'V'),
(0x1E70, 'M', u'ṱ'),
(0x1E71, 'V'),
(0x1E72, 'M', u'ṳ'),
(0x1E73, 'V'),
(0x1E74, 'M', u'ṵ'),
(0x1E75, 'V'),
(0x1E76, 'M', u'ṷ'),
(0x1E77, 'V'),
(0x1E78, 'M', u'ṹ'),
(0x1E79, 'V'),
(0x1E7A, 'M', u'ṻ'),
(0x1E7B, 'V'),
(0x1E7C, 'M', u'ṽ'),
(0x1E7D, 'V'),
(0x1E7E, 'M', u'ṿ'),
(0x1E7F, 'V'),
(0x1E80, 'M', u'ẁ'),
(0x1E81, 'V'),
(0x1E82, 'M', u'ẃ'),
(0x1E83, 'V'),
(0x1E84, 'M', u'ẅ'),
(0x1E85, 'V'),
(0x1E86, 'M', u'ẇ'),
(0x1E87, 'V'),
(0x1E88, 'M', u'ẉ'),
(0x1E89, 'V'),
(0x1E8A, 'M', u'ẋ'),
(0x1E8B, 'V'),
(0x1E8C, 'M', u'ẍ'),
(0x1E8D, 'V'),
(0x1E8E, 'M', u'ẏ'),
(0x1E8F, 'V'),
(0x1E90, 'M', u'ẑ'),
(0x1E91, 'V'),
(0x1E92, 'M', u'ẓ'),
(0x1E93, 'V'),
(0x1E94, 'M', u'ẕ'),
(0x1E95, 'V'),
(0x1E9A, 'M', u'aʾ'),
(0x1E9B, 'M', u'ṡ'),
(0x1E9C, 'V'),
(0x1E9E, 'M', u'ss'),
(0x1E9F, 'V'),
(0x1EA0, 'M', u'ạ'),
(0x1EA1, 'V'),
(0x1EA2, 'M', u'ả'),
(0x1EA3, 'V'),
(0x1EA4, 'M', u'ấ'),
(0x1EA5, 'V'),
(0x1EA6, 'M', u'ầ'),
(0x1EA7, 'V'),
(0x1EA8, 'M', u'ẩ'),
]
def _seg_18():
return [
(0x1EA9, 'V'),
(0x1EAA, 'M', u'ẫ'),
(0x1EAB, 'V'),
(0x1EAC, 'M', u'ậ'),
(0x1EAD, 'V'),
(0x1EAE, 'M', u'ắ'),
(0x1EAF, 'V'),
(0x1EB0, 'M', u'ằ'),
(0x1EB1, 'V'),
(0x1EB2, 'M', u'ẳ'),
(0x1EB3, 'V'),
(0x1EB4, 'M', u'ẵ'),
(0x1EB5, 'V'),
(0x1EB6, 'M', u'ặ'),
(0x1EB7, 'V'),
(0x1EB8, 'M', u'ẹ'),
(0x1EB9, 'V'),
(0x1EBA, 'M', u'ẻ'),
(0x1EBB, 'V'),
(0x1EBC, 'M', u'ẽ'),
(0x1EBD, 'V'),
(0x1EBE, 'M', u'ế'),
(0x1EBF, 'V'),
(0x1EC0, 'M', u'ề'),
(0x1EC1, 'V'),
(0x1EC2, 'M', u'ể'),
(0x1EC3, 'V'),
(0x1EC4, 'M', u'ễ'),
(0x1EC5, 'V'),
(0x1EC6, 'M', u'ệ'),
(0x1EC7, 'V'),
(0x1EC8, 'M', u'ỉ'),
(0x1EC9, 'V'),
(0x1ECA, 'M', u'ị'),
(0x1ECB, 'V'),
(0x1ECC, 'M', u'ọ'),
(0x1ECD, 'V'),
(0x1ECE, 'M', u'ỏ'),
(0x1ECF, 'V'),
(0x1ED0, 'M', u'ố'),
(0x1ED1, 'V'),
(0x1ED2, 'M', u'ồ'),
(0x1ED3, 'V'),
(0x1ED4, 'M', u'ổ'),
(0x1ED5, 'V'),
(0x1ED6, 'M', u'ỗ'),
(0x1ED7, 'V'),
(0x1ED8, 'M', u'ộ'),
(0x1ED9, 'V'),
(0x1EDA, 'M', u'ớ'),
(0x1EDB, 'V'),
(0x1EDC, 'M', u'ờ'),
(0x1EDD, 'V'),
(0x1EDE, 'M', u'ở'),
(0x1EDF, 'V'),
(0x1EE0, 'M', u'ỡ'),
(0x1EE1, 'V'),
(0x1EE2, 'M', u'ợ'),
(0x1EE3, 'V'),
(0x1EE4, 'M', u'ụ'),
(0x1EE5, 'V'),
(0x1EE6, 'M', u'ủ'),
(0x1EE7, 'V'),
(0x1EE8, 'M', u'ứ'),
(0x1EE9, 'V'),
(0x1EEA, 'M', u'ừ'),
(0x1EEB, 'V'),
(0x1EEC, 'M', u'ử'),
(0x1EED, 'V'),
(0x1EEE, 'M', u'ữ'),
(0x1EEF, 'V'),
(0x1EF0, 'M', u'ự'),
(0x1EF1, 'V'),
(0x1EF2, 'M', u'ỳ'),
(0x1EF3, 'V'),
(0x1EF4, 'M', u'ỵ'),
(0x1EF5, 'V'),
(0x1EF6, 'M', u'ỷ'),
(0x1EF7, 'V'),
(0x1EF8, 'M', u'ỹ'),
(0x1EF9, 'V'),
(0x1EFA, 'M', u'ỻ'),
(0x1EFB, 'V'),
(0x1EFC, 'M', u'ỽ'),
(0x1EFD, 'V'),
(0x1EFE, 'M', u'ỿ'),
(0x1EFF, 'V'),
(0x1F08, 'M', u'ἀ'),
(0x1F09, 'M', u'ἁ'),
(0x1F0A, 'M', u'ἂ'),
(0x1F0B, 'M', u'ἃ'),
(0x1F0C, 'M', u'ἄ'),
(0x1F0D, 'M', u'ἅ'),
(0x1F0E, 'M', u'ἆ'),
(0x1F0F, 'M', u'ἇ'),
(0x1F10, 'V'),
(0x1F16, 'X'),
(0x1F18, 'M', u'ἐ'),
(0x1F19, 'M', u'ἑ'),
(0x1F1A, 'M', u'ἒ'),
]
def _seg_19():
return [
(0x1F1B, 'M', u'ἓ'),
(0x1F1C, 'M', u'ἔ'),
(0x1F1D, 'M', u'ἕ'),
(0x1F1E, 'X'),
(0x1F20, 'V'),
(0x1F28, 'M', u'ἠ'),
(0x1F29, 'M', u'ἡ'),
(0x1F2A, 'M', u'ἢ'),
(0x1F2B, 'M', u'ἣ'),
(0x1F2C, 'M', u'ἤ'),
(0x1F2D, 'M', u'ἥ'),
(0x1F2E, 'M', u'ἦ'),
(0x1F2F, 'M', u'ἧ'),
(0x1F30, 'V'),
(0x1F38, 'M', u'ἰ'),
(0x1F39, 'M', u'ἱ'),
(0x1F3A, 'M', u'ἲ'),
(0x1F3B, 'M', u'ἳ'),
(0x1F3C, 'M', u'ἴ'),
(0x1F3D, 'M', u'ἵ'),
(0x1F3E, 'M', u'ἶ'),
(0x1F3F, 'M', u'ἷ'),
(0x1F40, 'V'),
(0x1F46, 'X'),
(0x1F48, 'M', u'ὀ'),
(0x1F49, 'M', u'ὁ'),
(0x1F4A, 'M', u'ὂ'),
(0x1F4B, 'M', u'ὃ'),
(0x1F4C, 'M', u'ὄ'),
(0x1F4D, 'M', u'ὅ'),
(0x1F4E, 'X'),
(0x1F50, 'V'),
(0x1F58, 'X'),
(0x1F59, 'M', u'ὑ'),
(0x1F5A, 'X'),
(0x1F5B, 'M', u'ὓ'),
(0x1F5C, 'X'),
(0x1F5D, 'M', u'ὕ'),
(0x1F5E, 'X'),
(0x1F5F, 'M', u'ὗ'),
(0x1F60, 'V'),
(0x1F68, 'M', u'ὠ'),
(0x1F69, 'M', u'ὡ'),
(0x1F6A, 'M', u'ὢ'),
(0x1F6B, 'M', u'ὣ'),
(0x1F6C, 'M', u'ὤ'),
(0x1F6D, 'M', u'ὥ'),
(0x1F6E, 'M', u'ὦ'),
(0x1F6F, 'M', u'ὧ'),
(0x1F70, 'V'),
(0x1F71, 'M', u'ά'),
(0x1F72, 'V'),
(0x1F73, 'M', u'έ'),
(0x1F74, 'V'),
(0x1F75, 'M', u'ή'),
(0x1F76, 'V'),
(0x1F77, 'M', u'ί'),
(0x1F78, 'V'),
(0x1F79, 'M', u'ό'),
(0x1F7A, 'V'),
(0x1F7B, 'M', u'ύ'),
(0x1F7C, 'V'),
(0x1F7D, 'M', u'ώ'),
(0x1F7E, 'X'),
(0x1F80, 'M', u'ἀι'),
(0x1F81, 'M', u'ἁι'),
(0x1F82, 'M', u'ἂι'),
(0x1F83, 'M', u'ἃι'),
(0x1F84, 'M', u'ἄι'),
(0x1F85, 'M', u'ἅι'),
(0x1F86, 'M', u'ἆι'),
(0x1F87, 'M', u'ἇι'),
(0x1F88, 'M', u'ἀι'),
(0x1F89, 'M', u'ἁι'),
(0x1F8A, 'M', u'ἂι'),
(0x1F8B, 'M', u'ἃι'),
(0x1F8C, 'M', u'ἄι'),
(0x1F8D, 'M', u'ἅι'),
(0x1F8E, 'M', u'ἆι'),
(0x1F8F, 'M', u'ἇι'),
(0x1F90, 'M', u'ἠι'),
(0x1F91, 'M', u'ἡι'),
(0x1F92, 'M', u'ἢι'),
(0x1F93, 'M', u'ἣι'),
(0x1F94, 'M', u'ἤι'),
(0x1F95, 'M', u'ἥι'),
(0x1F96, 'M', u'ἦι'),
(0x1F97, 'M', u'ἧι'),
(0x1F98, 'M', u'ἠι'),
(0x1F99, 'M', u'ἡι'),
(0x1F9A, 'M', u'ἢι'),
(0x1F9B, 'M', u'ἣι'),
(0x1F9C, 'M', u'ἤι'),
(0x1F9D, 'M', u'ἥι'),
(0x1F9E, 'M', u'ἦι'),
(0x1F9F, 'M', u'ἧι'),
(0x1FA0, 'M', u'ὠι'),
(0x1FA1, 'M', u'ὡι'),
(0x1FA2, 'M', u'ὢι'),
(0x1FA3, 'M', u'ὣι'),
]
def _seg_20():
return [
(0x1FA4, 'M', u'ὤι'),
(0x1FA5, 'M', u'ὥι'),
(0x1FA6, 'M', u'ὦι'),
(0x1FA7, 'M', u'ὧι'),
(0x1FA8, 'M', u'ὠι'),
(0x1FA9, 'M', u'ὡι'),
(0x1FAA, 'M', u'ὢι'),
(0x1FAB, 'M', u'ὣι'),
(0x1FAC, 'M', u'ὤι'),
(0x1FAD, 'M', u'ὥι'),
(0x1FAE, 'M', u'ὦι'),
(0x1FAF, 'M', u'ὧι'),
(0x1FB0, 'V'),
(0x1FB2, 'M', u'ὰι'),
(0x1FB3, 'M', u'αι'),
(0x1FB4, 'M', u'άι'),
(0x1FB5, 'X'),
(0x1FB6, 'V'),
(0x1FB7, 'M', u'ᾶι'),
(0x1FB8, 'M', u'ᾰ'),
(0x1FB9, 'M', u'ᾱ'),
(0x1FBA, 'M', u'ὰ'),
(0x1FBB, 'M', u'ά'),
(0x1FBC, 'M', u'αι'),
(0x1FBD, '3', u' ̓'),
(0x1FBE, 'M', u'ι'),
(0x1FBF, '3', u' ̓'),
(0x1FC0, '3', u' ͂'),
(0x1FC1, '3', u' ̈͂'),
(0x1FC2, 'M', u'ὴι'),
(0x1FC3, 'M', u'ηι'),
(0x1FC4, 'M', u'ήι'),
(0x1FC5, 'X'),
(0x1FC6, 'V'),
(0x1FC7, 'M', u'ῆι'),
(0x1FC8, 'M', u'ὲ'),
(0x1FC9, 'M', u'έ'),
(0x1FCA, 'M', u'ὴ'),
(0x1FCB, 'M', u'ή'),
(0x1FCC, 'M', u'ηι'),
(0x1FCD, '3', u' ̓̀'),
(0x1FCE, '3', u' ̓́'),
(0x1FCF, '3', u' ̓͂'),
(0x1FD0, 'V'),
(0x1FD3, 'M', u'ΐ'),
(0x1FD4, 'X'),
(0x1FD6, 'V'),
(0x1FD8, 'M', u'ῐ'),
(0x1FD9, 'M', u'ῑ'),
(0x1FDA, 'M', u'ὶ'),
(0x1FDB, 'M', u'ί'),
(0x1FDC, 'X'),
(0x1FDD, '3', u' ̔̀'),
(0x1FDE, '3', u' ̔́'),
(0x1FDF, '3', u' ̔͂'),
(0x1FE0, 'V'),
(0x1FE3, 'M', u'ΰ'),
(0x1FE4, 'V'),
(0x1FE8, 'M', u'ῠ'),
(0x1FE9, 'M', u'ῡ'),
(0x1FEA, 'M', u'ὺ'),
(0x1FEB, 'M', u'ύ'),
(0x1FEC, 'M', u'ῥ'),
(0x1FED, '3', u' ̈̀'),
(0x1FEE, '3', u' ̈́'),
(0x1FEF, '3', u'`'),
(0x1FF0, 'X'),
(0x1FF2, 'M', u'ὼι'),
(0x1FF3, 'M', u'ωι'),
(0x1FF4, 'M', u'ώι'),
(0x1FF5, 'X'),
(0x1FF6, 'V'),
(0x1FF7, 'M', u'ῶι'),
(0x1FF8, 'M', u'ὸ'),
(0x1FF9, 'M', u'ό'),
(0x1FFA, 'M', u'ὼ'),
(0x1FFB, 'M', u'ώ'),
(0x1FFC, 'M', u'ωι'),
(0x1FFD, '3', u' ́'),
(0x1FFE, '3', u' ̔'),
(0x1FFF, 'X'),
(0x2000, '3', u' '),
(0x200B, 'I'),
(0x200C, 'D', u''),
(0x200E, 'X'),
(0x2010, 'V'),
(0x2011, 'M', u'‐'),
(0x2012, 'V'),
(0x2017, '3', u' ̳'),
(0x2018, 'V'),
(0x2024, 'X'),
(0x2027, 'V'),
(0x2028, 'X'),
(0x202F, '3', u' '),
(0x2030, 'V'),
(0x2033, 'M', u'′′'),
(0x2034, 'M', u'′′′'),
(0x2035, 'V'),
(0x2036, 'M', u'‵‵'),
(0x2037, 'M', u'‵‵‵'),
]
def _seg_21():
return [
(0x2038, 'V'),
(0x203C, '3', u'!!'),
(0x203D, 'V'),
(0x203E, '3', u' ̅'),
(0x203F, 'V'),
(0x2047, '3', u'??'),
(0x2048, '3', u'?!'),
(0x2049, '3', u'!?'),
(0x204A, 'V'),
(0x2057, 'M', u'′′′′'),
(0x2058, 'V'),
(0x205F, '3', u' '),
(0x2060, 'I'),
(0x2061, 'X'),
(0x2064, 'I'),
(0x2065, 'X'),
(0x2070, 'M', u'0'),
(0x2071, 'M', u'i'),
(0x2072, 'X'),
(0x2074, 'M', u'4'),
(0x2075, 'M', u'5'),
(0x2076, 'M', u'6'),
(0x2077, 'M', u'7'),
(0x2078, 'M', u'8'),
(0x2079, 'M', u'9'),
(0x207A, '3', u'+'),
(0x207B, 'M', u'−'),
(0x207C, '3', u'='),
(0x207D, '3', u'('),
(0x207E, '3', u')'),
(0x207F, 'M', u'n'),
(0x2080, 'M', u'0'),
(0x2081, 'M', u'1'),
(0x2082, 'M', u'2'),
(0x2083, 'M', u'3'),
(0x2084, 'M', u'4'),
(0x2085, 'M', u'5'),
(0x2086, 'M', u'6'),
(0x2087, 'M', u'7'),
(0x2088, 'M', u'8'),
(0x2089, 'M', u'9'),
(0x208A, '3', u'+'),
(0x208B, 'M', u'−'),
(0x208C, '3', u'='),
(0x208D, '3', u'('),
(0x208E, '3', u')'),
(0x208F, 'X'),
(0x2090, 'M', u'a'),
(0x2091, 'M', u'e'),
(0x2092, 'M', u'o'),
(0x2093, 'M', u'x'),
(0x2094, 'M', u'ə'),
(0x2095, 'M', u'h'),
(0x2096, 'M', u'k'),
(0x2097, 'M', u'l'),
(0x2098, 'M', u'm'),
(0x2099, 'M', u'n'),
(0x209A, 'M', u'p'),
(0x209B, 'M', u's'),
(0x209C, 'M', u't'),
(0x209D, 'X'),
(0x20A0, 'V'),
(0x20A8, 'M', u'rs'),
(0x20A9, 'V'),
(0x20C0, 'X'),
(0x20D0, 'V'),
(0x20F1, 'X'),
(0x2100, '3', u'a/c'),
(0x2101, '3', u'a/s'),
(0x2102, 'M', u'c'),
(0x2103, 'M', u'°c'),
(0x2104, 'V'),
(0x2105, '3', u'c/o'),
(0x2106, '3', u'c/u'),
(0x2107, 'M', u'ɛ'),
(0x2108, 'V'),
(0x2109, 'M', u'°f'),
(0x210A, 'M', u'g'),
(0x210B, 'M', u'h'),
(0x210F, 'M', u'ħ'),
(0x2110, 'M', u'i'),
(0x2112, 'M', u'l'),
(0x2114, 'V'),
(0x2115, 'M', u'n'),
(0x2116, 'M', u'no'),
(0x2117, 'V'),
(0x2119, 'M', u'p'),
(0x211A, 'M', u'q'),
(0x211B, 'M', u'r'),
(0x211E, 'V'),
(0x2120, 'M', u'sm'),
(0x2121, 'M', u'tel'),
(0x2122, 'M', u'tm'),
(0x2123, 'V'),
(0x2124, 'M', u'z'),
(0x2125, 'V'),
(0x2126, 'M', u'ω'),
(0x2127, 'V'),
(0x2128, 'M', u'z'),
(0x2129, 'V'),
]
def _seg_22():
return [
(0x212A, 'M', u'k'),
(0x212B, 'M', u'å'),
(0x212C, 'M', u'b'),
(0x212D, 'M', u'c'),
(0x212E, 'V'),
(0x212F, 'M', u'e'),
(0x2131, 'M', u'f'),
(0x2132, 'X'),
(0x2133, 'M', u'm'),
(0x2134, 'M', u'o'),
(0x2135, 'M', u'א'),
(0x2136, 'M', u'ב'),
(0x2137, 'M', u'ג'),
(0x2138, 'M', u'ד'),
(0x2139, 'M', u'i'),
(0x213A, 'V'),
(0x213B, 'M', u'fax'),
(0x213C, 'M', u'π'),
(0x213D, 'M', u'γ'),
(0x213F, 'M', u'π'),
(0x2140, 'M', u'∑'),
(0x2141, 'V'),
(0x2145, 'M', u'd'),
(0x2147, 'M', u'e'),
(0x2148, 'M', u'i'),
(0x2149, 'M', u'j'),
(0x214A, 'V'),
(0x2150, 'M', u'1⁄7'),
(0x2151, 'M', u'1⁄9'),
(0x2152, 'M', u'1⁄10'),
(0x2153, 'M', u'1⁄3'),
(0x2154, 'M', u'2⁄3'),
(0x2155, 'M', u'1⁄5'),
(0x2156, 'M', u'2⁄5'),
(0x2157, 'M', u'3⁄5'),
(0x2158, 'M', u'4⁄5'),
(0x2159, 'M', u'1⁄6'),
(0x215A, 'M', u'5⁄6'),
(0x215B, 'M', u'1⁄8'),
(0x215C, 'M', u'3⁄8'),
(0x215D, 'M', u'5⁄8'),
(0x215E, 'M', u'7⁄8'),
(0x215F, 'M', u'1⁄'),
(0x2160, 'M', u'i'),
(0x2161, 'M', u'ii'),
(0x2162, 'M', u'iii'),
(0x2163, 'M', u'iv'),
(0x2164, 'M', u'v'),
(0x2165, 'M', u'vi'),
(0x2166, 'M', u'vii'),
(0x2167, 'M', u'viii'),
(0x2168, 'M', u'ix'),
(0x2169, 'M', u'x'),
(0x216A, 'M', u'xi'),
(0x216B, 'M', u'xii'),
(0x216C, 'M', u'l'),
(0x216D, 'M', u'c'),
(0x216E, 'M', u'd'),
(0x216F, 'M', u'm'),
(0x2170, 'M', u'i'),
(0x2171, 'M', u'ii'),
(0x2172, 'M', u'iii'),
(0x2173, 'M', u'iv'),
(0x2174, 'M', u'v'),
(0x2175, 'M', u'vi'),
(0x2176, 'M', u'vii'),
(0x2177, 'M', u'viii'),
(0x2178, 'M', u'ix'),
(0x2179, 'M', u'x'),
(0x217A, 'M', u'xi'),
(0x217B, 'M', u'xii'),
(0x217C, 'M', u'l'),
(0x217D, 'M', u'c'),
(0x217E, 'M', u'd'),
(0x217F, 'M', u'm'),
(0x2180, 'V'),
(0x2183, 'X'),
(0x2184, 'V'),
(0x2189, 'M', u'0⁄3'),
(0x218A, 'V'),
(0x218C, 'X'),
(0x2190, 'V'),
(0x222C, 'M', u'∫∫'),
(0x222D, 'M', u'∫∫∫'),
(0x222E, 'V'),
(0x222F, 'M', u'∮∮'),
(0x2230, 'M', u'∮∮∮'),
(0x2231, 'V'),
(0x2260, '3'),
(0x2261, 'V'),
(0x226E, '3'),
(0x2270, 'V'),
(0x2329, 'M', u'〈'),
(0x232A, 'M', u'〉'),
(0x232B, 'V'),
(0x2427, 'X'),
(0x2440, 'V'),
(0x244B, 'X'),
(0x2460, 'M', u'1'),
(0x2461, 'M', u'2'),
]
def _seg_23():
return [
(0x2462, 'M', u'3'),
(0x2463, 'M', u'4'),
(0x2464, 'M', u'5'),
(0x2465, 'M', u'6'),
(0x2466, 'M', u'7'),
(0x2467, 'M', u'8'),
(0x2468, 'M', u'9'),
(0x2469, 'M', u'10'),
(0x246A, 'M', u'11'),
(0x246B, 'M', u'12'),
(0x246C, 'M', u'13'),
(0x246D, 'M', u'14'),
(0x246E, 'M', u'15'),
(0x246F, 'M', u'16'),
(0x2470, 'M', u'17'),
(0x2471, 'M', u'18'),
(0x2472, 'M', u'19'),
(0x2473, 'M', u'20'),
(0x2474, '3', u'(1)'),
(0x2475, '3', u'(2)'),
(0x2476, '3', u'(3)'),
(0x2477, '3', u'(4)'),
(0x2478, '3', u'(5)'),
(0x2479, '3', u'(6)'),
(0x247A, '3', u'(7)'),
(0x247B, '3', u'(8)'),
(0x247C, '3', u'(9)'),
(0x247D, '3', u'(10)'),
(0x247E, '3', u'(11)'),
(0x247F, '3', u'(12)'),
(0x2480, '3', u'(13)'),
(0x2481, '3', u'(14)'),
(0x2482, '3', u'(15)'),
(0x2483, '3', u'(16)'),
(0x2484, '3', u'(17)'),
(0x2485, '3', u'(18)'),
(0x2486, '3', u'(19)'),
(0x2487, '3', u'(20)'),
(0x2488, 'X'),
(0x249C, '3', u'(a)'),
(0x249D, '3', u'(b)'),
(0x249E, '3', u'(c)'),
(0x249F, '3', u'(d)'),
(0x24A0, '3', u'(e)'),
(0x24A1, '3', u'(f)'),
(0x24A2, '3', u'(g)'),
(0x24A3, '3', u'(h)'),
(0x24A4, '3', u'(i)'),
(0x24A5, '3', u'(j)'),
(0x24A6, '3', u'(k)'),
(0x24A7, '3', u'(l)'),
(0x24A8, '3', u'(m)'),
(0x24A9, '3', u'(n)'),
(0x24AA, '3', u'(o)'),
(0x24AB, '3', u'(p)'),
(0x24AC, '3', u'(q)'),
(0x24AD, '3', u'(r)'),
(0x24AE, '3', u'(s)'),
(0x24AF, '3', u'(t)'),
(0x24B0, '3', u'(u)'),
(0x24B1, '3', u'(v)'),
(0x24B2, '3', u'(w)'),
(0x24B3, '3', u'(x)'),
(0x24B4, '3', u'(y)'),
(0x24B5, '3', u'(z)'),
(0x24B6, 'M', u'a'),
(0x24B7, 'M', u'b'),
(0x24B8, 'M', u'c'),
(0x24B9, 'M', u'd'),
(0x24BA, 'M', u'e'),
(0x24BB, 'M', u'f'),
(0x24BC, 'M', u'g'),
(0x24BD, 'M', u'h'),
(0x24BE, 'M', u'i'),
(0x24BF, 'M', u'j'),
(0x24C0, 'M', u'k'),
(0x24C1, 'M', u'l'),
(0x24C2, 'M', u'm'),
(0x24C3, 'M', u'n'),
(0x24C4, 'M', u'o'),
(0x24C5, 'M', u'p'),
(0x24C6, 'M', u'q'),
(0x24C7, 'M', u'r'),
(0x24C8, 'M', u's'),
(0x24C9, 'M', u't'),
(0x24CA, 'M', u'u'),
(0x24CB, 'M', u'v'),
(0x24CC, 'M', u'w'),
(0x24CD, 'M', u'x'),
(0x24CE, 'M', u'y'),
(0x24CF, 'M', u'z'),
(0x24D0, 'M', u'a'),
(0x24D1, 'M', u'b'),
(0x24D2, 'M', u'c'),
(0x24D3, 'M', u'd'),
(0x24D4, 'M', u'e'),
(0x24D5, 'M', u'f'),
(0x24D6, 'M', u'g'),
(0x24D7, 'M', u'h'),
(0x24D8, 'M', u'i'),
]
def _seg_24():
return [
(0x24D9, 'M', u'j'),
(0x24DA, 'M', u'k'),
(0x24DB, 'M', u'l'),
(0x24DC, 'M', u'm'),
(0x24DD, 'M', u'n'),
(0x24DE, 'M', u'o'),
(0x24DF, 'M', u'p'),
(0x24E0, 'M', u'q'),
(0x24E1, 'M', u'r'),
(0x24E2, 'M', u's'),
(0x24E3, 'M', u't'),
(0x24E4, 'M', u'u'),
(0x24E5, 'M', u'v'),
(0x24E6, 'M', u'w'),
(0x24E7, 'M', u'x'),
(0x24E8, 'M', u'y'),
(0x24E9, 'M', u'z'),
(0x24EA, 'M', u'0'),
(0x24EB, 'V'),
(0x2A0C, 'M', u'∫∫∫∫'),
(0x2A0D, 'V'),
(0x2A74, '3', u'::='),
(0x2A75, '3', u'=='),
(0x2A76, '3', u'==='),
(0x2A77, 'V'),
(0x2ADC, 'M', u'⫝̸'),
(0x2ADD, 'V'),
(0x2B74, 'X'),
(0x2B76, 'V'),
(0x2B96, 'X'),
(0x2B98, 'V'),
(0x2BC9, 'X'),
(0x2BCA, 'V'),
(0x2BFF, 'X'),
(0x2C00, 'M', u'ⰰ'),
(0x2C01, 'M', u'ⰱ'),
(0x2C02, 'M', u'ⰲ'),
(0x2C03, 'M', u'ⰳ'),
(0x2C04, 'M', u'ⰴ'),
(0x2C05, 'M', u'ⰵ'),
(0x2C06, 'M', u'ⰶ'),
(0x2C07, 'M', u'ⰷ'),
(0x2C08, 'M', u'ⰸ'),
(0x2C09, 'M', u'ⰹ'),
(0x2C0A, 'M', u'ⰺ'),
(0x2C0B, 'M', u'ⰻ'),
(0x2C0C, 'M', u'ⰼ'),
(0x2C0D, 'M', u'ⰽ'),
(0x2C0E, 'M', u'ⰾ'),
(0x2C0F, 'M', u'ⰿ'),
(0x2C10, 'M', u'ⱀ'),
(0x2C11, 'M', u'ⱁ'),
(0x2C12, 'M', u'ⱂ'),
(0x2C13, 'M', u'ⱃ'),
(0x2C14, 'M', u'ⱄ'),
(0x2C15, 'M', u'ⱅ'),
(0x2C16, 'M', u'ⱆ'),
(0x2C17, 'M', u'ⱇ'),
(0x2C18, 'M', u'ⱈ'),
(0x2C19, 'M', u'ⱉ'),
(0x2C1A, 'M', u'ⱊ'),
(0x2C1B, 'M', u'ⱋ'),
(0x2C1C, 'M', u'ⱌ'),
(0x2C1D, 'M', u'ⱍ'),
(0x2C1E, 'M', u'ⱎ'),
(0x2C1F, 'M', u'ⱏ'),
(0x2C20, 'M', u'ⱐ'),
(0x2C21, 'M', u'ⱑ'),
(0x2C22, 'M', u'ⱒ'),
(0x2C23, 'M', u'ⱓ'),
(0x2C24, 'M', u'ⱔ'),
(0x2C25, 'M', u'ⱕ'),
(0x2C26, 'M', u'ⱖ'),
(0x2C27, 'M', u'ⱗ'),
(0x2C28, 'M', u'ⱘ'),
(0x2C29, 'M', u'ⱙ'),
(0x2C2A, 'M', u'ⱚ'),
(0x2C2B, 'M', u'ⱛ'),
(0x2C2C, 'M', u'ⱜ'),
(0x2C2D, 'M', u'ⱝ'),
(0x2C2E, 'M', u'ⱞ'),
(0x2C2F, 'X'),
(0x2C30, 'V'),
(0x2C5F, 'X'),
(0x2C60, 'M', u'ⱡ'),
(0x2C61, 'V'),
(0x2C62, 'M', u'ɫ'),
(0x2C63, 'M', u'ᵽ'),
(0x2C64, 'M', u'ɽ'),
(0x2C65, 'V'),
(0x2C67, 'M', u'ⱨ'),
(0x2C68, 'V'),
(0x2C69, 'M', u'ⱪ'),
(0x2C6A, 'V'),
(0x2C6B, 'M', u'ⱬ'),
(0x2C6C, 'V'),
(0x2C6D, 'M', u'ɑ'),
(0x2C6E, 'M', u'ɱ'),
(0x2C6F, 'M', u'ɐ'),
(0x2C70, 'M', u'ɒ'),
]
def _seg_25():
return [
(0x2C71, 'V'),
(0x2C72, 'M', u'ⱳ'),
(0x2C73, 'V'),
(0x2C75, 'M', u'ⱶ'),
(0x2C76, 'V'),
(0x2C7C, 'M', u'j'),
(0x2C7D, 'M', u'v'),
(0x2C7E, 'M', u'ȿ'),
(0x2C7F, 'M', u'ɀ'),
(0x2C80, 'M', u'ⲁ'),
(0x2C81, 'V'),
(0x2C82, 'M', u'ⲃ'),
(0x2C83, 'V'),
(0x2C84, 'M', u'ⲅ'),
(0x2C85, 'V'),
(0x2C86, 'M', u'ⲇ'),
(0x2C87, 'V'),
(0x2C88, 'M', u'ⲉ'),
(0x2C89, 'V'),
(0x2C8A, 'M', u'ⲋ'),
(0x2C8B, 'V'),
(0x2C8C, 'M', u'ⲍ'),
(0x2C8D, 'V'),
(0x2C8E, 'M', u'ⲏ'),
(0x2C8F, 'V'),
(0x2C90, 'M', u'ⲑ'),
(0x2C91, 'V'),
(0x2C92, 'M', u'ⲓ'),
(0x2C93, 'V'),
(0x2C94, 'M', u'ⲕ'),
(0x2C95, 'V'),
(0x2C96, 'M', u'ⲗ'),
(0x2C97, 'V'),
(0x2C98, 'M', u'ⲙ'),
(0x2C99, 'V'),
(0x2C9A, 'M', u'ⲛ'),
(0x2C9B, 'V'),
(0x2C9C, 'M', u'ⲝ'),
(0x2C9D, 'V'),
(0x2C9E, 'M', u'ⲟ'),
(0x2C9F, 'V'),
(0x2CA0, 'M', u'ⲡ'),
(0x2CA1, 'V'),
(0x2CA2, 'M', u'ⲣ'),
(0x2CA3, 'V'),
(0x2CA4, 'M', u'ⲥ'),
(0x2CA5, 'V'),
(0x2CA6, 'M', u'ⲧ'),
(0x2CA7, 'V'),
(0x2CA8, 'M', u'ⲩ'),
(0x2CA9, 'V'),
(0x2CAA, 'M', u'ⲫ'),
(0x2CAB, 'V'),
(0x2CAC, 'M', u'ⲭ'),
(0x2CAD, 'V'),
(0x2CAE, 'M', u'ⲯ'),
(0x2CAF, 'V'),
(0x2CB0, 'M', u'ⲱ'),
(0x2CB1, 'V'),
(0x2CB2, 'M', u'ⲳ'),
(0x2CB3, 'V'),
(0x2CB4, 'M', u'ⲵ'),
(0x2CB5, 'V'),
(0x2CB6, 'M', u'ⲷ'),
(0x2CB7, 'V'),
(0x2CB8, 'M', u'ⲹ'),
(0x2CB9, 'V'),
(0x2CBA, 'M', u'ⲻ'),
(0x2CBB, 'V'),
(0x2CBC, 'M', u'ⲽ'),
(0x2CBD, 'V'),
(0x2CBE, 'M', u'ⲿ'),
(0x2CBF, 'V'),
(0x2CC0, 'M', u'ⳁ'),
(0x2CC1, 'V'),
(0x2CC2, 'M', u'ⳃ'),
(0x2CC3, 'V'),
(0x2CC4, 'M', u'ⳅ'),
(0x2CC5, 'V'),
(0x2CC6, 'M', u'ⳇ'),
(0x2CC7, 'V'),
(0x2CC8, 'M', u'ⳉ'),
(0x2CC9, 'V'),
(0x2CCA, 'M', u'ⳋ'),
(0x2CCB, 'V'),
(0x2CCC, 'M', u'ⳍ'),
(0x2CCD, 'V'),
(0x2CCE, 'M', u'ⳏ'),
(0x2CCF, 'V'),
(0x2CD0, 'M', u'ⳑ'),
(0x2CD1, 'V'),
(0x2CD2, 'M', u'ⳓ'),
(0x2CD3, 'V'),
(0x2CD4, 'M', u'ⳕ'),
(0x2CD5, 'V'),
(0x2CD6, 'M', u'ⳗ'),
(0x2CD7, 'V'),
(0x2CD8, 'M', u'ⳙ'),
(0x2CD9, 'V'),
(0x2CDA, 'M', u'ⳛ'),
]
def _seg_26():
return [
(0x2CDB, 'V'),
(0x2CDC, 'M', u'ⳝ'),
(0x2CDD, 'V'),
(0x2CDE, 'M', u'ⳟ'),
(0x2CDF, 'V'),
(0x2CE0, 'M', u'ⳡ'),
(0x2CE1, 'V'),
(0x2CE2, 'M', u'ⳣ'),
(0x2CE3, 'V'),
(0x2CEB, 'M', u'ⳬ'),
(0x2CEC, 'V'),
(0x2CED, 'M', u'ⳮ'),
(0x2CEE, 'V'),
(0x2CF2, 'M', u'ⳳ'),
(0x2CF3, 'V'),
(0x2CF4, 'X'),
(0x2CF9, 'V'),
(0x2D26, 'X'),
(0x2D27, 'V'),
(0x2D28, 'X'),
(0x2D2D, 'V'),
(0x2D2E, 'X'),
(0x2D30, 'V'),
(0x2D68, 'X'),
(0x2D6F, 'M', u'ⵡ'),
(0x2D70, 'V'),
(0x2D71, 'X'),
(0x2D7F, 'V'),
(0x2D97, 'X'),
(0x2DA0, 'V'),
(0x2DA7, 'X'),
(0x2DA8, 'V'),
(0x2DAF, 'X'),
(0x2DB0, 'V'),
(0x2DB7, 'X'),
(0x2DB8, 'V'),
(0x2DBF, 'X'),
(0x2DC0, 'V'),
(0x2DC7, 'X'),
(0x2DC8, 'V'),
(0x2DCF, 'X'),
(0x2DD0, 'V'),
(0x2DD7, 'X'),
(0x2DD8, 'V'),
(0x2DDF, 'X'),
(0x2DE0, 'V'),
(0x2E4F, 'X'),
(0x2E80, 'V'),
(0x2E9A, 'X'),
(0x2E9B, 'V'),
(0x2E9F, 'M', u'母'),
(0x2EA0, 'V'),
(0x2EF3, 'M', u'龟'),
(0x2EF4, 'X'),
(0x2F00, 'M', u'一'),
(0x2F01, 'M', u'丨'),
(0x2F02, 'M', u'丶'),
(0x2F03, 'M', u'丿'),
(0x2F04, 'M', u'乙'),
(0x2F05, 'M', u'亅'),
(0x2F06, 'M', u'二'),
(0x2F07, 'M', u'亠'),
(0x2F08, 'M', u'人'),
(0x2F09, 'M', u'儿'),
(0x2F0A, 'M', u'入'),
(0x2F0B, 'M', u'八'),
(0x2F0C, 'M', u'冂'),
(0x2F0D, 'M', u'冖'),
(0x2F0E, 'M', u'冫'),
(0x2F0F, 'M', u'几'),
(0x2F10, 'M', u'凵'),
(0x2F11, 'M', u'刀'),
(0x2F12, 'M', u'力'),
(0x2F13, 'M', u'勹'),
(0x2F14, 'M', u'匕'),
(0x2F15, 'M', u'匚'),
(0x2F16, 'M', u'匸'),
(0x2F17, 'M', u'十'),
(0x2F18, 'M', u'卜'),
(0x2F19, 'M', u'卩'),
(0x2F1A, 'M', u'厂'),
(0x2F1B, 'M', u'厶'),
(0x2F1C, 'M', u'又'),
(0x2F1D, 'M', u'口'),
(0x2F1E, 'M', u'囗'),
(0x2F1F, 'M', u'土'),
(0x2F20, 'M', u'士'),
(0x2F21, 'M', u'夂'),
(0x2F22, 'M', u'夊'),
(0x2F23, 'M', u'夕'),
(0x2F24, 'M', u'大'),
(0x2F25, 'M', u'女'),
(0x2F26, 'M', u'子'),
(0x2F27, 'M', u'宀'),
(0x2F28, 'M', u'寸'),
(0x2F29, 'M', u'小'),
(0x2F2A, 'M', u'尢'),
(0x2F2B, 'M', u'尸'),
(0x2F2C, 'M', u'屮'),
(0x2F2D, 'M', u'山'),
]
def _seg_27():
return [
(0x2F2E, 'M', u'巛'),
(0x2F2F, 'M', u'工'),
(0x2F30, 'M', u'己'),
(0x2F31, 'M', u'巾'),
(0x2F32, 'M', u'干'),
(0x2F33, 'M', u'幺'),
(0x2F34, 'M', u'广'),
(0x2F35, 'M', u'廴'),
(0x2F36, 'M', u'廾'),
(0x2F37, 'M', u'弋'),
(0x2F38, 'M', u'弓'),
(0x2F39, 'M', u'彐'),
(0x2F3A, 'M', u'彡'),
(0x2F3B, 'M', u'彳'),
(0x2F3C, 'M', u'心'),
(0x2F3D, 'M', u'戈'),
(0x2F3E, 'M', u'戶'),
(0x2F3F, 'M', u'手'),
(0x2F40, 'M', u'支'),
(0x2F41, 'M', u'攴'),
(0x2F42, 'M', u'文'),
(0x2F43, 'M', u'斗'),
(0x2F44, 'M', u'斤'),
(0x2F45, 'M', u'方'),
(0x2F46, 'M', u'无'),
(0x2F47, 'M', u'日'),
(0x2F48, 'M', u'曰'),
(0x2F49, 'M', u'月'),
(0x2F4A, 'M', u'木'),
(0x2F4B, 'M', u'欠'),
(0x2F4C, 'M', u'止'),
(0x2F4D, 'M', u'歹'),
(0x2F4E, 'M', u'殳'),
(0x2F4F, 'M', u'毋'),
(0x2F50, 'M', u'比'),
(0x2F51, 'M', u'毛'),
(0x2F52, 'M', u'氏'),
(0x2F53, 'M', u'气'),
(0x2F54, 'M', u'水'),
(0x2F55, 'M', u'火'),
(0x2F56, 'M', u'爪'),
(0x2F57, 'M', u'父'),
(0x2F58, 'M', u'爻'),
(0x2F59, 'M', u'爿'),
(0x2F5A, 'M', u'片'),
(0x2F5B, 'M', u'牙'),
(0x2F5C, 'M', u'牛'),
(0x2F5D, 'M', u'犬'),
(0x2F5E, 'M', u'玄'),
(0x2F5F, 'M', u'玉'),
(0x2F60, 'M', u'瓜'),
(0x2F61, 'M', u'瓦'),
(0x2F62, 'M', u'甘'),
(0x2F63, 'M', u'生'),
(0x2F64, 'M', u'用'),
(0x2F65, 'M', u'田'),
(0x2F66, 'M', u'疋'),
(0x2F67, 'M', u'疒'),
(0x2F68, 'M', u'癶'),
(0x2F69, 'M', u'白'),
(0x2F6A, 'M', u'皮'),
(0x2F6B, 'M', u'皿'),
(0x2F6C, 'M', u'目'),
(0x2F6D, 'M', u'矛'),
(0x2F6E, 'M', u'矢'),
(0x2F6F, 'M', u'石'),
(0x2F70, 'M', u'示'),
(0x2F71, 'M', u'禸'),
(0x2F72, 'M', u'禾'),
(0x2F73, 'M', u'穴'),
(0x2F74, 'M', u'立'),
(0x2F75, 'M', u'竹'),
(0x2F76, 'M', u'米'),
(0x2F77, 'M', u'糸'),
(0x2F78, 'M', u'缶'),
(0x2F79, 'M', u'网'),
(0x2F7A, 'M', u'羊'),
(0x2F7B, 'M', u'羽'),
(0x2F7C, 'M', u'老'),
(0x2F7D, 'M', u'而'),
(0x2F7E, 'M', u'耒'),
(0x2F7F, 'M', u'耳'),
(0x2F80, 'M', u'聿'),
(0x2F81, 'M', u'肉'),
(0x2F82, 'M', u'臣'),
(0x2F83, 'M', u'自'),
(0x2F84, 'M', u'至'),
(0x2F85, 'M', u'臼'),
(0x2F86, 'M', u'舌'),
(0x2F87, 'M', u'舛'),
(0x2F88, 'M', u'舟'),
(0x2F89, 'M', u'艮'),
(0x2F8A, 'M', u'色'),
(0x2F8B, 'M', u'艸'),
(0x2F8C, 'M', u'虍'),
(0x2F8D, 'M', u'虫'),
(0x2F8E, 'M', u'血'),
(0x2F8F, 'M', u'行'),
(0x2F90, 'M', u'衣'),
(0x2F91, 'M', u'襾'),
]
def _seg_28():
return [
(0x2F92, 'M', u'見'),
(0x2F93, 'M', u'角'),
(0x2F94, 'M', u'言'),
(0x2F95, 'M', u'谷'),
(0x2F96, 'M', u'豆'),
(0x2F97, 'M', u'豕'),
(0x2F98, 'M', u'豸'),
(0x2F99, 'M', u'貝'),
(0x2F9A, 'M', u'赤'),
(0x2F9B, 'M', u'走'),
(0x2F9C, 'M', u'足'),
(0x2F9D, 'M', u'身'),
(0x2F9E, 'M', u'車'),
(0x2F9F, 'M', u'辛'),
(0x2FA0, 'M', u'辰'),
(0x2FA1, 'M', u'辵'),
(0x2FA2, 'M', u'邑'),
(0x2FA3, 'M', u'酉'),
(0x2FA4, 'M', u'釆'),
(0x2FA5, 'M', u'里'),
(0x2FA6, 'M', u'金'),
(0x2FA7, 'M', u'長'),
(0x2FA8, 'M', u'門'),
(0x2FA9, 'M', u'阜'),
(0x2FAA, 'M', u'隶'),
(0x2FAB, 'M', u'隹'),
(0x2FAC, 'M', u'雨'),
(0x2FAD, 'M', u'靑'),
(0x2FAE, 'M', u'非'),
(0x2FAF, 'M', u'面'),
(0x2FB0, 'M', u'革'),
(0x2FB1, 'M', u'韋'),
(0x2FB2, 'M', u'韭'),
(0x2FB3, 'M', u'音'),
(0x2FB4, 'M', u'頁'),
(0x2FB5, 'M', u'風'),
(0x2FB6, 'M', u'飛'),
(0x2FB7, 'M', u'食'),
(0x2FB8, 'M', u'首'),
(0x2FB9, 'M', u'香'),
(0x2FBA, 'M', u'馬'),
(0x2FBB, 'M', u'骨'),
(0x2FBC, 'M', u'高'),
(0x2FBD, 'M', u'髟'),
(0x2FBE, 'M', u'鬥'),
(0x2FBF, 'M', u'鬯'),
(0x2FC0, 'M', u'鬲'),
(0x2FC1, 'M', u'鬼'),
(0x2FC2, 'M', u'魚'),
(0x2FC3, 'M', u'鳥'),
(0x2FC4, 'M', u'鹵'),
(0x2FC5, 'M', u'鹿'),
(0x2FC6, 'M', u'麥'),
(0x2FC7, 'M', u'麻'),
(0x2FC8, 'M', u'黃'),
(0x2FC9, 'M', u'黍'),
(0x2FCA, 'M', u'黑'),
(0x2FCB, 'M', u'黹'),
(0x2FCC, 'M', u'黽'),
(0x2FCD, 'M', u'鼎'),
(0x2FCE, 'M', u'鼓'),
(0x2FCF, 'M', u'鼠'),
(0x2FD0, 'M', u'鼻'),
(0x2FD1, 'M', u'齊'),
(0x2FD2, 'M', u'齒'),
(0x2FD3, 'M', u'龍'),
(0x2FD4, 'M', u'龜'),
(0x2FD5, 'M', u'龠'),
(0x2FD6, 'X'),
(0x3000, '3', u' '),
(0x3001, 'V'),
(0x3002, 'M', u'.'),
(0x3003, 'V'),
(0x3036, 'M', u'〒'),
(0x3037, 'V'),
(0x3038, 'M', u'十'),
(0x3039, 'M', u'卄'),
(0x303A, 'M', u'卅'),
(0x303B, 'V'),
(0x3040, 'X'),
(0x3041, 'V'),
(0x3097, 'X'),
(0x3099, 'V'),
(0x309B, '3', u' ゙'),
(0x309C, '3', u' ゚'),
(0x309D, 'V'),
(0x309F, 'M', u'より'),
(0x30A0, 'V'),
(0x30FF, 'M', u'コト'),
(0x3100, 'X'),
(0x3105, 'V'),
(0x3130, 'X'),
(0x3131, 'M', u'ᄀ'),
(0x3132, 'M', u'ᄁ'),
(0x3133, 'M', u'ᆪ'),
(0x3134, 'M', u'ᄂ'),
(0x3135, 'M', u'ᆬ'),
(0x3136, 'M', u'ᆭ'),
(0x3137, 'M', u'ᄃ'),
(0x3138, 'M', u'ᄄ'),
]
def _seg_29():
return [
(0x3139, 'M', u'ᄅ'),
(0x313A, 'M', u'ᆰ'),
(0x313B, 'M', u'ᆱ'),
(0x313C, 'M', u'ᆲ'),
(0x313D, 'M', u'ᆳ'),
(0x313E, 'M', u'ᆴ'),
(0x313F, 'M', u'ᆵ'),
(0x3140, 'M', u'ᄚ'),
(0x3141, 'M', u'ᄆ'),
(0x3142, 'M', u'ᄇ'),
(0x3143, 'M', u'ᄈ'),
(0x3144, 'M', u'ᄡ'),
(0x3145, 'M', u'ᄉ'),
(0x3146, 'M', u'ᄊ'),
(0x3147, 'M', u'ᄋ'),
(0x3148, 'M', u'ᄌ'),
(0x3149, 'M', u'ᄍ'),
(0x314A, 'M', u'ᄎ'),
(0x314B, 'M', u'ᄏ'),
(0x314C, 'M', u'ᄐ'),
(0x314D, 'M', u'ᄑ'),
(0x314E, 'M', u'ᄒ'),
(0x314F, 'M', u'ᅡ'),
(0x3150, 'M', u'ᅢ'),
(0x3151, 'M', u'ᅣ'),
(0x3152, 'M', u'ᅤ'),
(0x3153, 'M', u'ᅥ'),
(0x3154, 'M', u'ᅦ'),
(0x3155, 'M', u'ᅧ'),
(0x3156, 'M', u'ᅨ'),
(0x3157, 'M', u'ᅩ'),
(0x3158, 'M', u'ᅪ'),
(0x3159, 'M', u'ᅫ'),
(0x315A, 'M', u'ᅬ'),
(0x315B, 'M', u'ᅭ'),
(0x315C, 'M', u'ᅮ'),
(0x315D, 'M', u'ᅯ'),
(0x315E, 'M', u'ᅰ'),
(0x315F, 'M', u'ᅱ'),
(0x3160, 'M', u'ᅲ'),
(0x3161, 'M', u'ᅳ'),
(0x3162, 'M', u'ᅴ'),
(0x3163, 'M', u'ᅵ'),
(0x3164, 'X'),
(0x3165, 'M', u'ᄔ'),
(0x3166, 'M', u'ᄕ'),
(0x3167, 'M', u'ᇇ'),
(0x3168, 'M', u'ᇈ'),
(0x3169, 'M', u'ᇌ'),
(0x316A, 'M', u'ᇎ'),
(0x316B, 'M', u'ᇓ'),
(0x316C, 'M', u'ᇗ'),
(0x316D, 'M', u'ᇙ'),
(0x316E, 'M', u'ᄜ'),
(0x316F, 'M', u'ᇝ'),
(0x3170, 'M', u'ᇟ'),
(0x3171, 'M', u'ᄝ'),
(0x3172, 'M', u'ᄞ'),
(0x3173, 'M', u'ᄠ'),
(0x3174, 'M', u'ᄢ'),
(0x3175, 'M', u'ᄣ'),
(0x3176, 'M', u'ᄧ'),
(0x3177, 'M', u'ᄩ'),
(0x3178, 'M', u'ᄫ'),
(0x3179, 'M', u'ᄬ'),
(0x317A, 'M', u'ᄭ'),
(0x317B, 'M', u'ᄮ'),
(0x317C, 'M', u'ᄯ'),
(0x317D, 'M', u'ᄲ'),
(0x317E, 'M', u'ᄶ'),
(0x317F, 'M', u'ᅀ'),
(0x3180, 'M', u'ᅇ'),
(0x3181, 'M', u'ᅌ'),
(0x3182, 'M', u'ᇱ'),
(0x3183, 'M', u'ᇲ'),
(0x3184, 'M', u'ᅗ'),
(0x3185, 'M', u'ᅘ'),
(0x3186, 'M', u'ᅙ'),
(0x3187, 'M', u'ᆄ'),
(0x3188, 'M', u'ᆅ'),
(0x3189, 'M', u'ᆈ'),
(0x318A, 'M', u'ᆑ'),
(0x318B, 'M', u'ᆒ'),
(0x318C, 'M', u'ᆔ'),
(0x318D, 'M', u'ᆞ'),
(0x318E, 'M', u'ᆡ'),
(0x318F, 'X'),
(0x3190, 'V'),
(0x3192, 'M', u'一'),
(0x3193, 'M', u'二'),
(0x3194, 'M', u'三'),
(0x3195, 'M', u'四'),
(0x3196, 'M', u'上'),
(0x3197, 'M', u'中'),
(0x3198, 'M', u'下'),
(0x3199, 'M', u'甲'),
(0x319A, 'M', u'乙'),
(0x319B, 'M', u'丙'),
(0x319C, 'M', u'丁'),
(0x319D, 'M', u'天'),
]
def _seg_30():
return [
(0x319E, 'M', u'地'),
(0x319F, 'M', u'人'),
(0x31A0, 'V'),
(0x31BB, 'X'),
(0x31C0, 'V'),
(0x31E4, 'X'),
(0x31F0, 'V'),
(0x3200, '3', u'(ᄀ)'),
(0x3201, '3', u'(ᄂ)'),
(0x3202, '3', u'(ᄃ)'),
(0x3203, '3', u'(ᄅ)'),
(0x3204, '3', u'(ᄆ)'),
(0x3205, '3', u'(ᄇ)'),
(0x3206, '3', u'(ᄉ)'),
(0x3207, '3', u'(ᄋ)'),
(0x3208, '3', u'(ᄌ)'),
(0x3209, '3', u'(ᄎ)'),
(0x320A, '3', u'(ᄏ)'),
(0x320B, '3', u'(ᄐ)'),
(0x320C, '3', u'(ᄑ)'),
(0x320D, '3', u'(ᄒ)'),
(0x320E, '3', u'(가)'),
(0x320F, '3', u'(나)'),
(0x3210, '3', u'(다)'),
(0x3211, '3', u'(라)'),
(0x3212, '3', u'(마)'),
(0x3213, '3', u'(바)'),
(0x3214, '3', u'(사)'),
(0x3215, '3', u'(아)'),
(0x3216, '3', u'(자)'),
(0x3217, '3', u'(차)'),
(0x3218, '3', u'(카)'),
(0x3219, '3', u'(타)'),
(0x321A, '3', u'(파)'),
(0x321B, '3', u'(하)'),
(0x321C, '3', u'(주)'),
(0x321D, '3', u'(오전)'),
(0x321E, '3', u'(오후)'),
(0x321F, 'X'),
(0x3220, '3', u'(一)'),
(0x3221, '3', u'(二)'),
(0x3222, '3', u'(三)'),
(0x3223, '3', u'(四)'),
(0x3224, '3', u'(五)'),
(0x3225, '3', u'(六)'),
(0x3226, '3', u'(七)'),
(0x3227, '3', u'(八)'),
(0x3228, '3', u'(九)'),
(0x3229, '3', u'(十)'),
(0x322A, '3', u'(月)'),
(0x322B, '3', u'(火)'),
(0x322C, '3', u'(水)'),
(0x322D, '3', u'(木)'),
(0x322E, '3', u'(金)'),
(0x322F, '3', u'(土)'),
(0x3230, '3', u'(日)'),
(0x3231, '3', u'(株)'),
(0x3232, '3', u'(有)'),
(0x3233, '3', u'(社)'),
(0x3234, '3', u'(名)'),
(0x3235, '3', u'(特)'),
(0x3236, '3', u'(財)'),
(0x3237, '3', u'(祝)'),
(0x3238, '3', u'(労)'),
(0x3239, '3', u'(代)'),
(0x323A, '3', u'(呼)'),
(0x323B, '3', u'(学)'),
(0x323C, '3', u'(監)'),
(0x323D, '3', u'(企)'),
(0x323E, '3', u'(資)'),
(0x323F, '3', u'(協)'),
(0x3240, '3', u'(祭)'),
(0x3241, '3', u'(休)'),
(0x3242, '3', u'(自)'),
(0x3243, '3', u'(至)'),
(0x3244, 'M', u'問'),
(0x3245, 'M', u'幼'),
(0x3246, 'M', u'文'),
(0x3247, 'M', u'箏'),
(0x3248, 'V'),
(0x3250, 'M', u'pte'),
(0x3251, 'M', u'21'),
(0x3252, 'M', u'22'),
(0x3253, 'M', u'23'),
(0x3254, 'M', u'24'),
(0x3255, 'M', u'25'),
(0x3256, 'M', u'26'),
(0x3257, 'M', u'27'),
(0x3258, 'M', u'28'),
(0x3259, 'M', u'29'),
(0x325A, 'M', u'30'),
(0x325B, 'M', u'31'),
(0x325C, 'M', u'32'),
(0x325D, 'M', u'33'),
(0x325E, 'M', u'34'),
(0x325F, 'M', u'35'),
(0x3260, 'M', u'ᄀ'),
(0x3261, 'M', u'ᄂ'),
(0x3262, 'M', u'ᄃ'),
(0x3263, 'M', u'ᄅ'),
]
def _seg_31():
return [
(0x3264, 'M', u'ᄆ'),
(0x3265, 'M', u'ᄇ'),
(0x3266, 'M', u'ᄉ'),
(0x3267, 'M', u'ᄋ'),
(0x3268, 'M', u'ᄌ'),
(0x3269, 'M', u'ᄎ'),
(0x326A, 'M', u'ᄏ'),
(0x326B, 'M', u'ᄐ'),
(0x326C, 'M', u'ᄑ'),
(0x326D, 'M', u'ᄒ'),
(0x326E, 'M', u'가'),
(0x326F, 'M', u'나'),
(0x3270, 'M', u'다'),
(0x3271, 'M', u'라'),
(0x3272, 'M', u'마'),
(0x3273, 'M', u'바'),
(0x3274, 'M', u'사'),
(0x3275, 'M', u'아'),
(0x3276, 'M', u'자'),
(0x3277, 'M', u'차'),
(0x3278, 'M', u'카'),
(0x3279, 'M', u'타'),
(0x327A, 'M', u'파'),
(0x327B, 'M', u'하'),
(0x327C, 'M', u'참고'),
(0x327D, 'M', u'주의'),
(0x327E, 'M', u'우'),
(0x327F, 'V'),
(0x3280, 'M', u'一'),
(0x3281, 'M', u'二'),
(0x3282, 'M', u'三'),
(0x3283, 'M', u'四'),
(0x3284, 'M', u'五'),
(0x3285, 'M', u'六'),
(0x3286, 'M', u'七'),
(0x3287, 'M', u'八'),
(0x3288, 'M', u'九'),
(0x3289, 'M', u'十'),
(0x328A, 'M', u'月'),
(0x328B, 'M', u'火'),
(0x328C, 'M', u'水'),
(0x328D, 'M', u'木'),
(0x328E, 'M', u'金'),
(0x328F, 'M', u'土'),
(0x3290, 'M', u'日'),
(0x3291, 'M', u'株'),
(0x3292, 'M', u'有'),
(0x3293, 'M', u'社'),
(0x3294, 'M', u'名'),
(0x3295, 'M', u'特'),
(0x3296, 'M', u'財'),
(0x3297, 'M', u'祝'),
(0x3298, 'M', u'労'),
(0x3299, 'M', u'秘'),
(0x329A, 'M', u'男'),
(0x329B, 'M', u'女'),
(0x329C, 'M', u'適'),
(0x329D, 'M', u'優'),
(0x329E, 'M', u'印'),
(0x329F, 'M', u'注'),
(0x32A0, 'M', u'項'),
(0x32A1, 'M', u'休'),
(0x32A2, 'M', u'写'),
(0x32A3, 'M', u'正'),
(0x32A4, 'M', u'上'),
(0x32A5, 'M', u'中'),
(0x32A6, 'M', u'下'),
(0x32A7, 'M', u'左'),
(0x32A8, 'M', u'右'),
(0x32A9, 'M', u'医'),
(0x32AA, 'M', u'宗'),
(0x32AB, 'M', u'学'),
(0x32AC, 'M', u'監'),
(0x32AD, 'M', u'企'),
(0x32AE, 'M', u'資'),
(0x32AF, 'M', u'協'),
(0x32B0, 'M', u'夜'),
(0x32B1, 'M', u'36'),
(0x32B2, 'M', u'37'),
(0x32B3, 'M', u'38'),
(0x32B4, 'M', u'39'),
(0x32B5, 'M', u'40'),
(0x32B6, 'M', u'41'),
(0x32B7, 'M', u'42'),
(0x32B8, 'M', u'43'),
(0x32B9, 'M', u'44'),
(0x32BA, 'M', u'45'),
(0x32BB, 'M', u'46'),
(0x32BC, 'M', u'47'),
(0x32BD, 'M', u'48'),
(0x32BE, 'M', u'49'),
(0x32BF, 'M', u'50'),
(0x32C0, 'M', u'1月'),
(0x32C1, 'M', u'2月'),
(0x32C2, 'M', u'3月'),
(0x32C3, 'M', u'4月'),
(0x32C4, 'M', u'5月'),
(0x32C5, 'M', u'6月'),
(0x32C6, 'M', u'7月'),
(0x32C7, 'M', u'8月'),
]
def _seg_32():
return [
(0x32C8, 'M', u'9月'),
(0x32C9, 'M', u'10月'),
(0x32CA, 'M', u'11月'),
(0x32CB, 'M', u'12月'),
(0x32CC, 'M', u'hg'),
(0x32CD, 'M', u'erg'),
(0x32CE, 'M', u'ev'),
(0x32CF, 'M', u'ltd'),
(0x32D0, 'M', u'ア'),
(0x32D1, 'M', u'イ'),
(0x32D2, 'M', u'ウ'),
(0x32D3, 'M', u'エ'),
(0x32D4, 'M', u'オ'),
(0x32D5, 'M', u'カ'),
(0x32D6, 'M', u'キ'),
(0x32D7, 'M', u'ク'),
(0x32D8, 'M', u'ケ'),
(0x32D9, 'M', u'コ'),
(0x32DA, 'M', u'サ'),
(0x32DB, 'M', u'シ'),
(0x32DC, 'M', u'ス'),
(0x32DD, 'M', u'セ'),
(0x32DE, 'M', u'ソ'),
(0x32DF, 'M', u'タ'),
(0x32E0, 'M', u'チ'),
(0x32E1, 'M', u'ツ'),
(0x32E2, 'M', u'テ'),
(0x32E3, 'M', u'ト'),
(0x32E4, 'M', u'ナ'),
(0x32E5, 'M', u'ニ'),
(0x32E6, 'M', u'ヌ'),
(0x32E7, 'M', u'ネ'),
(0x32E8, 'M', u'ノ'),
(0x32E9, 'M', u'ハ'),
(0x32EA, 'M', u'ヒ'),
(0x32EB, 'M', u'フ'),
(0x32EC, 'M', u'ヘ'),
(0x32ED, 'M', u'ホ'),
(0x32EE, 'M', u'マ'),
(0x32EF, 'M', u'ミ'),
(0x32F0, 'M', u'ム'),
(0x32F1, 'M', u'メ'),
(0x32F2, 'M', u'モ'),
(0x32F3, 'M', u'ヤ'),
(0x32F4, 'M', u'ユ'),
(0x32F5, 'M', u'ヨ'),
(0x32F6, 'M', u'ラ'),
(0x32F7, 'M', u'リ'),
(0x32F8, 'M', u'ル'),
(0x32F9, 'M', u'レ'),
(0x32FA, 'M', u'ロ'),
(0x32FB, 'M', u'ワ'),
(0x32FC, 'M', u'ヰ'),
(0x32FD, 'M', u'ヱ'),
(0x32FE, 'M', u'ヲ'),
(0x32FF, 'X'),
(0x3300, 'M', u'アパート'),
(0x3301, 'M', u'アルファ'),
(0x3302, 'M', u'アンペア'),
(0x3303, 'M', u'アール'),
(0x3304, 'M', u'イニング'),
(0x3305, 'M', u'インチ'),
(0x3306, 'M', u'ウォン'),
(0x3307, 'M', u'エスクード'),
(0x3308, 'M', u'エーカー'),
(0x3309, 'M', u'オンス'),
(0x330A, 'M', u'オーム'),
(0x330B, 'M', u'カイリ'),
(0x330C, 'M', u'カラット'),
(0x330D, 'M', u'カロリー'),
(0x330E, 'M', u'ガロン'),
(0x330F, 'M', u'ガンマ'),
(0x3310, 'M', u'ギガ'),
(0x3311, 'M', u'ギニー'),
(0x3312, 'M', u'キュリー'),
(0x3313, 'M', u'ギルダー'),
(0x3314, 'M', u'キロ'),
(0x3315, 'M', u'キログラム'),
(0x3316, 'M', u'キロメートル'),
(0x3317, 'M', u'キロワット'),
(0x3318, 'M', u'グラム'),
(0x3319, 'M', u'グラムトン'),
(0x331A, 'M', u'クルゼイロ'),
(0x331B, 'M', u'クローネ'),
(0x331C, 'M', u'ケース'),
(0x331D, 'M', u'コルナ'),
(0x331E, 'M', u'コーポ'),
(0x331F, 'M', u'サイクル'),
(0x3320, 'M', u'サンチーム'),
(0x3321, 'M', u'シリング'),
(0x3322, 'M', u'センチ'),
(0x3323, 'M', u'セント'),
(0x3324, 'M', u'ダース'),
(0x3325, 'M', u'デシ'),
(0x3326, 'M', u'ドル'),
(0x3327, 'M', u'トン'),
(0x3328, 'M', u'ナノ'),
(0x3329, 'M', u'ノット'),
(0x332A, 'M', u'ハイツ'),
(0x332B, 'M', u'パーセント'),
]
def _seg_33():
return [
(0x332C, 'M', u'パーツ'),
(0x332D, 'M', u'バーレル'),
(0x332E, 'M', u'ピアストル'),
(0x332F, 'M', u'ピクル'),
(0x3330, 'M', u'ピコ'),
(0x3331, 'M', u'ビル'),
(0x3332, 'M', u'ファラッド'),
(0x3333, 'M', u'フィート'),
(0x3334, 'M', u'ブッシェル'),
(0x3335, 'M', u'フラン'),
(0x3336, 'M', u'ヘクタール'),
(0x3337, 'M', u'ペソ'),
(0x3338, 'M', u'ペニヒ'),
(0x3339, 'M', u'ヘルツ'),
(0x333A, 'M', u'ペンス'),
(0x333B, 'M', u'ページ'),
(0x333C, 'M', u'ベータ'),
(0x333D, 'M', u'ポイント'),
(0x333E, 'M', u'ボルト'),
(0x333F, 'M', u'ホン'),
(0x3340, 'M', u'ポンド'),
(0x3341, 'M', u'ホール'),
(0x3342, 'M', u'ホーン'),
(0x3343, 'M', u'マイクロ'),
(0x3344, 'M', u'マイル'),
(0x3345, 'M', u'マッハ'),
(0x3346, 'M', u'マルク'),
(0x3347, 'M', u'マンション'),
(0x3348, 'M', u'ミクロン'),
(0x3349, 'M', u'ミリ'),
(0x334A, 'M', u'ミリバール'),
(0x334B, 'M', u'メガ'),
(0x334C, 'M', u'メガトン'),
(0x334D, 'M', u'メートル'),
(0x334E, 'M', u'ヤード'),
(0x334F, 'M', u'ヤール'),
(0x3350, 'M', u'ユアン'),
(0x3351, 'M', u'リットル'),
(0x3352, 'M', u'リラ'),
(0x3353, 'M', u'ルピー'),
(0x3354, 'M', u'ルーブル'),
(0x3355, 'M', u'レム'),
(0x3356, 'M', u'レントゲン'),
(0x3357, 'M', u'ワット'),
(0x3358, 'M', u'0点'),
(0x3359, 'M', u'1点'),
(0x335A, 'M', u'2点'),
(0x335B, 'M', u'3点'),
(0x335C, 'M', u'4点'),
(0x335D, 'M', u'5点'),
(0x335E, 'M', u'6点'),
(0x335F, 'M', u'7点'),
(0x3360, 'M', u'8点'),
(0x3361, 'M', u'9点'),
(0x3362, 'M', u'10点'),
(0x3363, 'M', u'11点'),
(0x3364, 'M', u'12点'),
(0x3365, 'M', u'13点'),
(0x3366, 'M', u'14点'),
(0x3367, 'M', u'15点'),
(0x3368, 'M', u'16点'),
(0x3369, 'M', u'17点'),
(0x336A, 'M', u'18点'),
(0x336B, 'M', u'19点'),
(0x336C, 'M', u'20点'),
(0x336D, 'M', u'21点'),
(0x336E, 'M', u'22点'),
(0x336F, 'M', u'23点'),
(0x3370, 'M', u'24点'),
(0x3371, 'M', u'hpa'),
(0x3372, 'M', u'da'),
(0x3373, 'M', u'au'),
(0x3374, 'M', u'bar'),
(0x3375, 'M', u'ov'),
(0x3376, 'M', u'pc'),
(0x3377, 'M', u'dm'),
(0x3378, 'M', u'dm2'),
(0x3379, 'M', u'dm3'),
(0x337A, 'M', u'iu'),
(0x337B, 'M', u'平成'),
(0x337C, 'M', u'昭和'),
(0x337D, 'M', u'大正'),
(0x337E, 'M', u'明治'),
(0x337F, 'M', u'株式会社'),
(0x3380, 'M', u'pa'),
(0x3381, 'M', u'na'),
(0x3382, 'M', u'μa'),
(0x3383, 'M', u'ma'),
(0x3384, 'M', u'ka'),
(0x3385, 'M', u'kb'),
(0x3386, 'M', u'mb'),
(0x3387, 'M', u'gb'),
(0x3388, 'M', u'cal'),
(0x3389, 'M', u'kcal'),
(0x338A, 'M', u'pf'),
(0x338B, 'M', u'nf'),
(0x338C, 'M', u'μf'),
(0x338D, 'M', u'μg'),
(0x338E, 'M', u'mg'),
(0x338F, 'M', u'kg'),
]
def _seg_34():
return [
(0x3390, 'M', u'hz'),
(0x3391, 'M', u'khz'),
(0x3392, 'M', u'mhz'),
(0x3393, 'M', u'ghz'),
(0x3394, 'M', u'thz'),
(0x3395, 'M', u'μl'),
(0x3396, 'M', u'ml'),
(0x3397, 'M', u'dl'),
(0x3398, 'M', u'kl'),
(0x3399, 'M', u'fm'),
(0x339A, 'M', u'nm'),
(0x339B, 'M', u'μm'),
(0x339C, 'M', u'mm'),
(0x339D, 'M', u'cm'),
(0x339E, 'M', u'km'),
(0x339F, 'M', u'mm2'),
(0x33A0, 'M', u'cm2'),
(0x33A1, 'M', u'm2'),
(0x33A2, 'M', u'km2'),
(0x33A3, 'M', u'mm3'),
(0x33A4, 'M', u'cm3'),
(0x33A5, 'M', u'm3'),
(0x33A6, 'M', u'km3'),
(0x33A7, 'M', u'm∕s'),
(0x33A8, 'M', u'm∕s2'),
(0x33A9, 'M', u'pa'),
(0x33AA, 'M', u'kpa'),
(0x33AB, 'M', u'mpa'),
(0x33AC, 'M', u'gpa'),
(0x33AD, 'M', u'rad'),
(0x33AE, 'M', u'rad∕s'),
(0x33AF, 'M', u'rad∕s2'),
(0x33B0, 'M', u'ps'),
(0x33B1, 'M', u'ns'),
(0x33B2, 'M', u'μs'),
(0x33B3, 'M', u'ms'),
(0x33B4, 'M', u'pv'),
(0x33B5, 'M', u'nv'),
(0x33B6, 'M', u'μv'),
(0x33B7, 'M', u'mv'),
(0x33B8, 'M', u'kv'),
(0x33B9, 'M', u'mv'),
(0x33BA, 'M', u'pw'),
(0x33BB, 'M', u'nw'),
(0x33BC, 'M', u'μw'),
(0x33BD, 'M', u'mw'),
(0x33BE, 'M', u'kw'),
(0x33BF, 'M', u'mw'),
(0x33C0, 'M', u'kω'),
(0x33C1, 'M', u'mω'),
(0x33C2, 'X'),
(0x33C3, 'M', u'bq'),
(0x33C4, 'M', u'cc'),
(0x33C5, 'M', u'cd'),
(0x33C6, 'M', u'c∕kg'),
(0x33C7, 'X'),
(0x33C8, 'M', u'db'),
(0x33C9, 'M', u'gy'),
(0x33CA, 'M', u'ha'),
(0x33CB, 'M', u'hp'),
(0x33CC, 'M', u'in'),
(0x33CD, 'M', u'kk'),
(0x33CE, 'M', u'km'),
(0x33CF, 'M', u'kt'),
(0x33D0, 'M', u'lm'),
(0x33D1, 'M', u'ln'),
(0x33D2, 'M', u'log'),
(0x33D3, 'M', u'lx'),
(0x33D4, 'M', u'mb'),
(0x33D5, 'M', u'mil'),
(0x33D6, 'M', u'mol'),
(0x33D7, 'M', u'ph'),
(0x33D8, 'X'),
(0x33D9, 'M', u'ppm'),
(0x33DA, 'M', u'pr'),
(0x33DB, 'M', u'sr'),
(0x33DC, 'M', u'sv'),
(0x33DD, 'M', u'wb'),
(0x33DE, 'M', u'v∕m'),
(0x33DF, 'M', u'a∕m'),
(0x33E0, 'M', u'1日'),
(0x33E1, 'M', u'2日'),
(0x33E2, 'M', u'3日'),
(0x33E3, 'M', u'4日'),
(0x33E4, 'M', u'5日'),
(0x33E5, 'M', u'6日'),
(0x33E6, 'M', u'7日'),
(0x33E7, 'M', u'8日'),
(0x33E8, 'M', u'9日'),
(0x33E9, 'M', u'10日'),
(0x33EA, 'M', u'11日'),
(0x33EB, 'M', u'12日'),
(0x33EC, 'M', u'13日'),
(0x33ED, 'M', u'14日'),
(0x33EE, 'M', u'15日'),
(0x33EF, 'M', u'16日'),
(0x33F0, 'M', u'17日'),
(0x33F1, 'M', u'18日'),
(0x33F2, 'M', u'19日'),
(0x33F3, 'M', u'20日'),
]
def _seg_35():
return [
(0x33F4, 'M', u'21日'),
(0x33F5, 'M', u'22日'),
(0x33F6, 'M', u'23日'),
(0x33F7, 'M', u'24日'),
(0x33F8, 'M', u'25日'),
(0x33F9, 'M', u'26日'),
(0x33FA, 'M', u'27日'),
(0x33FB, 'M', u'28日'),
(0x33FC, 'M', u'29日'),
(0x33FD, 'M', u'30日'),
(0x33FE, 'M', u'31日'),
(0x33FF, 'M', u'gal'),
(0x3400, 'V'),
(0x4DB6, 'X'),
(0x4DC0, 'V'),
(0x9FF0, 'X'),
(0xA000, 'V'),
(0xA48D, 'X'),
(0xA490, 'V'),
(0xA4C7, 'X'),
(0xA4D0, 'V'),
(0xA62C, 'X'),
(0xA640, 'M', u'ꙁ'),
(0xA641, 'V'),
(0xA642, 'M', u'ꙃ'),
(0xA643, 'V'),
(0xA644, 'M', u'ꙅ'),
(0xA645, 'V'),
(0xA646, 'M', u'ꙇ'),
(0xA647, 'V'),
(0xA648, 'M', u'ꙉ'),
(0xA649, 'V'),
(0xA64A, 'M', u'ꙋ'),
(0xA64B, 'V'),
(0xA64C, 'M', u'ꙍ'),
(0xA64D, 'V'),
(0xA64E, 'M', u'ꙏ'),
(0xA64F, 'V'),
(0xA650, 'M', u'ꙑ'),
(0xA651, 'V'),
(0xA652, 'M', u'ꙓ'),
(0xA653, 'V'),
(0xA654, 'M', u'ꙕ'),
(0xA655, 'V'),
(0xA656, 'M', u'ꙗ'),
(0xA657, 'V'),
(0xA658, 'M', u'ꙙ'),
(0xA659, 'V'),
(0xA65A, 'M', u'ꙛ'),
(0xA65B, 'V'),
(0xA65C, 'M', u'ꙝ'),
(0xA65D, 'V'),
(0xA65E, 'M', u'ꙟ'),
(0xA65F, 'V'),
(0xA660, 'M', u'ꙡ'),
(0xA661, 'V'),
(0xA662, 'M', u'ꙣ'),
(0xA663, 'V'),
(0xA664, 'M', u'ꙥ'),
(0xA665, 'V'),
(0xA666, 'M', u'ꙧ'),
(0xA667, 'V'),
(0xA668, 'M', u'ꙩ'),
(0xA669, 'V'),
(0xA66A, 'M', u'ꙫ'),
(0xA66B, 'V'),
(0xA66C, 'M', u'ꙭ'),
(0xA66D, 'V'),
(0xA680, 'M', u'ꚁ'),
(0xA681, 'V'),
(0xA682, 'M', u'ꚃ'),
(0xA683, 'V'),
(0xA684, 'M', u'ꚅ'),
(0xA685, 'V'),
(0xA686, 'M', u'ꚇ'),
(0xA687, 'V'),
(0xA688, 'M', u'ꚉ'),
(0xA689, 'V'),
(0xA68A, 'M', u'ꚋ'),
(0xA68B, 'V'),
(0xA68C, 'M', u'ꚍ'),
(0xA68D, 'V'),
(0xA68E, 'M', u'ꚏ'),
(0xA68F, 'V'),
(0xA690, 'M', u'ꚑ'),
(0xA691, 'V'),
(0xA692, 'M', u'ꚓ'),
(0xA693, 'V'),
(0xA694, 'M', u'ꚕ'),
(0xA695, 'V'),
(0xA696, 'M', u'ꚗ'),
(0xA697, 'V'),
(0xA698, 'M', u'ꚙ'),
(0xA699, 'V'),
(0xA69A, 'M', u'ꚛ'),
(0xA69B, 'V'),
(0xA69C, 'M', u'ъ'),
(0xA69D, 'M', u'ь'),
(0xA69E, 'V'),
(0xA6F8, 'X'),
]
def _seg_36():
return [
(0xA700, 'V'),
(0xA722, 'M', u'ꜣ'),
(0xA723, 'V'),
(0xA724, 'M', u'ꜥ'),
(0xA725, 'V'),
(0xA726, 'M', u'ꜧ'),
(0xA727, 'V'),
(0xA728, 'M', u'ꜩ'),
(0xA729, 'V'),
(0xA72A, 'M', u'ꜫ'),
(0xA72B, 'V'),
(0xA72C, 'M', u'ꜭ'),
(0xA72D, 'V'),
(0xA72E, 'M', u'ꜯ'),
(0xA72F, 'V'),
(0xA732, 'M', u'ꜳ'),
(0xA733, 'V'),
(0xA734, 'M', u'ꜵ'),
(0xA735, 'V'),
(0xA736, 'M', u'ꜷ'),
(0xA737, 'V'),
(0xA738, 'M', u'ꜹ'),
(0xA739, 'V'),
(0xA73A, 'M', u'ꜻ'),
(0xA73B, 'V'),
(0xA73C, 'M', u'ꜽ'),
(0xA73D, 'V'),
(0xA73E, 'M', u'ꜿ'),
(0xA73F, 'V'),
(0xA740, 'M', u'ꝁ'),
(0xA741, 'V'),
(0xA742, 'M', u'ꝃ'),
(0xA743, 'V'),
(0xA744, 'M', u'ꝅ'),
(0xA745, 'V'),
(0xA746, 'M', u'ꝇ'),
(0xA747, 'V'),
(0xA748, 'M', u'ꝉ'),
(0xA749, 'V'),
(0xA74A, 'M', u'ꝋ'),
(0xA74B, 'V'),
(0xA74C, 'M', u'ꝍ'),
(0xA74D, 'V'),
(0xA74E, 'M', u'ꝏ'),
(0xA74F, 'V'),
(0xA750, 'M', u'ꝑ'),
(0xA751, 'V'),
(0xA752, 'M', u'ꝓ'),
(0xA753, 'V'),
(0xA754, 'M', u'ꝕ'),
(0xA755, 'V'),
(0xA756, 'M', u'ꝗ'),
(0xA757, 'V'),
(0xA758, 'M', u'ꝙ'),
(0xA759, 'V'),
(0xA75A, 'M', u'ꝛ'),
(0xA75B, 'V'),
(0xA75C, 'M', u'ꝝ'),
(0xA75D, 'V'),
(0xA75E, 'M', u'ꝟ'),
(0xA75F, 'V'),
(0xA760, 'M', u'ꝡ'),
(0xA761, 'V'),
(0xA762, 'M', u'ꝣ'),
(0xA763, 'V'),
(0xA764, 'M', u'ꝥ'),
(0xA765, 'V'),
(0xA766, 'M', u'ꝧ'),
(0xA767, 'V'),
(0xA768, 'M', u'ꝩ'),
(0xA769, 'V'),
(0xA76A, 'M', u'ꝫ'),
(0xA76B, 'V'),
(0xA76C, 'M', u'ꝭ'),
(0xA76D, 'V'),
(0xA76E, 'M', u'ꝯ'),
(0xA76F, 'V'),
(0xA770, 'M', u'ꝯ'),
(0xA771, 'V'),
(0xA779, 'M', u'ꝺ'),
(0xA77A, 'V'),
(0xA77B, 'M', u'ꝼ'),
(0xA77C, 'V'),
(0xA77D, 'M', u'ᵹ'),
(0xA77E, 'M', u'ꝿ'),
(0xA77F, 'V'),
(0xA780, 'M', u'ꞁ'),
(0xA781, 'V'),
(0xA782, 'M', u'ꞃ'),
(0xA783, 'V'),
(0xA784, 'M', u'ꞅ'),
(0xA785, 'V'),
(0xA786, 'M', u'ꞇ'),
(0xA787, 'V'),
(0xA78B, 'M', u'ꞌ'),
(0xA78C, 'V'),
(0xA78D, 'M', u'ɥ'),
(0xA78E, 'V'),
(0xA790, 'M', u'ꞑ'),
(0xA791, 'V'),
]
def _seg_37():
return [
(0xA792, 'M', u'ꞓ'),
(0xA793, 'V'),
(0xA796, 'M', u'ꞗ'),
(0xA797, 'V'),
(0xA798, 'M', u'ꞙ'),
(0xA799, 'V'),
(0xA79A, 'M', u'ꞛ'),
(0xA79B, 'V'),
(0xA79C, 'M', u'ꞝ'),
(0xA79D, 'V'),
(0xA79E, 'M', u'ꞟ'),
(0xA79F, 'V'),
(0xA7A0, 'M', u'ꞡ'),
(0xA7A1, 'V'),
(0xA7A2, 'M', u'ꞣ'),
(0xA7A3, 'V'),
(0xA7A4, 'M', u'ꞥ'),
(0xA7A5, 'V'),
(0xA7A6, 'M', u'ꞧ'),
(0xA7A7, 'V'),
(0xA7A8, 'M', u'ꞩ'),
(0xA7A9, 'V'),
(0xA7AA, 'M', u'ɦ'),
(0xA7AB, 'M', u'ɜ'),
(0xA7AC, 'M', u'ɡ'),
(0xA7AD, 'M', u'ɬ'),
(0xA7AE, 'M', u'ɪ'),
(0xA7AF, 'V'),
(0xA7B0, 'M', u'ʞ'),
(0xA7B1, 'M', u'ʇ'),
(0xA7B2, 'M', u'ʝ'),
(0xA7B3, 'M', u'ꭓ'),
(0xA7B4, 'M', u'ꞵ'),
(0xA7B5, 'V'),
(0xA7B6, 'M', u'ꞷ'),
(0xA7B7, 'V'),
(0xA7B8, 'X'),
(0xA7B9, 'V'),
(0xA7BA, 'X'),
(0xA7F7, 'V'),
(0xA7F8, 'M', u'ħ'),
(0xA7F9, 'M', u'œ'),
(0xA7FA, 'V'),
(0xA82C, 'X'),
(0xA830, 'V'),
(0xA83A, 'X'),
(0xA840, 'V'),
(0xA878, 'X'),
(0xA880, 'V'),
(0xA8C6, 'X'),
(0xA8CE, 'V'),
(0xA8DA, 'X'),
(0xA8E0, 'V'),
(0xA954, 'X'),
(0xA95F, 'V'),
(0xA97D, 'X'),
(0xA980, 'V'),
(0xA9CE, 'X'),
(0xA9CF, 'V'),
(0xA9DA, 'X'),
(0xA9DE, 'V'),
(0xA9FF, 'X'),
(0xAA00, 'V'),
(0xAA37, 'X'),
(0xAA40, 'V'),
(0xAA4E, 'X'),
(0xAA50, 'V'),
(0xAA5A, 'X'),
(0xAA5C, 'V'),
(0xAAC3, 'X'),
(0xAADB, 'V'),
(0xAAF7, 'X'),
(0xAB01, 'V'),
(0xAB07, 'X'),
(0xAB09, 'V'),
(0xAB0F, 'X'),
(0xAB11, 'V'),
(0xAB17, 'X'),
(0xAB20, 'V'),
(0xAB27, 'X'),
(0xAB28, 'V'),
(0xAB2F, 'X'),
(0xAB30, 'V'),
(0xAB5C, 'M', u'ꜧ'),
(0xAB5D, 'M', u'ꬷ'),
(0xAB5E, 'M', u'ɫ'),
(0xAB5F, 'M', u'ꭒ'),
(0xAB60, 'V'),
(0xAB66, 'X'),
(0xAB70, 'M', u'Ꭰ'),
(0xAB71, 'M', u'Ꭱ'),
(0xAB72, 'M', u'Ꭲ'),
(0xAB73, 'M', u'Ꭳ'),
(0xAB74, 'M', u'Ꭴ'),
(0xAB75, 'M', u'Ꭵ'),
(0xAB76, 'M', u'Ꭶ'),
(0xAB77, 'M', u'Ꭷ'),
(0xAB78, 'M', u'Ꭸ'),
(0xAB79, 'M', u'Ꭹ'),
(0xAB7A, 'M', u'Ꭺ'),
]
def _seg_38():
return [
(0xAB7B, 'M', u'Ꭻ'),
(0xAB7C, 'M', u'Ꭼ'),
(0xAB7D, 'M', u'Ꭽ'),
(0xAB7E, 'M', u'Ꭾ'),
(0xAB7F, 'M', u'Ꭿ'),
(0xAB80, 'M', u'Ꮀ'),
(0xAB81, 'M', u'Ꮁ'),
(0xAB82, 'M', u'Ꮂ'),
(0xAB83, 'M', u'Ꮃ'),
(0xAB84, 'M', u'Ꮄ'),
(0xAB85, 'M', u'Ꮅ'),
(0xAB86, 'M', u'Ꮆ'),
(0xAB87, 'M', u'Ꮇ'),
(0xAB88, 'M', u'Ꮈ'),
(0xAB89, 'M', u'Ꮉ'),
(0xAB8A, 'M', u'Ꮊ'),
(0xAB8B, 'M', u'Ꮋ'),
(0xAB8C, 'M', u'Ꮌ'),
(0xAB8D, 'M', u'Ꮍ'),
(0xAB8E, 'M', u'Ꮎ'),
(0xAB8F, 'M', u'Ꮏ'),
(0xAB90, 'M', u'Ꮐ'),
(0xAB91, 'M', u'Ꮑ'),
(0xAB92, 'M', u'Ꮒ'),
(0xAB93, 'M', u'Ꮓ'),
(0xAB94, 'M', u'Ꮔ'),
(0xAB95, 'M', u'Ꮕ'),
(0xAB96, 'M', u'Ꮖ'),
(0xAB97, 'M', u'Ꮗ'),
(0xAB98, 'M', u'Ꮘ'),
(0xAB99, 'M', u'Ꮙ'),
(0xAB9A, 'M', u'Ꮚ'),
(0xAB9B, 'M', u'Ꮛ'),
(0xAB9C, 'M', u'Ꮜ'),
(0xAB9D, 'M', u'Ꮝ'),
(0xAB9E, 'M', u'Ꮞ'),
(0xAB9F, 'M', u'Ꮟ'),
(0xABA0, 'M', u'Ꮠ'),
(0xABA1, 'M', u'Ꮡ'),
(0xABA2, 'M', u'Ꮢ'),
(0xABA3, 'M', u'Ꮣ'),
(0xABA4, 'M', u'Ꮤ'),
(0xABA5, 'M', u'Ꮥ'),
(0xABA6, 'M', u'Ꮦ'),
(0xABA7, 'M', u'Ꮧ'),
(0xABA8, 'M', u'Ꮨ'),
(0xABA9, 'M', u'Ꮩ'),
(0xABAA, 'M', u'Ꮪ'),
(0xABAB, 'M', u'Ꮫ'),
(0xABAC, 'M', u'Ꮬ'),
(0xABAD, 'M', u'Ꮭ'),
(0xABAE, 'M', u'Ꮮ'),
(0xABAF, 'M', u'Ꮯ'),
(0xABB0, 'M', u'Ꮰ'),
(0xABB1, 'M', u'Ꮱ'),
(0xABB2, 'M', u'Ꮲ'),
(0xABB3, 'M', u'Ꮳ'),
(0xABB4, 'M', u'Ꮴ'),
(0xABB5, 'M', u'Ꮵ'),
(0xABB6, 'M', u'Ꮶ'),
(0xABB7, 'M', u'Ꮷ'),
(0xABB8, 'M', u'Ꮸ'),
(0xABB9, 'M', u'Ꮹ'),
(0xABBA, 'M', u'Ꮺ'),
(0xABBB, 'M', u'Ꮻ'),
(0xABBC, 'M', u'Ꮼ'),
(0xABBD, 'M', u'Ꮽ'),
(0xABBE, 'M', u'Ꮾ'),
(0xABBF, 'M', u'Ꮿ'),
(0xABC0, 'V'),
(0xABEE, 'X'),
(0xABF0, 'V'),
(0xABFA, 'X'),
(0xAC00, 'V'),
(0xD7A4, 'X'),
(0xD7B0, 'V'),
(0xD7C7, 'X'),
(0xD7CB, 'V'),
(0xD7FC, 'X'),
(0xF900, 'M', u'豈'),
(0xF901, 'M', u'更'),
(0xF902, 'M', u'車'),
(0xF903, 'M', u'賈'),
(0xF904, 'M', u'滑'),
(0xF905, 'M', u'串'),
(0xF906, 'M', u'句'),
(0xF907, 'M', u'龜'),
(0xF909, 'M', u'契'),
(0xF90A, 'M', u'金'),
(0xF90B, 'M', u'喇'),
(0xF90C, 'M', u'奈'),
(0xF90D, 'M', u'懶'),
(0xF90E, 'M', u'癩'),
(0xF90F, 'M', u'羅'),
(0xF910, 'M', u'蘿'),
(0xF911, 'M', u'螺'),
(0xF912, 'M', u'裸'),
(0xF913, 'M', u'邏'),
(0xF914, 'M', u'樂'),
(0xF915, 'M', u'洛'),
]
def _seg_39():
return [
(0xF916, 'M', u'烙'),
(0xF917, 'M', u'珞'),
(0xF918, 'M', u'落'),
(0xF919, 'M', u'酪'),
(0xF91A, 'M', u'駱'),
(0xF91B, 'M', u'亂'),
(0xF91C, 'M', u'卵'),
(0xF91D, 'M', u'欄'),
(0xF91E, 'M', u'爛'),
(0xF91F, 'M', u'蘭'),
(0xF920, 'M', u'鸞'),
(0xF921, 'M', u'嵐'),
(0xF922, 'M', u'濫'),
(0xF923, 'M', u'藍'),
(0xF924, 'M', u'襤'),
(0xF925, 'M', u'拉'),
(0xF926, 'M', u'臘'),
(0xF927, 'M', u'蠟'),
(0xF928, 'M', u'廊'),
(0xF929, 'M', u'朗'),
(0xF92A, 'M', u'浪'),
(0xF92B, 'M', u'狼'),
(0xF92C, 'M', u'郎'),
(0xF92D, 'M', u'來'),
(0xF92E, 'M', u'冷'),
(0xF92F, 'M', u'勞'),
(0xF930, 'M', u'擄'),
(0xF931, 'M', u'櫓'),
(0xF932, 'M', u'爐'),
(0xF933, 'M', u'盧'),
(0xF934, 'M', u'老'),
(0xF935, 'M', u'蘆'),
(0xF936, 'M', u'虜'),
(0xF937, 'M', u'路'),
(0xF938, 'M', u'露'),
(0xF939, 'M', u'魯'),
(0xF93A, 'M', u'鷺'),
(0xF93B, 'M', u'碌'),
(0xF93C, 'M', u'祿'),
(0xF93D, 'M', u'綠'),
(0xF93E, 'M', u'菉'),
(0xF93F, 'M', u'錄'),
(0xF940, 'M', u'鹿'),
(0xF941, 'M', u'論'),
(0xF942, 'M', u'壟'),
(0xF943, 'M', u'弄'),
(0xF944, 'M', u'籠'),
(0xF945, 'M', u'聾'),
(0xF946, 'M', u'牢'),
(0xF947, 'M', u'磊'),
(0xF948, 'M', u'賂'),
(0xF949, 'M', u'雷'),
(0xF94A, 'M', u'壘'),
(0xF94B, 'M', u'屢'),
(0xF94C, 'M', u'樓'),
(0xF94D, 'M', u'淚'),
(0xF94E, 'M', u'漏'),
(0xF94F, 'M', u'累'),
(0xF950, 'M', u'縷'),
(0xF951, 'M', u'陋'),
(0xF952, 'M', u'勒'),
(0xF953, 'M', u'肋'),
(0xF954, 'M', u'凜'),
(0xF955, 'M', u'凌'),
(0xF956, 'M', u'稜'),
(0xF957, 'M', u'綾'),
(0xF958, 'M', u'菱'),
(0xF959, 'M', u'陵'),
(0xF95A, 'M', u'讀'),
(0xF95B, 'M', u'拏'),
(0xF95C, 'M', u'樂'),
(0xF95D, 'M', u'諾'),
(0xF95E, 'M', u'丹'),
(0xF95F, 'M', u'寧'),
(0xF960, 'M', u'怒'),
(0xF961, 'M', u'率'),
(0xF962, 'M', u'異'),
(0xF963, 'M', u'北'),
(0xF964, 'M', u'磻'),
(0xF965, 'M', u'便'),
(0xF966, 'M', u'復'),
(0xF967, 'M', u'不'),
(0xF968, 'M', u'泌'),
(0xF969, 'M', u'數'),
(0xF96A, 'M', u'索'),
(0xF96B, 'M', u'參'),
(0xF96C, 'M', u'塞'),
(0xF96D, 'M', u'省'),
(0xF96E, 'M', u'葉'),
(0xF96F, 'M', u'說'),
(0xF970, 'M', u'殺'),
(0xF971, 'M', u'辰'),
(0xF972, 'M', u'沈'),
(0xF973, 'M', u'拾'),
(0xF974, 'M', u'若'),
(0xF975, 'M', u'掠'),
(0xF976, 'M', u'略'),
(0xF977, 'M', u'亮'),
(0xF978, 'M', u'兩'),
(0xF979, 'M', u'凉'),
]
def _seg_40():
return [
(0xF97A, 'M', u'梁'),
(0xF97B, 'M', u'糧'),
(0xF97C, 'M', u'良'),
(0xF97D, 'M', u'諒'),
(0xF97E, 'M', u'量'),
(0xF97F, 'M', u'勵'),
(0xF980, 'M', u'呂'),
(0xF981, 'M', u'女'),
(0xF982, 'M', u'廬'),
(0xF983, 'M', u'旅'),
(0xF984, 'M', u'濾'),
(0xF985, 'M', u'礪'),
(0xF986, 'M', u'閭'),
(0xF987, 'M', u'驪'),
(0xF988, 'M', u'麗'),
(0xF989, 'M', u'黎'),
(0xF98A, 'M', u'力'),
(0xF98B, 'M', u'曆'),
(0xF98C, 'M', u'歷'),
(0xF98D, 'M', u'轢'),
(0xF98E, 'M', u'年'),
(0xF98F, 'M', u'憐'),
(0xF990, 'M', u'戀'),
(0xF991, 'M', u'撚'),
(0xF992, 'M', u'漣'),
(0xF993, 'M', u'煉'),
(0xF994, 'M', u'璉'),
(0xF995, 'M', u'秊'),
(0xF996, 'M', u'練'),
(0xF997, 'M', u'聯'),
(0xF998, 'M', u'輦'),
(0xF999, 'M', u'蓮'),
(0xF99A, 'M', u'連'),
(0xF99B, 'M', u'鍊'),
(0xF99C, 'M', u'列'),
(0xF99D, 'M', u'劣'),
(0xF99E, 'M', u'咽'),
(0xF99F, 'M', u'烈'),
(0xF9A0, 'M', u'裂'),
(0xF9A1, 'M', u'說'),
(0xF9A2, 'M', u'廉'),
(0xF9A3, 'M', u'念'),
(0xF9A4, 'M', u'捻'),
(0xF9A5, 'M', u'殮'),
(0xF9A6, 'M', u'簾'),
(0xF9A7, 'M', u'獵'),
(0xF9A8, 'M', u'令'),
(0xF9A9, 'M', u'囹'),
(0xF9AA, 'M', u'寧'),
(0xF9AB, 'M', u'嶺'),
(0xF9AC, 'M', u'怜'),
(0xF9AD, 'M', u'玲'),
(0xF9AE, 'M', u'瑩'),
(0xF9AF, 'M', u'羚'),
(0xF9B0, 'M', u'聆'),
(0xF9B1, 'M', u'鈴'),
(0xF9B2, 'M', u'零'),
(0xF9B3, 'M', u'靈'),
(0xF9B4, 'M', u'領'),
(0xF9B5, 'M', u'例'),
(0xF9B6, 'M', u'禮'),
(0xF9B7, 'M', u'醴'),
(0xF9B8, 'M', u'隸'),
(0xF9B9, 'M', u'惡'),
(0xF9BA, 'M', u'了'),
(0xF9BB, 'M', u'僚'),
(0xF9BC, 'M', u'寮'),
(0xF9BD, 'M', u'尿'),
(0xF9BE, 'M', u'料'),
(0xF9BF, 'M', u'樂'),
(0xF9C0, 'M', u'燎'),
(0xF9C1, 'M', u'療'),
(0xF9C2, 'M', u'蓼'),
(0xF9C3, 'M', u'遼'),
(0xF9C4, 'M', u'龍'),
(0xF9C5, 'M', u'暈'),
(0xF9C6, 'M', u'阮'),
(0xF9C7, 'M', u'劉'),
(0xF9C8, 'M', u'杻'),
(0xF9C9, 'M', u'柳'),
(0xF9CA, 'M', u'流'),
(0xF9CB, 'M', u'溜'),
(0xF9CC, 'M', u'琉'),
(0xF9CD, 'M', u'留'),
(0xF9CE, 'M', u'硫'),
(0xF9CF, 'M', u'紐'),
(0xF9D0, 'M', u'類'),
(0xF9D1, 'M', u'六'),
(0xF9D2, 'M', u'戮'),
(0xF9D3, 'M', u'陸'),
(0xF9D4, 'M', u'倫'),
(0xF9D5, 'M', u'崙'),
(0xF9D6, 'M', u'淪'),
(0xF9D7, 'M', u'輪'),
(0xF9D8, 'M', u'律'),
(0xF9D9, 'M', u'慄'),
(0xF9DA, 'M', u'栗'),
(0xF9DB, 'M', u'率'),
(0xF9DC, 'M', u'隆'),
(0xF9DD, 'M', u'利'),
]
def _seg_41():
return [
(0xF9DE, 'M', u'吏'),
(0xF9DF, 'M', u'履'),
(0xF9E0, 'M', u'易'),
(0xF9E1, 'M', u'李'),
(0xF9E2, 'M', u'梨'),
(0xF9E3, 'M', u'泥'),
(0xF9E4, 'M', u'理'),
(0xF9E5, 'M', u'痢'),
(0xF9E6, 'M', u'罹'),
(0xF9E7, 'M', u'裏'),
(0xF9E8, 'M', u'裡'),
(0xF9E9, 'M', u'里'),
(0xF9EA, 'M', u'離'),
(0xF9EB, 'M', u'匿'),
(0xF9EC, 'M', u'溺'),
(0xF9ED, 'M', u'吝'),
(0xF9EE, 'M', u'燐'),
(0xF9EF, 'M', u'璘'),
(0xF9F0, 'M', u'藺'),
(0xF9F1, 'M', u'隣'),
(0xF9F2, 'M', u'鱗'),
(0xF9F3, 'M', u'麟'),
(0xF9F4, 'M', u'林'),
(0xF9F5, 'M', u'淋'),
(0xF9F6, 'M', u'臨'),
(0xF9F7, 'M', u'立'),
(0xF9F8, 'M', u'笠'),
(0xF9F9, 'M', u'粒'),
(0xF9FA, 'M', u'狀'),
(0xF9FB, 'M', u'炙'),
(0xF9FC, 'M', u'識'),
(0xF9FD, 'M', u'什'),
(0xF9FE, 'M', u'茶'),
(0xF9FF, 'M', u'刺'),
(0xFA00, 'M', u'切'),
(0xFA01, 'M', u'度'),
(0xFA02, 'M', u'拓'),
(0xFA03, 'M', u'糖'),
(0xFA04, 'M', u'宅'),
(0xFA05, 'M', u'洞'),
(0xFA06, 'M', u'暴'),
(0xFA07, 'M', u'輻'),
(0xFA08, 'M', u'行'),
(0xFA09, 'M', u'降'),
(0xFA0A, 'M', u'見'),
(0xFA0B, 'M', u'廓'),
(0xFA0C, 'M', u'兀'),
(0xFA0D, 'M', u'嗀'),
(0xFA0E, 'V'),
(0xFA10, 'M', u'塚'),
(0xFA11, 'V'),
(0xFA12, 'M', u'晴'),
(0xFA13, 'V'),
(0xFA15, 'M', u'凞'),
(0xFA16, 'M', u'猪'),
(0xFA17, 'M', u'益'),
(0xFA18, 'M', u'礼'),
(0xFA19, 'M', u'神'),
(0xFA1A, 'M', u'祥'),
(0xFA1B, 'M', u'福'),
(0xFA1C, 'M', u'靖'),
(0xFA1D, 'M', u'精'),
(0xFA1E, 'M', u'羽'),
(0xFA1F, 'V'),
(0xFA20, 'M', u'蘒'),
(0xFA21, 'V'),
(0xFA22, 'M', u'諸'),
(0xFA23, 'V'),
(0xFA25, 'M', u'逸'),
(0xFA26, 'M', u'都'),
(0xFA27, 'V'),
(0xFA2A, 'M', u'飯'),
(0xFA2B, 'M', u'飼'),
(0xFA2C, 'M', u'館'),
(0xFA2D, 'M', u'鶴'),
(0xFA2E, 'M', u'郞'),
(0xFA2F, 'M', u'隷'),
(0xFA30, 'M', u'侮'),
(0xFA31, 'M', u'僧'),
(0xFA32, 'M', u'免'),
(0xFA33, 'M', u'勉'),
(0xFA34, 'M', u'勤'),
(0xFA35, 'M', u'卑'),
(0xFA36, 'M', u'喝'),
(0xFA37, 'M', u'嘆'),
(0xFA38, 'M', u'器'),
(0xFA39, 'M', u'塀'),
(0xFA3A, 'M', u'墨'),
(0xFA3B, 'M', u'層'),
(0xFA3C, 'M', u'屮'),
(0xFA3D, 'M', u'悔'),
(0xFA3E, 'M', u'慨'),
(0xFA3F, 'M', u'憎'),
(0xFA40, 'M', u'懲'),
(0xFA41, 'M', u'敏'),
(0xFA42, 'M', u'既'),
(0xFA43, 'M', u'暑'),
(0xFA44, 'M', u'梅'),
(0xFA45, 'M', u'海'),
(0xFA46, 'M', u'渚'),
]
def _seg_42():
return [
(0xFA47, 'M', u'漢'),
(0xFA48, 'M', u'煮'),
(0xFA49, 'M', u'爫'),
(0xFA4A, 'M', u'琢'),
(0xFA4B, 'M', u'碑'),
(0xFA4C, 'M', u'社'),
(0xFA4D, 'M', u'祉'),
(0xFA4E, 'M', u'祈'),
(0xFA4F, 'M', u'祐'),
(0xFA50, 'M', u'祖'),
(0xFA51, 'M', u'祝'),
(0xFA52, 'M', u'禍'),
(0xFA53, 'M', u'禎'),
(0xFA54, 'M', u'穀'),
(0xFA55, 'M', u'突'),
(0xFA56, 'M', u'節'),
(0xFA57, 'M', u'練'),
(0xFA58, 'M', u'縉'),
(0xFA59, 'M', u'繁'),
(0xFA5A, 'M', u'署'),
(0xFA5B, 'M', u'者'),
(0xFA5C, 'M', u'臭'),
(0xFA5D, 'M', u'艹'),
(0xFA5F, 'M', u'著'),
(0xFA60, 'M', u'褐'),
(0xFA61, 'M', u'視'),
(0xFA62, 'M', u'謁'),
(0xFA63, 'M', u'謹'),
(0xFA64, 'M', u'賓'),
(0xFA65, 'M', u'贈'),
(0xFA66, 'M', u'辶'),
(0xFA67, 'M', u'逸'),
(0xFA68, 'M', u'難'),
(0xFA69, 'M', u'響'),
(0xFA6A, 'M', u'頻'),
(0xFA6B, 'M', u'恵'),
(0xFA6C, 'M', u'𤋮'),
(0xFA6D, 'M', u'舘'),
(0xFA6E, 'X'),
(0xFA70, 'M', u'並'),
(0xFA71, 'M', u'况'),
(0xFA72, 'M', u'全'),
(0xFA73, 'M', u'侀'),
(0xFA74, 'M', u'充'),
(0xFA75, 'M', u'冀'),
(0xFA76, 'M', u'勇'),
(0xFA77, 'M', u'勺'),
(0xFA78, 'M', u'喝'),
(0xFA79, 'M', u'啕'),
(0xFA7A, 'M', u'喙'),
(0xFA7B, 'M', u'嗢'),
(0xFA7C, 'M', u'塚'),
(0xFA7D, 'M', u'墳'),
(0xFA7E, 'M', u'奄'),
(0xFA7F, 'M', u'奔'),
(0xFA80, 'M', u'婢'),
(0xFA81, 'M', u'嬨'),
(0xFA82, 'M', u'廒'),
(0xFA83, 'M', u'廙'),
(0xFA84, 'M', u'彩'),
(0xFA85, 'M', u'徭'),
(0xFA86, 'M', u'惘'),
(0xFA87, 'M', u'慎'),
(0xFA88, 'M', u'愈'),
(0xFA89, 'M', u'憎'),
(0xFA8A, 'M', u'慠'),
(0xFA8B, 'M', u'懲'),
(0xFA8C, 'M', u'戴'),
(0xFA8D, 'M', u'揄'),
(0xFA8E, 'M', u'搜'),
(0xFA8F, 'M', u'摒'),
(0xFA90, 'M', u'敖'),
(0xFA91, 'M', u'晴'),
(0xFA92, 'M', u'朗'),
(0xFA93, 'M', u'望'),
(0xFA94, 'M', u'杖'),
(0xFA95, 'M', u'歹'),
(0xFA96, 'M', u'殺'),
(0xFA97, 'M', u'流'),
(0xFA98, 'M', u'滛'),
(0xFA99, 'M', u'滋'),
(0xFA9A, 'M', u'漢'),
(0xFA9B, 'M', u'瀞'),
(0xFA9C, 'M', u'煮'),
(0xFA9D, 'M', u'瞧'),
(0xFA9E, 'M', u'爵'),
(0xFA9F, 'M', u'犯'),
(0xFAA0, 'M', u'猪'),
(0xFAA1, 'M', u'瑱'),
(0xFAA2, 'M', u'甆'),
(0xFAA3, 'M', u'画'),
(0xFAA4, 'M', u'瘝'),
(0xFAA5, 'M', u'瘟'),
(0xFAA6, 'M', u'益'),
(0xFAA7, 'M', u'盛'),
(0xFAA8, 'M', u'直'),
(0xFAA9, 'M', u'睊'),
(0xFAAA, 'M', u'着'),
(0xFAAB, 'M', u'磌'),
(0xFAAC, 'M', u'窱'),
]
def _seg_43():
return [
(0xFAAD, 'M', u'節'),
(0xFAAE, 'M', u'类'),
(0xFAAF, 'M', u'絛'),
(0xFAB0, 'M', u'練'),
(0xFAB1, 'M', u'缾'),
(0xFAB2, 'M', u'者'),
(0xFAB3, 'M', u'荒'),
(0xFAB4, 'M', u'華'),
(0xFAB5, 'M', u'蝹'),
(0xFAB6, 'M', u'襁'),
(0xFAB7, 'M', u'覆'),
(0xFAB8, 'M', u'視'),
(0xFAB9, 'M', u'調'),
(0xFABA, 'M', u'諸'),
(0xFABB, 'M', u'請'),
(0xFABC, 'M', u'謁'),
(0xFABD, 'M', u'諾'),
(0xFABE, 'M', u'諭'),
(0xFABF, 'M', u'謹'),
(0xFAC0, 'M', u'變'),
(0xFAC1, 'M', u'贈'),
(0xFAC2, 'M', u'輸'),
(0xFAC3, 'M', u'遲'),
(0xFAC4, 'M', u'醙'),
(0xFAC5, 'M', u'鉶'),
(0xFAC6, 'M', u'陼'),
(0xFAC7, 'M', u'難'),
(0xFAC8, 'M', u'靖'),
(0xFAC9, 'M', u'韛'),
(0xFACA, 'M', u'響'),
(0xFACB, 'M', u'頋'),
(0xFACC, 'M', u'頻'),
(0xFACD, 'M', u'鬒'),
(0xFACE, 'M', u'龜'),
(0xFACF, 'M', u'𢡊'),
(0xFAD0, 'M', u'𢡄'),
(0xFAD1, 'M', u'𣏕'),
(0xFAD2, 'M', u'㮝'),
(0xFAD3, 'M', u'䀘'),
(0xFAD4, 'M', u'䀹'),
(0xFAD5, 'M', u'𥉉'),
(0xFAD6, 'M', u'𥳐'),
(0xFAD7, 'M', u'𧻓'),
(0xFAD8, 'M', u'齃'),
(0xFAD9, 'M', u'龎'),
(0xFADA, 'X'),
(0xFB00, 'M', u'ff'),
(0xFB01, 'M', u'fi'),
(0xFB02, 'M', u'fl'),
(0xFB03, 'M', u'ffi'),
(0xFB04, 'M', u'ffl'),
(0xFB05, 'M', u'st'),
(0xFB07, 'X'),
(0xFB13, 'M', u'մն'),
(0xFB14, 'M', u'մե'),
(0xFB15, 'M', u'մի'),
(0xFB16, 'M', u'վն'),
(0xFB17, 'M', u'մխ'),
(0xFB18, 'X'),
(0xFB1D, 'M', u'יִ'),
(0xFB1E, 'V'),
(0xFB1F, 'M', u'ײַ'),
(0xFB20, 'M', u'ע'),
(0xFB21, 'M', u'א'),
(0xFB22, 'M', u'ד'),
(0xFB23, 'M', u'ה'),
(0xFB24, 'M', u'כ'),
(0xFB25, 'M', u'ל'),
(0xFB26, 'M', u'ם'),
(0xFB27, 'M', u'ר'),
(0xFB28, 'M', u'ת'),
(0xFB29, '3', u'+'),
(0xFB2A, 'M', u'שׁ'),
(0xFB2B, 'M', u'שׂ'),
(0xFB2C, 'M', u'שּׁ'),
(0xFB2D, 'M', u'שּׂ'),
(0xFB2E, 'M', u'אַ'),
(0xFB2F, 'M', u'אָ'),
(0xFB30, 'M', u'אּ'),
(0xFB31, 'M', u'בּ'),
(0xFB32, 'M', u'גּ'),
(0xFB33, 'M', u'דּ'),
(0xFB34, 'M', u'הּ'),
(0xFB35, 'M', u'וּ'),
(0xFB36, 'M', u'זּ'),
(0xFB37, 'X'),
(0xFB38, 'M', u'טּ'),
(0xFB39, 'M', u'יּ'),
(0xFB3A, 'M', u'ךּ'),
(0xFB3B, 'M', u'כּ'),
(0xFB3C, 'M', u'לּ'),
(0xFB3D, 'X'),
(0xFB3E, 'M', u'מּ'),
(0xFB3F, 'X'),
(0xFB40, 'M', u'נּ'),
(0xFB41, 'M', u'סּ'),
(0xFB42, 'X'),
(0xFB43, 'M', u'ףּ'),
(0xFB44, 'M', u'פּ'),
(0xFB45, 'X'),
]
def _seg_44():
return [
(0xFB46, 'M', u'צּ'),
(0xFB47, 'M', u'קּ'),
(0xFB48, 'M', u'רּ'),
(0xFB49, 'M', u'שּ'),
(0xFB4A, 'M', u'תּ'),
(0xFB4B, 'M', u'וֹ'),
(0xFB4C, 'M', u'בֿ'),
(0xFB4D, 'M', u'כֿ'),
(0xFB4E, 'M', u'פֿ'),
(0xFB4F, 'M', u'אל'),
(0xFB50, 'M', u'ٱ'),
(0xFB52, 'M', u'ٻ'),
(0xFB56, 'M', u'پ'),
(0xFB5A, 'M', u'ڀ'),
(0xFB5E, 'M', u'ٺ'),
(0xFB62, 'M', u'ٿ'),
(0xFB66, 'M', u'ٹ'),
(0xFB6A, 'M', u'ڤ'),
(0xFB6E, 'M', u'ڦ'),
(0xFB72, 'M', u'ڄ'),
(0xFB76, 'M', u'ڃ'),
(0xFB7A, 'M', u'چ'),
(0xFB7E, 'M', u'ڇ'),
(0xFB82, 'M', u'ڍ'),
(0xFB84, 'M', u'ڌ'),
(0xFB86, 'M', u'ڎ'),
(0xFB88, 'M', u'ڈ'),
(0xFB8A, 'M', u'ژ'),
(0xFB8C, 'M', u'ڑ'),
(0xFB8E, 'M', u'ک'),
(0xFB92, 'M', u'گ'),
(0xFB96, 'M', u'ڳ'),
(0xFB9A, 'M', u'ڱ'),
(0xFB9E, 'M', u'ں'),
(0xFBA0, 'M', u'ڻ'),
(0xFBA4, 'M', u'ۀ'),
(0xFBA6, 'M', u'ہ'),
(0xFBAA, 'M', u'ھ'),
(0xFBAE, 'M', u'ے'),
(0xFBB0, 'M', u'ۓ'),
(0xFBB2, 'V'),
(0xFBC2, 'X'),
(0xFBD3, 'M', u'ڭ'),
(0xFBD7, 'M', u'ۇ'),
(0xFBD9, 'M', u'ۆ'),
(0xFBDB, 'M', u'ۈ'),
(0xFBDD, 'M', u'ۇٴ'),
(0xFBDE, 'M', u'ۋ'),
(0xFBE0, 'M', u'ۅ'),
(0xFBE2, 'M', u'ۉ'),
(0xFBE4, 'M', u'ې'),
(0xFBE8, 'M', u'ى'),
(0xFBEA, 'M', u'ئا'),
(0xFBEC, 'M', u'ئە'),
(0xFBEE, 'M', u'ئو'),
(0xFBF0, 'M', u'ئۇ'),
(0xFBF2, 'M', u'ئۆ'),
(0xFBF4, 'M', u'ئۈ'),
(0xFBF6, 'M', u'ئې'),
(0xFBF9, 'M', u'ئى'),
(0xFBFC, 'M', u'ی'),
(0xFC00, 'M', u'ئج'),
(0xFC01, 'M', u'ئح'),
(0xFC02, 'M', u'ئم'),
(0xFC03, 'M', u'ئى'),
(0xFC04, 'M', u'ئي'),
(0xFC05, 'M', u'بج'),
(0xFC06, 'M', u'بح'),
(0xFC07, 'M', u'بخ'),
(0xFC08, 'M', u'بم'),
(0xFC09, 'M', u'بى'),
(0xFC0A, 'M', u'بي'),
(0xFC0B, 'M', u'تج'),
(0xFC0C, 'M', u'تح'),
(0xFC0D, 'M', u'تخ'),
(0xFC0E, 'M', u'تم'),
(0xFC0F, 'M', u'تى'),
(0xFC10, 'M', u'تي'),
(0xFC11, 'M', u'ثج'),
(0xFC12, 'M', u'ثم'),
(0xFC13, 'M', u'ثى'),
(0xFC14, 'M', u'ثي'),
(0xFC15, 'M', u'جح'),
(0xFC16, 'M', u'جم'),
(0xFC17, 'M', u'حج'),
(0xFC18, 'M', u'حم'),
(0xFC19, 'M', u'خج'),
(0xFC1A, 'M', u'خح'),
(0xFC1B, 'M', u'خم'),
(0xFC1C, 'M', u'سج'),
(0xFC1D, 'M', u'سح'),
(0xFC1E, 'M', u'سخ'),
(0xFC1F, 'M', u'سم'),
(0xFC20, 'M', u'صح'),
(0xFC21, 'M', u'صم'),
(0xFC22, 'M', u'ضج'),
(0xFC23, 'M', u'ضح'),
(0xFC24, 'M', u'ضخ'),
(0xFC25, 'M', u'ضم'),
(0xFC26, 'M', u'طح'),
]
def _seg_45():
return [
(0xFC27, 'M', u'طم'),
(0xFC28, 'M', u'ظم'),
(0xFC29, 'M', u'عج'),
(0xFC2A, 'M', u'عم'),
(0xFC2B, 'M', u'غج'),
(0xFC2C, 'M', u'غم'),
(0xFC2D, 'M', u'فج'),
(0xFC2E, 'M', u'فح'),
(0xFC2F, 'M', u'فخ'),
(0xFC30, 'M', u'فم'),
(0xFC31, 'M', u'فى'),
(0xFC32, 'M', u'في'),
(0xFC33, 'M', u'قح'),
(0xFC34, 'M', u'قم'),
(0xFC35, 'M', u'قى'),
(0xFC36, 'M', u'قي'),
(0xFC37, 'M', u'كا'),
(0xFC38, 'M', u'كج'),
(0xFC39, 'M', u'كح'),
(0xFC3A, 'M', u'كخ'),
(0xFC3B, 'M', u'كل'),
(0xFC3C, 'M', u'كم'),
(0xFC3D, 'M', u'كى'),
(0xFC3E, 'M', u'كي'),
(0xFC3F, 'M', u'لج'),
(0xFC40, 'M', u'لح'),
(0xFC41, 'M', u'لخ'),
(0xFC42, 'M', u'لم'),
(0xFC43, 'M', u'لى'),
(0xFC44, 'M', u'لي'),
(0xFC45, 'M', u'مج'),
(0xFC46, 'M', u'مح'),
(0xFC47, 'M', u'مخ'),
(0xFC48, 'M', u'مم'),
(0xFC49, 'M', u'مى'),
(0xFC4A, 'M', u'مي'),
(0xFC4B, 'M', u'نج'),
(0xFC4C, 'M', u'نح'),
(0xFC4D, 'M', u'نخ'),
(0xFC4E, 'M', u'نم'),
(0xFC4F, 'M', u'نى'),
(0xFC50, 'M', u'ني'),
(0xFC51, 'M', u'هج'),
(0xFC52, 'M', u'هم'),
(0xFC53, 'M', u'هى'),
(0xFC54, 'M', u'هي'),
(0xFC55, 'M', u'يج'),
(0xFC56, 'M', u'يح'),
(0xFC57, 'M', u'يخ'),
(0xFC58, 'M', u'يم'),
(0xFC59, 'M', u'يى'),
(0xFC5A, 'M', u'يي'),
(0xFC5B, 'M', u'ذٰ'),
(0xFC5C, 'M', u'رٰ'),
(0xFC5D, 'M', u'ىٰ'),
(0xFC5E, '3', u' ٌّ'),
(0xFC5F, '3', u' ٍّ'),
(0xFC60, '3', u' َّ'),
(0xFC61, '3', u' ُّ'),
(0xFC62, '3', u' ِّ'),
(0xFC63, '3', u' ّٰ'),
(0xFC64, 'M', u'ئر'),
(0xFC65, 'M', u'ئز'),
(0xFC66, 'M', u'ئم'),
(0xFC67, 'M', u'ئن'),
(0xFC68, 'M', u'ئى'),
(0xFC69, 'M', u'ئي'),
(0xFC6A, 'M', u'بر'),
(0xFC6B, 'M', u'بز'),
(0xFC6C, 'M', u'بم'),
(0xFC6D, 'M', u'بن'),
(0xFC6E, 'M', u'بى'),
(0xFC6F, 'M', u'بي'),
(0xFC70, 'M', u'تر'),
(0xFC71, 'M', u'تز'),
(0xFC72, 'M', u'تم'),
(0xFC73, 'M', u'تن'),
(0xFC74, 'M', u'تى'),
(0xFC75, 'M', u'تي'),
(0xFC76, 'M', u'ثر'),
(0xFC77, 'M', u'ثز'),
(0xFC78, 'M', u'ثم'),
(0xFC79, 'M', u'ثن'),
(0xFC7A, 'M', u'ثى'),
(0xFC7B, 'M', u'ثي'),
(0xFC7C, 'M', u'فى'),
(0xFC7D, 'M', u'في'),
(0xFC7E, 'M', u'قى'),
(0xFC7F, 'M', u'قي'),
(0xFC80, 'M', u'كا'),
(0xFC81, 'M', u'كل'),
(0xFC82, 'M', u'كم'),
(0xFC83, 'M', u'كى'),
(0xFC84, 'M', u'كي'),
(0xFC85, 'M', u'لم'),
(0xFC86, 'M', u'لى'),
(0xFC87, 'M', u'لي'),
(0xFC88, 'M', u'ما'),
(0xFC89, 'M', u'مم'),
(0xFC8A, 'M', u'نر'),
]
def _seg_46():
return [
(0xFC8B, 'M', u'نز'),
(0xFC8C, 'M', u'نم'),
(0xFC8D, 'M', u'نن'),
(0xFC8E, 'M', u'نى'),
(0xFC8F, 'M', u'ني'),
(0xFC90, 'M', u'ىٰ'),
(0xFC91, 'M', u'ير'),
(0xFC92, 'M', u'يز'),
(0xFC93, 'M', u'يم'),
(0xFC94, 'M', u'ين'),
(0xFC95, 'M', u'يى'),
(0xFC96, 'M', u'يي'),
(0xFC97, 'M', u'ئج'),
(0xFC98, 'M', u'ئح'),
(0xFC99, 'M', u'ئخ'),
(0xFC9A, 'M', u'ئم'),
(0xFC9B, 'M', u'ئه'),
(0xFC9C, 'M', u'بج'),
(0xFC9D, 'M', u'بح'),
(0xFC9E, 'M', u'بخ'),
(0xFC9F, 'M', u'بم'),
(0xFCA0, 'M', u'به'),
(0xFCA1, 'M', u'تج'),
(0xFCA2, 'M', u'تح'),
(0xFCA3, 'M', u'تخ'),
(0xFCA4, 'M', u'تم'),
(0xFCA5, 'M', u'ته'),
(0xFCA6, 'M', u'ثم'),
(0xFCA7, 'M', u'جح'),
(0xFCA8, 'M', u'جم'),
(0xFCA9, 'M', u'حج'),
(0xFCAA, 'M', u'حم'),
(0xFCAB, 'M', u'خج'),
(0xFCAC, 'M', u'خم'),
(0xFCAD, 'M', u'سج'),
(0xFCAE, 'M', u'سح'),
(0xFCAF, 'M', u'سخ'),
(0xFCB0, 'M', u'سم'),
(0xFCB1, 'M', u'صح'),
(0xFCB2, 'M', u'صخ'),
(0xFCB3, 'M', u'صم'),
(0xFCB4, 'M', u'ضج'),
(0xFCB5, 'M', u'ضح'),
(0xFCB6, 'M', u'ضخ'),
(0xFCB7, 'M', u'ضم'),
(0xFCB8, 'M', u'طح'),
(0xFCB9, 'M', u'ظم'),
(0xFCBA, 'M', u'عج'),
(0xFCBB, 'M', u'عم'),
(0xFCBC, 'M', u'غج'),
(0xFCBD, 'M', u'غم'),
(0xFCBE, 'M', u'فج'),
(0xFCBF, 'M', u'فح'),
(0xFCC0, 'M', u'فخ'),
(0xFCC1, 'M', u'فم'),
(0xFCC2, 'M', u'قح'),
(0xFCC3, 'M', u'قم'),
(0xFCC4, 'M', u'كج'),
(0xFCC5, 'M', u'كح'),
(0xFCC6, 'M', u'كخ'),
(0xFCC7, 'M', u'كل'),
(0xFCC8, 'M', u'كم'),
(0xFCC9, 'M', u'لج'),
(0xFCCA, 'M', u'لح'),
(0xFCCB, 'M', u'لخ'),
(0xFCCC, 'M', u'لم'),
(0xFCCD, 'M', u'له'),
(0xFCCE, 'M', u'مج'),
(0xFCCF, 'M', u'مح'),
(0xFCD0, 'M', u'مخ'),
(0xFCD1, 'M', u'مم'),
(0xFCD2, 'M', u'نج'),
(0xFCD3, 'M', u'نح'),
(0xFCD4, 'M', u'نخ'),
(0xFCD5, 'M', u'نم'),
(0xFCD6, 'M', u'نه'),
(0xFCD7, 'M', u'هج'),
(0xFCD8, 'M', u'هم'),
(0xFCD9, 'M', u'هٰ'),
(0xFCDA, 'M', u'يج'),
(0xFCDB, 'M', u'يح'),
(0xFCDC, 'M', u'يخ'),
(0xFCDD, 'M', u'يم'),
(0xFCDE, 'M', u'يه'),
(0xFCDF, 'M', u'ئم'),
(0xFCE0, 'M', u'ئه'),
(0xFCE1, 'M', u'بم'),
(0xFCE2, 'M', u'به'),
(0xFCE3, 'M', u'تم'),
(0xFCE4, 'M', u'ته'),
(0xFCE5, 'M', u'ثم'),
(0xFCE6, 'M', u'ثه'),
(0xFCE7, 'M', u'سم'),
(0xFCE8, 'M', u'سه'),
(0xFCE9, 'M', u'شم'),
(0xFCEA, 'M', u'شه'),
(0xFCEB, 'M', u'كل'),
(0xFCEC, 'M', u'كم'),
(0xFCED, 'M', u'لم'),
(0xFCEE, 'M', u'نم'),
]
def _seg_47():
return [
(0xFCEF, 'M', u'نه'),
(0xFCF0, 'M', u'يم'),
(0xFCF1, 'M', u'يه'),
(0xFCF2, 'M', u'ـَّ'),
(0xFCF3, 'M', u'ـُّ'),
(0xFCF4, 'M', u'ـِّ'),
(0xFCF5, 'M', u'طى'),
(0xFCF6, 'M', u'طي'),
(0xFCF7, 'M', u'عى'),
(0xFCF8, 'M', u'عي'),
(0xFCF9, 'M', u'غى'),
(0xFCFA, 'M', u'غي'),
(0xFCFB, 'M', u'سى'),
(0xFCFC, 'M', u'سي'),
(0xFCFD, 'M', u'شى'),
(0xFCFE, 'M', u'شي'),
(0xFCFF, 'M', u'حى'),
(0xFD00, 'M', u'حي'),
(0xFD01, 'M', u'جى'),
(0xFD02, 'M', u'جي'),
(0xFD03, 'M', u'خى'),
(0xFD04, 'M', u'خي'),
(0xFD05, 'M', u'صى'),
(0xFD06, 'M', u'صي'),
(0xFD07, 'M', u'ضى'),
(0xFD08, 'M', u'ضي'),
(0xFD09, 'M', u'شج'),
(0xFD0A, 'M', u'شح'),
(0xFD0B, 'M', u'شخ'),
(0xFD0C, 'M', u'شم'),
(0xFD0D, 'M', u'شر'),
(0xFD0E, 'M', u'سر'),
(0xFD0F, 'M', u'صر'),
(0xFD10, 'M', u'ضر'),
(0xFD11, 'M', u'طى'),
(0xFD12, 'M', u'طي'),
(0xFD13, 'M', u'عى'),
(0xFD14, 'M', u'عي'),
(0xFD15, 'M', u'غى'),
(0xFD16, 'M', u'غي'),
(0xFD17, 'M', u'سى'),
(0xFD18, 'M', u'سي'),
(0xFD19, 'M', u'شى'),
(0xFD1A, 'M', u'شي'),
(0xFD1B, 'M', u'حى'),
(0xFD1C, 'M', u'حي'),
(0xFD1D, 'M', u'جى'),
(0xFD1E, 'M', u'جي'),
(0xFD1F, 'M', u'خى'),
(0xFD20, 'M', u'خي'),
(0xFD21, 'M', u'صى'),
(0xFD22, 'M', u'صي'),
(0xFD23, 'M', u'ضى'),
(0xFD24, 'M', u'ضي'),
(0xFD25, 'M', u'شج'),
(0xFD26, 'M', u'شح'),
(0xFD27, 'M', u'شخ'),
(0xFD28, 'M', u'شم'),
(0xFD29, 'M', u'شر'),
(0xFD2A, 'M', u'سر'),
(0xFD2B, 'M', u'صر'),
(0xFD2C, 'M', u'ضر'),
(0xFD2D, 'M', u'شج'),
(0xFD2E, 'M', u'شح'),
(0xFD2F, 'M', u'شخ'),
(0xFD30, 'M', u'شم'),
(0xFD31, 'M', u'سه'),
(0xFD32, 'M', u'شه'),
(0xFD33, 'M', u'طم'),
(0xFD34, 'M', u'سج'),
(0xFD35, 'M', u'سح'),
(0xFD36, 'M', u'سخ'),
(0xFD37, 'M', u'شج'),
(0xFD38, 'M', u'شح'),
(0xFD39, 'M', u'شخ'),
(0xFD3A, 'M', u'طم'),
(0xFD3B, 'M', u'ظم'),
(0xFD3C, 'M', u'اً'),
(0xFD3E, 'V'),
(0xFD40, 'X'),
(0xFD50, 'M', u'تجم'),
(0xFD51, 'M', u'تحج'),
(0xFD53, 'M', u'تحم'),
(0xFD54, 'M', u'تخم'),
(0xFD55, 'M', u'تمج'),
(0xFD56, 'M', u'تمح'),
(0xFD57, 'M', u'تمخ'),
(0xFD58, 'M', u'جمح'),
(0xFD5A, 'M', u'حمي'),
(0xFD5B, 'M', u'حمى'),
(0xFD5C, 'M', u'سحج'),
(0xFD5D, 'M', u'سجح'),
(0xFD5E, 'M', u'سجى'),
(0xFD5F, 'M', u'سمح'),
(0xFD61, 'M', u'سمج'),
(0xFD62, 'M', u'سمم'),
(0xFD64, 'M', u'صحح'),
(0xFD66, 'M', u'صمم'),
(0xFD67, 'M', u'شحم'),
(0xFD69, 'M', u'شجي'),
]
def _seg_48():
return [
(0xFD6A, 'M', u'شمخ'),
(0xFD6C, 'M', u'شمم'),
(0xFD6E, 'M', u'ضحى'),
(0xFD6F, 'M', u'ضخم'),
(0xFD71, 'M', u'طمح'),
(0xFD73, 'M', u'طمم'),
(0xFD74, 'M', u'طمي'),
(0xFD75, 'M', u'عجم'),
(0xFD76, 'M', u'عمم'),
(0xFD78, 'M', u'عمى'),
(0xFD79, 'M', u'غمم'),
(0xFD7A, 'M', u'غمي'),
(0xFD7B, 'M', u'غمى'),
(0xFD7C, 'M', u'فخم'),
(0xFD7E, 'M', u'قمح'),
(0xFD7F, 'M', u'قمم'),
(0xFD80, 'M', u'لحم'),
(0xFD81, 'M', u'لحي'),
(0xFD82, 'M', u'لحى'),
(0xFD83, 'M', u'لجج'),
(0xFD85, 'M', u'لخم'),
(0xFD87, 'M', u'لمح'),
(0xFD89, 'M', u'محج'),
(0xFD8A, 'M', u'محم'),
(0xFD8B, 'M', u'محي'),
(0xFD8C, 'M', u'مجح'),
(0xFD8D, 'M', u'مجم'),
(0xFD8E, 'M', u'مخج'),
(0xFD8F, 'M', u'مخم'),
(0xFD90, 'X'),
(0xFD92, 'M', u'مجخ'),
(0xFD93, 'M', u'همج'),
(0xFD94, 'M', u'همم'),
(0xFD95, 'M', u'نحم'),
(0xFD96, 'M', u'نحى'),
(0xFD97, 'M', u'نجم'),
(0xFD99, 'M', u'نجى'),
(0xFD9A, 'M', u'نمي'),
(0xFD9B, 'M', u'نمى'),
(0xFD9C, 'M', u'يمم'),
(0xFD9E, 'M', u'بخي'),
(0xFD9F, 'M', u'تجي'),
(0xFDA0, 'M', u'تجى'),
(0xFDA1, 'M', u'تخي'),
(0xFDA2, 'M', u'تخى'),
(0xFDA3, 'M', u'تمي'),
(0xFDA4, 'M', u'تمى'),
(0xFDA5, 'M', u'جمي'),
(0xFDA6, 'M', u'جحى'),
(0xFDA7, 'M', u'جمى'),
(0xFDA8, 'M', u'سخى'),
(0xFDA9, 'M', u'صحي'),
(0xFDAA, 'M', u'شحي'),
(0xFDAB, 'M', u'ضحي'),
(0xFDAC, 'M', u'لجي'),
(0xFDAD, 'M', u'لمي'),
(0xFDAE, 'M', u'يحي'),
(0xFDAF, 'M', u'يجي'),
(0xFDB0, 'M', u'يمي'),
(0xFDB1, 'M', u'ممي'),
(0xFDB2, 'M', u'قمي'),
(0xFDB3, 'M', u'نحي'),
(0xFDB4, 'M', u'قمح'),
(0xFDB5, 'M', u'لحم'),
(0xFDB6, 'M', u'عمي'),
(0xFDB7, 'M', u'كمي'),
(0xFDB8, 'M', u'نجح'),
(0xFDB9, 'M', u'مخي'),
(0xFDBA, 'M', u'لجم'),
(0xFDBB, 'M', u'كمم'),
(0xFDBC, 'M', u'لجم'),
(0xFDBD, 'M', u'نجح'),
(0xFDBE, 'M', u'جحي'),
(0xFDBF, 'M', u'حجي'),
(0xFDC0, 'M', u'مجي'),
(0xFDC1, 'M', u'فمي'),
(0xFDC2, 'M', u'بحي'),
(0xFDC3, 'M', u'كمم'),
(0xFDC4, 'M', u'عجم'),
(0xFDC5, 'M', u'صمم'),
(0xFDC6, 'M', u'سخي'),
(0xFDC7, 'M', u'نجي'),
(0xFDC8, 'X'),
(0xFDF0, 'M', u'صلے'),
(0xFDF1, 'M', u'قلے'),
(0xFDF2, 'M', u'الله'),
(0xFDF3, 'M', u'اكبر'),
(0xFDF4, 'M', u'محمد'),
(0xFDF5, 'M', u'صلعم'),
(0xFDF6, 'M', u'رسول'),
(0xFDF7, 'M', u'عليه'),
(0xFDF8, 'M', u'وسلم'),
(0xFDF9, 'M', u'صلى'),
(0xFDFA, '3', u'صلى الله عليه وسلم'),
(0xFDFB, '3', u'جل جلاله'),
(0xFDFC, 'M', u'ریال'),
(0xFDFD, 'V'),
(0xFDFE, 'X'),
(0xFE00, 'I'),
(0xFE10, '3', u','),
]
def _seg_49():
return [
(0xFE11, 'M', u'、'),
(0xFE12, 'X'),
(0xFE13, '3', u':'),
(0xFE14, '3', u';'),
(0xFE15, '3', u'!'),
(0xFE16, '3', u'?'),
(0xFE17, 'M', u'〖'),
(0xFE18, 'M', u'〗'),
(0xFE19, 'X'),
(0xFE20, 'V'),
(0xFE30, 'X'),
(0xFE31, 'M', u'—'),
(0xFE32, 'M', u'–'),
(0xFE33, '3', u'_'),
(0xFE35, '3', u'('),
(0xFE36, '3', u')'),
(0xFE37, '3', u'{'),
(0xFE38, '3', u'}'),
(0xFE39, 'M', u'〔'),
(0xFE3A, 'M', u'〕'),
(0xFE3B, 'M', u'【'),
(0xFE3C, 'M', u'】'),
(0xFE3D, 'M', u'《'),
(0xFE3E, 'M', u'》'),
(0xFE3F, 'M', u'〈'),
(0xFE40, 'M', u'〉'),
(0xFE41, 'M', u'「'),
(0xFE42, 'M', u'」'),
(0xFE43, 'M', u'『'),
(0xFE44, 'M', u'』'),
(0xFE45, 'V'),
(0xFE47, '3', u'['),
(0xFE48, '3', u']'),
(0xFE49, '3', u' ̅'),
(0xFE4D, '3', u'_'),
(0xFE50, '3', u','),
(0xFE51, 'M', u'、'),
(0xFE52, 'X'),
(0xFE54, '3', u';'),
(0xFE55, '3', u':'),
(0xFE56, '3', u'?'),
(0xFE57, '3', u'!'),
(0xFE58, 'M', u'—'),
(0xFE59, '3', u'('),
(0xFE5A, '3', u')'),
(0xFE5B, '3', u'{'),
(0xFE5C, '3', u'}'),
(0xFE5D, 'M', u'〔'),
(0xFE5E, 'M', u'〕'),
(0xFE5F, '3', u'#'),
(0xFE60, '3', u'&'),
(0xFE61, '3', u'*'),
(0xFE62, '3', u'+'),
(0xFE63, 'M', u'-'),
(0xFE64, '3', u'<'),
(0xFE65, '3', u'>'),
(0xFE66, '3', u'='),
(0xFE67, 'X'),
(0xFE68, '3', u'\\'),
(0xFE69, '3', u'$'),
(0xFE6A, '3', u'%'),
(0xFE6B, '3', u'@'),
(0xFE6C, 'X'),
(0xFE70, '3', u' ً'),
(0xFE71, 'M', u'ـً'),
(0xFE72, '3', u' ٌ'),
(0xFE73, 'V'),
(0xFE74, '3', u' ٍ'),
(0xFE75, 'X'),
(0xFE76, '3', u' َ'),
(0xFE77, 'M', u'ـَ'),
(0xFE78, '3', u' ُ'),
(0xFE79, 'M', u'ـُ'),
(0xFE7A, '3', u' ِ'),
(0xFE7B, 'M', u'ـِ'),
(0xFE7C, '3', u' ّ'),
(0xFE7D, 'M', u'ـّ'),
(0xFE7E, '3', u' ْ'),
(0xFE7F, 'M', u'ـْ'),
(0xFE80, 'M', u'ء'),
(0xFE81, 'M', u'آ'),
(0xFE83, 'M', u'أ'),
(0xFE85, 'M', u'ؤ'),
(0xFE87, 'M', u'إ'),
(0xFE89, 'M', u'ئ'),
(0xFE8D, 'M', u'ا'),
(0xFE8F, 'M', u'ب'),
(0xFE93, 'M', u'ة'),
(0xFE95, 'M', u'ت'),
(0xFE99, 'M', u'ث'),
(0xFE9D, 'M', u'ج'),
(0xFEA1, 'M', u'ح'),
(0xFEA5, 'M', u'خ'),
(0xFEA9, 'M', u'د'),
(0xFEAB, 'M', u'ذ'),
(0xFEAD, 'M', u'ر'),
(0xFEAF, 'M', u'ز'),
(0xFEB1, 'M', u'س'),
(0xFEB5, 'M', u'ش'),
(0xFEB9, 'M', u'ص'),
]
def _seg_50():
return [
(0xFEBD, 'M', u'ض'),
(0xFEC1, 'M', u'ط'),
(0xFEC5, 'M', u'ظ'),
(0xFEC9, 'M', u'ع'),
(0xFECD, 'M', u'غ'),
(0xFED1, 'M', u'ف'),
(0xFED5, 'M', u'ق'),
(0xFED9, 'M', u'ك'),
(0xFEDD, 'M', u'ل'),
(0xFEE1, 'M', u'م'),
(0xFEE5, 'M', u'ن'),
(0xFEE9, 'M', u'ه'),
(0xFEED, 'M', u'و'),
(0xFEEF, 'M', u'ى'),
(0xFEF1, 'M', u'ي'),
(0xFEF5, 'M', u'لآ'),
(0xFEF7, 'M', u'لأ'),
(0xFEF9, 'M', u'لإ'),
(0xFEFB, 'M', u'لا'),
(0xFEFD, 'X'),
(0xFEFF, 'I'),
(0xFF00, 'X'),
(0xFF01, '3', u'!'),
(0xFF02, '3', u'"'),
(0xFF03, '3', u'#'),
(0xFF04, '3', u'$'),
(0xFF05, '3', u'%'),
(0xFF06, '3', u'&'),
(0xFF07, '3', u'\''),
(0xFF08, '3', u'('),
(0xFF09, '3', u')'),
(0xFF0A, '3', u'*'),
(0xFF0B, '3', u'+'),
(0xFF0C, '3', u','),
(0xFF0D, 'M', u'-'),
(0xFF0E, 'M', u'.'),
(0xFF0F, '3', u'/'),
(0xFF10, 'M', u'0'),
(0xFF11, 'M', u'1'),
(0xFF12, 'M', u'2'),
(0xFF13, 'M', u'3'),
(0xFF14, 'M', u'4'),
(0xFF15, 'M', u'5'),
(0xFF16, 'M', u'6'),
(0xFF17, 'M', u'7'),
(0xFF18, 'M', u'8'),
(0xFF19, 'M', u'9'),
(0xFF1A, '3', u':'),
(0xFF1B, '3', u';'),
(0xFF1C, '3', u'<'),
(0xFF1D, '3', u'='),
(0xFF1E, '3', u'>'),
(0xFF1F, '3', u'?'),
(0xFF20, '3', u'@'),
(0xFF21, 'M', u'a'),
(0xFF22, 'M', u'b'),
(0xFF23, 'M', u'c'),
(0xFF24, 'M', u'd'),
(0xFF25, 'M', u'e'),
(0xFF26, 'M', u'f'),
(0xFF27, 'M', u'g'),
(0xFF28, 'M', u'h'),
(0xFF29, 'M', u'i'),
(0xFF2A, 'M', u'j'),
(0xFF2B, 'M', u'k'),
(0xFF2C, 'M', u'l'),
(0xFF2D, 'M', u'm'),
(0xFF2E, 'M', u'n'),
(0xFF2F, 'M', u'o'),
(0xFF30, 'M', u'p'),
(0xFF31, 'M', u'q'),
(0xFF32, 'M', u'r'),
(0xFF33, 'M', u's'),
(0xFF34, 'M', u't'),
(0xFF35, 'M', u'u'),
(0xFF36, 'M', u'v'),
(0xFF37, 'M', u'w'),
(0xFF38, 'M', u'x'),
(0xFF39, 'M', u'y'),
(0xFF3A, 'M', u'z'),
(0xFF3B, '3', u'['),
(0xFF3C, '3', u'\\'),
(0xFF3D, '3', u']'),
(0xFF3E, '3', u'^'),
(0xFF3F, '3', u'_'),
(0xFF40, '3', u'`'),
(0xFF41, 'M', u'a'),
(0xFF42, 'M', u'b'),
(0xFF43, 'M', u'c'),
(0xFF44, 'M', u'd'),
(0xFF45, 'M', u'e'),
(0xFF46, 'M', u'f'),
(0xFF47, 'M', u'g'),
(0xFF48, 'M', u'h'),
(0xFF49, 'M', u'i'),
(0xFF4A, 'M', u'j'),
(0xFF4B, 'M', u'k'),
(0xFF4C, 'M', u'l'),
(0xFF4D, 'M', u'm'),
(0xFF4E, 'M', u'n'),
]
def _seg_51():
return [
(0xFF4F, 'M', u'o'),
(0xFF50, 'M', u'p'),
(0xFF51, 'M', u'q'),
(0xFF52, 'M', u'r'),
(0xFF53, 'M', u's'),
(0xFF54, 'M', u't'),
(0xFF55, 'M', u'u'),
(0xFF56, 'M', u'v'),
(0xFF57, 'M', u'w'),
(0xFF58, 'M', u'x'),
(0xFF59, 'M', u'y'),
(0xFF5A, 'M', u'z'),
(0xFF5B, '3', u'{'),
(0xFF5C, '3', u'|'),
(0xFF5D, '3', u'}'),
(0xFF5E, '3', u'~'),
(0xFF5F, 'M', u'⦅'),
(0xFF60, 'M', u'⦆'),
(0xFF61, 'M', u'.'),
(0xFF62, 'M', u'「'),
(0xFF63, 'M', u'」'),
(0xFF64, 'M', u'、'),
(0xFF65, 'M', u'・'),
(0xFF66, 'M', u'ヲ'),
(0xFF67, 'M', u'ァ'),
(0xFF68, 'M', u'ィ'),
(0xFF69, 'M', u'ゥ'),
(0xFF6A, 'M', u'ェ'),
(0xFF6B, 'M', u'ォ'),
(0xFF6C, 'M', u'ャ'),
(0xFF6D, 'M', u'ュ'),
(0xFF6E, 'M', u'ョ'),
(0xFF6F, 'M', u'ッ'),
(0xFF70, 'M', u'ー'),
(0xFF71, 'M', u'ア'),
(0xFF72, 'M', u'イ'),
(0xFF73, 'M', u'ウ'),
(0xFF74, 'M', u'エ'),
(0xFF75, 'M', u'オ'),
(0xFF76, 'M', u'カ'),
(0xFF77, 'M', u'キ'),
(0xFF78, 'M', u'ク'),
(0xFF79, 'M', u'ケ'),
(0xFF7A, 'M', u'コ'),
(0xFF7B, 'M', u'サ'),
(0xFF7C, 'M', u'シ'),
(0xFF7D, 'M', u'ス'),
(0xFF7E, 'M', u'セ'),
(0xFF7F, 'M', u'ソ'),
(0xFF80, 'M', u'タ'),
(0xFF81, 'M', u'チ'),
(0xFF82, 'M', u'ツ'),
(0xFF83, 'M', u'テ'),
(0xFF84, 'M', u'ト'),
(0xFF85, 'M', u'ナ'),
(0xFF86, 'M', u'ニ'),
(0xFF87, 'M', u'ヌ'),
(0xFF88, 'M', u'ネ'),
(0xFF89, 'M', u'ノ'),
(0xFF8A, 'M', u'ハ'),
(0xFF8B, 'M', u'ヒ'),
(0xFF8C, 'M', u'フ'),
(0xFF8D, 'M', u'ヘ'),
(0xFF8E, 'M', u'ホ'),
(0xFF8F, 'M', u'マ'),
(0xFF90, 'M', u'ミ'),
(0xFF91, 'M', u'ム'),
(0xFF92, 'M', u'メ'),
(0xFF93, 'M', u'モ'),
(0xFF94, 'M', u'ヤ'),
(0xFF95, 'M', u'ユ'),
(0xFF96, 'M', u'ヨ'),
(0xFF97, 'M', u'ラ'),
(0xFF98, 'M', u'リ'),
(0xFF99, 'M', u'ル'),
(0xFF9A, 'M', u'レ'),
(0xFF9B, 'M', u'ロ'),
(0xFF9C, 'M', u'ワ'),
(0xFF9D, 'M', u'ン'),
(0xFF9E, 'M', u'゙'),
(0xFF9F, 'M', u'゚'),
(0xFFA0, 'X'),
(0xFFA1, 'M', u'ᄀ'),
(0xFFA2, 'M', u'ᄁ'),
(0xFFA3, 'M', u'ᆪ'),
(0xFFA4, 'M', u'ᄂ'),
(0xFFA5, 'M', u'ᆬ'),
(0xFFA6, 'M', u'ᆭ'),
(0xFFA7, 'M', u'ᄃ'),
(0xFFA8, 'M', u'ᄄ'),
(0xFFA9, 'M', u'ᄅ'),
(0xFFAA, 'M', u'ᆰ'),
(0xFFAB, 'M', u'ᆱ'),
(0xFFAC, 'M', u'ᆲ'),
(0xFFAD, 'M', u'ᆳ'),
(0xFFAE, 'M', u'ᆴ'),
(0xFFAF, 'M', u'ᆵ'),
(0xFFB0, 'M', u'ᄚ'),
(0xFFB1, 'M', u'ᄆ'),
(0xFFB2, 'M', u'ᄇ'),
]
def _seg_52():
return [
(0xFFB3, 'M', u'ᄈ'),
(0xFFB4, 'M', u'ᄡ'),
(0xFFB5, 'M', u'ᄉ'),
(0xFFB6, 'M', u'ᄊ'),
(0xFFB7, 'M', u'ᄋ'),
(0xFFB8, 'M', u'ᄌ'),
(0xFFB9, 'M', u'ᄍ'),
(0xFFBA, 'M', u'ᄎ'),
(0xFFBB, 'M', u'ᄏ'),
(0xFFBC, 'M', u'ᄐ'),
(0xFFBD, 'M', u'ᄑ'),
(0xFFBE, 'M', u'ᄒ'),
(0xFFBF, 'X'),
(0xFFC2, 'M', u'ᅡ'),
(0xFFC3, 'M', u'ᅢ'),
(0xFFC4, 'M', u'ᅣ'),
(0xFFC5, 'M', u'ᅤ'),
(0xFFC6, 'M', u'ᅥ'),
(0xFFC7, 'M', u'ᅦ'),
(0xFFC8, 'X'),
(0xFFCA, 'M', u'ᅧ'),
(0xFFCB, 'M', u'ᅨ'),
(0xFFCC, 'M', u'ᅩ'),
(0xFFCD, 'M', u'ᅪ'),
(0xFFCE, 'M', u'ᅫ'),
(0xFFCF, 'M', u'ᅬ'),
(0xFFD0, 'X'),
(0xFFD2, 'M', u'ᅭ'),
(0xFFD3, 'M', u'ᅮ'),
(0xFFD4, 'M', u'ᅯ'),
(0xFFD5, 'M', u'ᅰ'),
(0xFFD6, 'M', u'ᅱ'),
(0xFFD7, 'M', u'ᅲ'),
(0xFFD8, 'X'),
(0xFFDA, 'M', u'ᅳ'),
(0xFFDB, 'M', u'ᅴ'),
(0xFFDC, 'M', u'ᅵ'),
(0xFFDD, 'X'),
(0xFFE0, 'M', u'¢'),
(0xFFE1, 'M', u'£'),
(0xFFE2, 'M', u'¬'),
(0xFFE3, '3', u' ̄'),
(0xFFE4, 'M', u'¦'),
(0xFFE5, 'M', u'¥'),
(0xFFE6, 'M', u'₩'),
(0xFFE7, 'X'),
(0xFFE8, 'M', u'│'),
(0xFFE9, 'M', u'←'),
(0xFFEA, 'M', u'↑'),
(0xFFEB, 'M', u'→'),
(0xFFEC, 'M', u'↓'),
(0xFFED, 'M', u'■'),
(0xFFEE, 'M', u'○'),
(0xFFEF, 'X'),
(0x10000, 'V'),
(0x1000C, 'X'),
(0x1000D, 'V'),
(0x10027, 'X'),
(0x10028, 'V'),
(0x1003B, 'X'),
(0x1003C, 'V'),
(0x1003E, 'X'),
(0x1003F, 'V'),
(0x1004E, 'X'),
(0x10050, 'V'),
(0x1005E, 'X'),
(0x10080, 'V'),
(0x100FB, 'X'),
(0x10100, 'V'),
(0x10103, 'X'),
(0x10107, 'V'),
(0x10134, 'X'),
(0x10137, 'V'),
(0x1018F, 'X'),
(0x10190, 'V'),
(0x1019C, 'X'),
(0x101A0, 'V'),
(0x101A1, 'X'),
(0x101D0, 'V'),
(0x101FE, 'X'),
(0x10280, 'V'),
(0x1029D, 'X'),
(0x102A0, 'V'),
(0x102D1, 'X'),
(0x102E0, 'V'),
(0x102FC, 'X'),
(0x10300, 'V'),
(0x10324, 'X'),
(0x1032D, 'V'),
(0x1034B, 'X'),
(0x10350, 'V'),
(0x1037B, 'X'),
(0x10380, 'V'),
(0x1039E, 'X'),
(0x1039F, 'V'),
(0x103C4, 'X'),
(0x103C8, 'V'),
(0x103D6, 'X'),
(0x10400, 'M', u'𐐨'),
(0x10401, 'M', u'𐐩'),
]
def _seg_53():
return [
(0x10402, 'M', u'𐐪'),
(0x10403, 'M', u'𐐫'),
(0x10404, 'M', u'𐐬'),
(0x10405, 'M', u'𐐭'),
(0x10406, 'M', u'𐐮'),
(0x10407, 'M', u'𐐯'),
(0x10408, 'M', u'𐐰'),
(0x10409, 'M', u'𐐱'),
(0x1040A, 'M', u'𐐲'),
(0x1040B, 'M', u'𐐳'),
(0x1040C, 'M', u'𐐴'),
(0x1040D, 'M', u'𐐵'),
(0x1040E, 'M', u'𐐶'),
(0x1040F, 'M', u'𐐷'),
(0x10410, 'M', u'𐐸'),
(0x10411, 'M', u'𐐹'),
(0x10412, 'M', u'𐐺'),
(0x10413, 'M', u'𐐻'),
(0x10414, 'M', u'𐐼'),
(0x10415, 'M', u'𐐽'),
(0x10416, 'M', u'𐐾'),
(0x10417, 'M', u'𐐿'),
(0x10418, 'M', u'𐑀'),
(0x10419, 'M', u'𐑁'),
(0x1041A, 'M', u'𐑂'),
(0x1041B, 'M', u'𐑃'),
(0x1041C, 'M', u'𐑄'),
(0x1041D, 'M', u'𐑅'),
(0x1041E, 'M', u'𐑆'),
(0x1041F, 'M', u'𐑇'),
(0x10420, 'M', u'𐑈'),
(0x10421, 'M', u'𐑉'),
(0x10422, 'M', u'𐑊'),
(0x10423, 'M', u'𐑋'),
(0x10424, 'M', u'𐑌'),
(0x10425, 'M', u'𐑍'),
(0x10426, 'M', u'𐑎'),
(0x10427, 'M', u'𐑏'),
(0x10428, 'V'),
(0x1049E, 'X'),
(0x104A0, 'V'),
(0x104AA, 'X'),
(0x104B0, 'M', u'𐓘'),
(0x104B1, 'M', u'𐓙'),
(0x104B2, 'M', u'𐓚'),
(0x104B3, 'M', u'𐓛'),
(0x104B4, 'M', u'𐓜'),
(0x104B5, 'M', u'𐓝'),
(0x104B6, 'M', u'𐓞'),
(0x104B7, 'M', u'𐓟'),
(0x104B8, 'M', u'𐓠'),
(0x104B9, 'M', u'𐓡'),
(0x104BA, 'M', u'𐓢'),
(0x104BB, 'M', u'𐓣'),
(0x104BC, 'M', u'𐓤'),
(0x104BD, 'M', u'𐓥'),
(0x104BE, 'M', u'𐓦'),
(0x104BF, 'M', u'𐓧'),
(0x104C0, 'M', u'𐓨'),
(0x104C1, 'M', u'𐓩'),
(0x104C2, 'M', u'𐓪'),
(0x104C3, 'M', u'𐓫'),
(0x104C4, 'M', u'𐓬'),
(0x104C5, 'M', u'𐓭'),
(0x104C6, 'M', u'𐓮'),
(0x104C7, 'M', u'𐓯'),
(0x104C8, 'M', u'𐓰'),
(0x104C9, 'M', u'𐓱'),
(0x104CA, 'M', u'𐓲'),
(0x104CB, 'M', u'𐓳'),
(0x104CC, 'M', u'𐓴'),
(0x104CD, 'M', u'𐓵'),
(0x104CE, 'M', u'𐓶'),
(0x104CF, 'M', u'𐓷'),
(0x104D0, 'M', u'𐓸'),
(0x104D1, 'M', u'𐓹'),
(0x104D2, 'M', u'𐓺'),
(0x104D3, 'M', u'𐓻'),
(0x104D4, 'X'),
(0x104D8, 'V'),
(0x104FC, 'X'),
(0x10500, 'V'),
(0x10528, 'X'),
(0x10530, 'V'),
(0x10564, 'X'),
(0x1056F, 'V'),
(0x10570, 'X'),
(0x10600, 'V'),
(0x10737, 'X'),
(0x10740, 'V'),
(0x10756, 'X'),
(0x10760, 'V'),
(0x10768, 'X'),
(0x10800, 'V'),
(0x10806, 'X'),
(0x10808, 'V'),
(0x10809, 'X'),
(0x1080A, 'V'),
(0x10836, 'X'),
(0x10837, 'V'),
]
def _seg_54():
return [
(0x10839, 'X'),
(0x1083C, 'V'),
(0x1083D, 'X'),
(0x1083F, 'V'),
(0x10856, 'X'),
(0x10857, 'V'),
(0x1089F, 'X'),
(0x108A7, 'V'),
(0x108B0, 'X'),
(0x108E0, 'V'),
(0x108F3, 'X'),
(0x108F4, 'V'),
(0x108F6, 'X'),
(0x108FB, 'V'),
(0x1091C, 'X'),
(0x1091F, 'V'),
(0x1093A, 'X'),
(0x1093F, 'V'),
(0x10940, 'X'),
(0x10980, 'V'),
(0x109B8, 'X'),
(0x109BC, 'V'),
(0x109D0, 'X'),
(0x109D2, 'V'),
(0x10A04, 'X'),
(0x10A05, 'V'),
(0x10A07, 'X'),
(0x10A0C, 'V'),
(0x10A14, 'X'),
(0x10A15, 'V'),
(0x10A18, 'X'),
(0x10A19, 'V'),
(0x10A36, 'X'),
(0x10A38, 'V'),
(0x10A3B, 'X'),
(0x10A3F, 'V'),
(0x10A49, 'X'),
(0x10A50, 'V'),
(0x10A59, 'X'),
(0x10A60, 'V'),
(0x10AA0, 'X'),
(0x10AC0, 'V'),
(0x10AE7, 'X'),
(0x10AEB, 'V'),
(0x10AF7, 'X'),
(0x10B00, 'V'),
(0x10B36, 'X'),
(0x10B39, 'V'),
(0x10B56, 'X'),
(0x10B58, 'V'),
(0x10B73, 'X'),
(0x10B78, 'V'),
(0x10B92, 'X'),
(0x10B99, 'V'),
(0x10B9D, 'X'),
(0x10BA9, 'V'),
(0x10BB0, 'X'),
(0x10C00, 'V'),
(0x10C49, 'X'),
(0x10C80, 'M', u'𐳀'),
(0x10C81, 'M', u'𐳁'),
(0x10C82, 'M', u'𐳂'),
(0x10C83, 'M', u'𐳃'),
(0x10C84, 'M', u'𐳄'),
(0x10C85, 'M', u'𐳅'),
(0x10C86, 'M', u'𐳆'),
(0x10C87, 'M', u'𐳇'),
(0x10C88, 'M', u'𐳈'),
(0x10C89, 'M', u'𐳉'),
(0x10C8A, 'M', u'𐳊'),
(0x10C8B, 'M', u'𐳋'),
(0x10C8C, 'M', u'𐳌'),
(0x10C8D, 'M', u'𐳍'),
(0x10C8E, 'M', u'𐳎'),
(0x10C8F, 'M', u'𐳏'),
(0x10C90, 'M', u'𐳐'),
(0x10C91, 'M', u'𐳑'),
(0x10C92, 'M', u'𐳒'),
(0x10C93, 'M', u'𐳓'),
(0x10C94, 'M', u'𐳔'),
(0x10C95, 'M', u'𐳕'),
(0x10C96, 'M', u'𐳖'),
(0x10C97, 'M', u'𐳗'),
(0x10C98, 'M', u'𐳘'),
(0x10C99, 'M', u'𐳙'),
(0x10C9A, 'M', u'𐳚'),
(0x10C9B, 'M', u'𐳛'),
(0x10C9C, 'M', u'𐳜'),
(0x10C9D, 'M', u'𐳝'),
(0x10C9E, 'M', u'𐳞'),
(0x10C9F, 'M', u'𐳟'),
(0x10CA0, 'M', u'𐳠'),
(0x10CA1, 'M', u'𐳡'),
(0x10CA2, 'M', u'𐳢'),
(0x10CA3, 'M', u'𐳣'),
(0x10CA4, 'M', u'𐳤'),
(0x10CA5, 'M', u'𐳥'),
(0x10CA6, 'M', u'𐳦'),
(0x10CA7, 'M', u'𐳧'),
(0x10CA8, 'M', u'𐳨'),
]
def _seg_55():
return [
(0x10CA9, 'M', u'𐳩'),
(0x10CAA, 'M', u'𐳪'),
(0x10CAB, 'M', u'𐳫'),
(0x10CAC, 'M', u'𐳬'),
(0x10CAD, 'M', u'𐳭'),
(0x10CAE, 'M', u'𐳮'),
(0x10CAF, 'M', u'𐳯'),
(0x10CB0, 'M', u'𐳰'),
(0x10CB1, 'M', u'𐳱'),
(0x10CB2, 'M', u'𐳲'),
(0x10CB3, 'X'),
(0x10CC0, 'V'),
(0x10CF3, 'X'),
(0x10CFA, 'V'),
(0x10D28, 'X'),
(0x10D30, 'V'),
(0x10D3A, 'X'),
(0x10E60, 'V'),
(0x10E7F, 'X'),
(0x10F00, 'V'),
(0x10F28, 'X'),
(0x10F30, 'V'),
(0x10F5A, 'X'),
(0x11000, 'V'),
(0x1104E, 'X'),
(0x11052, 'V'),
(0x11070, 'X'),
(0x1107F, 'V'),
(0x110BD, 'X'),
(0x110BE, 'V'),
(0x110C2, 'X'),
(0x110D0, 'V'),
(0x110E9, 'X'),
(0x110F0, 'V'),
(0x110FA, 'X'),
(0x11100, 'V'),
(0x11135, 'X'),
(0x11136, 'V'),
(0x11147, 'X'),
(0x11150, 'V'),
(0x11177, 'X'),
(0x11180, 'V'),
(0x111CE, 'X'),
(0x111D0, 'V'),
(0x111E0, 'X'),
(0x111E1, 'V'),
(0x111F5, 'X'),
(0x11200, 'V'),
(0x11212, 'X'),
(0x11213, 'V'),
(0x1123F, 'X'),
(0x11280, 'V'),
(0x11287, 'X'),
(0x11288, 'V'),
(0x11289, 'X'),
(0x1128A, 'V'),
(0x1128E, 'X'),
(0x1128F, 'V'),
(0x1129E, 'X'),
(0x1129F, 'V'),
(0x112AA, 'X'),
(0x112B0, 'V'),
(0x112EB, 'X'),
(0x112F0, 'V'),
(0x112FA, 'X'),
(0x11300, 'V'),
(0x11304, 'X'),
(0x11305, 'V'),
(0x1130D, 'X'),
(0x1130F, 'V'),
(0x11311, 'X'),
(0x11313, 'V'),
(0x11329, 'X'),
(0x1132A, 'V'),
(0x11331, 'X'),
(0x11332, 'V'),
(0x11334, 'X'),
(0x11335, 'V'),
(0x1133A, 'X'),
(0x1133B, 'V'),
(0x11345, 'X'),
(0x11347, 'V'),
(0x11349, 'X'),
(0x1134B, 'V'),
(0x1134E, 'X'),
(0x11350, 'V'),
(0x11351, 'X'),
(0x11357, 'V'),
(0x11358, 'X'),
(0x1135D, 'V'),
(0x11364, 'X'),
(0x11366, 'V'),
(0x1136D, 'X'),
(0x11370, 'V'),
(0x11375, 'X'),
(0x11400, 'V'),
(0x1145A, 'X'),
(0x1145B, 'V'),
(0x1145C, 'X'),
(0x1145D, 'V'),
]
def _seg_56():
return [
(0x1145F, 'X'),
(0x11480, 'V'),
(0x114C8, 'X'),
(0x114D0, 'V'),
(0x114DA, 'X'),
(0x11580, 'V'),
(0x115B6, 'X'),
(0x115B8, 'V'),
(0x115DE, 'X'),
(0x11600, 'V'),
(0x11645, 'X'),
(0x11650, 'V'),
(0x1165A, 'X'),
(0x11660, 'V'),
(0x1166D, 'X'),
(0x11680, 'V'),
(0x116B8, 'X'),
(0x116C0, 'V'),
(0x116CA, 'X'),
(0x11700, 'V'),
(0x1171B, 'X'),
(0x1171D, 'V'),
(0x1172C, 'X'),
(0x11730, 'V'),
(0x11740, 'X'),
(0x11800, 'V'),
(0x1183C, 'X'),
(0x118A0, 'M', u'𑣀'),
(0x118A1, 'M', u'𑣁'),
(0x118A2, 'M', u'𑣂'),
(0x118A3, 'M', u'𑣃'),
(0x118A4, 'M', u'𑣄'),
(0x118A5, 'M', u'𑣅'),
(0x118A6, 'M', u'𑣆'),
(0x118A7, 'M', u'𑣇'),
(0x118A8, 'M', u'𑣈'),
(0x118A9, 'M', u'𑣉'),
(0x118AA, 'M', u'𑣊'),
(0x118AB, 'M', u'𑣋'),
(0x118AC, 'M', u'𑣌'),
(0x118AD, 'M', u'𑣍'),
(0x118AE, 'M', u'𑣎'),
(0x118AF, 'M', u'𑣏'),
(0x118B0, 'M', u'𑣐'),
(0x118B1, 'M', u'𑣑'),
(0x118B2, 'M', u'𑣒'),
(0x118B3, 'M', u'𑣓'),
(0x118B4, 'M', u'𑣔'),
(0x118B5, 'M', u'𑣕'),
(0x118B6, 'M', u'𑣖'),
(0x118B7, 'M', u'𑣗'),
(0x118B8, 'M', u'𑣘'),
(0x118B9, 'M', u'𑣙'),
(0x118BA, 'M', u'𑣚'),
(0x118BB, 'M', u'𑣛'),
(0x118BC, 'M', u'𑣜'),
(0x118BD, 'M', u'𑣝'),
(0x118BE, 'M', u'𑣞'),
(0x118BF, 'M', u'𑣟'),
(0x118C0, 'V'),
(0x118F3, 'X'),
(0x118FF, 'V'),
(0x11900, 'X'),
(0x11A00, 'V'),
(0x11A48, 'X'),
(0x11A50, 'V'),
(0x11A84, 'X'),
(0x11A86, 'V'),
(0x11AA3, 'X'),
(0x11AC0, 'V'),
(0x11AF9, 'X'),
(0x11C00, 'V'),
(0x11C09, 'X'),
(0x11C0A, 'V'),
(0x11C37, 'X'),
(0x11C38, 'V'),
(0x11C46, 'X'),
(0x11C50, 'V'),
(0x11C6D, 'X'),
(0x11C70, 'V'),
(0x11C90, 'X'),
(0x11C92, 'V'),
(0x11CA8, 'X'),
(0x11CA9, 'V'),
(0x11CB7, 'X'),
(0x11D00, 'V'),
(0x11D07, 'X'),
(0x11D08, 'V'),
(0x11D0A, 'X'),
(0x11D0B, 'V'),
(0x11D37, 'X'),
(0x11D3A, 'V'),
(0x11D3B, 'X'),
(0x11D3C, 'V'),
(0x11D3E, 'X'),
(0x11D3F, 'V'),
(0x11D48, 'X'),
(0x11D50, 'V'),
(0x11D5A, 'X'),
(0x11D60, 'V'),
]
def _seg_57():
return [
(0x11D66, 'X'),
(0x11D67, 'V'),
(0x11D69, 'X'),
(0x11D6A, 'V'),
(0x11D8F, 'X'),
(0x11D90, 'V'),
(0x11D92, 'X'),
(0x11D93, 'V'),
(0x11D99, 'X'),
(0x11DA0, 'V'),
(0x11DAA, 'X'),
(0x11EE0, 'V'),
(0x11EF9, 'X'),
(0x12000, 'V'),
(0x1239A, 'X'),
(0x12400, 'V'),
(0x1246F, 'X'),
(0x12470, 'V'),
(0x12475, 'X'),
(0x12480, 'V'),
(0x12544, 'X'),
(0x13000, 'V'),
(0x1342F, 'X'),
(0x14400, 'V'),
(0x14647, 'X'),
(0x16800, 'V'),
(0x16A39, 'X'),
(0x16A40, 'V'),
(0x16A5F, 'X'),
(0x16A60, 'V'),
(0x16A6A, 'X'),
(0x16A6E, 'V'),
(0x16A70, 'X'),
(0x16AD0, 'V'),
(0x16AEE, 'X'),
(0x16AF0, 'V'),
(0x16AF6, 'X'),
(0x16B00, 'V'),
(0x16B46, 'X'),
(0x16B50, 'V'),
(0x16B5A, 'X'),
(0x16B5B, 'V'),
(0x16B62, 'X'),
(0x16B63, 'V'),
(0x16B78, 'X'),
(0x16B7D, 'V'),
(0x16B90, 'X'),
(0x16E60, 'V'),
(0x16E9B, 'X'),
(0x16F00, 'V'),
(0x16F45, 'X'),
(0x16F50, 'V'),
(0x16F7F, 'X'),
(0x16F8F, 'V'),
(0x16FA0, 'X'),
(0x16FE0, 'V'),
(0x16FE2, 'X'),
(0x17000, 'V'),
(0x187F2, 'X'),
(0x18800, 'V'),
(0x18AF3, 'X'),
(0x1B000, 'V'),
(0x1B11F, 'X'),
(0x1B170, 'V'),
(0x1B2FC, 'X'),
(0x1BC00, 'V'),
(0x1BC6B, 'X'),
(0x1BC70, 'V'),
(0x1BC7D, 'X'),
(0x1BC80, 'V'),
(0x1BC89, 'X'),
(0x1BC90, 'V'),
(0x1BC9A, 'X'),
(0x1BC9C, 'V'),
(0x1BCA0, 'I'),
(0x1BCA4, 'X'),
(0x1D000, 'V'),
(0x1D0F6, 'X'),
(0x1D100, 'V'),
(0x1D127, 'X'),
(0x1D129, 'V'),
(0x1D15E, 'M', u'𝅗𝅥'),
(0x1D15F, 'M', u'𝅘𝅥'),
(0x1D160, 'M', u'𝅘𝅥𝅮'),
(0x1D161, 'M', u'𝅘𝅥𝅯'),
(0x1D162, 'M', u'𝅘𝅥𝅰'),
(0x1D163, 'M', u'𝅘𝅥𝅱'),
(0x1D164, 'M', u'𝅘𝅥𝅲'),
(0x1D165, 'V'),
(0x1D173, 'X'),
(0x1D17B, 'V'),
(0x1D1BB, 'M', u'𝆹𝅥'),
(0x1D1BC, 'M', u'𝆺𝅥'),
(0x1D1BD, 'M', u'𝆹𝅥𝅮'),
(0x1D1BE, 'M', u'𝆺𝅥𝅮'),
(0x1D1BF, 'M', u'𝆹𝅥𝅯'),
(0x1D1C0, 'M', u'𝆺𝅥𝅯'),
(0x1D1C1, 'V'),
(0x1D1E9, 'X'),
(0x1D200, 'V'),
]
def _seg_58():
return [
(0x1D246, 'X'),
(0x1D2E0, 'V'),
(0x1D2F4, 'X'),
(0x1D300, 'V'),
(0x1D357, 'X'),
(0x1D360, 'V'),
(0x1D379, 'X'),
(0x1D400, 'M', u'a'),
(0x1D401, 'M', u'b'),
(0x1D402, 'M', u'c'),
(0x1D403, 'M', u'd'),
(0x1D404, 'M', u'e'),
(0x1D405, 'M', u'f'),
(0x1D406, 'M', u'g'),
(0x1D407, 'M', u'h'),
(0x1D408, 'M', u'i'),
(0x1D409, 'M', u'j'),
(0x1D40A, 'M', u'k'),
(0x1D40B, 'M', u'l'),
(0x1D40C, 'M', u'm'),
(0x1D40D, 'M', u'n'),
(0x1D40E, 'M', u'o'),
(0x1D40F, 'M', u'p'),
(0x1D410, 'M', u'q'),
(0x1D411, 'M', u'r'),
(0x1D412, 'M', u's'),
(0x1D413, 'M', u't'),
(0x1D414, 'M', u'u'),
(0x1D415, 'M', u'v'),
(0x1D416, 'M', u'w'),
(0x1D417, 'M', u'x'),
(0x1D418, 'M', u'y'),
(0x1D419, 'M', u'z'),
(0x1D41A, 'M', u'a'),
(0x1D41B, 'M', u'b'),
(0x1D41C, 'M', u'c'),
(0x1D41D, 'M', u'd'),
(0x1D41E, 'M', u'e'),
(0x1D41F, 'M', u'f'),
(0x1D420, 'M', u'g'),
(0x1D421, 'M', u'h'),
(0x1D422, 'M', u'i'),
(0x1D423, 'M', u'j'),
(0x1D424, 'M', u'k'),
(0x1D425, 'M', u'l'),
(0x1D426, 'M', u'm'),
(0x1D427, 'M', u'n'),
(0x1D428, 'M', u'o'),
(0x1D429, 'M', u'p'),
(0x1D42A, 'M', u'q'),
(0x1D42B, 'M', u'r'),
(0x1D42C, 'M', u's'),
(0x1D42D, 'M', u't'),
(0x1D42E, 'M', u'u'),
(0x1D42F, 'M', u'v'),
(0x1D430, 'M', u'w'),
(0x1D431, 'M', u'x'),
(0x1D432, 'M', u'y'),
(0x1D433, 'M', u'z'),
(0x1D434, 'M', u'a'),
(0x1D435, 'M', u'b'),
(0x1D436, 'M', u'c'),
(0x1D437, 'M', u'd'),
(0x1D438, 'M', u'e'),
(0x1D439, 'M', u'f'),
(0x1D43A, 'M', u'g'),
(0x1D43B, 'M', u'h'),
(0x1D43C, 'M', u'i'),
(0x1D43D, 'M', u'j'),
(0x1D43E, 'M', u'k'),
(0x1D43F, 'M', u'l'),
(0x1D440, 'M', u'm'),
(0x1D441, 'M', u'n'),
(0x1D442, 'M', u'o'),
(0x1D443, 'M', u'p'),
(0x1D444, 'M', u'q'),
(0x1D445, 'M', u'r'),
(0x1D446, 'M', u's'),
(0x1D447, 'M', u't'),
(0x1D448, 'M', u'u'),
(0x1D449, 'M', u'v'),
(0x1D44A, 'M', u'w'),
(0x1D44B, 'M', u'x'),
(0x1D44C, 'M', u'y'),
(0x1D44D, 'M', u'z'),
(0x1D44E, 'M', u'a'),
(0x1D44F, 'M', u'b'),
(0x1D450, 'M', u'c'),
(0x1D451, 'M', u'd'),
(0x1D452, 'M', u'e'),
(0x1D453, 'M', u'f'),
(0x1D454, 'M', u'g'),
(0x1D455, 'X'),
(0x1D456, 'M', u'i'),
(0x1D457, 'M', u'j'),
(0x1D458, 'M', u'k'),
(0x1D459, 'M', u'l'),
(0x1D45A, 'M', u'm'),
(0x1D45B, 'M', u'n'),
(0x1D45C, 'M', u'o'),
]
def _seg_59():
return [
(0x1D45D, 'M', u'p'),
(0x1D45E, 'M', u'q'),
(0x1D45F, 'M', u'r'),
(0x1D460, 'M', u's'),
(0x1D461, 'M', u't'),
(0x1D462, 'M', u'u'),
(0x1D463, 'M', u'v'),
(0x1D464, 'M', u'w'),
(0x1D465, 'M', u'x'),
(0x1D466, 'M', u'y'),
(0x1D467, 'M', u'z'),
(0x1D468, 'M', u'a'),
(0x1D469, 'M', u'b'),
(0x1D46A, 'M', u'c'),
(0x1D46B, 'M', u'd'),
(0x1D46C, 'M', u'e'),
(0x1D46D, 'M', u'f'),
(0x1D46E, 'M', u'g'),
(0x1D46F, 'M', u'h'),
(0x1D470, 'M', u'i'),
(0x1D471, 'M', u'j'),
(0x1D472, 'M', u'k'),
(0x1D473, 'M', u'l'),
(0x1D474, 'M', u'm'),
(0x1D475, 'M', u'n'),
(0x1D476, 'M', u'o'),
(0x1D477, 'M', u'p'),
(0x1D478, 'M', u'q'),
(0x1D479, 'M', u'r'),
(0x1D47A, 'M', u's'),
(0x1D47B, 'M', u't'),
(0x1D47C, 'M', u'u'),
(0x1D47D, 'M', u'v'),
(0x1D47E, 'M', u'w'),
(0x1D47F, 'M', u'x'),
(0x1D480, 'M', u'y'),
(0x1D481, 'M', u'z'),
(0x1D482, 'M', u'a'),
(0x1D483, 'M', u'b'),
(0x1D484, 'M', u'c'),
(0x1D485, 'M', u'd'),
(0x1D486, 'M', u'e'),
(0x1D487, 'M', u'f'),
(0x1D488, 'M', u'g'),
(0x1D489, 'M', u'h'),
(0x1D48A, 'M', u'i'),
(0x1D48B, 'M', u'j'),
(0x1D48C, 'M', u'k'),
(0x1D48D, 'M', u'l'),
(0x1D48E, 'M', u'm'),
(0x1D48F, 'M', u'n'),
(0x1D490, 'M', u'o'),
(0x1D491, 'M', u'p'),
(0x1D492, 'M', u'q'),
(0x1D493, 'M', u'r'),
(0x1D494, 'M', u's'),
(0x1D495, 'M', u't'),
(0x1D496, 'M', u'u'),
(0x1D497, 'M', u'v'),
(0x1D498, 'M', u'w'),
(0x1D499, 'M', u'x'),
(0x1D49A, 'M', u'y'),
(0x1D49B, 'M', u'z'),
(0x1D49C, 'M', u'a'),
(0x1D49D, 'X'),
(0x1D49E, 'M', u'c'),
(0x1D49F, 'M', u'd'),
(0x1D4A0, 'X'),
(0x1D4A2, 'M', u'g'),
(0x1D4A3, 'X'),
(0x1D4A5, 'M', u'j'),
(0x1D4A6, 'M', u'k'),
(0x1D4A7, 'X'),
(0x1D4A9, 'M', u'n'),
(0x1D4AA, 'M', u'o'),
(0x1D4AB, 'M', u'p'),
(0x1D4AC, 'M', u'q'),
(0x1D4AD, 'X'),
(0x1D4AE, 'M', u's'),
(0x1D4AF, 'M', u't'),
(0x1D4B0, 'M', u'u'),
(0x1D4B1, 'M', u'v'),
(0x1D4B2, 'M', u'w'),
(0x1D4B3, 'M', u'x'),
(0x1D4B4, 'M', u'y'),
(0x1D4B5, 'M', u'z'),
(0x1D4B6, 'M', u'a'),
(0x1D4B7, 'M', u'b'),
(0x1D4B8, 'M', u'c'),
(0x1D4B9, 'M', u'd'),
(0x1D4BA, 'X'),
(0x1D4BB, 'M', u'f'),
(0x1D4BC, 'X'),
(0x1D4BD, 'M', u'h'),
(0x1D4BE, 'M', u'i'),
(0x1D4BF, 'M', u'j'),
(0x1D4C0, 'M', u'k'),
(0x1D4C1, 'M', u'l'),
(0x1D4C2, 'M', u'm'),
(0x1D4C3, 'M', u'n'),
]
def _seg_60():
return [
(0x1D4C4, 'X'),
(0x1D4C5, 'M', u'p'),
(0x1D4C6, 'M', u'q'),
(0x1D4C7, 'M', u'r'),
(0x1D4C8, 'M', u's'),
(0x1D4C9, 'M', u't'),
(0x1D4CA, 'M', u'u'),
(0x1D4CB, 'M', u'v'),
(0x1D4CC, 'M', u'w'),
(0x1D4CD, 'M', u'x'),
(0x1D4CE, 'M', u'y'),
(0x1D4CF, 'M', u'z'),
(0x1D4D0, 'M', u'a'),
(0x1D4D1, 'M', u'b'),
(0x1D4D2, 'M', u'c'),
(0x1D4D3, 'M', u'd'),
(0x1D4D4, 'M', u'e'),
(0x1D4D5, 'M', u'f'),
(0x1D4D6, 'M', u'g'),
(0x1D4D7, 'M', u'h'),
(0x1D4D8, 'M', u'i'),
(0x1D4D9, 'M', u'j'),
(0x1D4DA, 'M', u'k'),
(0x1D4DB, 'M', u'l'),
(0x1D4DC, 'M', u'm'),
(0x1D4DD, 'M', u'n'),
(0x1D4DE, 'M', u'o'),
(0x1D4DF, 'M', u'p'),
(0x1D4E0, 'M', u'q'),
(0x1D4E1, 'M', u'r'),
(0x1D4E2, 'M', u's'),
(0x1D4E3, 'M', u't'),
(0x1D4E4, 'M', u'u'),
(0x1D4E5, 'M', u'v'),
(0x1D4E6, 'M', u'w'),
(0x1D4E7, 'M', u'x'),
(0x1D4E8, 'M', u'y'),
(0x1D4E9, 'M', u'z'),
(0x1D4EA, 'M', u'a'),
(0x1D4EB, 'M', u'b'),
(0x1D4EC, 'M', u'c'),
(0x1D4ED, 'M', u'd'),
(0x1D4EE, 'M', u'e'),
(0x1D4EF, 'M', u'f'),
(0x1D4F0, 'M', u'g'),
(0x1D4F1, 'M', u'h'),
(0x1D4F2, 'M', u'i'),
(0x1D4F3, 'M', u'j'),
(0x1D4F4, 'M', u'k'),
(0x1D4F5, 'M', u'l'),
(0x1D4F6, 'M', u'm'),
(0x1D4F7, 'M', u'n'),
(0x1D4F8, 'M', u'o'),
(0x1D4F9, 'M', u'p'),
(0x1D4FA, 'M', u'q'),
(0x1D4FB, 'M', u'r'),
(0x1D4FC, 'M', u's'),
(0x1D4FD, 'M', u't'),
(0x1D4FE, 'M', u'u'),
(0x1D4FF, 'M', u'v'),
(0x1D500, 'M', u'w'),
(0x1D501, 'M', u'x'),
(0x1D502, 'M', u'y'),
(0x1D503, 'M', u'z'),
(0x1D504, 'M', u'a'),
(0x1D505, 'M', u'b'),
(0x1D506, 'X'),
(0x1D507, 'M', u'd'),
(0x1D508, 'M', u'e'),
(0x1D509, 'M', u'f'),
(0x1D50A, 'M', u'g'),
(0x1D50B, 'X'),
(0x1D50D, 'M', u'j'),
(0x1D50E, 'M', u'k'),
(0x1D50F, 'M', u'l'),
(0x1D510, 'M', u'm'),
(0x1D511, 'M', u'n'),
(0x1D512, 'M', u'o'),
(0x1D513, 'M', u'p'),
(0x1D514, 'M', u'q'),
(0x1D515, 'X'),
(0x1D516, 'M', u's'),
(0x1D517, 'M', u't'),
(0x1D518, 'M', u'u'),
(0x1D519, 'M', u'v'),
(0x1D51A, 'M', u'w'),
(0x1D51B, 'M', u'x'),
(0x1D51C, 'M', u'y'),
(0x1D51D, 'X'),
(0x1D51E, 'M', u'a'),
(0x1D51F, 'M', u'b'),
(0x1D520, 'M', u'c'),
(0x1D521, 'M', u'd'),
(0x1D522, 'M', u'e'),
(0x1D523, 'M', u'f'),
(0x1D524, 'M', u'g'),
(0x1D525, 'M', u'h'),
(0x1D526, 'M', u'i'),
(0x1D527, 'M', u'j'),
(0x1D528, 'M', u'k'),
]
def _seg_61():
return [
(0x1D529, 'M', u'l'),
(0x1D52A, 'M', u'm'),
(0x1D52B, 'M', u'n'),
(0x1D52C, 'M', u'o'),
(0x1D52D, 'M', u'p'),
(0x1D52E, 'M', u'q'),
(0x1D52F, 'M', u'r'),
(0x1D530, 'M', u's'),
(0x1D531, 'M', u't'),
(0x1D532, 'M', u'u'),
(0x1D533, 'M', u'v'),
(0x1D534, 'M', u'w'),
(0x1D535, 'M', u'x'),
(0x1D536, 'M', u'y'),
(0x1D537, 'M', u'z'),
(0x1D538, 'M', u'a'),
(0x1D539, 'M', u'b'),
(0x1D53A, 'X'),
(0x1D53B, 'M', u'd'),
(0x1D53C, 'M', u'e'),
(0x1D53D, 'M', u'f'),
(0x1D53E, 'M', u'g'),
(0x1D53F, 'X'),
(0x1D540, 'M', u'i'),
(0x1D541, 'M', u'j'),
(0x1D542, 'M', u'k'),
(0x1D543, 'M', u'l'),
(0x1D544, 'M', u'm'),
(0x1D545, 'X'),
(0x1D546, 'M', u'o'),
(0x1D547, 'X'),
(0x1D54A, 'M', u's'),
(0x1D54B, 'M', u't'),
(0x1D54C, 'M', u'u'),
(0x1D54D, 'M', u'v'),
(0x1D54E, 'M', u'w'),
(0x1D54F, 'M', u'x'),
(0x1D550, 'M', u'y'),
(0x1D551, 'X'),
(0x1D552, 'M', u'a'),
(0x1D553, 'M', u'b'),
(0x1D554, 'M', u'c'),
(0x1D555, 'M', u'd'),
(0x1D556, 'M', u'e'),
(0x1D557, 'M', u'f'),
(0x1D558, 'M', u'g'),
(0x1D559, 'M', u'h'),
(0x1D55A, 'M', u'i'),
(0x1D55B, 'M', u'j'),
(0x1D55C, 'M', u'k'),
(0x1D55D, 'M', u'l'),
(0x1D55E, 'M', u'm'),
(0x1D55F, 'M', u'n'),
(0x1D560, 'M', u'o'),
(0x1D561, 'M', u'p'),
(0x1D562, 'M', u'q'),
(0x1D563, 'M', u'r'),
(0x1D564, 'M', u's'),
(0x1D565, 'M', u't'),
(0x1D566, 'M', u'u'),
(0x1D567, 'M', u'v'),
(0x1D568, 'M', u'w'),
(0x1D569, 'M', u'x'),
(0x1D56A, 'M', u'y'),
(0x1D56B, 'M', u'z'),
(0x1D56C, 'M', u'a'),
(0x1D56D, 'M', u'b'),
(0x1D56E, 'M', u'c'),
(0x1D56F, 'M', u'd'),
(0x1D570, 'M', u'e'),
(0x1D571, 'M', u'f'),
(0x1D572, 'M', u'g'),
(0x1D573, 'M', u'h'),
(0x1D574, 'M', u'i'),
(0x1D575, 'M', u'j'),
(0x1D576, 'M', u'k'),
(0x1D577, 'M', u'l'),
(0x1D578, 'M', u'm'),
(0x1D579, 'M', u'n'),
(0x1D57A, 'M', u'o'),
(0x1D57B, 'M', u'p'),
(0x1D57C, 'M', u'q'),
(0x1D57D, 'M', u'r'),
(0x1D57E, 'M', u's'),
(0x1D57F, 'M', u't'),
(0x1D580, 'M', u'u'),
(0x1D581, 'M', u'v'),
(0x1D582, 'M', u'w'),
(0x1D583, 'M', u'x'),
(0x1D584, 'M', u'y'),
(0x1D585, 'M', u'z'),
(0x1D586, 'M', u'a'),
(0x1D587, 'M', u'b'),
(0x1D588, 'M', u'c'),
(0x1D589, 'M', u'd'),
(0x1D58A, 'M', u'e'),
(0x1D58B, 'M', u'f'),
(0x1D58C, 'M', u'g'),
(0x1D58D, 'M', u'h'),
(0x1D58E, 'M', u'i'),
]
def _seg_62():
return [
(0x1D58F, 'M', u'j'),
(0x1D590, 'M', u'k'),
(0x1D591, 'M', u'l'),
(0x1D592, 'M', u'm'),
(0x1D593, 'M', u'n'),
(0x1D594, 'M', u'o'),
(0x1D595, 'M', u'p'),
(0x1D596, 'M', u'q'),
(0x1D597, 'M', u'r'),
(0x1D598, 'M', u's'),
(0x1D599, 'M', u't'),
(0x1D59A, 'M', u'u'),
(0x1D59B, 'M', u'v'),
(0x1D59C, 'M', u'w'),
(0x1D59D, 'M', u'x'),
(0x1D59E, 'M', u'y'),
(0x1D59F, 'M', u'z'),
(0x1D5A0, 'M', u'a'),
(0x1D5A1, 'M', u'b'),
(0x1D5A2, 'M', u'c'),
(0x1D5A3, 'M', u'd'),
(0x1D5A4, 'M', u'e'),
(0x1D5A5, 'M', u'f'),
(0x1D5A6, 'M', u'g'),
(0x1D5A7, 'M', u'h'),
(0x1D5A8, 'M', u'i'),
(0x1D5A9, 'M', u'j'),
(0x1D5AA, 'M', u'k'),
(0x1D5AB, 'M', u'l'),
(0x1D5AC, 'M', u'm'),
(0x1D5AD, 'M', u'n'),
(0x1D5AE, 'M', u'o'),
(0x1D5AF, 'M', u'p'),
(0x1D5B0, 'M', u'q'),
(0x1D5B1, 'M', u'r'),
(0x1D5B2, 'M', u's'),
(0x1D5B3, 'M', u't'),
(0x1D5B4, 'M', u'u'),
(0x1D5B5, 'M', u'v'),
(0x1D5B6, 'M', u'w'),
(0x1D5B7, 'M', u'x'),
(0x1D5B8, 'M', u'y'),
(0x1D5B9, 'M', u'z'),
(0x1D5BA, 'M', u'a'),
(0x1D5BB, 'M', u'b'),
(0x1D5BC, 'M', u'c'),
(0x1D5BD, 'M', u'd'),
(0x1D5BE, 'M', u'e'),
(0x1D5BF, 'M', u'f'),
(0x1D5C0, 'M', u'g'),
(0x1D5C1, 'M', u'h'),
(0x1D5C2, 'M', u'i'),
(0x1D5C3, 'M', u'j'),
(0x1D5C4, 'M', u'k'),
(0x1D5C5, 'M', u'l'),
(0x1D5C6, 'M', u'm'),
(0x1D5C7, 'M', u'n'),
(0x1D5C8, 'M', u'o'),
(0x1D5C9, 'M', u'p'),
(0x1D5CA, 'M', u'q'),
(0x1D5CB, 'M', u'r'),
(0x1D5CC, 'M', u's'),
(0x1D5CD, 'M', u't'),
(0x1D5CE, 'M', u'u'),
(0x1D5CF, 'M', u'v'),
(0x1D5D0, 'M', u'w'),
(0x1D5D1, 'M', u'x'),
(0x1D5D2, 'M', u'y'),
(0x1D5D3, 'M', u'z'),
(0x1D5D4, 'M', u'a'),
(0x1D5D5, 'M', u'b'),
(0x1D5D6, 'M', u'c'),
(0x1D5D7, 'M', u'd'),
(0x1D5D8, 'M', u'e'),
(0x1D5D9, 'M', u'f'),
(0x1D5DA, 'M', u'g'),
(0x1D5DB, 'M', u'h'),
(0x1D5DC, 'M', u'i'),
(0x1D5DD, 'M', u'j'),
(0x1D5DE, 'M', u'k'),
(0x1D5DF, 'M', u'l'),
(0x1D5E0, 'M', u'm'),
(0x1D5E1, 'M', u'n'),
(0x1D5E2, 'M', u'o'),
(0x1D5E3, 'M', u'p'),
(0x1D5E4, 'M', u'q'),
(0x1D5E5, 'M', u'r'),
(0x1D5E6, 'M', u's'),
(0x1D5E7, 'M', u't'),
(0x1D5E8, 'M', u'u'),
(0x1D5E9, 'M', u'v'),
(0x1D5EA, 'M', u'w'),
(0x1D5EB, 'M', u'x'),
(0x1D5EC, 'M', u'y'),
(0x1D5ED, 'M', u'z'),
(0x1D5EE, 'M', u'a'),
(0x1D5EF, 'M', u'b'),
(0x1D5F0, 'M', u'c'),
(0x1D5F1, 'M', u'd'),
(0x1D5F2, 'M', u'e'),
]
def _seg_63():
return [
(0x1D5F3, 'M', u'f'),
(0x1D5F4, 'M', u'g'),
(0x1D5F5, 'M', u'h'),
(0x1D5F6, 'M', u'i'),
(0x1D5F7, 'M', u'j'),
(0x1D5F8, 'M', u'k'),
(0x1D5F9, 'M', u'l'),
(0x1D5FA, 'M', u'm'),
(0x1D5FB, 'M', u'n'),
(0x1D5FC, 'M', u'o'),
(0x1D5FD, 'M', u'p'),
(0x1D5FE, 'M', u'q'),
(0x1D5FF, 'M', u'r'),
(0x1D600, 'M', u's'),
(0x1D601, 'M', u't'),
(0x1D602, 'M', u'u'),
(0x1D603, 'M', u'v'),
(0x1D604, 'M', u'w'),
(0x1D605, 'M', u'x'),
(0x1D606, 'M', u'y'),
(0x1D607, 'M', u'z'),
(0x1D608, 'M', u'a'),
(0x1D609, 'M', u'b'),
(0x1D60A, 'M', u'c'),
(0x1D60B, 'M', u'd'),
(0x1D60C, 'M', u'e'),
(0x1D60D, 'M', u'f'),
(0x1D60E, 'M', u'g'),
(0x1D60F, 'M', u'h'),
(0x1D610, 'M', u'i'),
(0x1D611, 'M', u'j'),
(0x1D612, 'M', u'k'),
(0x1D613, 'M', u'l'),
(0x1D614, 'M', u'm'),
(0x1D615, 'M', u'n'),
(0x1D616, 'M', u'o'),
(0x1D617, 'M', u'p'),
(0x1D618, 'M', u'q'),
(0x1D619, 'M', u'r'),
(0x1D61A, 'M', u's'),
(0x1D61B, 'M', u't'),
(0x1D61C, 'M', u'u'),
(0x1D61D, 'M', u'v'),
(0x1D61E, 'M', u'w'),
(0x1D61F, 'M', u'x'),
(0x1D620, 'M', u'y'),
(0x1D621, 'M', u'z'),
(0x1D622, 'M', u'a'),
(0x1D623, 'M', u'b'),
(0x1D624, 'M', u'c'),
(0x1D625, 'M', u'd'),
(0x1D626, 'M', u'e'),
(0x1D627, 'M', u'f'),
(0x1D628, 'M', u'g'),
(0x1D629, 'M', u'h'),
(0x1D62A, 'M', u'i'),
(0x1D62B, 'M', u'j'),
(0x1D62C, 'M', u'k'),
(0x1D62D, 'M', u'l'),
(0x1D62E, 'M', u'm'),
(0x1D62F, 'M', u'n'),
(0x1D630, 'M', u'o'),
(0x1D631, 'M', u'p'),
(0x1D632, 'M', u'q'),
(0x1D633, 'M', u'r'),
(0x1D634, 'M', u's'),
(0x1D635, 'M', u't'),
(0x1D636, 'M', u'u'),
(0x1D637, 'M', u'v'),
(0x1D638, 'M', u'w'),
(0x1D639, 'M', u'x'),
(0x1D63A, 'M', u'y'),
(0x1D63B, 'M', u'z'),
(0x1D63C, 'M', u'a'),
(0x1D63D, 'M', u'b'),
(0x1D63E, 'M', u'c'),
(0x1D63F, 'M', u'd'),
(0x1D640, 'M', u'e'),
(0x1D641, 'M', u'f'),
(0x1D642, 'M', u'g'),
(0x1D643, 'M', u'h'),
(0x1D644, 'M', u'i'),
(0x1D645, 'M', u'j'),
(0x1D646, 'M', u'k'),
(0x1D647, 'M', u'l'),
(0x1D648, 'M', u'm'),
(0x1D649, 'M', u'n'),
(0x1D64A, 'M', u'o'),
(0x1D64B, 'M', u'p'),
(0x1D64C, 'M', u'q'),
(0x1D64D, 'M', u'r'),
(0x1D64E, 'M', u's'),
(0x1D64F, 'M', u't'),
(0x1D650, 'M', u'u'),
(0x1D651, 'M', u'v'),
(0x1D652, 'M', u'w'),
(0x1D653, 'M', u'x'),
(0x1D654, 'M', u'y'),
(0x1D655, 'M', u'z'),
(0x1D656, 'M', u'a'),
]
def _seg_64():
return [
(0x1D657, 'M', u'b'),
(0x1D658, 'M', u'c'),
(0x1D659, 'M', u'd'),
(0x1D65A, 'M', u'e'),
(0x1D65B, 'M', u'f'),
(0x1D65C, 'M', u'g'),
(0x1D65D, 'M', u'h'),
(0x1D65E, 'M', u'i'),
(0x1D65F, 'M', u'j'),
(0x1D660, 'M', u'k'),
(0x1D661, 'M', u'l'),
(0x1D662, 'M', u'm'),
(0x1D663, 'M', u'n'),
(0x1D664, 'M', u'o'),
(0x1D665, 'M', u'p'),
(0x1D666, 'M', u'q'),
(0x1D667, 'M', u'r'),
(0x1D668, 'M', u's'),
(0x1D669, 'M', u't'),
(0x1D66A, 'M', u'u'),
(0x1D66B, 'M', u'v'),
(0x1D66C, 'M', u'w'),
(0x1D66D, 'M', u'x'),
(0x1D66E, 'M', u'y'),
(0x1D66F, 'M', u'z'),
(0x1D670, 'M', u'a'),
(0x1D671, 'M', u'b'),
(0x1D672, 'M', u'c'),
(0x1D673, 'M', u'd'),
(0x1D674, 'M', u'e'),
(0x1D675, 'M', u'f'),
(0x1D676, 'M', u'g'),
(0x1D677, 'M', u'h'),
(0x1D678, 'M', u'i'),
(0x1D679, 'M', u'j'),
(0x1D67A, 'M', u'k'),
(0x1D67B, 'M', u'l'),
(0x1D67C, 'M', u'm'),
(0x1D67D, 'M', u'n'),
(0x1D67E, 'M', u'o'),
(0x1D67F, 'M', u'p'),
(0x1D680, 'M', u'q'),
(0x1D681, 'M', u'r'),
(0x1D682, 'M', u's'),
(0x1D683, 'M', u't'),
(0x1D684, 'M', u'u'),
(0x1D685, 'M', u'v'),
(0x1D686, 'M', u'w'),
(0x1D687, 'M', u'x'),
(0x1D688, 'M', u'y'),
(0x1D689, 'M', u'z'),
(0x1D68A, 'M', u'a'),
(0x1D68B, 'M', u'b'),
(0x1D68C, 'M', u'c'),
(0x1D68D, 'M', u'd'),
(0x1D68E, 'M', u'e'),
(0x1D68F, 'M', u'f'),
(0x1D690, 'M', u'g'),
(0x1D691, 'M', u'h'),
(0x1D692, 'M', u'i'),
(0x1D693, 'M', u'j'),
(0x1D694, 'M', u'k'),
(0x1D695, 'M', u'l'),
(0x1D696, 'M', u'm'),
(0x1D697, 'M', u'n'),
(0x1D698, 'M', u'o'),
(0x1D699, 'M', u'p'),
(0x1D69A, 'M', u'q'),
(0x1D69B, 'M', u'r'),
(0x1D69C, 'M', u's'),
(0x1D69D, 'M', u't'),
(0x1D69E, 'M', u'u'),
(0x1D69F, 'M', u'v'),
(0x1D6A0, 'M', u'w'),
(0x1D6A1, 'M', u'x'),
(0x1D6A2, 'M', u'y'),
(0x1D6A3, 'M', u'z'),
(0x1D6A4, 'M', u'ı'),
(0x1D6A5, 'M', u'ȷ'),
(0x1D6A6, 'X'),
(0x1D6A8, 'M', u'α'),
(0x1D6A9, 'M', u'β'),
(0x1D6AA, 'M', u'γ'),
(0x1D6AB, 'M', u'δ'),
(0x1D6AC, 'M', u'ε'),
(0x1D6AD, 'M', u'ζ'),
(0x1D6AE, 'M', u'η'),
(0x1D6AF, 'M', u'θ'),
(0x1D6B0, 'M', u'ι'),
(0x1D6B1, 'M', u'κ'),
(0x1D6B2, 'M', u'λ'),
(0x1D6B3, 'M', u'μ'),
(0x1D6B4, 'M', u'ν'),
(0x1D6B5, 'M', u'ξ'),
(0x1D6B6, 'M', u'ο'),
(0x1D6B7, 'M', u'π'),
(0x1D6B8, 'M', u'ρ'),
(0x1D6B9, 'M', u'θ'),
(0x1D6BA, 'M', u'σ'),
(0x1D6BB, 'M', u'τ'),
]
def _seg_65():
return [
(0x1D6BC, 'M', u'υ'),
(0x1D6BD, 'M', u'φ'),
(0x1D6BE, 'M', u'χ'),
(0x1D6BF, 'M', u'ψ'),
(0x1D6C0, 'M', u'ω'),
(0x1D6C1, 'M', u'∇'),
(0x1D6C2, 'M', u'α'),
(0x1D6C3, 'M', u'β'),
(0x1D6C4, 'M', u'γ'),
(0x1D6C5, 'M', u'δ'),
(0x1D6C6, 'M', u'ε'),
(0x1D6C7, 'M', u'ζ'),
(0x1D6C8, 'M', u'η'),
(0x1D6C9, 'M', u'θ'),
(0x1D6CA, 'M', u'ι'),
(0x1D6CB, 'M', u'κ'),
(0x1D6CC, 'M', u'λ'),
(0x1D6CD, 'M', u'μ'),
(0x1D6CE, 'M', u'ν'),
(0x1D6CF, 'M', u'ξ'),
(0x1D6D0, 'M', u'ο'),
(0x1D6D1, 'M', u'π'),
(0x1D6D2, 'M', u'ρ'),
(0x1D6D3, 'M', u'σ'),
(0x1D6D5, 'M', u'τ'),
(0x1D6D6, 'M', u'υ'),
(0x1D6D7, 'M', u'φ'),
(0x1D6D8, 'M', u'χ'),
(0x1D6D9, 'M', u'ψ'),
(0x1D6DA, 'M', u'ω'),
(0x1D6DB, 'M', u'∂'),
(0x1D6DC, 'M', u'ε'),
(0x1D6DD, 'M', u'θ'),
(0x1D6DE, 'M', u'κ'),
(0x1D6DF, 'M', u'φ'),
(0x1D6E0, 'M', u'ρ'),
(0x1D6E1, 'M', u'π'),
(0x1D6E2, 'M', u'α'),
(0x1D6E3, 'M', u'β'),
(0x1D6E4, 'M', u'γ'),
(0x1D6E5, 'M', u'δ'),
(0x1D6E6, 'M', u'ε'),
(0x1D6E7, 'M', u'ζ'),
(0x1D6E8, 'M', u'η'),
(0x1D6E9, 'M', u'θ'),
(0x1D6EA, 'M', u'ι'),
(0x1D6EB, 'M', u'κ'),
(0x1D6EC, 'M', u'λ'),
(0x1D6ED, 'M', u'μ'),
(0x1D6EE, 'M', u'ν'),
(0x1D6EF, 'M', u'ξ'),
(0x1D6F0, 'M', u'ο'),
(0x1D6F1, 'M', u'π'),
(0x1D6F2, 'M', u'ρ'),
(0x1D6F3, 'M', u'θ'),
(0x1D6F4, 'M', u'σ'),
(0x1D6F5, 'M', u'τ'),
(0x1D6F6, 'M', u'υ'),
(0x1D6F7, 'M', u'φ'),
(0x1D6F8, 'M', u'χ'),
(0x1D6F9, 'M', u'ψ'),
(0x1D6FA, 'M', u'ω'),
(0x1D6FB, 'M', u'∇'),
(0x1D6FC, 'M', u'α'),
(0x1D6FD, 'M', u'β'),
(0x1D6FE, 'M', u'γ'),
(0x1D6FF, 'M', u'δ'),
(0x1D700, 'M', u'ε'),
(0x1D701, 'M', u'ζ'),
(0x1D702, 'M', u'η'),
(0x1D703, 'M', u'θ'),
(0x1D704, 'M', u'ι'),
(0x1D705, 'M', u'κ'),
(0x1D706, 'M', u'λ'),
(0x1D707, 'M', u'μ'),
(0x1D708, 'M', u'ν'),
(0x1D709, 'M', u'ξ'),
(0x1D70A, 'M', u'ο'),
(0x1D70B, 'M', u'π'),
(0x1D70C, 'M', u'ρ'),
(0x1D70D, 'M', u'σ'),
(0x1D70F, 'M', u'τ'),
(0x1D710, 'M', u'υ'),
(0x1D711, 'M', u'φ'),
(0x1D712, 'M', u'χ'),
(0x1D713, 'M', u'ψ'),
(0x1D714, 'M', u'ω'),
(0x1D715, 'M', u'∂'),
(0x1D716, 'M', u'ε'),
(0x1D717, 'M', u'θ'),
(0x1D718, 'M', u'κ'),
(0x1D719, 'M', u'φ'),
(0x1D71A, 'M', u'ρ'),
(0x1D71B, 'M', u'π'),
(0x1D71C, 'M', u'α'),
(0x1D71D, 'M', u'β'),
(0x1D71E, 'M', u'γ'),
(0x1D71F, 'M', u'δ'),
(0x1D720, 'M', u'ε'),
(0x1D721, 'M', u'ζ'),
]
def _seg_66():
return [
(0x1D722, 'M', u'η'),
(0x1D723, 'M', u'θ'),
(0x1D724, 'M', u'ι'),
(0x1D725, 'M', u'κ'),
(0x1D726, 'M', u'λ'),
(0x1D727, 'M', u'μ'),
(0x1D728, 'M', u'ν'),
(0x1D729, 'M', u'ξ'),
(0x1D72A, 'M', u'ο'),
(0x1D72B, 'M', u'π'),
(0x1D72C, 'M', u'ρ'),
(0x1D72D, 'M', u'θ'),
(0x1D72E, 'M', u'σ'),
(0x1D72F, 'M', u'τ'),
(0x1D730, 'M', u'υ'),
(0x1D731, 'M', u'φ'),
(0x1D732, 'M', u'χ'),
(0x1D733, 'M', u'ψ'),
(0x1D734, 'M', u'ω'),
(0x1D735, 'M', u'∇'),
(0x1D736, 'M', u'α'),
(0x1D737, 'M', u'β'),
(0x1D738, 'M', u'γ'),
(0x1D739, 'M', u'δ'),
(0x1D73A, 'M', u'ε'),
(0x1D73B, 'M', u'ζ'),
(0x1D73C, 'M', u'η'),
(0x1D73D, 'M', u'θ'),
(0x1D73E, 'M', u'ι'),
(0x1D73F, 'M', u'κ'),
(0x1D740, 'M', u'λ'),
(0x1D741, 'M', u'μ'),
(0x1D742, 'M', u'ν'),
(0x1D743, 'M', u'ξ'),
(0x1D744, 'M', u'ο'),
(0x1D745, 'M', u'π'),
(0x1D746, 'M', u'ρ'),
(0x1D747, 'M', u'σ'),
(0x1D749, 'M', u'τ'),
(0x1D74A, 'M', u'υ'),
(0x1D74B, 'M', u'φ'),
(0x1D74C, 'M', u'χ'),
(0x1D74D, 'M', u'ψ'),
(0x1D74E, 'M', u'ω'),
(0x1D74F, 'M', u'∂'),
(0x1D750, 'M', u'ε'),
(0x1D751, 'M', u'θ'),
(0x1D752, 'M', u'κ'),
(0x1D753, 'M', u'φ'),
(0x1D754, 'M', u'ρ'),
(0x1D755, 'M', u'π'),
(0x1D756, 'M', u'α'),
(0x1D757, 'M', u'β'),
(0x1D758, 'M', u'γ'),
(0x1D759, 'M', u'δ'),
(0x1D75A, 'M', u'ε'),
(0x1D75B, 'M', u'ζ'),
(0x1D75C, 'M', u'η'),
(0x1D75D, 'M', u'θ'),
(0x1D75E, 'M', u'ι'),
(0x1D75F, 'M', u'κ'),
(0x1D760, 'M', u'λ'),
(0x1D761, 'M', u'μ'),
(0x1D762, 'M', u'ν'),
(0x1D763, 'M', u'ξ'),
(0x1D764, 'M', u'ο'),
(0x1D765, 'M', u'π'),
(0x1D766, 'M', u'ρ'),
(0x1D767, 'M', u'θ'),
(0x1D768, 'M', u'σ'),
(0x1D769, 'M', u'τ'),
(0x1D76A, 'M', u'υ'),
(0x1D76B, 'M', u'φ'),
(0x1D76C, 'M', u'χ'),
(0x1D76D, 'M', u'ψ'),
(0x1D76E, 'M', u'ω'),
(0x1D76F, 'M', u'∇'),
(0x1D770, 'M', u'α'),
(0x1D771, 'M', u'β'),
(0x1D772, 'M', u'γ'),
(0x1D773, 'M', u'δ'),
(0x1D774, 'M', u'ε'),
(0x1D775, 'M', u'ζ'),
(0x1D776, 'M', u'η'),
(0x1D777, 'M', u'θ'),
(0x1D778, 'M', u'ι'),
(0x1D779, 'M', u'κ'),
(0x1D77A, 'M', u'λ'),
(0x1D77B, 'M', u'μ'),
(0x1D77C, 'M', u'ν'),
(0x1D77D, 'M', u'ξ'),
(0x1D77E, 'M', u'ο'),
(0x1D77F, 'M', u'π'),
(0x1D780, 'M', u'ρ'),
(0x1D781, 'M', u'σ'),
(0x1D783, 'M', u'τ'),
(0x1D784, 'M', u'υ'),
(0x1D785, 'M', u'φ'),
(0x1D786, 'M', u'χ'),
(0x1D787, 'M', u'ψ'),
]
def _seg_67():
return [
(0x1D788, 'M', u'ω'),
(0x1D789, 'M', u'∂'),
(0x1D78A, 'M', u'ε'),
(0x1D78B, 'M', u'θ'),
(0x1D78C, 'M', u'κ'),
(0x1D78D, 'M', u'φ'),
(0x1D78E, 'M', u'ρ'),
(0x1D78F, 'M', u'π'),
(0x1D790, 'M', u'α'),
(0x1D791, 'M', u'β'),
(0x1D792, 'M', u'γ'),
(0x1D793, 'M', u'δ'),
(0x1D794, 'M', u'ε'),
(0x1D795, 'M', u'ζ'),
(0x1D796, 'M', u'η'),
(0x1D797, 'M', u'θ'),
(0x1D798, 'M', u'ι'),
(0x1D799, 'M', u'κ'),
(0x1D79A, 'M', u'λ'),
(0x1D79B, 'M', u'μ'),
(0x1D79C, 'M', u'ν'),
(0x1D79D, 'M', u'ξ'),
(0x1D79E, 'M', u'ο'),
(0x1D79F, 'M', u'π'),
(0x1D7A0, 'M', u'ρ'),
(0x1D7A1, 'M', u'θ'),
(0x1D7A2, 'M', u'σ'),
(0x1D7A3, 'M', u'τ'),
(0x1D7A4, 'M', u'υ'),
(0x1D7A5, 'M', u'φ'),
(0x1D7A6, 'M', u'χ'),
(0x1D7A7, 'M', u'ψ'),
(0x1D7A8, 'M', u'ω'),
(0x1D7A9, 'M', u'∇'),
(0x1D7AA, 'M', u'α'),
(0x1D7AB, 'M', u'β'),
(0x1D7AC, 'M', u'γ'),
(0x1D7AD, 'M', u'δ'),
(0x1D7AE, 'M', u'ε'),
(0x1D7AF, 'M', u'ζ'),
(0x1D7B0, 'M', u'η'),
(0x1D7B1, 'M', u'θ'),
(0x1D7B2, 'M', u'ι'),
(0x1D7B3, 'M', u'κ'),
(0x1D7B4, 'M', u'λ'),
(0x1D7B5, 'M', u'μ'),
(0x1D7B6, 'M', u'ν'),
(0x1D7B7, 'M', u'ξ'),
(0x1D7B8, 'M', u'ο'),
(0x1D7B9, 'M', u'π'),
(0x1D7BA, 'M', u'ρ'),
(0x1D7BB, 'M', u'σ'),
(0x1D7BD, 'M', u'τ'),
(0x1D7BE, 'M', u'υ'),
(0x1D7BF, 'M', u'φ'),
(0x1D7C0, 'M', u'χ'),
(0x1D7C1, 'M', u'ψ'),
(0x1D7C2, 'M', u'ω'),
(0x1D7C3, 'M', u'∂'),
(0x1D7C4, 'M', u'ε'),
(0x1D7C5, 'M', u'θ'),
(0x1D7C6, 'M', u'κ'),
(0x1D7C7, 'M', u'φ'),
(0x1D7C8, 'M', u'ρ'),
(0x1D7C9, 'M', u'π'),
(0x1D7CA, 'M', u'ϝ'),
(0x1D7CC, 'X'),
(0x1D7CE, 'M', u'0'),
(0x1D7CF, 'M', u'1'),
(0x1D7D0, 'M', u'2'),
(0x1D7D1, 'M', u'3'),
(0x1D7D2, 'M', u'4'),
(0x1D7D3, 'M', u'5'),
(0x1D7D4, 'M', u'6'),
(0x1D7D5, 'M', u'7'),
(0x1D7D6, 'M', u'8'),
(0x1D7D7, 'M', u'9'),
(0x1D7D8, 'M', u'0'),
(0x1D7D9, 'M', u'1'),
(0x1D7DA, 'M', u'2'),
(0x1D7DB, 'M', u'3'),
(0x1D7DC, 'M', u'4'),
(0x1D7DD, 'M', u'5'),
(0x1D7DE, 'M', u'6'),
(0x1D7DF, 'M', u'7'),
(0x1D7E0, 'M', u'8'),
(0x1D7E1, 'M', u'9'),
(0x1D7E2, 'M', u'0'),
(0x1D7E3, 'M', u'1'),
(0x1D7E4, 'M', u'2'),
(0x1D7E5, 'M', u'3'),
(0x1D7E6, 'M', u'4'),
(0x1D7E7, 'M', u'5'),
(0x1D7E8, 'M', u'6'),
(0x1D7E9, 'M', u'7'),
(0x1D7EA, 'M', u'8'),
(0x1D7EB, 'M', u'9'),
(0x1D7EC, 'M', u'0'),
(0x1D7ED, 'M', u'1'),
(0x1D7EE, 'M', u'2'),
]
def _seg_68():
return [
(0x1D7EF, 'M', u'3'),
(0x1D7F0, 'M', u'4'),
(0x1D7F1, 'M', u'5'),
(0x1D7F2, 'M', u'6'),
(0x1D7F3, 'M', u'7'),
(0x1D7F4, 'M', u'8'),
(0x1D7F5, 'M', u'9'),
(0x1D7F6, 'M', u'0'),
(0x1D7F7, 'M', u'1'),
(0x1D7F8, 'M', u'2'),
(0x1D7F9, 'M', u'3'),
(0x1D7FA, 'M', u'4'),
(0x1D7FB, 'M', u'5'),
(0x1D7FC, 'M', u'6'),
(0x1D7FD, 'M', u'7'),
(0x1D7FE, 'M', u'8'),
(0x1D7FF, 'M', u'9'),
(0x1D800, 'V'),
(0x1DA8C, 'X'),
(0x1DA9B, 'V'),
(0x1DAA0, 'X'),
(0x1DAA1, 'V'),
(0x1DAB0, 'X'),
(0x1E000, 'V'),
(0x1E007, 'X'),
(0x1E008, 'V'),
(0x1E019, 'X'),
(0x1E01B, 'V'),
(0x1E022, 'X'),
(0x1E023, 'V'),
(0x1E025, 'X'),
(0x1E026, 'V'),
(0x1E02B, 'X'),
(0x1E800, 'V'),
(0x1E8C5, 'X'),
(0x1E8C7, 'V'),
(0x1E8D7, 'X'),
(0x1E900, 'M', u'𞤢'),
(0x1E901, 'M', u'𞤣'),
(0x1E902, 'M', u'𞤤'),
(0x1E903, 'M', u'𞤥'),
(0x1E904, 'M', u'𞤦'),
(0x1E905, 'M', u'𞤧'),
(0x1E906, 'M', u'𞤨'),
(0x1E907, 'M', u'𞤩'),
(0x1E908, 'M', u'𞤪'),
(0x1E909, 'M', u'𞤫'),
(0x1E90A, 'M', u'𞤬'),
(0x1E90B, 'M', u'𞤭'),
(0x1E90C, 'M', u'𞤮'),
(0x1E90D, 'M', u'𞤯'),
(0x1E90E, 'M', u'𞤰'),
(0x1E90F, 'M', u'𞤱'),
(0x1E910, 'M', u'𞤲'),
(0x1E911, 'M', u'𞤳'),
(0x1E912, 'M', u'𞤴'),
(0x1E913, 'M', u'𞤵'),
(0x1E914, 'M', u'𞤶'),
(0x1E915, 'M', u'𞤷'),
(0x1E916, 'M', u'𞤸'),
(0x1E917, 'M', u'𞤹'),
(0x1E918, 'M', u'𞤺'),
(0x1E919, 'M', u'𞤻'),
(0x1E91A, 'M', u'𞤼'),
(0x1E91B, 'M', u'𞤽'),
(0x1E91C, 'M', u'𞤾'),
(0x1E91D, 'M', u'𞤿'),
(0x1E91E, 'M', u'𞥀'),
(0x1E91F, 'M', u'𞥁'),
(0x1E920, 'M', u'𞥂'),
(0x1E921, 'M', u'𞥃'),
(0x1E922, 'V'),
(0x1E94B, 'X'),
(0x1E950, 'V'),
(0x1E95A, 'X'),
(0x1E95E, 'V'),
(0x1E960, 'X'),
(0x1EC71, 'V'),
(0x1ECB5, 'X'),
(0x1EE00, 'M', u'ا'),
(0x1EE01, 'M', u'ب'),
(0x1EE02, 'M', u'ج'),
(0x1EE03, 'M', u'د'),
(0x1EE04, 'X'),
(0x1EE05, 'M', u'و'),
(0x1EE06, 'M', u'ز'),
(0x1EE07, 'M', u'ح'),
(0x1EE08, 'M', u'ط'),
(0x1EE09, 'M', u'ي'),
(0x1EE0A, 'M', u'ك'),
(0x1EE0B, 'M', u'ل'),
(0x1EE0C, 'M', u'م'),
(0x1EE0D, 'M', u'ن'),
(0x1EE0E, 'M', u'س'),
(0x1EE0F, 'M', u'ع'),
(0x1EE10, 'M', u'ف'),
(0x1EE11, 'M', u'ص'),
(0x1EE12, 'M', u'ق'),
(0x1EE13, 'M', u'ر'),
(0x1EE14, 'M', u'ش'),
]
def _seg_69():
return [
(0x1EE15, 'M', u'ت'),
(0x1EE16, 'M', u'ث'),
(0x1EE17, 'M', u'خ'),
(0x1EE18, 'M', u'ذ'),
(0x1EE19, 'M', u'ض'),
(0x1EE1A, 'M', u'ظ'),
(0x1EE1B, 'M', u'غ'),
(0x1EE1C, 'M', u'ٮ'),
(0x1EE1D, 'M', u'ں'),
(0x1EE1E, 'M', u'ڡ'),
(0x1EE1F, 'M', u'ٯ'),
(0x1EE20, 'X'),
(0x1EE21, 'M', u'ب'),
(0x1EE22, 'M', u'ج'),
(0x1EE23, 'X'),
(0x1EE24, 'M', u'ه'),
(0x1EE25, 'X'),
(0x1EE27, 'M', u'ح'),
(0x1EE28, 'X'),
(0x1EE29, 'M', u'ي'),
(0x1EE2A, 'M', u'ك'),
(0x1EE2B, 'M', u'ل'),
(0x1EE2C, 'M', u'م'),
(0x1EE2D, 'M', u'ن'),
(0x1EE2E, 'M', u'س'),
(0x1EE2F, 'M', u'ع'),
(0x1EE30, 'M', u'ف'),
(0x1EE31, 'M', u'ص'),
(0x1EE32, 'M', u'ق'),
(0x1EE33, 'X'),
(0x1EE34, 'M', u'ش'),
(0x1EE35, 'M', u'ت'),
(0x1EE36, 'M', u'ث'),
(0x1EE37, 'M', u'خ'),
(0x1EE38, 'X'),
(0x1EE39, 'M', u'ض'),
(0x1EE3A, 'X'),
(0x1EE3B, 'M', u'غ'),
(0x1EE3C, 'X'),
(0x1EE42, 'M', u'ج'),
(0x1EE43, 'X'),
(0x1EE47, 'M', u'ح'),
(0x1EE48, 'X'),
(0x1EE49, 'M', u'ي'),
(0x1EE4A, 'X'),
(0x1EE4B, 'M', u'ل'),
(0x1EE4C, 'X'),
(0x1EE4D, 'M', u'ن'),
(0x1EE4E, 'M', u'س'),
(0x1EE4F, 'M', u'ع'),
(0x1EE50, 'X'),
(0x1EE51, 'M', u'ص'),
(0x1EE52, 'M', u'ق'),
(0x1EE53, 'X'),
(0x1EE54, 'M', u'ش'),
(0x1EE55, 'X'),
(0x1EE57, 'M', u'خ'),
(0x1EE58, 'X'),
(0x1EE59, 'M', u'ض'),
(0x1EE5A, 'X'),
(0x1EE5B, 'M', u'غ'),
(0x1EE5C, 'X'),
(0x1EE5D, 'M', u'ں'),
(0x1EE5E, 'X'),
(0x1EE5F, 'M', u'ٯ'),
(0x1EE60, 'X'),
(0x1EE61, 'M', u'ب'),
(0x1EE62, 'M', u'ج'),
(0x1EE63, 'X'),
(0x1EE64, 'M', u'ه'),
(0x1EE65, 'X'),
(0x1EE67, 'M', u'ح'),
(0x1EE68, 'M', u'ط'),
(0x1EE69, 'M', u'ي'),
(0x1EE6A, 'M', u'ك'),
(0x1EE6B, 'X'),
(0x1EE6C, 'M', u'م'),
(0x1EE6D, 'M', u'ن'),
(0x1EE6E, 'M', u'س'),
(0x1EE6F, 'M', u'ع'),
(0x1EE70, 'M', u'ف'),
(0x1EE71, 'M', u'ص'),
(0x1EE72, 'M', u'ق'),
(0x1EE73, 'X'),
(0x1EE74, 'M', u'ش'),
(0x1EE75, 'M', u'ت'),
(0x1EE76, 'M', u'ث'),
(0x1EE77, 'M', u'خ'),
(0x1EE78, 'X'),
(0x1EE79, 'M', u'ض'),
(0x1EE7A, 'M', u'ظ'),
(0x1EE7B, 'M', u'غ'),
(0x1EE7C, 'M', u'ٮ'),
(0x1EE7D, 'X'),
(0x1EE7E, 'M', u'ڡ'),
(0x1EE7F, 'X'),
(0x1EE80, 'M', u'ا'),
(0x1EE81, 'M', u'ب'),
(0x1EE82, 'M', u'ج'),
(0x1EE83, 'M', u'د'),
]
def _seg_70():
return [
(0x1EE84, 'M', u'ه'),
(0x1EE85, 'M', u'و'),
(0x1EE86, 'M', u'ز'),
(0x1EE87, 'M', u'ح'),
(0x1EE88, 'M', u'ط'),
(0x1EE89, 'M', u'ي'),
(0x1EE8A, 'X'),
(0x1EE8B, 'M', u'ل'),
(0x1EE8C, 'M', u'م'),
(0x1EE8D, 'M', u'ن'),
(0x1EE8E, 'M', u'س'),
(0x1EE8F, 'M', u'ع'),
(0x1EE90, 'M', u'ف'),
(0x1EE91, 'M', u'ص'),
(0x1EE92, 'M', u'ق'),
(0x1EE93, 'M', u'ر'),
(0x1EE94, 'M', u'ش'),
(0x1EE95, 'M', u'ت'),
(0x1EE96, 'M', u'ث'),
(0x1EE97, 'M', u'خ'),
(0x1EE98, 'M', u'ذ'),
(0x1EE99, 'M', u'ض'),
(0x1EE9A, 'M', u'ظ'),
(0x1EE9B, 'M', u'غ'),
(0x1EE9C, 'X'),
(0x1EEA1, 'M', u'ب'),
(0x1EEA2, 'M', u'ج'),
(0x1EEA3, 'M', u'د'),
(0x1EEA4, 'X'),
(0x1EEA5, 'M', u'و'),
(0x1EEA6, 'M', u'ز'),
(0x1EEA7, 'M', u'ح'),
(0x1EEA8, 'M', u'ط'),
(0x1EEA9, 'M', u'ي'),
(0x1EEAA, 'X'),
(0x1EEAB, 'M', u'ل'),
(0x1EEAC, 'M', u'م'),
(0x1EEAD, 'M', u'ن'),
(0x1EEAE, 'M', u'س'),
(0x1EEAF, 'M', u'ع'),
(0x1EEB0, 'M', u'ف'),
(0x1EEB1, 'M', u'ص'),
(0x1EEB2, 'M', u'ق'),
(0x1EEB3, 'M', u'ر'),
(0x1EEB4, 'M', u'ش'),
(0x1EEB5, 'M', u'ت'),
(0x1EEB6, 'M', u'ث'),
(0x1EEB7, 'M', u'خ'),
(0x1EEB8, 'M', u'ذ'),
(0x1EEB9, 'M', u'ض'),
(0x1EEBA, 'M', u'ظ'),
(0x1EEBB, 'M', u'غ'),
(0x1EEBC, 'X'),
(0x1EEF0, 'V'),
(0x1EEF2, 'X'),
(0x1F000, 'V'),
(0x1F02C, 'X'),
(0x1F030, 'V'),
(0x1F094, 'X'),
(0x1F0A0, 'V'),
(0x1F0AF, 'X'),
(0x1F0B1, 'V'),
(0x1F0C0, 'X'),
(0x1F0C1, 'V'),
(0x1F0D0, 'X'),
(0x1F0D1, 'V'),
(0x1F0F6, 'X'),
(0x1F101, '3', u'0,'),
(0x1F102, '3', u'1,'),
(0x1F103, '3', u'2,'),
(0x1F104, '3', u'3,'),
(0x1F105, '3', u'4,'),
(0x1F106, '3', u'5,'),
(0x1F107, '3', u'6,'),
(0x1F108, '3', u'7,'),
(0x1F109, '3', u'8,'),
(0x1F10A, '3', u'9,'),
(0x1F10B, 'V'),
(0x1F10D, 'X'),
(0x1F110, '3', u'(a)'),
(0x1F111, '3', u'(b)'),
(0x1F112, '3', u'(c)'),
(0x1F113, '3', u'(d)'),
(0x1F114, '3', u'(e)'),
(0x1F115, '3', u'(f)'),
(0x1F116, '3', u'(g)'),
(0x1F117, '3', u'(h)'),
(0x1F118, '3', u'(i)'),
(0x1F119, '3', u'(j)'),
(0x1F11A, '3', u'(k)'),
(0x1F11B, '3', u'(l)'),
(0x1F11C, '3', u'(m)'),
(0x1F11D, '3', u'(n)'),
(0x1F11E, '3', u'(o)'),
(0x1F11F, '3', u'(p)'),
(0x1F120, '3', u'(q)'),
(0x1F121, '3', u'(r)'),
(0x1F122, '3', u'(s)'),
(0x1F123, '3', u'(t)'),
(0x1F124, '3', u'(u)'),
]
def _seg_71():
return [
(0x1F125, '3', u'(v)'),
(0x1F126, '3', u'(w)'),
(0x1F127, '3', u'(x)'),
(0x1F128, '3', u'(y)'),
(0x1F129, '3', u'(z)'),
(0x1F12A, 'M', u'〔s〕'),
(0x1F12B, 'M', u'c'),
(0x1F12C, 'M', u'r'),
(0x1F12D, 'M', u'cd'),
(0x1F12E, 'M', u'wz'),
(0x1F12F, 'V'),
(0x1F130, 'M', u'a'),
(0x1F131, 'M', u'b'),
(0x1F132, 'M', u'c'),
(0x1F133, 'M', u'd'),
(0x1F134, 'M', u'e'),
(0x1F135, 'M', u'f'),
(0x1F136, 'M', u'g'),
(0x1F137, 'M', u'h'),
(0x1F138, 'M', u'i'),
(0x1F139, 'M', u'j'),
(0x1F13A, 'M', u'k'),
(0x1F13B, 'M', u'l'),
(0x1F13C, 'M', u'm'),
(0x1F13D, 'M', u'n'),
(0x1F13E, 'M', u'o'),
(0x1F13F, 'M', u'p'),
(0x1F140, 'M', u'q'),
(0x1F141, 'M', u'r'),
(0x1F142, 'M', u's'),
(0x1F143, 'M', u't'),
(0x1F144, 'M', u'u'),
(0x1F145, 'M', u'v'),
(0x1F146, 'M', u'w'),
(0x1F147, 'M', u'x'),
(0x1F148, 'M', u'y'),
(0x1F149, 'M', u'z'),
(0x1F14A, 'M', u'hv'),
(0x1F14B, 'M', u'mv'),
(0x1F14C, 'M', u'sd'),
(0x1F14D, 'M', u'ss'),
(0x1F14E, 'M', u'ppv'),
(0x1F14F, 'M', u'wc'),
(0x1F150, 'V'),
(0x1F16A, 'M', u'mc'),
(0x1F16B, 'M', u'md'),
(0x1F16C, 'X'),
(0x1F170, 'V'),
(0x1F190, 'M', u'dj'),
(0x1F191, 'V'),
(0x1F1AD, 'X'),
(0x1F1E6, 'V'),
(0x1F200, 'M', u'ほか'),
(0x1F201, 'M', u'ココ'),
(0x1F202, 'M', u'サ'),
(0x1F203, 'X'),
(0x1F210, 'M', u'手'),
(0x1F211, 'M', u'字'),
(0x1F212, 'M', u'双'),
(0x1F213, 'M', u'デ'),
(0x1F214, 'M', u'二'),
(0x1F215, 'M', u'多'),
(0x1F216, 'M', u'解'),
(0x1F217, 'M', u'天'),
(0x1F218, 'M', u'交'),
(0x1F219, 'M', u'映'),
(0x1F21A, 'M', u'無'),
(0x1F21B, 'M', u'料'),
(0x1F21C, 'M', u'前'),
(0x1F21D, 'M', u'後'),
(0x1F21E, 'M', u'再'),
(0x1F21F, 'M', u'新'),
(0x1F220, 'M', u'初'),
(0x1F221, 'M', u'終'),
(0x1F222, 'M', u'生'),
(0x1F223, 'M', u'販'),
(0x1F224, 'M', u'声'),
(0x1F225, 'M', u'吹'),
(0x1F226, 'M', u'演'),
(0x1F227, 'M', u'投'),
(0x1F228, 'M', u'捕'),
(0x1F229, 'M', u'一'),
(0x1F22A, 'M', u'三'),
(0x1F22B, 'M', u'遊'),
(0x1F22C, 'M', u'左'),
(0x1F22D, 'M', u'中'),
(0x1F22E, 'M', u'右'),
(0x1F22F, 'M', u'指'),
(0x1F230, 'M', u'走'),
(0x1F231, 'M', u'打'),
(0x1F232, 'M', u'禁'),
(0x1F233, 'M', u'空'),
(0x1F234, 'M', u'合'),
(0x1F235, 'M', u'満'),
(0x1F236, 'M', u'有'),
(0x1F237, 'M', u'月'),
(0x1F238, 'M', u'申'),
(0x1F239, 'M', u'割'),
(0x1F23A, 'M', u'営'),
(0x1F23B, 'M', u'配'),
]
def _seg_72():
return [
(0x1F23C, 'X'),
(0x1F240, 'M', u'〔本〕'),
(0x1F241, 'M', u'〔三〕'),
(0x1F242, 'M', u'〔二〕'),
(0x1F243, 'M', u'〔安〕'),
(0x1F244, 'M', u'〔点〕'),
(0x1F245, 'M', u'〔打〕'),
(0x1F246, 'M', u'〔盗〕'),
(0x1F247, 'M', u'〔勝〕'),
(0x1F248, 'M', u'〔敗〕'),
(0x1F249, 'X'),
(0x1F250, 'M', u'得'),
(0x1F251, 'M', u'可'),
(0x1F252, 'X'),
(0x1F260, 'V'),
(0x1F266, 'X'),
(0x1F300, 'V'),
(0x1F6D5, 'X'),
(0x1F6E0, 'V'),
(0x1F6ED, 'X'),
(0x1F6F0, 'V'),
(0x1F6FA, 'X'),
(0x1F700, 'V'),
(0x1F774, 'X'),
(0x1F780, 'V'),
(0x1F7D9, 'X'),
(0x1F800, 'V'),
(0x1F80C, 'X'),
(0x1F810, 'V'),
(0x1F848, 'X'),
(0x1F850, 'V'),
(0x1F85A, 'X'),
(0x1F860, 'V'),
(0x1F888, 'X'),
(0x1F890, 'V'),
(0x1F8AE, 'X'),
(0x1F900, 'V'),
(0x1F90C, 'X'),
(0x1F910, 'V'),
(0x1F93F, 'X'),
(0x1F940, 'V'),
(0x1F971, 'X'),
(0x1F973, 'V'),
(0x1F977, 'X'),
(0x1F97A, 'V'),
(0x1F97B, 'X'),
(0x1F97C, 'V'),
(0x1F9A3, 'X'),
(0x1F9B0, 'V'),
(0x1F9BA, 'X'),
(0x1F9C0, 'V'),
(0x1F9C3, 'X'),
(0x1F9D0, 'V'),
(0x1FA00, 'X'),
(0x1FA60, 'V'),
(0x1FA6E, 'X'),
(0x20000, 'V'),
(0x2A6D7, 'X'),
(0x2A700, 'V'),
(0x2B735, 'X'),
(0x2B740, 'V'),
(0x2B81E, 'X'),
(0x2B820, 'V'),
(0x2CEA2, 'X'),
(0x2CEB0, 'V'),
(0x2EBE1, 'X'),
(0x2F800, 'M', u'丽'),
(0x2F801, 'M', u'丸'),
(0x2F802, 'M', u'乁'),
(0x2F803, 'M', u'𠄢'),
(0x2F804, 'M', u'你'),
(0x2F805, 'M', u'侮'),
(0x2F806, 'M', u'侻'),
(0x2F807, 'M', u'倂'),
(0x2F808, 'M', u'偺'),
(0x2F809, 'M', u'備'),
(0x2F80A, 'M', u'僧'),
(0x2F80B, 'M', u'像'),
(0x2F80C, 'M', u'㒞'),
(0x2F80D, 'M', u'𠘺'),
(0x2F80E, 'M', u'免'),
(0x2F80F, 'M', u'兔'),
(0x2F810, 'M', u'兤'),
(0x2F811, 'M', u'具'),
(0x2F812, 'M', u'𠔜'),
(0x2F813, 'M', u'㒹'),
(0x2F814, 'M', u'內'),
(0x2F815, 'M', u'再'),
(0x2F816, 'M', u'𠕋'),
(0x2F817, 'M', u'冗'),
(0x2F818, 'M', u'冤'),
(0x2F819, 'M', u'仌'),
(0x2F81A, 'M', u'冬'),
(0x2F81B, 'M', u'况'),
(0x2F81C, 'M', u'𩇟'),
(0x2F81D, 'M', u'凵'),
(0x2F81E, 'M', u'刃'),
(0x2F81F, 'M', u'㓟'),
(0x2F820, 'M', u'刻'),
(0x2F821, 'M', u'剆'),
]
def _seg_73():
return [
(0x2F822, 'M', u'割'),
(0x2F823, 'M', u'剷'),
(0x2F824, 'M', u'㔕'),
(0x2F825, 'M', u'勇'),
(0x2F826, 'M', u'勉'),
(0x2F827, 'M', u'勤'),
(0x2F828, 'M', u'勺'),
(0x2F829, 'M', u'包'),
(0x2F82A, 'M', u'匆'),
(0x2F82B, 'M', u'北'),
(0x2F82C, 'M', u'卉'),
(0x2F82D, 'M', u'卑'),
(0x2F82E, 'M', u'博'),
(0x2F82F, 'M', u'即'),
(0x2F830, 'M', u'卽'),
(0x2F831, 'M', u'卿'),
(0x2F834, 'M', u'𠨬'),
(0x2F835, 'M', u'灰'),
(0x2F836, 'M', u'及'),
(0x2F837, 'M', u'叟'),
(0x2F838, 'M', u'𠭣'),
(0x2F839, 'M', u'叫'),
(0x2F83A, 'M', u'叱'),
(0x2F83B, 'M', u'吆'),
(0x2F83C, 'M', u'咞'),
(0x2F83D, 'M', u'吸'),
(0x2F83E, 'M', u'呈'),
(0x2F83F, 'M', u'周'),
(0x2F840, 'M', u'咢'),
(0x2F841, 'M', u'哶'),
(0x2F842, 'M', u'唐'),
(0x2F843, 'M', u'啓'),
(0x2F844, 'M', u'啣'),
(0x2F845, 'M', u'善'),
(0x2F847, 'M', u'喙'),
(0x2F848, 'M', u'喫'),
(0x2F849, 'M', u'喳'),
(0x2F84A, 'M', u'嗂'),
(0x2F84B, 'M', u'圖'),
(0x2F84C, 'M', u'嘆'),
(0x2F84D, 'M', u'圗'),
(0x2F84E, 'M', u'噑'),
(0x2F84F, 'M', u'噴'),
(0x2F850, 'M', u'切'),
(0x2F851, 'M', u'壮'),
(0x2F852, 'M', u'城'),
(0x2F853, 'M', u'埴'),
(0x2F854, 'M', u'堍'),
(0x2F855, 'M', u'型'),
(0x2F856, 'M', u'堲'),
(0x2F857, 'M', u'報'),
(0x2F858, 'M', u'墬'),
(0x2F859, 'M', u'𡓤'),
(0x2F85A, 'M', u'売'),
(0x2F85B, 'M', u'壷'),
(0x2F85C, 'M', u'夆'),
(0x2F85D, 'M', u'多'),
(0x2F85E, 'M', u'夢'),
(0x2F85F, 'M', u'奢'),
(0x2F860, 'M', u'𡚨'),
(0x2F861, 'M', u'𡛪'),
(0x2F862, 'M', u'姬'),
(0x2F863, 'M', u'娛'),
(0x2F864, 'M', u'娧'),
(0x2F865, 'M', u'姘'),
(0x2F866, 'M', u'婦'),
(0x2F867, 'M', u'㛮'),
(0x2F868, 'X'),
(0x2F869, 'M', u'嬈'),
(0x2F86A, 'M', u'嬾'),
(0x2F86C, 'M', u'𡧈'),
(0x2F86D, 'M', u'寃'),
(0x2F86E, 'M', u'寘'),
(0x2F86F, 'M', u'寧'),
(0x2F870, 'M', u'寳'),
(0x2F871, 'M', u'𡬘'),
(0x2F872, 'M', u'寿'),
(0x2F873, 'M', u'将'),
(0x2F874, 'X'),
(0x2F875, 'M', u'尢'),
(0x2F876, 'M', u'㞁'),
(0x2F877, 'M', u'屠'),
(0x2F878, 'M', u'屮'),
(0x2F879, 'M', u'峀'),
(0x2F87A, 'M', u'岍'),
(0x2F87B, 'M', u'𡷤'),
(0x2F87C, 'M', u'嵃'),
(0x2F87D, 'M', u'𡷦'),
(0x2F87E, 'M', u'嵮'),
(0x2F87F, 'M', u'嵫'),
(0x2F880, 'M', u'嵼'),
(0x2F881, 'M', u'巡'),
(0x2F882, 'M', u'巢'),
(0x2F883, 'M', u'㠯'),
(0x2F884, 'M', u'巽'),
(0x2F885, 'M', u'帨'),
(0x2F886, 'M', u'帽'),
(0x2F887, 'M', u'幩'),
(0x2F888, 'M', u'㡢'),
(0x2F889, 'M', u'𢆃'),
]
def _seg_74():
return [
(0x2F88A, 'M', u'㡼'),
(0x2F88B, 'M', u'庰'),
(0x2F88C, 'M', u'庳'),
(0x2F88D, 'M', u'庶'),
(0x2F88E, 'M', u'廊'),
(0x2F88F, 'M', u'𪎒'),
(0x2F890, 'M', u'廾'),
(0x2F891, 'M', u'𢌱'),
(0x2F893, 'M', u'舁'),
(0x2F894, 'M', u'弢'),
(0x2F896, 'M', u'㣇'),
(0x2F897, 'M', u'𣊸'),
(0x2F898, 'M', u'𦇚'),
(0x2F899, 'M', u'形'),
(0x2F89A, 'M', u'彫'),
(0x2F89B, 'M', u'㣣'),
(0x2F89C, 'M', u'徚'),
(0x2F89D, 'M', u'忍'),
(0x2F89E, 'M', u'志'),
(0x2F89F, 'M', u'忹'),
(0x2F8A0, 'M', u'悁'),
(0x2F8A1, 'M', u'㤺'),
(0x2F8A2, 'M', u'㤜'),
(0x2F8A3, 'M', u'悔'),
(0x2F8A4, 'M', u'𢛔'),
(0x2F8A5, 'M', u'惇'),
(0x2F8A6, 'M', u'慈'),
(0x2F8A7, 'M', u'慌'),
(0x2F8A8, 'M', u'慎'),
(0x2F8A9, 'M', u'慌'),
(0x2F8AA, 'M', u'慺'),
(0x2F8AB, 'M', u'憎'),
(0x2F8AC, 'M', u'憲'),
(0x2F8AD, 'M', u'憤'),
(0x2F8AE, 'M', u'憯'),
(0x2F8AF, 'M', u'懞'),
(0x2F8B0, 'M', u'懲'),
(0x2F8B1, 'M', u'懶'),
(0x2F8B2, 'M', u'成'),
(0x2F8B3, 'M', u'戛'),
(0x2F8B4, 'M', u'扝'),
(0x2F8B5, 'M', u'抱'),
(0x2F8B6, 'M', u'拔'),
(0x2F8B7, 'M', u'捐'),
(0x2F8B8, 'M', u'𢬌'),
(0x2F8B9, 'M', u'挽'),
(0x2F8BA, 'M', u'拼'),
(0x2F8BB, 'M', u'捨'),
(0x2F8BC, 'M', u'掃'),
(0x2F8BD, 'M', u'揤'),
(0x2F8BE, 'M', u'𢯱'),
(0x2F8BF, 'M', u'搢'),
(0x2F8C0, 'M', u'揅'),
(0x2F8C1, 'M', u'掩'),
(0x2F8C2, 'M', u'㨮'),
(0x2F8C3, 'M', u'摩'),
(0x2F8C4, 'M', u'摾'),
(0x2F8C5, 'M', u'撝'),
(0x2F8C6, 'M', u'摷'),
(0x2F8C7, 'M', u'㩬'),
(0x2F8C8, 'M', u'敏'),
(0x2F8C9, 'M', u'敬'),
(0x2F8CA, 'M', u'𣀊'),
(0x2F8CB, 'M', u'旣'),
(0x2F8CC, 'M', u'書'),
(0x2F8CD, 'M', u'晉'),
(0x2F8CE, 'M', u'㬙'),
(0x2F8CF, 'M', u'暑'),
(0x2F8D0, 'M', u'㬈'),
(0x2F8D1, 'M', u'㫤'),
(0x2F8D2, 'M', u'冒'),
(0x2F8D3, 'M', u'冕'),
(0x2F8D4, 'M', u'最'),
(0x2F8D5, 'M', u'暜'),
(0x2F8D6, 'M', u'肭'),
(0x2F8D7, 'M', u'䏙'),
(0x2F8D8, 'M', u'朗'),
(0x2F8D9, 'M', u'望'),
(0x2F8DA, 'M', u'朡'),
(0x2F8DB, 'M', u'杞'),
(0x2F8DC, 'M', u'杓'),
(0x2F8DD, 'M', u'𣏃'),
(0x2F8DE, 'M', u'㭉'),
(0x2F8DF, 'M', u'柺'),
(0x2F8E0, 'M', u'枅'),
(0x2F8E1, 'M', u'桒'),
(0x2F8E2, 'M', u'梅'),
(0x2F8E3, 'M', u'𣑭'),
(0x2F8E4, 'M', u'梎'),
(0x2F8E5, 'M', u'栟'),
(0x2F8E6, 'M', u'椔'),
(0x2F8E7, 'M', u'㮝'),
(0x2F8E8, 'M', u'楂'),
(0x2F8E9, 'M', u'榣'),
(0x2F8EA, 'M', u'槪'),
(0x2F8EB, 'M', u'檨'),
(0x2F8EC, 'M', u'𣚣'),
(0x2F8ED, 'M', u'櫛'),
(0x2F8EE, 'M', u'㰘'),
(0x2F8EF, 'M', u'次'),
]
def _seg_75():
return [
(0x2F8F0, 'M', u'𣢧'),
(0x2F8F1, 'M', u'歔'),
(0x2F8F2, 'M', u'㱎'),
(0x2F8F3, 'M', u'歲'),
(0x2F8F4, 'M', u'殟'),
(0x2F8F5, 'M', u'殺'),
(0x2F8F6, 'M', u'殻'),
(0x2F8F7, 'M', u'𣪍'),
(0x2F8F8, 'M', u'𡴋'),
(0x2F8F9, 'M', u'𣫺'),
(0x2F8FA, 'M', u'汎'),
(0x2F8FB, 'M', u'𣲼'),
(0x2F8FC, 'M', u'沿'),
(0x2F8FD, 'M', u'泍'),
(0x2F8FE, 'M', u'汧'),
(0x2F8FF, 'M', u'洖'),
(0x2F900, 'M', u'派'),
(0x2F901, 'M', u'海'),
(0x2F902, 'M', u'流'),
(0x2F903, 'M', u'浩'),
(0x2F904, 'M', u'浸'),
(0x2F905, 'M', u'涅'),
(0x2F906, 'M', u'𣴞'),
(0x2F907, 'M', u'洴'),
(0x2F908, 'M', u'港'),
(0x2F909, 'M', u'湮'),
(0x2F90A, 'M', u'㴳'),
(0x2F90B, 'M', u'滋'),
(0x2F90C, 'M', u'滇'),
(0x2F90D, 'M', u'𣻑'),
(0x2F90E, 'M', u'淹'),
(0x2F90F, 'M', u'潮'),
(0x2F910, 'M', u'𣽞'),
(0x2F911, 'M', u'𣾎'),
(0x2F912, 'M', u'濆'),
(0x2F913, 'M', u'瀹'),
(0x2F914, 'M', u'瀞'),
(0x2F915, 'M', u'瀛'),
(0x2F916, 'M', u'㶖'),
(0x2F917, 'M', u'灊'),
(0x2F918, 'M', u'災'),
(0x2F919, 'M', u'灷'),
(0x2F91A, 'M', u'炭'),
(0x2F91B, 'M', u'𠔥'),
(0x2F91C, 'M', u'煅'),
(0x2F91D, 'M', u'𤉣'),
(0x2F91E, 'M', u'熜'),
(0x2F91F, 'X'),
(0x2F920, 'M', u'爨'),
(0x2F921, 'M', u'爵'),
(0x2F922, 'M', u'牐'),
(0x2F923, 'M', u'𤘈'),
(0x2F924, 'M', u'犀'),
(0x2F925, 'M', u'犕'),
(0x2F926, 'M', u'𤜵'),
(0x2F927, 'M', u'𤠔'),
(0x2F928, 'M', u'獺'),
(0x2F929, 'M', u'王'),
(0x2F92A, 'M', u'㺬'),
(0x2F92B, 'M', u'玥'),
(0x2F92C, 'M', u'㺸'),
(0x2F92E, 'M', u'瑇'),
(0x2F92F, 'M', u'瑜'),
(0x2F930, 'M', u'瑱'),
(0x2F931, 'M', u'璅'),
(0x2F932, 'M', u'瓊'),
(0x2F933, 'M', u'㼛'),
(0x2F934, 'M', u'甤'),
(0x2F935, 'M', u'𤰶'),
(0x2F936, 'M', u'甾'),
(0x2F937, 'M', u'𤲒'),
(0x2F938, 'M', u'異'),
(0x2F939, 'M', u'𢆟'),
(0x2F93A, 'M', u'瘐'),
(0x2F93B, 'M', u'𤾡'),
(0x2F93C, 'M', u'𤾸'),
(0x2F93D, 'M', u'𥁄'),
(0x2F93E, 'M', u'㿼'),
(0x2F93F, 'M', u'䀈'),
(0x2F940, 'M', u'直'),
(0x2F941, 'M', u'𥃳'),
(0x2F942, 'M', u'𥃲'),
(0x2F943, 'M', u'𥄙'),
(0x2F944, 'M', u'𥄳'),
(0x2F945, 'M', u'眞'),
(0x2F946, 'M', u'真'),
(0x2F948, 'M', u'睊'),
(0x2F949, 'M', u'䀹'),
(0x2F94A, 'M', u'瞋'),
(0x2F94B, 'M', u'䁆'),
(0x2F94C, 'M', u'䂖'),
(0x2F94D, 'M', u'𥐝'),
(0x2F94E, 'M', u'硎'),
(0x2F94F, 'M', u'碌'),
(0x2F950, 'M', u'磌'),
(0x2F951, 'M', u'䃣'),
(0x2F952, 'M', u'𥘦'),
(0x2F953, 'M', u'祖'),
(0x2F954, 'M', u'𥚚'),
(0x2F955, 'M', u'𥛅'),
]
def _seg_76():
return [
(0x2F956, 'M', u'福'),
(0x2F957, 'M', u'秫'),
(0x2F958, 'M', u'䄯'),
(0x2F959, 'M', u'穀'),
(0x2F95A, 'M', u'穊'),
(0x2F95B, 'M', u'穏'),
(0x2F95C, 'M', u'𥥼'),
(0x2F95D, 'M', u'𥪧'),
(0x2F95F, 'X'),
(0x2F960, 'M', u'䈂'),
(0x2F961, 'M', u'𥮫'),
(0x2F962, 'M', u'篆'),
(0x2F963, 'M', u'築'),
(0x2F964, 'M', u'䈧'),
(0x2F965, 'M', u'𥲀'),
(0x2F966, 'M', u'糒'),
(0x2F967, 'M', u'䊠'),
(0x2F968, 'M', u'糨'),
(0x2F969, 'M', u'糣'),
(0x2F96A, 'M', u'紀'),
(0x2F96B, 'M', u'𥾆'),
(0x2F96C, 'M', u'絣'),
(0x2F96D, 'M', u'䌁'),
(0x2F96E, 'M', u'緇'),
(0x2F96F, 'M', u'縂'),
(0x2F970, 'M', u'繅'),
(0x2F971, 'M', u'䌴'),
(0x2F972, 'M', u'𦈨'),
(0x2F973, 'M', u'𦉇'),
(0x2F974, 'M', u'䍙'),
(0x2F975, 'M', u'𦋙'),
(0x2F976, 'M', u'罺'),
(0x2F977, 'M', u'𦌾'),
(0x2F978, 'M', u'羕'),
(0x2F979, 'M', u'翺'),
(0x2F97A, 'M', u'者'),
(0x2F97B, 'M', u'𦓚'),
(0x2F97C, 'M', u'𦔣'),
(0x2F97D, 'M', u'聠'),
(0x2F97E, 'M', u'𦖨'),
(0x2F97F, 'M', u'聰'),
(0x2F980, 'M', u'𣍟'),
(0x2F981, 'M', u'䏕'),
(0x2F982, 'M', u'育'),
(0x2F983, 'M', u'脃'),
(0x2F984, 'M', u'䐋'),
(0x2F985, 'M', u'脾'),
(0x2F986, 'M', u'媵'),
(0x2F987, 'M', u'𦞧'),
(0x2F988, 'M', u'𦞵'),
(0x2F989, 'M', u'𣎓'),
(0x2F98A, 'M', u'𣎜'),
(0x2F98B, 'M', u'舁'),
(0x2F98C, 'M', u'舄'),
(0x2F98D, 'M', u'辞'),
(0x2F98E, 'M', u'䑫'),
(0x2F98F, 'M', u'芑'),
(0x2F990, 'M', u'芋'),
(0x2F991, 'M', u'芝'),
(0x2F992, 'M', u'劳'),
(0x2F993, 'M', u'花'),
(0x2F994, 'M', u'芳'),
(0x2F995, 'M', u'芽'),
(0x2F996, 'M', u'苦'),
(0x2F997, 'M', u'𦬼'),
(0x2F998, 'M', u'若'),
(0x2F999, 'M', u'茝'),
(0x2F99A, 'M', u'荣'),
(0x2F99B, 'M', u'莭'),
(0x2F99C, 'M', u'茣'),
(0x2F99D, 'M', u'莽'),
(0x2F99E, 'M', u'菧'),
(0x2F99F, 'M', u'著'),
(0x2F9A0, 'M', u'荓'),
(0x2F9A1, 'M', u'菊'),
(0x2F9A2, 'M', u'菌'),
(0x2F9A3, 'M', u'菜'),
(0x2F9A4, 'M', u'𦰶'),
(0x2F9A5, 'M', u'𦵫'),
(0x2F9A6, 'M', u'𦳕'),
(0x2F9A7, 'M', u'䔫'),
(0x2F9A8, 'M', u'蓱'),
(0x2F9A9, 'M', u'蓳'),
(0x2F9AA, 'M', u'蔖'),
(0x2F9AB, 'M', u'𧏊'),
(0x2F9AC, 'M', u'蕤'),
(0x2F9AD, 'M', u'𦼬'),
(0x2F9AE, 'M', u'䕝'),
(0x2F9AF, 'M', u'䕡'),
(0x2F9B0, 'M', u'𦾱'),
(0x2F9B1, 'M', u'𧃒'),
(0x2F9B2, 'M', u'䕫'),
(0x2F9B3, 'M', u'虐'),
(0x2F9B4, 'M', u'虜'),
(0x2F9B5, 'M', u'虧'),
(0x2F9B6, 'M', u'虩'),
(0x2F9B7, 'M', u'蚩'),
(0x2F9B8, 'M', u'蚈'),
(0x2F9B9, 'M', u'蜎'),
(0x2F9BA, 'M', u'蛢'),
]
def _seg_77():
return [
(0x2F9BB, 'M', u'蝹'),
(0x2F9BC, 'M', u'蜨'),
(0x2F9BD, 'M', u'蝫'),
(0x2F9BE, 'M', u'螆'),
(0x2F9BF, 'X'),
(0x2F9C0, 'M', u'蟡'),
(0x2F9C1, 'M', u'蠁'),
(0x2F9C2, 'M', u'䗹'),
(0x2F9C3, 'M', u'衠'),
(0x2F9C4, 'M', u'衣'),
(0x2F9C5, 'M', u'𧙧'),
(0x2F9C6, 'M', u'裗'),
(0x2F9C7, 'M', u'裞'),
(0x2F9C8, 'M', u'䘵'),
(0x2F9C9, 'M', u'裺'),
(0x2F9CA, 'M', u'㒻'),
(0x2F9CB, 'M', u'𧢮'),
(0x2F9CC, 'M', u'𧥦'),
(0x2F9CD, 'M', u'䚾'),
(0x2F9CE, 'M', u'䛇'),
(0x2F9CF, 'M', u'誠'),
(0x2F9D0, 'M', u'諭'),
(0x2F9D1, 'M', u'變'),
(0x2F9D2, 'M', u'豕'),
(0x2F9D3, 'M', u'𧲨'),
(0x2F9D4, 'M', u'貫'),
(0x2F9D5, 'M', u'賁'),
(0x2F9D6, 'M', u'贛'),
(0x2F9D7, 'M', u'起'),
(0x2F9D8, 'M', u'𧼯'),
(0x2F9D9, 'M', u'𠠄'),
(0x2F9DA, 'M', u'跋'),
(0x2F9DB, 'M', u'趼'),
(0x2F9DC, 'M', u'跰'),
(0x2F9DD, 'M', u'𠣞'),
(0x2F9DE, 'M', u'軔'),
(0x2F9DF, 'M', u'輸'),
(0x2F9E0, 'M', u'𨗒'),
(0x2F9E1, 'M', u'𨗭'),
(0x2F9E2, 'M', u'邔'),
(0x2F9E3, 'M', u'郱'),
(0x2F9E4, 'M', u'鄑'),
(0x2F9E5, 'M', u'𨜮'),
(0x2F9E6, 'M', u'鄛'),
(0x2F9E7, 'M', u'鈸'),
(0x2F9E8, 'M', u'鋗'),
(0x2F9E9, 'M', u'鋘'),
(0x2F9EA, 'M', u'鉼'),
(0x2F9EB, 'M', u'鏹'),
(0x2F9EC, 'M', u'鐕'),
(0x2F9ED, 'M', u'𨯺'),
(0x2F9EE, 'M', u'開'),
(0x2F9EF, 'M', u'䦕'),
(0x2F9F0, 'M', u'閷'),
(0x2F9F1, 'M', u'𨵷'),
(0x2F9F2, 'M', u'䧦'),
(0x2F9F3, 'M', u'雃'),
(0x2F9F4, 'M', u'嶲'),
(0x2F9F5, 'M', u'霣'),
(0x2F9F6, 'M', u'𩅅'),
(0x2F9F7, 'M', u'𩈚'),
(0x2F9F8, 'M', u'䩮'),
(0x2F9F9, 'M', u'䩶'),
(0x2F9FA, 'M', u'韠'),
(0x2F9FB, 'M', u'𩐊'),
(0x2F9FC, 'M', u'䪲'),
(0x2F9FD, 'M', u'𩒖'),
(0x2F9FE, 'M', u'頋'),
(0x2FA00, 'M', u'頩'),
(0x2FA01, 'M', u'𩖶'),
(0x2FA02, 'M', u'飢'),
(0x2FA03, 'M', u'䬳'),
(0x2FA04, 'M', u'餩'),
(0x2FA05, 'M', u'馧'),
(0x2FA06, 'M', u'駂'),
(0x2FA07, 'M', u'駾'),
(0x2FA08, 'M', u'䯎'),
(0x2FA09, 'M', u'𩬰'),
(0x2FA0A, 'M', u'鬒'),
(0x2FA0B, 'M', u'鱀'),
(0x2FA0C, 'M', u'鳽'),
(0x2FA0D, 'M', u'䳎'),
(0x2FA0E, 'M', u'䳭'),
(0x2FA0F, 'M', u'鵧'),
(0x2FA10, 'M', u'𪃎'),
(0x2FA11, 'M', u'䳸'),
(0x2FA12, 'M', u'𪄅'),
(0x2FA13, 'M', u'𪈎'),
(0x2FA14, 'M', u'𪊑'),
(0x2FA15, 'M', u'麻'),
(0x2FA16, 'M', u'䵖'),
(0x2FA17, 'M', u'黹'),
(0x2FA18, 'M', u'黾'),
(0x2FA19, 'M', u'鼅'),
(0x2FA1A, 'M', u'鼏'),
(0x2FA1B, 'M', u'鼖'),
(0x2FA1C, 'M', u'鼻'),
(0x2FA1D, 'M', u'𪘀'),
(0x2FA1E, 'X'),
(0xE0100, 'I'),
]
def _seg_78():
return [
(0xE01F0, 'X'),
]
uts46data = tuple(
_seg_0()
+ _seg_1()
+ _seg_2()
+ _seg_3()
+ _seg_4()
+ _seg_5()
+ _seg_6()
+ _seg_7()
+ _seg_8()
+ _seg_9()
+ _seg_10()
+ _seg_11()
+ _seg_12()
+ _seg_13()
+ _seg_14()
+ _seg_15()
+ _seg_16()
+ _seg_17()
+ _seg_18()
+ _seg_19()
+ _seg_20()
+ _seg_21()
+ _seg_22()
+ _seg_23()
+ _seg_24()
+ _seg_25()
+ _seg_26()
+ _seg_27()
+ _seg_28()
+ _seg_29()
+ _seg_30()
+ _seg_31()
+ _seg_32()
+ _seg_33()
+ _seg_34()
+ _seg_35()
+ _seg_36()
+ _seg_37()
+ _seg_38()
+ _seg_39()
+ _seg_40()
+ _seg_41()
+ _seg_42()
+ _seg_43()
+ _seg_44()
+ _seg_45()
+ _seg_46()
+ _seg_47()
+ _seg_48()
+ _seg_49()
+ _seg_50()
+ _seg_51()
+ _seg_52()
+ _seg_53()
+ _seg_54()
+ _seg_55()
+ _seg_56()
+ _seg_57()
+ _seg_58()
+ _seg_59()
+ _seg_60()
+ _seg_61()
+ _seg_62()
+ _seg_63()
+ _seg_64()
+ _seg_65()
+ _seg_66()
+ _seg_67()
+ _seg_68()
+ _seg_69()
+ _seg_70()
+ _seg_71()
+ _seg_72()
+ _seg_73()
+ _seg_74()
+ _seg_75()
+ _seg_76()
+ _seg_77()
+ _seg_78()
)
| <filename>functions/source/GreengrassLambda/idna/uts46data.py
# This file is automatically generated by tools/idna-data
# vim: set fileencoding=utf-8 :
"""IDNA Mapping Table from UTS46."""
__version__ = "11.0.0"
def _seg_0():
return [
(0x0, '3'),
(0x1, '3'),
(0x2, '3'),
(0x3, '3'),
(0x4, '3'),
(0x5, '3'),
(0x6, '3'),
(0x7, '3'),
(0x8, '3'),
(0x9, '3'),
(0xA, '3'),
(0xB, '3'),
(0xC, '3'),
(0xD, '3'),
(0xE, '3'),
(0xF, '3'),
(0x10, '3'),
(0x11, '3'),
(0x12, '3'),
(0x13, '3'),
(0x14, '3'),
(0x15, '3'),
(0x16, '3'),
(0x17, '3'),
(0x18, '3'),
(0x19, '3'),
(0x1A, '3'),
(0x1B, '3'),
(0x1C, '3'),
(0x1D, '3'),
(0x1E, '3'),
(0x1F, '3'),
(0x20, '3'),
(0x21, '3'),
(0x22, '3'),
(0x23, '3'),
(0x24, '3'),
(0x25, '3'),
(0x26, '3'),
(0x27, '3'),
(0x28, '3'),
(0x29, '3'),
(0x2A, '3'),
(0x2B, '3'),
(0x2C, '3'),
(0x2D, 'V'),
(0x2E, 'V'),
(0x2F, '3'),
(0x30, 'V'),
(0x31, 'V'),
(0x32, 'V'),
(0x33, 'V'),
(0x34, 'V'),
(0x35, 'V'),
(0x36, 'V'),
(0x37, 'V'),
(0x38, 'V'),
(0x39, 'V'),
(0x3A, '3'),
(0x3B, '3'),
(0x3C, '3'),
(0x3D, '3'),
(0x3E, '3'),
(0x3F, '3'),
(0x40, '3'),
(0x41, 'M', u'a'),
(0x42, 'M', u'b'),
(0x43, 'M', u'c'),
(0x44, 'M', u'd'),
(0x45, 'M', u'e'),
(0x46, 'M', u'f'),
(0x47, 'M', u'g'),
(0x48, 'M', u'h'),
(0x49, 'M', u'i'),
(0x4A, 'M', u'j'),
(0x4B, 'M', u'k'),
(0x4C, 'M', u'l'),
(0x4D, 'M', u'm'),
(0x4E, 'M', u'n'),
(0x4F, 'M', u'o'),
(0x50, 'M', u'p'),
(0x51, 'M', u'q'),
(0x52, 'M', u'r'),
(0x53, 'M', u's'),
(0x54, 'M', u't'),
(0x55, 'M', u'u'),
(0x56, 'M', u'v'),
(0x57, 'M', u'w'),
(0x58, 'M', u'x'),
(0x59, 'M', u'y'),
(0x5A, 'M', u'z'),
(0x5B, '3'),
(0x5C, '3'),
(0x5D, '3'),
(0x5E, '3'),
(0x5F, '3'),
(0x60, '3'),
(0x61, 'V'),
(0x62, 'V'),
(0x63, 'V'),
]
def _seg_1():
return [
(0x64, 'V'),
(0x65, 'V'),
(0x66, 'V'),
(0x67, 'V'),
(0x68, 'V'),
(0x69, 'V'),
(0x6A, 'V'),
(0x6B, 'V'),
(0x6C, 'V'),
(0x6D, 'V'),
(0x6E, 'V'),
(0x6F, 'V'),
(0x70, 'V'),
(0x71, 'V'),
(0x72, 'V'),
(0x73, 'V'),
(0x74, 'V'),
(0x75, 'V'),
(0x76, 'V'),
(0x77, 'V'),
(0x78, 'V'),
(0x79, 'V'),
(0x7A, 'V'),
(0x7B, '3'),
(0x7C, '3'),
(0x7D, '3'),
(0x7E, '3'),
(0x7F, '3'),
(0x80, 'X'),
(0x81, 'X'),
(0x82, 'X'),
(0x83, 'X'),
(0x84, 'X'),
(0x85, 'X'),
(0x86, 'X'),
(0x87, 'X'),
(0x88, 'X'),
(0x89, 'X'),
(0x8A, 'X'),
(0x8B, 'X'),
(0x8C, 'X'),
(0x8D, 'X'),
(0x8E, 'X'),
(0x8F, 'X'),
(0x90, 'X'),
(0x91, 'X'),
(0x92, 'X'),
(0x93, 'X'),
(0x94, 'X'),
(0x95, 'X'),
(0x96, 'X'),
(0x97, 'X'),
(0x98, 'X'),
(0x99, 'X'),
(0x9A, 'X'),
(0x9B, 'X'),
(0x9C, 'X'),
(0x9D, 'X'),
(0x9E, 'X'),
(0x9F, 'X'),
(0xA0, '3', u' '),
(0xA1, 'V'),
(0xA2, 'V'),
(0xA3, 'V'),
(0xA4, 'V'),
(0xA5, 'V'),
(0xA6, 'V'),
(0xA7, 'V'),
(0xA8, '3', u' ̈'),
(0xA9, 'V'),
(0xAA, 'M', u'a'),
(0xAB, 'V'),
(0xAC, 'V'),
(0xAD, 'I'),
(0xAE, 'V'),
(0xAF, '3', u' ̄'),
(0xB0, 'V'),
(0xB1, 'V'),
(0xB2, 'M', u'2'),
(0xB3, 'M', u'3'),
(0xB4, '3', u' ́'),
(0xB5, 'M', u'μ'),
(0xB6, 'V'),
(0xB7, 'V'),
(0xB8, '3', u' ̧'),
(0xB9, 'M', u'1'),
(0xBA, 'M', u'o'),
(0xBB, 'V'),
(0xBC, 'M', u'1⁄4'),
(0xBD, 'M', u'1⁄2'),
(0xBE, 'M', u'3⁄4'),
(0xBF, 'V'),
(0xC0, 'M', u'à'),
(0xC1, 'M', u'á'),
(0xC2, 'M', u'â'),
(0xC3, 'M', u'ã'),
(0xC4, 'M', u'ä'),
(0xC5, 'M', u'å'),
(0xC6, 'M', u'æ'),
(0xC7, 'M', u'ç'),
]
def _seg_2():
return [
(0xC8, 'M', u'è'),
(0xC9, 'M', u'é'),
(0xCA, 'M', u'ê'),
(0xCB, 'M', u'ë'),
(0xCC, 'M', u'ì'),
(0xCD, 'M', u'í'),
(0xCE, 'M', u'î'),
(0xCF, 'M', u'ï'),
(0xD0, 'M', u'ð'),
(0xD1, 'M', u'ñ'),
(0xD2, 'M', u'ò'),
(0xD3, 'M', u'ó'),
(0xD4, 'M', u'ô'),
(0xD5, 'M', u'õ'),
(0xD6, 'M', u'ö'),
(0xD7, 'V'),
(0xD8, 'M', u'ø'),
(0xD9, 'M', u'ù'),
(0xDA, 'M', u'ú'),
(0xDB, 'M', u'û'),
(0xDC, 'M', u'ü'),
(0xDD, 'M', u'ý'),
(0xDE, 'M', u'þ'),
(0xDF, 'D', u'ss'),
(0xE0, 'V'),
(0xE1, 'V'),
(0xE2, 'V'),
(0xE3, 'V'),
(0xE4, 'V'),
(0xE5, 'V'),
(0xE6, 'V'),
(0xE7, 'V'),
(0xE8, 'V'),
(0xE9, 'V'),
(0xEA, 'V'),
(0xEB, 'V'),
(0xEC, 'V'),
(0xED, 'V'),
(0xEE, 'V'),
(0xEF, 'V'),
(0xF0, 'V'),
(0xF1, 'V'),
(0xF2, 'V'),
(0xF3, 'V'),
(0xF4, 'V'),
(0xF5, 'V'),
(0xF6, 'V'),
(0xF7, 'V'),
(0xF8, 'V'),
(0xF9, 'V'),
(0xFA, 'V'),
(0xFB, 'V'),
(0xFC, 'V'),
(0xFD, 'V'),
(0xFE, 'V'),
(0xFF, 'V'),
(0x100, 'M', u'ā'),
(0x101, 'V'),
(0x102, 'M', u'ă'),
(0x103, 'V'),
(0x104, 'M', u'ą'),
(0x105, 'V'),
(0x106, 'M', u'ć'),
(0x107, 'V'),
(0x108, 'M', u'ĉ'),
(0x109, 'V'),
(0x10A, 'M', u'ċ'),
(0x10B, 'V'),
(0x10C, 'M', u'č'),
(0x10D, 'V'),
(0x10E, 'M', u'ď'),
(0x10F, 'V'),
(0x110, 'M', u'đ'),
(0x111, 'V'),
(0x112, 'M', u'ē'),
(0x113, 'V'),
(0x114, 'M', u'ĕ'),
(0x115, 'V'),
(0x116, 'M', u'ė'),
(0x117, 'V'),
(0x118, 'M', u'ę'),
(0x119, 'V'),
(0x11A, 'M', u'ě'),
(0x11B, 'V'),
(0x11C, 'M', u'ĝ'),
(0x11D, 'V'),
(0x11E, 'M', u'ğ'),
(0x11F, 'V'),
(0x120, 'M', u'ġ'),
(0x121, 'V'),
(0x122, 'M', u'ģ'),
(0x123, 'V'),
(0x124, 'M', u'ĥ'),
(0x125, 'V'),
(0x126, 'M', u'ħ'),
(0x127, 'V'),
(0x128, 'M', u'ĩ'),
(0x129, 'V'),
(0x12A, 'M', u'ī'),
(0x12B, 'V'),
]
def _seg_3():
return [
(0x12C, 'M', u'ĭ'),
(0x12D, 'V'),
(0x12E, 'M', u'į'),
(0x12F, 'V'),
(0x130, 'M', u'i̇'),
(0x131, 'V'),
(0x132, 'M', u'ij'),
(0x134, 'M', u'ĵ'),
(0x135, 'V'),
(0x136, 'M', u'ķ'),
(0x137, 'V'),
(0x139, 'M', u'ĺ'),
(0x13A, 'V'),
(0x13B, 'M', u'ļ'),
(0x13C, 'V'),
(0x13D, 'M', u'ľ'),
(0x13E, 'V'),
(0x13F, 'M', u'l·'),
(0x141, 'M', u'ł'),
(0x142, 'V'),
(0x143, 'M', u'ń'),
(0x144, 'V'),
(0x145, 'M', u'ņ'),
(0x146, 'V'),
(0x147, 'M', u'ň'),
(0x148, 'V'),
(0x149, 'M', u'ʼn'),
(0x14A, 'M', u'ŋ'),
(0x14B, 'V'),
(0x14C, 'M', u'ō'),
(0x14D, 'V'),
(0x14E, 'M', u'ŏ'),
(0x14F, 'V'),
(0x150, 'M', u'ő'),
(0x151, 'V'),
(0x152, 'M', u'œ'),
(0x153, 'V'),
(0x154, 'M', u'ŕ'),
(0x155, 'V'),
(0x156, 'M', u'ŗ'),
(0x157, 'V'),
(0x158, 'M', u'ř'),
(0x159, 'V'),
(0x15A, 'M', u'ś'),
(0x15B, 'V'),
(0x15C, 'M', u'ŝ'),
(0x15D, 'V'),
(0x15E, 'M', u'ş'),
(0x15F, 'V'),
(0x160, 'M', u'š'),
(0x161, 'V'),
(0x162, 'M', u'ţ'),
(0x163, 'V'),
(0x164, 'M', u'ť'),
(0x165, 'V'),
(0x166, 'M', u'ŧ'),
(0x167, 'V'),
(0x168, 'M', u'ũ'),
(0x169, 'V'),
(0x16A, 'M', u'ū'),
(0x16B, 'V'),
(0x16C, 'M', u'ŭ'),
(0x16D, 'V'),
(0x16E, 'M', u'ů'),
(0x16F, 'V'),
(0x170, 'M', u'ű'),
(0x171, 'V'),
(0x172, 'M', u'ų'),
(0x173, 'V'),
(0x174, 'M', u'ŵ'),
(0x175, 'V'),
(0x176, 'M', u'ŷ'),
(0x177, 'V'),
(0x178, 'M', u'ÿ'),
(0x179, 'M', u'ź'),
(0x17A, 'V'),
(0x17B, 'M', u'ż'),
(0x17C, 'V'),
(0x17D, 'M', u'ž'),
(0x17E, 'V'),
(0x17F, 'M', u's'),
(0x180, 'V'),
(0x181, 'M', u'ɓ'),
(0x182, 'M', u'ƃ'),
(0x183, 'V'),
(0x184, 'M', u'ƅ'),
(0x185, 'V'),
(0x186, 'M', u'ɔ'),
(0x187, 'M', u'ƈ'),
(0x188, 'V'),
(0x189, 'M', u'ɖ'),
(0x18A, 'M', u'ɗ'),
(0x18B, 'M', u'ƌ'),
(0x18C, 'V'),
(0x18E, 'M', u'ǝ'),
(0x18F, 'M', u'ə'),
(0x190, 'M', u'ɛ'),
(0x191, 'M', u'ƒ'),
(0x192, 'V'),
(0x193, 'M', u'ɠ'),
]
def _seg_4():
return [
(0x194, 'M', u'ɣ'),
(0x195, 'V'),
(0x196, 'M', u'ɩ'),
(0x197, 'M', u'ɨ'),
(0x198, 'M', u'ƙ'),
(0x199, 'V'),
(0x19C, 'M', u'ɯ'),
(0x19D, 'M', u'ɲ'),
(0x19E, 'V'),
(0x19F, 'M', u'ɵ'),
(0x1A0, 'M', u'ơ'),
(0x1A1, 'V'),
(0x1A2, 'M', u'ƣ'),
(0x1A3, 'V'),
(0x1A4, 'M', u'ƥ'),
(0x1A5, 'V'),
(0x1A6, 'M', u'ʀ'),
(0x1A7, 'M', u'ƨ'),
(0x1A8, 'V'),
(0x1A9, 'M', u'ʃ'),
(0x1AA, 'V'),
(0x1AC, 'M', u'ƭ'),
(0x1AD, 'V'),
(0x1AE, 'M', u'ʈ'),
(0x1AF, 'M', u'ư'),
(0x1B0, 'V'),
(0x1B1, 'M', u'ʊ'),
(0x1B2, 'M', u'ʋ'),
(0x1B3, 'M', u'ƴ'),
(0x1B4, 'V'),
(0x1B5, 'M', u'ƶ'),
(0x1B6, 'V'),
(0x1B7, 'M', u'ʒ'),
(0x1B8, 'M', u'ƹ'),
(0x1B9, 'V'),
(0x1BC, 'M', u'ƽ'),
(0x1BD, 'V'),
(0x1C4, 'M', u'dž'),
(0x1C7, 'M', u'lj'),
(0x1CA, 'M', u'nj'),
(0x1CD, 'M', u'ǎ'),
(0x1CE, 'V'),
(0x1CF, 'M', u'ǐ'),
(0x1D0, 'V'),
(0x1D1, 'M', u'ǒ'),
(0x1D2, 'V'),
(0x1D3, 'M', u'ǔ'),
(0x1D4, 'V'),
(0x1D5, 'M', u'ǖ'),
(0x1D6, 'V'),
(0x1D7, 'M', u'ǘ'),
(0x1D8, 'V'),
(0x1D9, 'M', u'ǚ'),
(0x1DA, 'V'),
(0x1DB, 'M', u'ǜ'),
(0x1DC, 'V'),
(0x1DE, 'M', u'ǟ'),
(0x1DF, 'V'),
(0x1E0, 'M', u'ǡ'),
(0x1E1, 'V'),
(0x1E2, 'M', u'ǣ'),
(0x1E3, 'V'),
(0x1E4, 'M', u'ǥ'),
(0x1E5, 'V'),
(0x1E6, 'M', u'ǧ'),
(0x1E7, 'V'),
(0x1E8, 'M', u'ǩ'),
(0x1E9, 'V'),
(0x1EA, 'M', u'ǫ'),
(0x1EB, 'V'),
(0x1EC, 'M', u'ǭ'),
(0x1ED, 'V'),
(0x1EE, 'M', u'ǯ'),
(0x1EF, 'V'),
(0x1F1, 'M', u'dz'),
(0x1F4, 'M', u'ǵ'),
(0x1F5, 'V'),
(0x1F6, 'M', u'ƕ'),
(0x1F7, 'M', u'ƿ'),
(0x1F8, 'M', u'ǹ'),
(0x1F9, 'V'),
(0x1FA, 'M', u'ǻ'),
(0x1FB, 'V'),
(0x1FC, 'M', u'ǽ'),
(0x1FD, 'V'),
(0x1FE, 'M', u'ǿ'),
(0x1FF, 'V'),
(0x200, 'M', u'ȁ'),
(0x201, 'V'),
(0x202, 'M', u'ȃ'),
(0x203, 'V'),
(0x204, 'M', u'ȅ'),
(0x205, 'V'),
(0x206, 'M', u'ȇ'),
(0x207, 'V'),
(0x208, 'M', u'ȉ'),
(0x209, 'V'),
(0x20A, 'M', u'ȋ'),
(0x20B, 'V'),
(0x20C, 'M', u'ȍ'),
]
def _seg_5():
return [
(0x20D, 'V'),
(0x20E, 'M', u'ȏ'),
(0x20F, 'V'),
(0x210, 'M', u'ȑ'),
(0x211, 'V'),
(0x212, 'M', u'ȓ'),
(0x213, 'V'),
(0x214, 'M', u'ȕ'),
(0x215, 'V'),
(0x216, 'M', u'ȗ'),
(0x217, 'V'),
(0x218, 'M', u'ș'),
(0x219, 'V'),
(0x21A, 'M', u'ț'),
(0x21B, 'V'),
(0x21C, 'M', u'ȝ'),
(0x21D, 'V'),
(0x21E, 'M', u'ȟ'),
(0x21F, 'V'),
(0x220, 'M', u'ƞ'),
(0x221, 'V'),
(0x222, 'M', u'ȣ'),
(0x223, 'V'),
(0x224, 'M', u'ȥ'),
(0x225, 'V'),
(0x226, 'M', u'ȧ'),
(0x227, 'V'),
(0x228, 'M', u'ȩ'),
(0x229, 'V'),
(0x22A, 'M', u'ȫ'),
(0x22B, 'V'),
(0x22C, 'M', u'ȭ'),
(0x22D, 'V'),
(0x22E, 'M', u'ȯ'),
(0x22F, 'V'),
(0x230, 'M', u'ȱ'),
(0x231, 'V'),
(0x232, 'M', u'ȳ'),
(0x233, 'V'),
(0x23A, 'M', u'ⱥ'),
(0x23B, 'M', u'ȼ'),
(0x23C, 'V'),
(0x23D, 'M', u'ƚ'),
(0x23E, 'M', u'ⱦ'),
(0x23F, 'V'),
(0x241, 'M', u'ɂ'),
(0x242, 'V'),
(0x243, 'M', u'ƀ'),
(0x244, 'M', u'ʉ'),
(0x245, 'M', u'ʌ'),
(0x246, 'M', u'ɇ'),
(0x247, 'V'),
(0x248, 'M', u'ɉ'),
(0x249, 'V'),
(0x24A, 'M', u'ɋ'),
(0x24B, 'V'),
(0x24C, 'M', u'ɍ'),
(0x24D, 'V'),
(0x24E, 'M', u'ɏ'),
(0x24F, 'V'),
(0x2B0, 'M', u'h'),
(0x2B1, 'M', u'ɦ'),
(0x2B2, 'M', u'j'),
(0x2B3, 'M', u'r'),
(0x2B4, 'M', u'ɹ'),
(0x2B5, 'M', u'ɻ'),
(0x2B6, 'M', u'ʁ'),
(0x2B7, 'M', u'w'),
(0x2B8, 'M', u'y'),
(0x2B9, 'V'),
(0x2D8, '3', u' ̆'),
(0x2D9, '3', u' ̇'),
(0x2DA, '3', u' ̊'),
(0x2DB, '3', u' ̨'),
(0x2DC, '3', u' ̃'),
(0x2DD, '3', u' ̋'),
(0x2DE, 'V'),
(0x2E0, 'M', u'ɣ'),
(0x2E1, 'M', u'l'),
(0x2E2, 'M', u's'),
(0x2E3, 'M', u'x'),
(0x2E4, 'M', u'ʕ'),
(0x2E5, 'V'),
(0x340, 'M', u'̀'),
(0x341, 'M', u'́'),
(0x342, 'V'),
(0x343, 'M', u'̓'),
(0x344, 'M', u'̈́'),
(0x345, 'M', u'ι'),
(0x346, 'V'),
(0x34F, 'I'),
(0x350, 'V'),
(0x370, 'M', u'ͱ'),
(0x371, 'V'),
(0x372, 'M', u'ͳ'),
(0x373, 'V'),
(0x374, 'M', u'ʹ'),
(0x375, 'V'),
(0x376, 'M', u'ͷ'),
(0x377, 'V'),
]
def _seg_6():
return [
(0x378, 'X'),
(0x37A, '3', u' ι'),
(0x37B, 'V'),
(0x37E, '3', u';'),
(0x37F, 'M', u'ϳ'),
(0x380, 'X'),
(0x384, '3', u' ́'),
(0x385, '3', u' ̈́'),
(0x386, 'M', u'ά'),
(0x387, 'M', u'·'),
(0x388, 'M', u'έ'),
(0x389, 'M', u'ή'),
(0x38A, 'M', u'ί'),
(0x38B, 'X'),
(0x38C, 'M', u'ό'),
(0x38D, 'X'),
(0x38E, 'M', u'ύ'),
(0x38F, 'M', u'ώ'),
(0x390, 'V'),
(0x391, 'M', u'α'),
(0x392, 'M', u'β'),
(0x393, 'M', u'γ'),
(0x394, 'M', u'δ'),
(0x395, 'M', u'ε'),
(0x396, 'M', u'ζ'),
(0x397, 'M', u'η'),
(0x398, 'M', u'θ'),
(0x399, 'M', u'ι'),
(0x39A, 'M', u'κ'),
(0x39B, 'M', u'λ'),
(0x39C, 'M', u'μ'),
(0x39D, 'M', u'ν'),
(0x39E, 'M', u'ξ'),
(0x39F, 'M', u'ο'),
(0x3A0, 'M', u'π'),
(0x3A1, 'M', u'ρ'),
(0x3A2, 'X'),
(0x3A3, 'M', u'σ'),
(0x3A4, 'M', u'τ'),
(0x3A5, 'M', u'υ'),
(0x3A6, 'M', u'φ'),
(0x3A7, 'M', u'χ'),
(0x3A8, 'M', u'ψ'),
(0x3A9, 'M', u'ω'),
(0x3AA, 'M', u'ϊ'),
(0x3AB, 'M', u'ϋ'),
(0x3AC, 'V'),
(0x3C2, 'D', u'σ'),
(0x3C3, 'V'),
(0x3CF, 'M', u'ϗ'),
(0x3D0, 'M', u'β'),
(0x3D1, 'M', u'θ'),
(0x3D2, 'M', u'υ'),
(0x3D3, 'M', u'ύ'),
(0x3D4, 'M', u'ϋ'),
(0x3D5, 'M', u'φ'),
(0x3D6, 'M', u'π'),
(0x3D7, 'V'),
(0x3D8, 'M', u'ϙ'),
(0x3D9, 'V'),
(0x3DA, 'M', u'ϛ'),
(0x3DB, 'V'),
(0x3DC, 'M', u'ϝ'),
(0x3DD, 'V'),
(0x3DE, 'M', u'ϟ'),
(0x3DF, 'V'),
(0x3E0, 'M', u'ϡ'),
(0x3E1, 'V'),
(0x3E2, 'M', u'ϣ'),
(0x3E3, 'V'),
(0x3E4, 'M', u'ϥ'),
(0x3E5, 'V'),
(0x3E6, 'M', u'ϧ'),
(0x3E7, 'V'),
(0x3E8, 'M', u'ϩ'),
(0x3E9, 'V'),
(0x3EA, 'M', u'ϫ'),
(0x3EB, 'V'),
(0x3EC, 'M', u'ϭ'),
(0x3ED, 'V'),
(0x3EE, 'M', u'ϯ'),
(0x3EF, 'V'),
(0x3F0, 'M', u'κ'),
(0x3F1, 'M', u'ρ'),
(0x3F2, 'M', u'σ'),
(0x3F3, 'V'),
(0x3F4, 'M', u'θ'),
(0x3F5, 'M', u'ε'),
(0x3F6, 'V'),
(0x3F7, 'M', u'ϸ'),
(0x3F8, 'V'),
(0x3F9, 'M', u'σ'),
(0x3FA, 'M', u'ϻ'),
(0x3FB, 'V'),
(0x3FD, 'M', u'ͻ'),
(0x3FE, 'M', u'ͼ'),
(0x3FF, 'M', u'ͽ'),
(0x400, 'M', u'ѐ'),
(0x401, 'M', u'ё'),
(0x402, 'M', u'ђ'),
]
def _seg_7():
return [
(0x403, 'M', u'ѓ'),
(0x404, 'M', u'є'),
(0x405, 'M', u'ѕ'),
(0x406, 'M', u'і'),
(0x407, 'M', u'ї'),
(0x408, 'M', u'ј'),
(0x409, 'M', u'љ'),
(0x40A, 'M', u'њ'),
(0x40B, 'M', u'ћ'),
(0x40C, 'M', u'ќ'),
(0x40D, 'M', u'ѝ'),
(0x40E, 'M', u'ў'),
(0x40F, 'M', u'џ'),
(0x410, 'M', u'а'),
(0x411, 'M', u'б'),
(0x412, 'M', u'в'),
(0x413, 'M', u'г'),
(0x414, 'M', u'д'),
(0x415, 'M', u'е'),
(0x416, 'M', u'ж'),
(0x417, 'M', u'з'),
(0x418, 'M', u'и'),
(0x419, 'M', u'й'),
(0x41A, 'M', u'к'),
(0x41B, 'M', u'л'),
(0x41C, 'M', u'м'),
(0x41D, 'M', u'н'),
(0x41E, 'M', u'о'),
(0x41F, 'M', u'п'),
(0x420, 'M', u'р'),
(0x421, 'M', u'с'),
(0x422, 'M', u'т'),
(0x423, 'M', u'у'),
(0x424, 'M', u'ф'),
(0x425, 'M', u'х'),
(0x426, 'M', u'ц'),
(0x427, 'M', u'ч'),
(0x428, 'M', u'ш'),
(0x429, 'M', u'щ'),
(0x42A, 'M', u'ъ'),
(0x42B, 'M', u'ы'),
(0x42C, 'M', u'ь'),
(0x42D, 'M', u'э'),
(0x42E, 'M', u'ю'),
(0x42F, 'M', u'я'),
(0x430, 'V'),
(0x460, 'M', u'ѡ'),
(0x461, 'V'),
(0x462, 'M', u'ѣ'),
(0x463, 'V'),
(0x464, 'M', u'ѥ'),
(0x465, 'V'),
(0x466, 'M', u'ѧ'),
(0x467, 'V'),
(0x468, 'M', u'ѩ'),
(0x469, 'V'),
(0x46A, 'M', u'ѫ'),
(0x46B, 'V'),
(0x46C, 'M', u'ѭ'),
(0x46D, 'V'),
(0x46E, 'M', u'ѯ'),
(0x46F, 'V'),
(0x470, 'M', u'ѱ'),
(0x471, 'V'),
(0x472, 'M', u'ѳ'),
(0x473, 'V'),
(0x474, 'M', u'ѵ'),
(0x475, 'V'),
(0x476, 'M', u'ѷ'),
(0x477, 'V'),
(0x478, 'M', u'ѹ'),
(0x479, 'V'),
(0x47A, 'M', u'ѻ'),
(0x47B, 'V'),
(0x47C, 'M', u'ѽ'),
(0x47D, 'V'),
(0x47E, 'M', u'ѿ'),
(0x47F, 'V'),
(0x480, 'M', u'ҁ'),
(0x481, 'V'),
(0x48A, 'M', u'ҋ'),
(0x48B, 'V'),
(0x48C, 'M', u'ҍ'),
(0x48D, 'V'),
(0x48E, 'M', u'ҏ'),
(0x48F, 'V'),
(0x490, 'M', u'ґ'),
(0x491, 'V'),
(0x492, 'M', u'ғ'),
(0x493, 'V'),
(0x494, 'M', u'ҕ'),
(0x495, 'V'),
(0x496, 'M', u'җ'),
(0x497, 'V'),
(0x498, 'M', u'ҙ'),
(0x499, 'V'),
(0x49A, 'M', u'қ'),
(0x49B, 'V'),
(0x49C, 'M', u'ҝ'),
(0x49D, 'V'),
]
def _seg_8():
return [
(0x49E, 'M', u'ҟ'),
(0x49F, 'V'),
(0x4A0, 'M', u'ҡ'),
(0x4A1, 'V'),
(0x4A2, 'M', u'ң'),
(0x4A3, 'V'),
(0x4A4, 'M', u'ҥ'),
(0x4A5, 'V'),
(0x4A6, 'M', u'ҧ'),
(0x4A7, 'V'),
(0x4A8, 'M', u'ҩ'),
(0x4A9, 'V'),
(0x4AA, 'M', u'ҫ'),
(0x4AB, 'V'),
(0x4AC, 'M', u'ҭ'),
(0x4AD, 'V'),
(0x4AE, 'M', u'ү'),
(0x4AF, 'V'),
(0x4B0, 'M', u'ұ'),
(0x4B1, 'V'),
(0x4B2, 'M', u'ҳ'),
(0x4B3, 'V'),
(0x4B4, 'M', u'ҵ'),
(0x4B5, 'V'),
(0x4B6, 'M', u'ҷ'),
(0x4B7, 'V'),
(0x4B8, 'M', u'ҹ'),
(0x4B9, 'V'),
(0x4BA, 'M', u'һ'),
(0x4BB, 'V'),
(0x4BC, 'M', u'ҽ'),
(0x4BD, 'V'),
(0x4BE, 'M', u'ҿ'),
(0x4BF, 'V'),
(0x4C0, 'X'),
(0x4C1, 'M', u'ӂ'),
(0x4C2, 'V'),
(0x4C3, 'M', u'ӄ'),
(0x4C4, 'V'),
(0x4C5, 'M', u'ӆ'),
(0x4C6, 'V'),
(0x4C7, 'M', u'ӈ'),
(0x4C8, 'V'),
(0x4C9, 'M', u'ӊ'),
(0x4CA, 'V'),
(0x4CB, 'M', u'ӌ'),
(0x4CC, 'V'),
(0x4CD, 'M', u'ӎ'),
(0x4CE, 'V'),
(0x4D0, 'M', u'ӑ'),
(0x4D1, 'V'),
(0x4D2, 'M', u'ӓ'),
(0x4D3, 'V'),
(0x4D4, 'M', u'ӕ'),
(0x4D5, 'V'),
(0x4D6, 'M', u'ӗ'),
(0x4D7, 'V'),
(0x4D8, 'M', u'ә'),
(0x4D9, 'V'),
(0x4DA, 'M', u'ӛ'),
(0x4DB, 'V'),
(0x4DC, 'M', u'ӝ'),
(0x4DD, 'V'),
(0x4DE, 'M', u'ӟ'),
(0x4DF, 'V'),
(0x4E0, 'M', u'ӡ'),
(0x4E1, 'V'),
(0x4E2, 'M', u'ӣ'),
(0x4E3, 'V'),
(0x4E4, 'M', u'ӥ'),
(0x4E5, 'V'),
(0x4E6, 'M', u'ӧ'),
(0x4E7, 'V'),
(0x4E8, 'M', u'ө'),
(0x4E9, 'V'),
(0x4EA, 'M', u'ӫ'),
(0x4EB, 'V'),
(0x4EC, 'M', u'ӭ'),
(0x4ED, 'V'),
(0x4EE, 'M', u'ӯ'),
(0x4EF, 'V'),
(0x4F0, 'M', u'ӱ'),
(0x4F1, 'V'),
(0x4F2, 'M', u'ӳ'),
(0x4F3, 'V'),
(0x4F4, 'M', u'ӵ'),
(0x4F5, 'V'),
(0x4F6, 'M', u'ӷ'),
(0x4F7, 'V'),
(0x4F8, 'M', u'ӹ'),
(0x4F9, 'V'),
(0x4FA, 'M', u'ӻ'),
(0x4FB, 'V'),
(0x4FC, 'M', u'ӽ'),
(0x4FD, 'V'),
(0x4FE, 'M', u'ӿ'),
(0x4FF, 'V'),
(0x500, 'M', u'ԁ'),
(0x501, 'V'),
(0x502, 'M', u'ԃ'),
]
def _seg_9():
return [
(0x503, 'V'),
(0x504, 'M', u'ԅ'),
(0x505, 'V'),
(0x506, 'M', u'ԇ'),
(0x507, 'V'),
(0x508, 'M', u'ԉ'),
(0x509, 'V'),
(0x50A, 'M', u'ԋ'),
(0x50B, 'V'),
(0x50C, 'M', u'ԍ'),
(0x50D, 'V'),
(0x50E, 'M', u'ԏ'),
(0x50F, 'V'),
(0x510, 'M', u'ԑ'),
(0x511, 'V'),
(0x512, 'M', u'ԓ'),
(0x513, 'V'),
(0x514, 'M', u'ԕ'),
(0x515, 'V'),
(0x516, 'M', u'ԗ'),
(0x517, 'V'),
(0x518, 'M', u'ԙ'),
(0x519, 'V'),
(0x51A, 'M', u'ԛ'),
(0x51B, 'V'),
(0x51C, 'M', u'ԝ'),
(0x51D, 'V'),
(0x51E, 'M', u'ԟ'),
(0x51F, 'V'),
(0x520, 'M', u'ԡ'),
(0x521, 'V'),
(0x522, 'M', u'ԣ'),
(0x523, 'V'),
(0x524, 'M', u'ԥ'),
(0x525, 'V'),
(0x526, 'M', u'ԧ'),
(0x527, 'V'),
(0x528, 'M', u'ԩ'),
(0x529, 'V'),
(0x52A, 'M', u'ԫ'),
(0x52B, 'V'),
(0x52C, 'M', u'ԭ'),
(0x52D, 'V'),
(0x52E, 'M', u'ԯ'),
(0x52F, 'V'),
(0x530, 'X'),
(0x531, 'M', u'ա'),
(0x532, 'M', u'բ'),
(0x533, 'M', u'գ'),
(0x534, 'M', u'դ'),
(0x535, 'M', u'ե'),
(0x536, 'M', u'զ'),
(0x537, 'M', u'է'),
(0x538, 'M', u'ը'),
(0x539, 'M', u'թ'),
(0x53A, 'M', u'ժ'),
(0x53B, 'M', u'ի'),
(0x53C, 'M', u'լ'),
(0x53D, 'M', u'խ'),
(0x53E, 'M', u'ծ'),
(0x53F, 'M', u'կ'),
(0x540, 'M', u'հ'),
(0x541, 'M', u'ձ'),
(0x542, 'M', u'ղ'),
(0x543, 'M', u'ճ'),
(0x544, 'M', u'մ'),
(0x545, 'M', u'յ'),
(0x546, 'M', u'ն'),
(0x547, 'M', u'շ'),
(0x548, 'M', u'ո'),
(0x549, 'M', u'չ'),
(0x54A, 'M', u'պ'),
(0x54B, 'M', u'ջ'),
(0x54C, 'M', u'ռ'),
(0x54D, 'M', u'ս'),
(0x54E, 'M', u'վ'),
(0x54F, 'M', u'տ'),
(0x550, 'M', u'ր'),
(0x551, 'M', u'ց'),
(0x552, 'M', u'ւ'),
(0x553, 'M', u'փ'),
(0x554, 'M', u'ք'),
(0x555, 'M', u'օ'),
(0x556, 'M', u'ֆ'),
(0x557, 'X'),
(0x559, 'V'),
(0x587, 'M', u'եւ'),
(0x588, 'V'),
(0x58B, 'X'),
(0x58D, 'V'),
(0x590, 'X'),
(0x591, 'V'),
(0x5C8, 'X'),
(0x5D0, 'V'),
(0x5EB, 'X'),
(0x5EF, 'V'),
(0x5F5, 'X'),
(0x606, 'V'),
(0x61C, 'X'),
(0x61E, 'V'),
]
def _seg_10():
return [
(0x675, 'M', u'اٴ'),
(0x676, 'M', u'وٴ'),
(0x677, 'M', u'ۇٴ'),
(0x678, 'M', u'يٴ'),
(0x679, 'V'),
(0x6DD, 'X'),
(0x6DE, 'V'),
(0x70E, 'X'),
(0x710, 'V'),
(0x74B, 'X'),
(0x74D, 'V'),
(0x7B2, 'X'),
(0x7C0, 'V'),
(0x7FB, 'X'),
(0x7FD, 'V'),
(0x82E, 'X'),
(0x830, 'V'),
(0x83F, 'X'),
(0x840, 'V'),
(0x85C, 'X'),
(0x85E, 'V'),
(0x85F, 'X'),
(0x860, 'V'),
(0x86B, 'X'),
(0x8A0, 'V'),
(0x8B5, 'X'),
(0x8B6, 'V'),
(0x8BE, 'X'),
(0x8D3, 'V'),
(0x8E2, 'X'),
(0x8E3, 'V'),
(0x958, 'M', u'क़'),
(0x959, 'M', u'ख़'),
(0x95A, 'M', u'ग़'),
(0x95B, 'M', u'ज़'),
(0x95C, 'M', u'ड़'),
(0x95D, 'M', u'ढ़'),
(0x95E, 'M', u'फ़'),
(0x95F, 'M', u'य़'),
(0x960, 'V'),
(0x984, 'X'),
(0x985, 'V'),
(0x98D, 'X'),
(0x98F, 'V'),
(0x991, 'X'),
(0x993, 'V'),
(0x9A9, 'X'),
(0x9AA, 'V'),
(0x9B1, 'X'),
(0x9B2, 'V'),
(0x9B3, 'X'),
(0x9B6, 'V'),
(0x9BA, 'X'),
(0x9BC, 'V'),
(0x9C5, 'X'),
(0x9C7, 'V'),
(0x9C9, 'X'),
(0x9CB, 'V'),
(0x9CF, 'X'),
(0x9D7, 'V'),
(0x9D8, 'X'),
(0x9DC, 'M', u'ড়'),
(0x9DD, 'M', u'ঢ়'),
(0x9DE, 'X'),
(0x9DF, 'M', u'য়'),
(0x9E0, 'V'),
(0x9E4, 'X'),
(0x9E6, 'V'),
(0x9FF, 'X'),
(0xA01, 'V'),
(0xA04, 'X'),
(0xA05, 'V'),
(0xA0B, 'X'),
(0xA0F, 'V'),
(0xA11, 'X'),
(0xA13, 'V'),
(0xA29, 'X'),
(0xA2A, 'V'),
(0xA31, 'X'),
(0xA32, 'V'),
(0xA33, 'M', u'ਲ਼'),
(0xA34, 'X'),
(0xA35, 'V'),
(0xA36, 'M', u'ਸ਼'),
(0xA37, 'X'),
(0xA38, 'V'),
(0xA3A, 'X'),
(0xA3C, 'V'),
(0xA3D, 'X'),
(0xA3E, 'V'),
(0xA43, 'X'),
(0xA47, 'V'),
(0xA49, 'X'),
(0xA4B, 'V'),
(0xA4E, 'X'),
(0xA51, 'V'),
(0xA52, 'X'),
(0xA59, 'M', u'ਖ਼'),
(0xA5A, 'M', u'ਗ਼'),
(0xA5B, 'M', u'ਜ਼'),
]
def _seg_11():
return [
(0xA5C, 'V'),
(0xA5D, 'X'),
(0xA5E, 'M', u'ਫ਼'),
(0xA5F, 'X'),
(0xA66, 'V'),
(0xA77, 'X'),
(0xA81, 'V'),
(0xA84, 'X'),
(0xA85, 'V'),
(0xA8E, 'X'),
(0xA8F, 'V'),
(0xA92, 'X'),
(0xA93, 'V'),
(0xAA9, 'X'),
(0xAAA, 'V'),
(0xAB1, 'X'),
(0xAB2, 'V'),
(0xAB4, 'X'),
(0xAB5, 'V'),
(0xABA, 'X'),
(0xABC, 'V'),
(0xAC6, 'X'),
(0xAC7, 'V'),
(0xACA, 'X'),
(0xACB, 'V'),
(0xACE, 'X'),
(0xAD0, 'V'),
(0xAD1, 'X'),
(0xAE0, 'V'),
(0xAE4, 'X'),
(0xAE6, 'V'),
(0xAF2, 'X'),
(0xAF9, 'V'),
(0xB00, 'X'),
(0xB01, 'V'),
(0xB04, 'X'),
(0xB05, 'V'),
(0xB0D, 'X'),
(0xB0F, 'V'),
(0xB11, 'X'),
(0xB13, 'V'),
(0xB29, 'X'),
(0xB2A, 'V'),
(0xB31, 'X'),
(0xB32, 'V'),
(0xB34, 'X'),
(0xB35, 'V'),
(0xB3A, 'X'),
(0xB3C, 'V'),
(0xB45, 'X'),
(0xB47, 'V'),
(0xB49, 'X'),
(0xB4B, 'V'),
(0xB4E, 'X'),
(0xB56, 'V'),
(0xB58, 'X'),
(0xB5C, 'M', u'ଡ଼'),
(0xB5D, 'M', u'ଢ଼'),
(0xB5E, 'X'),
(0xB5F, 'V'),
(0xB64, 'X'),
(0xB66, 'V'),
(0xB78, 'X'),
(0xB82, 'V'),
(0xB84, 'X'),
(0xB85, 'V'),
(0xB8B, 'X'),
(0xB8E, 'V'),
(0xB91, 'X'),
(0xB92, 'V'),
(0xB96, 'X'),
(0xB99, 'V'),
(0xB9B, 'X'),
(0xB9C, 'V'),
(0xB9D, 'X'),
(0xB9E, 'V'),
(0xBA0, 'X'),
(0xBA3, 'V'),
(0xBA5, 'X'),
(0xBA8, 'V'),
(0xBAB, 'X'),
(0xBAE, 'V'),
(0xBBA, 'X'),
(0xBBE, 'V'),
(0xBC3, 'X'),
(0xBC6, 'V'),
(0xBC9, 'X'),
(0xBCA, 'V'),
(0xBCE, 'X'),
(0xBD0, 'V'),
(0xBD1, 'X'),
(0xBD7, 'V'),
(0xBD8, 'X'),
(0xBE6, 'V'),
(0xBFB, 'X'),
(0xC00, 'V'),
(0xC0D, 'X'),
(0xC0E, 'V'),
(0xC11, 'X'),
(0xC12, 'V'),
]
def _seg_12():
return [
(0xC29, 'X'),
(0xC2A, 'V'),
(0xC3A, 'X'),
(0xC3D, 'V'),
(0xC45, 'X'),
(0xC46, 'V'),
(0xC49, 'X'),
(0xC4A, 'V'),
(0xC4E, 'X'),
(0xC55, 'V'),
(0xC57, 'X'),
(0xC58, 'V'),
(0xC5B, 'X'),
(0xC60, 'V'),
(0xC64, 'X'),
(0xC66, 'V'),
(0xC70, 'X'),
(0xC78, 'V'),
(0xC8D, 'X'),
(0xC8E, 'V'),
(0xC91, 'X'),
(0xC92, 'V'),
(0xCA9, 'X'),
(0xCAA, 'V'),
(0xCB4, 'X'),
(0xCB5, 'V'),
(0xCBA, 'X'),
(0xCBC, 'V'),
(0xCC5, 'X'),
(0xCC6, 'V'),
(0xCC9, 'X'),
(0xCCA, 'V'),
(0xCCE, 'X'),
(0xCD5, 'V'),
(0xCD7, 'X'),
(0xCDE, 'V'),
(0xCDF, 'X'),
(0xCE0, 'V'),
(0xCE4, 'X'),
(0xCE6, 'V'),
(0xCF0, 'X'),
(0xCF1, 'V'),
(0xCF3, 'X'),
(0xD00, 'V'),
(0xD04, 'X'),
(0xD05, 'V'),
(0xD0D, 'X'),
(0xD0E, 'V'),
(0xD11, 'X'),
(0xD12, 'V'),
(0xD45, 'X'),
(0xD46, 'V'),
(0xD49, 'X'),
(0xD4A, 'V'),
(0xD50, 'X'),
(0xD54, 'V'),
(0xD64, 'X'),
(0xD66, 'V'),
(0xD80, 'X'),
(0xD82, 'V'),
(0xD84, 'X'),
(0xD85, 'V'),
(0xD97, 'X'),
(0xD9A, 'V'),
(0xDB2, 'X'),
(0xDB3, 'V'),
(0xDBC, 'X'),
(0xDBD, 'V'),
(0xDBE, 'X'),
(0xDC0, 'V'),
(0xDC7, 'X'),
(0xDCA, 'V'),
(0xDCB, 'X'),
(0xDCF, 'V'),
(0xDD5, 'X'),
(0xDD6, 'V'),
(0xDD7, 'X'),
(0xDD8, 'V'),
(0xDE0, 'X'),
(0xDE6, 'V'),
(0xDF0, 'X'),
(0xDF2, 'V'),
(0xDF5, 'X'),
(0xE01, 'V'),
(0xE33, 'M', u'ํา'),
(0xE34, 'V'),
(0xE3B, 'X'),
(0xE3F, 'V'),
(0xE5C, 'X'),
(0xE81, 'V'),
(0xE83, 'X'),
(0xE84, 'V'),
(0xE85, 'X'),
(0xE87, 'V'),
(0xE89, 'X'),
(0xE8A, 'V'),
(0xE8B, 'X'),
(0xE8D, 'V'),
(0xE8E, 'X'),
(0xE94, 'V'),
]
def _seg_13():
return [
(0xE98, 'X'),
(0xE99, 'V'),
(0xEA0, 'X'),
(0xEA1, 'V'),
(0xEA4, 'X'),
(0xEA5, 'V'),
(0xEA6, 'X'),
(0xEA7, 'V'),
(0xEA8, 'X'),
(0xEAA, 'V'),
(0xEAC, 'X'),
(0xEAD, 'V'),
(0xEB3, 'M', u'ໍາ'),
(0xEB4, 'V'),
(0xEBA, 'X'),
(0xEBB, 'V'),
(0xEBE, 'X'),
(0xEC0, 'V'),
(0xEC5, 'X'),
(0xEC6, 'V'),
(0xEC7, 'X'),
(0xEC8, 'V'),
(0xECE, 'X'),
(0xED0, 'V'),
(0xEDA, 'X'),
(0xEDC, 'M', u'ຫນ'),
(0xEDD, 'M', u'ຫມ'),
(0xEDE, 'V'),
(0xEE0, 'X'),
(0xF00, 'V'),
(0xF0C, 'M', u'་'),
(0xF0D, 'V'),
(0xF43, 'M', u'གྷ'),
(0xF44, 'V'),
(0xF48, 'X'),
(0xF49, 'V'),
(0xF4D, 'M', u'ཌྷ'),
(0xF4E, 'V'),
(0xF52, 'M', u'དྷ'),
(0xF53, 'V'),
(0xF57, 'M', u'བྷ'),
(0xF58, 'V'),
(0xF5C, 'M', u'ཛྷ'),
(0xF5D, 'V'),
(0xF69, 'M', u'ཀྵ'),
(0xF6A, 'V'),
(0xF6D, 'X'),
(0xF71, 'V'),
(0xF73, 'M', u'ཱི'),
(0xF74, 'V'),
(0xF75, 'M', u'ཱུ'),
(0xF76, 'M', u'ྲྀ'),
(0xF77, 'M', u'ྲཱྀ'),
(0xF78, 'M', u'ླྀ'),
(0xF79, 'M', u'ླཱྀ'),
(0xF7A, 'V'),
(0xF81, 'M', u'ཱྀ'),
(0xF82, 'V'),
(0xF93, 'M', u'ྒྷ'),
(0xF94, 'V'),
(0xF98, 'X'),
(0xF99, 'V'),
(0xF9D, 'M', u'ྜྷ'),
(0xF9E, 'V'),
(0xFA2, 'M', u'ྡྷ'),
(0xFA3, 'V'),
(0xFA7, 'M', u'ྦྷ'),
(0xFA8, 'V'),
(0xFAC, 'M', u'ྫྷ'),
(0xFAD, 'V'),
(0xFB9, 'M', u'ྐྵ'),
(0xFBA, 'V'),
(0xFBD, 'X'),
(0xFBE, 'V'),
(0xFCD, 'X'),
(0xFCE, 'V'),
(0xFDB, 'X'),
(0x1000, 'V'),
(0x10A0, 'X'),
(0x10C7, 'M', u'ⴧ'),
(0x10C8, 'X'),
(0x10CD, 'M', u'ⴭ'),
(0x10CE, 'X'),
(0x10D0, 'V'),
(0x10FC, 'M', u'ნ'),
(0x10FD, 'V'),
(0x115F, 'X'),
(0x1161, 'V'),
(0x1249, 'X'),
(0x124A, 'V'),
(0x124E, 'X'),
(0x1250, 'V'),
(0x1257, 'X'),
(0x1258, 'V'),
(0x1259, 'X'),
(0x125A, 'V'),
(0x125E, 'X'),
(0x1260, 'V'),
(0x1289, 'X'),
(0x128A, 'V'),
]
def _seg_14():
return [
(0x128E, 'X'),
(0x1290, 'V'),
(0x12B1, 'X'),
(0x12B2, 'V'),
(0x12B6, 'X'),
(0x12B8, 'V'),
(0x12BF, 'X'),
(0x12C0, 'V'),
(0x12C1, 'X'),
(0x12C2, 'V'),
(0x12C6, 'X'),
(0x12C8, 'V'),
(0x12D7, 'X'),
(0x12D8, 'V'),
(0x1311, 'X'),
(0x1312, 'V'),
(0x1316, 'X'),
(0x1318, 'V'),
(0x135B, 'X'),
(0x135D, 'V'),
(0x137D, 'X'),
(0x1380, 'V'),
(0x139A, 'X'),
(0x13A0, 'V'),
(0x13F6, 'X'),
(0x13F8, 'M', u'Ᏸ'),
(0x13F9, 'M', u'Ᏹ'),
(0x13FA, 'M', u'Ᏺ'),
(0x13FB, 'M', u'Ᏻ'),
(0x13FC, 'M', u'Ᏼ'),
(0x13FD, 'M', u'Ᏽ'),
(0x13FE, 'X'),
(0x1400, 'V'),
(0x1680, 'X'),
(0x1681, 'V'),
(0x169D, 'X'),
(0x16A0, 'V'),
(0x16F9, 'X'),
(0x1700, 'V'),
(0x170D, 'X'),
(0x170E, 'V'),
(0x1715, 'X'),
(0x1720, 'V'),
(0x1737, 'X'),
(0x1740, 'V'),
(0x1754, 'X'),
(0x1760, 'V'),
(0x176D, 'X'),
(0x176E, 'V'),
(0x1771, 'X'),
(0x1772, 'V'),
(0x1774, 'X'),
(0x1780, 'V'),
(0x17B4, 'X'),
(0x17B6, 'V'),
(0x17DE, 'X'),
(0x17E0, 'V'),
(0x17EA, 'X'),
(0x17F0, 'V'),
(0x17FA, 'X'),
(0x1800, 'V'),
(0x1806, 'X'),
(0x1807, 'V'),
(0x180B, 'I'),
(0x180E, 'X'),
(0x1810, 'V'),
(0x181A, 'X'),
(0x1820, 'V'),
(0x1879, 'X'),
(0x1880, 'V'),
(0x18AB, 'X'),
(0x18B0, 'V'),
(0x18F6, 'X'),
(0x1900, 'V'),
(0x191F, 'X'),
(0x1920, 'V'),
(0x192C, 'X'),
(0x1930, 'V'),
(0x193C, 'X'),
(0x1940, 'V'),
(0x1941, 'X'),
(0x1944, 'V'),
(0x196E, 'X'),
(0x1970, 'V'),
(0x1975, 'X'),
(0x1980, 'V'),
(0x19AC, 'X'),
(0x19B0, 'V'),
(0x19CA, 'X'),
(0x19D0, 'V'),
(0x19DB, 'X'),
(0x19DE, 'V'),
(0x1A1C, 'X'),
(0x1A1E, 'V'),
(0x1A5F, 'X'),
(0x1A60, 'V'),
(0x1A7D, 'X'),
(0x1A7F, 'V'),
(0x1A8A, 'X'),
(0x1A90, 'V'),
]
def _seg_15():
return [
(0x1A9A, 'X'),
(0x1AA0, 'V'),
(0x1AAE, 'X'),
(0x1AB0, 'V'),
(0x1ABF, 'X'),
(0x1B00, 'V'),
(0x1B4C, 'X'),
(0x1B50, 'V'),
(0x1B7D, 'X'),
(0x1B80, 'V'),
(0x1BF4, 'X'),
(0x1BFC, 'V'),
(0x1C38, 'X'),
(0x1C3B, 'V'),
(0x1C4A, 'X'),
(0x1C4D, 'V'),
(0x1C80, 'M', u'в'),
(0x1C81, 'M', u'д'),
(0x1C82, 'M', u'о'),
(0x1C83, 'M', u'с'),
(0x1C84, 'M', u'т'),
(0x1C86, 'M', u'ъ'),
(0x1C87, 'M', u'ѣ'),
(0x1C88, 'M', u'ꙋ'),
(0x1C89, 'X'),
(0x1CC0, 'V'),
(0x1CC8, 'X'),
(0x1CD0, 'V'),
(0x1CFA, 'X'),
(0x1D00, 'V'),
(0x1D2C, 'M', u'a'),
(0x1D2D, 'M', u'æ'),
(0x1D2E, 'M', u'b'),
(0x1D2F, 'V'),
(0x1D30, 'M', u'd'),
(0x1D31, 'M', u'e'),
(0x1D32, 'M', u'ǝ'),
(0x1D33, 'M', u'g'),
(0x1D34, 'M', u'h'),
(0x1D35, 'M', u'i'),
(0x1D36, 'M', u'j'),
(0x1D37, 'M', u'k'),
(0x1D38, 'M', u'l'),
(0x1D39, 'M', u'm'),
(0x1D3A, 'M', u'n'),
(0x1D3B, 'V'),
(0x1D3C, 'M', u'o'),
(0x1D3D, 'M', u'ȣ'),
(0x1D3E, 'M', u'p'),
(0x1D3F, 'M', u'r'),
(0x1D40, 'M', u't'),
(0x1D41, 'M', u'u'),
(0x1D42, 'M', u'w'),
(0x1D43, 'M', u'a'),
(0x1D44, 'M', u'ɐ'),
(0x1D45, 'M', u'ɑ'),
(0x1D46, 'M', u'ᴂ'),
(0x1D47, 'M', u'b'),
(0x1D48, 'M', u'd'),
(0x1D49, 'M', u'e'),
(0x1D4A, 'M', u'ə'),
(0x1D4B, 'M', u'ɛ'),
(0x1D4C, 'M', u'ɜ'),
(0x1D4D, 'M', u'g'),
(0x1D4E, 'V'),
(0x1D4F, 'M', u'k'),
(0x1D50, 'M', u'm'),
(0x1D51, 'M', u'ŋ'),
(0x1D52, 'M', u'o'),
(0x1D53, 'M', u'ɔ'),
(0x1D54, 'M', u'ᴖ'),
(0x1D55, 'M', u'ᴗ'),
(0x1D56, 'M', u'p'),
(0x1D57, 'M', u't'),
(0x1D58, 'M', u'u'),
(0x1D59, 'M', u'ᴝ'),
(0x1D5A, 'M', u'ɯ'),
(0x1D5B, 'M', u'v'),
(0x1D5C, 'M', u'ᴥ'),
(0x1D5D, 'M', u'β'),
(0x1D5E, 'M', u'γ'),
(0x1D5F, 'M', u'δ'),
(0x1D60, 'M', u'φ'),
(0x1D61, 'M', u'χ'),
(0x1D62, 'M', u'i'),
(0x1D63, 'M', u'r'),
(0x1D64, 'M', u'u'),
(0x1D65, 'M', u'v'),
(0x1D66, 'M', u'β'),
(0x1D67, 'M', u'γ'),
(0x1D68, 'M', u'ρ'),
(0x1D69, 'M', u'φ'),
(0x1D6A, 'M', u'χ'),
(0x1D6B, 'V'),
(0x1D78, 'M', u'н'),
(0x1D79, 'V'),
(0x1D9B, 'M', u'ɒ'),
(0x1D9C, 'M', u'c'),
(0x1D9D, 'M', u'ɕ'),
(0x1D9E, 'M', u'ð'),
]
def _seg_16():
return [
(0x1D9F, 'M', u'ɜ'),
(0x1DA0, 'M', u'f'),
(0x1DA1, 'M', u'ɟ'),
(0x1DA2, 'M', u'ɡ'),
(0x1DA3, 'M', u'ɥ'),
(0x1DA4, 'M', u'ɨ'),
(0x1DA5, 'M', u'ɩ'),
(0x1DA6, 'M', u'ɪ'),
(0x1DA7, 'M', u'ᵻ'),
(0x1DA8, 'M', u'ʝ'),
(0x1DA9, 'M', u'ɭ'),
(0x1DAA, 'M', u'ᶅ'),
(0x1DAB, 'M', u'ʟ'),
(0x1DAC, 'M', u'ɱ'),
(0x1DAD, 'M', u'ɰ'),
(0x1DAE, 'M', u'ɲ'),
(0x1DAF, 'M', u'ɳ'),
(0x1DB0, 'M', u'ɴ'),
(0x1DB1, 'M', u'ɵ'),
(0x1DB2, 'M', u'ɸ'),
(0x1DB3, 'M', u'ʂ'),
(0x1DB4, 'M', u'ʃ'),
(0x1DB5, 'M', u'ƫ'),
(0x1DB6, 'M', u'ʉ'),
(0x1DB7, 'M', u'ʊ'),
(0x1DB8, 'M', u'ᴜ'),
(0x1DB9, 'M', u'ʋ'),
(0x1DBA, 'M', u'ʌ'),
(0x1DBB, 'M', u'z'),
(0x1DBC, 'M', u'ʐ'),
(0x1DBD, 'M', u'ʑ'),
(0x1DBE, 'M', u'ʒ'),
(0x1DBF, 'M', u'θ'),
(0x1DC0, 'V'),
(0x1DFA, 'X'),
(0x1DFB, 'V'),
(0x1E00, 'M', u'ḁ'),
(0x1E01, 'V'),
(0x1E02, 'M', u'ḃ'),
(0x1E03, 'V'),
(0x1E04, 'M', u'ḅ'),
(0x1E05, 'V'),
(0x1E06, 'M', u'ḇ'),
(0x1E07, 'V'),
(0x1E08, 'M', u'ḉ'),
(0x1E09, 'V'),
(0x1E0A, 'M', u'ḋ'),
(0x1E0B, 'V'),
(0x1E0C, 'M', u'ḍ'),
(0x1E0D, 'V'),
(0x1E0E, 'M', u'ḏ'),
(0x1E0F, 'V'),
(0x1E10, 'M', u'ḑ'),
(0x1E11, 'V'),
(0x1E12, 'M', u'ḓ'),
(0x1E13, 'V'),
(0x1E14, 'M', u'ḕ'),
(0x1E15, 'V'),
(0x1E16, 'M', u'ḗ'),
(0x1E17, 'V'),
(0x1E18, 'M', u'ḙ'),
(0x1E19, 'V'),
(0x1E1A, 'M', u'ḛ'),
(0x1E1B, 'V'),
(0x1E1C, 'M', u'ḝ'),
(0x1E1D, 'V'),
(0x1E1E, 'M', u'ḟ'),
(0x1E1F, 'V'),
(0x1E20, 'M', u'ḡ'),
(0x1E21, 'V'),
(0x1E22, 'M', u'ḣ'),
(0x1E23, 'V'),
(0x1E24, 'M', u'ḥ'),
(0x1E25, 'V'),
(0x1E26, 'M', u'ḧ'),
(0x1E27, 'V'),
(0x1E28, 'M', u'ḩ'),
(0x1E29, 'V'),
(0x1E2A, 'M', u'ḫ'),
(0x1E2B, 'V'),
(0x1E2C, 'M', u'ḭ'),
(0x1E2D, 'V'),
(0x1E2E, 'M', u'ḯ'),
(0x1E2F, 'V'),
(0x1E30, 'M', u'ḱ'),
(0x1E31, 'V'),
(0x1E32, 'M', u'ḳ'),
(0x1E33, 'V'),
(0x1E34, 'M', u'ḵ'),
(0x1E35, 'V'),
(0x1E36, 'M', u'ḷ'),
(0x1E37, 'V'),
(0x1E38, 'M', u'ḹ'),
(0x1E39, 'V'),
(0x1E3A, 'M', u'ḻ'),
(0x1E3B, 'V'),
(0x1E3C, 'M', u'ḽ'),
(0x1E3D, 'V'),
(0x1E3E, 'M', u'ḿ'),
(0x1E3F, 'V'),
]
def _seg_17():
return [
(0x1E40, 'M', u'ṁ'),
(0x1E41, 'V'),
(0x1E42, 'M', u'ṃ'),
(0x1E43, 'V'),
(0x1E44, 'M', u'ṅ'),
(0x1E45, 'V'),
(0x1E46, 'M', u'ṇ'),
(0x1E47, 'V'),
(0x1E48, 'M', u'ṉ'),
(0x1E49, 'V'),
(0x1E4A, 'M', u'ṋ'),
(0x1E4B, 'V'),
(0x1E4C, 'M', u'ṍ'),
(0x1E4D, 'V'),
(0x1E4E, 'M', u'ṏ'),
(0x1E4F, 'V'),
(0x1E50, 'M', u'ṑ'),
(0x1E51, 'V'),
(0x1E52, 'M', u'ṓ'),
(0x1E53, 'V'),
(0x1E54, 'M', u'ṕ'),
(0x1E55, 'V'),
(0x1E56, 'M', u'ṗ'),
(0x1E57, 'V'),
(0x1E58, 'M', u'ṙ'),
(0x1E59, 'V'),
(0x1E5A, 'M', u'ṛ'),
(0x1E5B, 'V'),
(0x1E5C, 'M', u'ṝ'),
(0x1E5D, 'V'),
(0x1E5E, 'M', u'ṟ'),
(0x1E5F, 'V'),
(0x1E60, 'M', u'ṡ'),
(0x1E61, 'V'),
(0x1E62, 'M', u'ṣ'),
(0x1E63, 'V'),
(0x1E64, 'M', u'ṥ'),
(0x1E65, 'V'),
(0x1E66, 'M', u'ṧ'),
(0x1E67, 'V'),
(0x1E68, 'M', u'ṩ'),
(0x1E69, 'V'),
(0x1E6A, 'M', u'ṫ'),
(0x1E6B, 'V'),
(0x1E6C, 'M', u'ṭ'),
(0x1E6D, 'V'),
(0x1E6E, 'M', u'ṯ'),
(0x1E6F, 'V'),
(0x1E70, 'M', u'ṱ'),
(0x1E71, 'V'),
(0x1E72, 'M', u'ṳ'),
(0x1E73, 'V'),
(0x1E74, 'M', u'ṵ'),
(0x1E75, 'V'),
(0x1E76, 'M', u'ṷ'),
(0x1E77, 'V'),
(0x1E78, 'M', u'ṹ'),
(0x1E79, 'V'),
(0x1E7A, 'M', u'ṻ'),
(0x1E7B, 'V'),
(0x1E7C, 'M', u'ṽ'),
(0x1E7D, 'V'),
(0x1E7E, 'M', u'ṿ'),
(0x1E7F, 'V'),
(0x1E80, 'M', u'ẁ'),
(0x1E81, 'V'),
(0x1E82, 'M', u'ẃ'),
(0x1E83, 'V'),
(0x1E84, 'M', u'ẅ'),
(0x1E85, 'V'),
(0x1E86, 'M', u'ẇ'),
(0x1E87, 'V'),
(0x1E88, 'M', u'ẉ'),
(0x1E89, 'V'),
(0x1E8A, 'M', u'ẋ'),
(0x1E8B, 'V'),
(0x1E8C, 'M', u'ẍ'),
(0x1E8D, 'V'),
(0x1E8E, 'M', u'ẏ'),
(0x1E8F, 'V'),
(0x1E90, 'M', u'ẑ'),
(0x1E91, 'V'),
(0x1E92, 'M', u'ẓ'),
(0x1E93, 'V'),
(0x1E94, 'M', u'ẕ'),
(0x1E95, 'V'),
(0x1E9A, 'M', u'aʾ'),
(0x1E9B, 'M', u'ṡ'),
(0x1E9C, 'V'),
(0x1E9E, 'M', u'ss'),
(0x1E9F, 'V'),
(0x1EA0, 'M', u'ạ'),
(0x1EA1, 'V'),
(0x1EA2, 'M', u'ả'),
(0x1EA3, 'V'),
(0x1EA4, 'M', u'ấ'),
(0x1EA5, 'V'),
(0x1EA6, 'M', u'ầ'),
(0x1EA7, 'V'),
(0x1EA8, 'M', u'ẩ'),
]
def _seg_18():
return [
(0x1EA9, 'V'),
(0x1EAA, 'M', u'ẫ'),
(0x1EAB, 'V'),
(0x1EAC, 'M', u'ậ'),
(0x1EAD, 'V'),
(0x1EAE, 'M', u'ắ'),
(0x1EAF, 'V'),
(0x1EB0, 'M', u'ằ'),
(0x1EB1, 'V'),
(0x1EB2, 'M', u'ẳ'),
(0x1EB3, 'V'),
(0x1EB4, 'M', u'ẵ'),
(0x1EB5, 'V'),
(0x1EB6, 'M', u'ặ'),
(0x1EB7, 'V'),
(0x1EB8, 'M', u'ẹ'),
(0x1EB9, 'V'),
(0x1EBA, 'M', u'ẻ'),
(0x1EBB, 'V'),
(0x1EBC, 'M', u'ẽ'),
(0x1EBD, 'V'),
(0x1EBE, 'M', u'ế'),
(0x1EBF, 'V'),
(0x1EC0, 'M', u'ề'),
(0x1EC1, 'V'),
(0x1EC2, 'M', u'ể'),
(0x1EC3, 'V'),
(0x1EC4, 'M', u'ễ'),
(0x1EC5, 'V'),
(0x1EC6, 'M', u'ệ'),
(0x1EC7, 'V'),
(0x1EC8, 'M', u'ỉ'),
(0x1EC9, 'V'),
(0x1ECA, 'M', u'ị'),
(0x1ECB, 'V'),
(0x1ECC, 'M', u'ọ'),
(0x1ECD, 'V'),
(0x1ECE, 'M', u'ỏ'),
(0x1ECF, 'V'),
(0x1ED0, 'M', u'ố'),
(0x1ED1, 'V'),
(0x1ED2, 'M', u'ồ'),
(0x1ED3, 'V'),
(0x1ED4, 'M', u'ổ'),
(0x1ED5, 'V'),
(0x1ED6, 'M', u'ỗ'),
(0x1ED7, 'V'),
(0x1ED8, 'M', u'ộ'),
(0x1ED9, 'V'),
(0x1EDA, 'M', u'ớ'),
(0x1EDB, 'V'),
(0x1EDC, 'M', u'ờ'),
(0x1EDD, 'V'),
(0x1EDE, 'M', u'ở'),
(0x1EDF, 'V'),
(0x1EE0, 'M', u'ỡ'),
(0x1EE1, 'V'),
(0x1EE2, 'M', u'ợ'),
(0x1EE3, 'V'),
(0x1EE4, 'M', u'ụ'),
(0x1EE5, 'V'),
(0x1EE6, 'M', u'ủ'),
(0x1EE7, 'V'),
(0x1EE8, 'M', u'ứ'),
(0x1EE9, 'V'),
(0x1EEA, 'M', u'ừ'),
(0x1EEB, 'V'),
(0x1EEC, 'M', u'ử'),
(0x1EED, 'V'),
(0x1EEE, 'M', u'ữ'),
(0x1EEF, 'V'),
(0x1EF0, 'M', u'ự'),
(0x1EF1, 'V'),
(0x1EF2, 'M', u'ỳ'),
(0x1EF3, 'V'),
(0x1EF4, 'M', u'ỵ'),
(0x1EF5, 'V'),
(0x1EF6, 'M', u'ỷ'),
(0x1EF7, 'V'),
(0x1EF8, 'M', u'ỹ'),
(0x1EF9, 'V'),
(0x1EFA, 'M', u'ỻ'),
(0x1EFB, 'V'),
(0x1EFC, 'M', u'ỽ'),
(0x1EFD, 'V'),
(0x1EFE, 'M', u'ỿ'),
(0x1EFF, 'V'),
(0x1F08, 'M', u'ἀ'),
(0x1F09, 'M', u'ἁ'),
(0x1F0A, 'M', u'ἂ'),
(0x1F0B, 'M', u'ἃ'),
(0x1F0C, 'M', u'ἄ'),
(0x1F0D, 'M', u'ἅ'),
(0x1F0E, 'M', u'ἆ'),
(0x1F0F, 'M', u'ἇ'),
(0x1F10, 'V'),
(0x1F16, 'X'),
(0x1F18, 'M', u'ἐ'),
(0x1F19, 'M', u'ἑ'),
(0x1F1A, 'M', u'ἒ'),
]
def _seg_19():
return [
(0x1F1B, 'M', u'ἓ'),
(0x1F1C, 'M', u'ἔ'),
(0x1F1D, 'M', u'ἕ'),
(0x1F1E, 'X'),
(0x1F20, 'V'),
(0x1F28, 'M', u'ἠ'),
(0x1F29, 'M', u'ἡ'),
(0x1F2A, 'M', u'ἢ'),
(0x1F2B, 'M', u'ἣ'),
(0x1F2C, 'M', u'ἤ'),
(0x1F2D, 'M', u'ἥ'),
(0x1F2E, 'M', u'ἦ'),
(0x1F2F, 'M', u'ἧ'),
(0x1F30, 'V'),
(0x1F38, 'M', u'ἰ'),
(0x1F39, 'M', u'ἱ'),
(0x1F3A, 'M', u'ἲ'),
(0x1F3B, 'M', u'ἳ'),
(0x1F3C, 'M', u'ἴ'),
(0x1F3D, 'M', u'ἵ'),
(0x1F3E, 'M', u'ἶ'),
(0x1F3F, 'M', u'ἷ'),
(0x1F40, 'V'),
(0x1F46, 'X'),
(0x1F48, 'M', u'ὀ'),
(0x1F49, 'M', u'ὁ'),
(0x1F4A, 'M', u'ὂ'),
(0x1F4B, 'M', u'ὃ'),
(0x1F4C, 'M', u'ὄ'),
(0x1F4D, 'M', u'ὅ'),
(0x1F4E, 'X'),
(0x1F50, 'V'),
(0x1F58, 'X'),
(0x1F59, 'M', u'ὑ'),
(0x1F5A, 'X'),
(0x1F5B, 'M', u'ὓ'),
(0x1F5C, 'X'),
(0x1F5D, 'M', u'ὕ'),
(0x1F5E, 'X'),
(0x1F5F, 'M', u'ὗ'),
(0x1F60, 'V'),
(0x1F68, 'M', u'ὠ'),
(0x1F69, 'M', u'ὡ'),
(0x1F6A, 'M', u'ὢ'),
(0x1F6B, 'M', u'ὣ'),
(0x1F6C, 'M', u'ὤ'),
(0x1F6D, 'M', u'ὥ'),
(0x1F6E, 'M', u'ὦ'),
(0x1F6F, 'M', u'ὧ'),
(0x1F70, 'V'),
(0x1F71, 'M', u'ά'),
(0x1F72, 'V'),
(0x1F73, 'M', u'έ'),
(0x1F74, 'V'),
(0x1F75, 'M', u'ή'),
(0x1F76, 'V'),
(0x1F77, 'M', u'ί'),
(0x1F78, 'V'),
(0x1F79, 'M', u'ό'),
(0x1F7A, 'V'),
(0x1F7B, 'M', u'ύ'),
(0x1F7C, 'V'),
(0x1F7D, 'M', u'ώ'),
(0x1F7E, 'X'),
(0x1F80, 'M', u'ἀι'),
(0x1F81, 'M', u'ἁι'),
(0x1F82, 'M', u'ἂι'),
(0x1F83, 'M', u'ἃι'),
(0x1F84, 'M', u'ἄι'),
(0x1F85, 'M', u'ἅι'),
(0x1F86, 'M', u'ἆι'),
(0x1F87, 'M', u'ἇι'),
(0x1F88, 'M', u'ἀι'),
(0x1F89, 'M', u'ἁι'),
(0x1F8A, 'M', u'ἂι'),
(0x1F8B, 'M', u'ἃι'),
(0x1F8C, 'M', u'ἄι'),
(0x1F8D, 'M', u'ἅι'),
(0x1F8E, 'M', u'ἆι'),
(0x1F8F, 'M', u'ἇι'),
(0x1F90, 'M', u'ἠι'),
(0x1F91, 'M', u'ἡι'),
(0x1F92, 'M', u'ἢι'),
(0x1F93, 'M', u'ἣι'),
(0x1F94, 'M', u'ἤι'),
(0x1F95, 'M', u'ἥι'),
(0x1F96, 'M', u'ἦι'),
(0x1F97, 'M', u'ἧι'),
(0x1F98, 'M', u'ἠι'),
(0x1F99, 'M', u'ἡι'),
(0x1F9A, 'M', u'ἢι'),
(0x1F9B, 'M', u'ἣι'),
(0x1F9C, 'M', u'ἤι'),
(0x1F9D, 'M', u'ἥι'),
(0x1F9E, 'M', u'ἦι'),
(0x1F9F, 'M', u'ἧι'),
(0x1FA0, 'M', u'ὠι'),
(0x1FA1, 'M', u'ὡι'),
(0x1FA2, 'M', u'ὢι'),
(0x1FA3, 'M', u'ὣι'),
]
def _seg_20():
return [
(0x1FA4, 'M', u'ὤι'),
(0x1FA5, 'M', u'ὥι'),
(0x1FA6, 'M', u'ὦι'),
(0x1FA7, 'M', u'ὧι'),
(0x1FA8, 'M', u'ὠι'),
(0x1FA9, 'M', u'ὡι'),
(0x1FAA, 'M', u'ὢι'),
(0x1FAB, 'M', u'ὣι'),
(0x1FAC, 'M', u'ὤι'),
(0x1FAD, 'M', u'ὥι'),
(0x1FAE, 'M', u'ὦι'),
(0x1FAF, 'M', u'ὧι'),
(0x1FB0, 'V'),
(0x1FB2, 'M', u'ὰι'),
(0x1FB3, 'M', u'αι'),
(0x1FB4, 'M', u'άι'),
(0x1FB5, 'X'),
(0x1FB6, 'V'),
(0x1FB7, 'M', u'ᾶι'),
(0x1FB8, 'M', u'ᾰ'),
(0x1FB9, 'M', u'ᾱ'),
(0x1FBA, 'M', u'ὰ'),
(0x1FBB, 'M', u'ά'),
(0x1FBC, 'M', u'αι'),
(0x1FBD, '3', u' ̓'),
(0x1FBE, 'M', u'ι'),
(0x1FBF, '3', u' ̓'),
(0x1FC0, '3', u' ͂'),
(0x1FC1, '3', u' ̈͂'),
(0x1FC2, 'M', u'ὴι'),
(0x1FC3, 'M', u'ηι'),
(0x1FC4, 'M', u'ήι'),
(0x1FC5, 'X'),
(0x1FC6, 'V'),
(0x1FC7, 'M', u'ῆι'),
(0x1FC8, 'M', u'ὲ'),
(0x1FC9, 'M', u'έ'),
(0x1FCA, 'M', u'ὴ'),
(0x1FCB, 'M', u'ή'),
(0x1FCC, 'M', u'ηι'),
(0x1FCD, '3', u' ̓̀'),
(0x1FCE, '3', u' ̓́'),
(0x1FCF, '3', u' ̓͂'),
(0x1FD0, 'V'),
(0x1FD3, 'M', u'ΐ'),
(0x1FD4, 'X'),
(0x1FD6, 'V'),
(0x1FD8, 'M', u'ῐ'),
(0x1FD9, 'M', u'ῑ'),
(0x1FDA, 'M', u'ὶ'),
(0x1FDB, 'M', u'ί'),
(0x1FDC, 'X'),
(0x1FDD, '3', u' ̔̀'),
(0x1FDE, '3', u' ̔́'),
(0x1FDF, '3', u' ̔͂'),
(0x1FE0, 'V'),
(0x1FE3, 'M', u'ΰ'),
(0x1FE4, 'V'),
(0x1FE8, 'M', u'ῠ'),
(0x1FE9, 'M', u'ῡ'),
(0x1FEA, 'M', u'ὺ'),
(0x1FEB, 'M', u'ύ'),
(0x1FEC, 'M', u'ῥ'),
(0x1FED, '3', u' ̈̀'),
(0x1FEE, '3', u' ̈́'),
(0x1FEF, '3', u'`'),
(0x1FF0, 'X'),
(0x1FF2, 'M', u'ὼι'),
(0x1FF3, 'M', u'ωι'),
(0x1FF4, 'M', u'ώι'),
(0x1FF5, 'X'),
(0x1FF6, 'V'),
(0x1FF7, 'M', u'ῶι'),
(0x1FF8, 'M', u'ὸ'),
(0x1FF9, 'M', u'ό'),
(0x1FFA, 'M', u'ὼ'),
(0x1FFB, 'M', u'ώ'),
(0x1FFC, 'M', u'ωι'),
(0x1FFD, '3', u' ́'),
(0x1FFE, '3', u' ̔'),
(0x1FFF, 'X'),
(0x2000, '3', u' '),
(0x200B, 'I'),
(0x200C, 'D', u''),
(0x200E, 'X'),
(0x2010, 'V'),
(0x2011, 'M', u'‐'),
(0x2012, 'V'),
(0x2017, '3', u' ̳'),
(0x2018, 'V'),
(0x2024, 'X'),
(0x2027, 'V'),
(0x2028, 'X'),
(0x202F, '3', u' '),
(0x2030, 'V'),
(0x2033, 'M', u'′′'),
(0x2034, 'M', u'′′′'),
(0x2035, 'V'),
(0x2036, 'M', u'‵‵'),
(0x2037, 'M', u'‵‵‵'),
]
def _seg_21():
return [
(0x2038, 'V'),
(0x203C, '3', u'!!'),
(0x203D, 'V'),
(0x203E, '3', u' ̅'),
(0x203F, 'V'),
(0x2047, '3', u'??'),
(0x2048, '3', u'?!'),
(0x2049, '3', u'!?'),
(0x204A, 'V'),
(0x2057, 'M', u'′′′′'),
(0x2058, 'V'),
(0x205F, '3', u' '),
(0x2060, 'I'),
(0x2061, 'X'),
(0x2064, 'I'),
(0x2065, 'X'),
(0x2070, 'M', u'0'),
(0x2071, 'M', u'i'),
(0x2072, 'X'),
(0x2074, 'M', u'4'),
(0x2075, 'M', u'5'),
(0x2076, 'M', u'6'),
(0x2077, 'M', u'7'),
(0x2078, 'M', u'8'),
(0x2079, 'M', u'9'),
(0x207A, '3', u'+'),
(0x207B, 'M', u'−'),
(0x207C, '3', u'='),
(0x207D, '3', u'('),
(0x207E, '3', u')'),
(0x207F, 'M', u'n'),
(0x2080, 'M', u'0'),
(0x2081, 'M', u'1'),
(0x2082, 'M', u'2'),
(0x2083, 'M', u'3'),
(0x2084, 'M', u'4'),
(0x2085, 'M', u'5'),
(0x2086, 'M', u'6'),
(0x2087, 'M', u'7'),
(0x2088, 'M', u'8'),
(0x2089, 'M', u'9'),
(0x208A, '3', u'+'),
(0x208B, 'M', u'−'),
(0x208C, '3', u'='),
(0x208D, '3', u'('),
(0x208E, '3', u')'),
(0x208F, 'X'),
(0x2090, 'M', u'a'),
(0x2091, 'M', u'e'),
(0x2092, 'M', u'o'),
(0x2093, 'M', u'x'),
(0x2094, 'M', u'ə'),
(0x2095, 'M', u'h'),
(0x2096, 'M', u'k'),
(0x2097, 'M', u'l'),
(0x2098, 'M', u'm'),
(0x2099, 'M', u'n'),
(0x209A, 'M', u'p'),
(0x209B, 'M', u's'),
(0x209C, 'M', u't'),
(0x209D, 'X'),
(0x20A0, 'V'),
(0x20A8, 'M', u'rs'),
(0x20A9, 'V'),
(0x20C0, 'X'),
(0x20D0, 'V'),
(0x20F1, 'X'),
(0x2100, '3', u'a/c'),
(0x2101, '3', u'a/s'),
(0x2102, 'M', u'c'),
(0x2103, 'M', u'°c'),
(0x2104, 'V'),
(0x2105, '3', u'c/o'),
(0x2106, '3', u'c/u'),
(0x2107, 'M', u'ɛ'),
(0x2108, 'V'),
(0x2109, 'M', u'°f'),
(0x210A, 'M', u'g'),
(0x210B, 'M', u'h'),
(0x210F, 'M', u'ħ'),
(0x2110, 'M', u'i'),
(0x2112, 'M', u'l'),
(0x2114, 'V'),
(0x2115, 'M', u'n'),
(0x2116, 'M', u'no'),
(0x2117, 'V'),
(0x2119, 'M', u'p'),
(0x211A, 'M', u'q'),
(0x211B, 'M', u'r'),
(0x211E, 'V'),
(0x2120, 'M', u'sm'),
(0x2121, 'M', u'tel'),
(0x2122, 'M', u'tm'),
(0x2123, 'V'),
(0x2124, 'M', u'z'),
(0x2125, 'V'),
(0x2126, 'M', u'ω'),
(0x2127, 'V'),
(0x2128, 'M', u'z'),
(0x2129, 'V'),
]
def _seg_22():
return [
(0x212A, 'M', u'k'),
(0x212B, 'M', u'å'),
(0x212C, 'M', u'b'),
(0x212D, 'M', u'c'),
(0x212E, 'V'),
(0x212F, 'M', u'e'),
(0x2131, 'M', u'f'),
(0x2132, 'X'),
(0x2133, 'M', u'm'),
(0x2134, 'M', u'o'),
(0x2135, 'M', u'א'),
(0x2136, 'M', u'ב'),
(0x2137, 'M', u'ג'),
(0x2138, 'M', u'ד'),
(0x2139, 'M', u'i'),
(0x213A, 'V'),
(0x213B, 'M', u'fax'),
(0x213C, 'M', u'π'),
(0x213D, 'M', u'γ'),
(0x213F, 'M', u'π'),
(0x2140, 'M', u'∑'),
(0x2141, 'V'),
(0x2145, 'M', u'd'),
(0x2147, 'M', u'e'),
(0x2148, 'M', u'i'),
(0x2149, 'M', u'j'),
(0x214A, 'V'),
(0x2150, 'M', u'1⁄7'),
(0x2151, 'M', u'1⁄9'),
(0x2152, 'M', u'1⁄10'),
(0x2153, 'M', u'1⁄3'),
(0x2154, 'M', u'2⁄3'),
(0x2155, 'M', u'1⁄5'),
(0x2156, 'M', u'2⁄5'),
(0x2157, 'M', u'3⁄5'),
(0x2158, 'M', u'4⁄5'),
(0x2159, 'M', u'1⁄6'),
(0x215A, 'M', u'5⁄6'),
(0x215B, 'M', u'1⁄8'),
(0x215C, 'M', u'3⁄8'),
(0x215D, 'M', u'5⁄8'),
(0x215E, 'M', u'7⁄8'),
(0x215F, 'M', u'1⁄'),
(0x2160, 'M', u'i'),
(0x2161, 'M', u'ii'),
(0x2162, 'M', u'iii'),
(0x2163, 'M', u'iv'),
(0x2164, 'M', u'v'),
(0x2165, 'M', u'vi'),
(0x2166, 'M', u'vii'),
(0x2167, 'M', u'viii'),
(0x2168, 'M', u'ix'),
(0x2169, 'M', u'x'),
(0x216A, 'M', u'xi'),
(0x216B, 'M', u'xii'),
(0x216C, 'M', u'l'),
(0x216D, 'M', u'c'),
(0x216E, 'M', u'd'),
(0x216F, 'M', u'm'),
(0x2170, 'M', u'i'),
(0x2171, 'M', u'ii'),
(0x2172, 'M', u'iii'),
(0x2173, 'M', u'iv'),
(0x2174, 'M', u'v'),
(0x2175, 'M', u'vi'),
(0x2176, 'M', u'vii'),
(0x2177, 'M', u'viii'),
(0x2178, 'M', u'ix'),
(0x2179, 'M', u'x'),
(0x217A, 'M', u'xi'),
(0x217B, 'M', u'xii'),
(0x217C, 'M', u'l'),
(0x217D, 'M', u'c'),
(0x217E, 'M', u'd'),
(0x217F, 'M', u'm'),
(0x2180, 'V'),
(0x2183, 'X'),
(0x2184, 'V'),
(0x2189, 'M', u'0⁄3'),
(0x218A, 'V'),
(0x218C, 'X'),
(0x2190, 'V'),
(0x222C, 'M', u'∫∫'),
(0x222D, 'M', u'∫∫∫'),
(0x222E, 'V'),
(0x222F, 'M', u'∮∮'),
(0x2230, 'M', u'∮∮∮'),
(0x2231, 'V'),
(0x2260, '3'),
(0x2261, 'V'),
(0x226E, '3'),
(0x2270, 'V'),
(0x2329, 'M', u'〈'),
(0x232A, 'M', u'〉'),
(0x232B, 'V'),
(0x2427, 'X'),
(0x2440, 'V'),
(0x244B, 'X'),
(0x2460, 'M', u'1'),
(0x2461, 'M', u'2'),
]
def _seg_23():
return [
(0x2462, 'M', u'3'),
(0x2463, 'M', u'4'),
(0x2464, 'M', u'5'),
(0x2465, 'M', u'6'),
(0x2466, 'M', u'7'),
(0x2467, 'M', u'8'),
(0x2468, 'M', u'9'),
(0x2469, 'M', u'10'),
(0x246A, 'M', u'11'),
(0x246B, 'M', u'12'),
(0x246C, 'M', u'13'),
(0x246D, 'M', u'14'),
(0x246E, 'M', u'15'),
(0x246F, 'M', u'16'),
(0x2470, 'M', u'17'),
(0x2471, 'M', u'18'),
(0x2472, 'M', u'19'),
(0x2473, 'M', u'20'),
(0x2474, '3', u'(1)'),
(0x2475, '3', u'(2)'),
(0x2476, '3', u'(3)'),
(0x2477, '3', u'(4)'),
(0x2478, '3', u'(5)'),
(0x2479, '3', u'(6)'),
(0x247A, '3', u'(7)'),
(0x247B, '3', u'(8)'),
(0x247C, '3', u'(9)'),
(0x247D, '3', u'(10)'),
(0x247E, '3', u'(11)'),
(0x247F, '3', u'(12)'),
(0x2480, '3', u'(13)'),
(0x2481, '3', u'(14)'),
(0x2482, '3', u'(15)'),
(0x2483, '3', u'(16)'),
(0x2484, '3', u'(17)'),
(0x2485, '3', u'(18)'),
(0x2486, '3', u'(19)'),
(0x2487, '3', u'(20)'),
(0x2488, 'X'),
(0x249C, '3', u'(a)'),
(0x249D, '3', u'(b)'),
(0x249E, '3', u'(c)'),
(0x249F, '3', u'(d)'),
(0x24A0, '3', u'(e)'),
(0x24A1, '3', u'(f)'),
(0x24A2, '3', u'(g)'),
(0x24A3, '3', u'(h)'),
(0x24A4, '3', u'(i)'),
(0x24A5, '3', u'(j)'),
(0x24A6, '3', u'(k)'),
(0x24A7, '3', u'(l)'),
(0x24A8, '3', u'(m)'),
(0x24A9, '3', u'(n)'),
(0x24AA, '3', u'(o)'),
(0x24AB, '3', u'(p)'),
(0x24AC, '3', u'(q)'),
(0x24AD, '3', u'(r)'),
(0x24AE, '3', u'(s)'),
(0x24AF, '3', u'(t)'),
(0x24B0, '3', u'(u)'),
(0x24B1, '3', u'(v)'),
(0x24B2, '3', u'(w)'),
(0x24B3, '3', u'(x)'),
(0x24B4, '3', u'(y)'),
(0x24B5, '3', u'(z)'),
(0x24B6, 'M', u'a'),
(0x24B7, 'M', u'b'),
(0x24B8, 'M', u'c'),
(0x24B9, 'M', u'd'),
(0x24BA, 'M', u'e'),
(0x24BB, 'M', u'f'),
(0x24BC, 'M', u'g'),
(0x24BD, 'M', u'h'),
(0x24BE, 'M', u'i'),
(0x24BF, 'M', u'j'),
(0x24C0, 'M', u'k'),
(0x24C1, 'M', u'l'),
(0x24C2, 'M', u'm'),
(0x24C3, 'M', u'n'),
(0x24C4, 'M', u'o'),
(0x24C5, 'M', u'p'),
(0x24C6, 'M', u'q'),
(0x24C7, 'M', u'r'),
(0x24C8, 'M', u's'),
(0x24C9, 'M', u't'),
(0x24CA, 'M', u'u'),
(0x24CB, 'M', u'v'),
(0x24CC, 'M', u'w'),
(0x24CD, 'M', u'x'),
(0x24CE, 'M', u'y'),
(0x24CF, 'M', u'z'),
(0x24D0, 'M', u'a'),
(0x24D1, 'M', u'b'),
(0x24D2, 'M', u'c'),
(0x24D3, 'M', u'd'),
(0x24D4, 'M', u'e'),
(0x24D5, 'M', u'f'),
(0x24D6, 'M', u'g'),
(0x24D7, 'M', u'h'),
(0x24D8, 'M', u'i'),
]
def _seg_24():
return [
(0x24D9, 'M', u'j'),
(0x24DA, 'M', u'k'),
(0x24DB, 'M', u'l'),
(0x24DC, 'M', u'm'),
(0x24DD, 'M', u'n'),
(0x24DE, 'M', u'o'),
(0x24DF, 'M', u'p'),
(0x24E0, 'M', u'q'),
(0x24E1, 'M', u'r'),
(0x24E2, 'M', u's'),
(0x24E3, 'M', u't'),
(0x24E4, 'M', u'u'),
(0x24E5, 'M', u'v'),
(0x24E6, 'M', u'w'),
(0x24E7, 'M', u'x'),
(0x24E8, 'M', u'y'),
(0x24E9, 'M', u'z'),
(0x24EA, 'M', u'0'),
(0x24EB, 'V'),
(0x2A0C, 'M', u'∫∫∫∫'),
(0x2A0D, 'V'),
(0x2A74, '3', u'::='),
(0x2A75, '3', u'=='),
(0x2A76, '3', u'==='),
(0x2A77, 'V'),
(0x2ADC, 'M', u'⫝̸'),
(0x2ADD, 'V'),
(0x2B74, 'X'),
(0x2B76, 'V'),
(0x2B96, 'X'),
(0x2B98, 'V'),
(0x2BC9, 'X'),
(0x2BCA, 'V'),
(0x2BFF, 'X'),
(0x2C00, 'M', u'ⰰ'),
(0x2C01, 'M', u'ⰱ'),
(0x2C02, 'M', u'ⰲ'),
(0x2C03, 'M', u'ⰳ'),
(0x2C04, 'M', u'ⰴ'),
(0x2C05, 'M', u'ⰵ'),
(0x2C06, 'M', u'ⰶ'),
(0x2C07, 'M', u'ⰷ'),
(0x2C08, 'M', u'ⰸ'),
(0x2C09, 'M', u'ⰹ'),
(0x2C0A, 'M', u'ⰺ'),
(0x2C0B, 'M', u'ⰻ'),
(0x2C0C, 'M', u'ⰼ'),
(0x2C0D, 'M', u'ⰽ'),
(0x2C0E, 'M', u'ⰾ'),
(0x2C0F, 'M', u'ⰿ'),
(0x2C10, 'M', u'ⱀ'),
(0x2C11, 'M', u'ⱁ'),
(0x2C12, 'M', u'ⱂ'),
(0x2C13, 'M', u'ⱃ'),
(0x2C14, 'M', u'ⱄ'),
(0x2C15, 'M', u'ⱅ'),
(0x2C16, 'M', u'ⱆ'),
(0x2C17, 'M', u'ⱇ'),
(0x2C18, 'M', u'ⱈ'),
(0x2C19, 'M', u'ⱉ'),
(0x2C1A, 'M', u'ⱊ'),
(0x2C1B, 'M', u'ⱋ'),
(0x2C1C, 'M', u'ⱌ'),
(0x2C1D, 'M', u'ⱍ'),
(0x2C1E, 'M', u'ⱎ'),
(0x2C1F, 'M', u'ⱏ'),
(0x2C20, 'M', u'ⱐ'),
(0x2C21, 'M', u'ⱑ'),
(0x2C22, 'M', u'ⱒ'),
(0x2C23, 'M', u'ⱓ'),
(0x2C24, 'M', u'ⱔ'),
(0x2C25, 'M', u'ⱕ'),
(0x2C26, 'M', u'ⱖ'),
(0x2C27, 'M', u'ⱗ'),
(0x2C28, 'M', u'ⱘ'),
(0x2C29, 'M', u'ⱙ'),
(0x2C2A, 'M', u'ⱚ'),
(0x2C2B, 'M', u'ⱛ'),
(0x2C2C, 'M', u'ⱜ'),
(0x2C2D, 'M', u'ⱝ'),
(0x2C2E, 'M', u'ⱞ'),
(0x2C2F, 'X'),
(0x2C30, 'V'),
(0x2C5F, 'X'),
(0x2C60, 'M', u'ⱡ'),
(0x2C61, 'V'),
(0x2C62, 'M', u'ɫ'),
(0x2C63, 'M', u'ᵽ'),
(0x2C64, 'M', u'ɽ'),
(0x2C65, 'V'),
(0x2C67, 'M', u'ⱨ'),
(0x2C68, 'V'),
(0x2C69, 'M', u'ⱪ'),
(0x2C6A, 'V'),
(0x2C6B, 'M', u'ⱬ'),
(0x2C6C, 'V'),
(0x2C6D, 'M', u'ɑ'),
(0x2C6E, 'M', u'ɱ'),
(0x2C6F, 'M', u'ɐ'),
(0x2C70, 'M', u'ɒ'),
]
def _seg_25():
return [
(0x2C71, 'V'),
(0x2C72, 'M', u'ⱳ'),
(0x2C73, 'V'),
(0x2C75, 'M', u'ⱶ'),
(0x2C76, 'V'),
(0x2C7C, 'M', u'j'),
(0x2C7D, 'M', u'v'),
(0x2C7E, 'M', u'ȿ'),
(0x2C7F, 'M', u'ɀ'),
(0x2C80, 'M', u'ⲁ'),
(0x2C81, 'V'),
(0x2C82, 'M', u'ⲃ'),
(0x2C83, 'V'),
(0x2C84, 'M', u'ⲅ'),
(0x2C85, 'V'),
(0x2C86, 'M', u'ⲇ'),
(0x2C87, 'V'),
(0x2C88, 'M', u'ⲉ'),
(0x2C89, 'V'),
(0x2C8A, 'M', u'ⲋ'),
(0x2C8B, 'V'),
(0x2C8C, 'M', u'ⲍ'),
(0x2C8D, 'V'),
(0x2C8E, 'M', u'ⲏ'),
(0x2C8F, 'V'),
(0x2C90, 'M', u'ⲑ'),
(0x2C91, 'V'),
(0x2C92, 'M', u'ⲓ'),
(0x2C93, 'V'),
(0x2C94, 'M', u'ⲕ'),
(0x2C95, 'V'),
(0x2C96, 'M', u'ⲗ'),
(0x2C97, 'V'),
(0x2C98, 'M', u'ⲙ'),
(0x2C99, 'V'),
(0x2C9A, 'M', u'ⲛ'),
(0x2C9B, 'V'),
(0x2C9C, 'M', u'ⲝ'),
(0x2C9D, 'V'),
(0x2C9E, 'M', u'ⲟ'),
(0x2C9F, 'V'),
(0x2CA0, 'M', u'ⲡ'),
(0x2CA1, 'V'),
(0x2CA2, 'M', u'ⲣ'),
(0x2CA3, 'V'),
(0x2CA4, 'M', u'ⲥ'),
(0x2CA5, 'V'),
(0x2CA6, 'M', u'ⲧ'),
(0x2CA7, 'V'),
(0x2CA8, 'M', u'ⲩ'),
(0x2CA9, 'V'),
(0x2CAA, 'M', u'ⲫ'),
(0x2CAB, 'V'),
(0x2CAC, 'M', u'ⲭ'),
(0x2CAD, 'V'),
(0x2CAE, 'M', u'ⲯ'),
(0x2CAF, 'V'),
(0x2CB0, 'M', u'ⲱ'),
(0x2CB1, 'V'),
(0x2CB2, 'M', u'ⲳ'),
(0x2CB3, 'V'),
(0x2CB4, 'M', u'ⲵ'),
(0x2CB5, 'V'),
(0x2CB6, 'M', u'ⲷ'),
(0x2CB7, 'V'),
(0x2CB8, 'M', u'ⲹ'),
(0x2CB9, 'V'),
(0x2CBA, 'M', u'ⲻ'),
(0x2CBB, 'V'),
(0x2CBC, 'M', u'ⲽ'),
(0x2CBD, 'V'),
(0x2CBE, 'M', u'ⲿ'),
(0x2CBF, 'V'),
(0x2CC0, 'M', u'ⳁ'),
(0x2CC1, 'V'),
(0x2CC2, 'M', u'ⳃ'),
(0x2CC3, 'V'),
(0x2CC4, 'M', u'ⳅ'),
(0x2CC5, 'V'),
(0x2CC6, 'M', u'ⳇ'),
(0x2CC7, 'V'),
(0x2CC8, 'M', u'ⳉ'),
(0x2CC9, 'V'),
(0x2CCA, 'M', u'ⳋ'),
(0x2CCB, 'V'),
(0x2CCC, 'M', u'ⳍ'),
(0x2CCD, 'V'),
(0x2CCE, 'M', u'ⳏ'),
(0x2CCF, 'V'),
(0x2CD0, 'M', u'ⳑ'),
(0x2CD1, 'V'),
(0x2CD2, 'M', u'ⳓ'),
(0x2CD3, 'V'),
(0x2CD4, 'M', u'ⳕ'),
(0x2CD5, 'V'),
(0x2CD6, 'M', u'ⳗ'),
(0x2CD7, 'V'),
(0x2CD8, 'M', u'ⳙ'),
(0x2CD9, 'V'),
(0x2CDA, 'M', u'ⳛ'),
]
def _seg_26():
return [
(0x2CDB, 'V'),
(0x2CDC, 'M', u'ⳝ'),
(0x2CDD, 'V'),
(0x2CDE, 'M', u'ⳟ'),
(0x2CDF, 'V'),
(0x2CE0, 'M', u'ⳡ'),
(0x2CE1, 'V'),
(0x2CE2, 'M', u'ⳣ'),
(0x2CE3, 'V'),
(0x2CEB, 'M', u'ⳬ'),
(0x2CEC, 'V'),
(0x2CED, 'M', u'ⳮ'),
(0x2CEE, 'V'),
(0x2CF2, 'M', u'ⳳ'),
(0x2CF3, 'V'),
(0x2CF4, 'X'),
(0x2CF9, 'V'),
(0x2D26, 'X'),
(0x2D27, 'V'),
(0x2D28, 'X'),
(0x2D2D, 'V'),
(0x2D2E, 'X'),
(0x2D30, 'V'),
(0x2D68, 'X'),
(0x2D6F, 'M', u'ⵡ'),
(0x2D70, 'V'),
(0x2D71, 'X'),
(0x2D7F, 'V'),
(0x2D97, 'X'),
(0x2DA0, 'V'),
(0x2DA7, 'X'),
(0x2DA8, 'V'),
(0x2DAF, 'X'),
(0x2DB0, 'V'),
(0x2DB7, 'X'),
(0x2DB8, 'V'),
(0x2DBF, 'X'),
(0x2DC0, 'V'),
(0x2DC7, 'X'),
(0x2DC8, 'V'),
(0x2DCF, 'X'),
(0x2DD0, 'V'),
(0x2DD7, 'X'),
(0x2DD8, 'V'),
(0x2DDF, 'X'),
(0x2DE0, 'V'),
(0x2E4F, 'X'),
(0x2E80, 'V'),
(0x2E9A, 'X'),
(0x2E9B, 'V'),
(0x2E9F, 'M', u'母'),
(0x2EA0, 'V'),
(0x2EF3, 'M', u'龟'),
(0x2EF4, 'X'),
(0x2F00, 'M', u'一'),
(0x2F01, 'M', u'丨'),
(0x2F02, 'M', u'丶'),
(0x2F03, 'M', u'丿'),
(0x2F04, 'M', u'乙'),
(0x2F05, 'M', u'亅'),
(0x2F06, 'M', u'二'),
(0x2F07, 'M', u'亠'),
(0x2F08, 'M', u'人'),
(0x2F09, 'M', u'儿'),
(0x2F0A, 'M', u'入'),
(0x2F0B, 'M', u'八'),
(0x2F0C, 'M', u'冂'),
(0x2F0D, 'M', u'冖'),
(0x2F0E, 'M', u'冫'),
(0x2F0F, 'M', u'几'),
(0x2F10, 'M', u'凵'),
(0x2F11, 'M', u'刀'),
(0x2F12, 'M', u'力'),
(0x2F13, 'M', u'勹'),
(0x2F14, 'M', u'匕'),
(0x2F15, 'M', u'匚'),
(0x2F16, 'M', u'匸'),
(0x2F17, 'M', u'十'),
(0x2F18, 'M', u'卜'),
(0x2F19, 'M', u'卩'),
(0x2F1A, 'M', u'厂'),
(0x2F1B, 'M', u'厶'),
(0x2F1C, 'M', u'又'),
(0x2F1D, 'M', u'口'),
(0x2F1E, 'M', u'囗'),
(0x2F1F, 'M', u'土'),
(0x2F20, 'M', u'士'),
(0x2F21, 'M', u'夂'),
(0x2F22, 'M', u'夊'),
(0x2F23, 'M', u'夕'),
(0x2F24, 'M', u'大'),
(0x2F25, 'M', u'女'),
(0x2F26, 'M', u'子'),
(0x2F27, 'M', u'宀'),
(0x2F28, 'M', u'寸'),
(0x2F29, 'M', u'小'),
(0x2F2A, 'M', u'尢'),
(0x2F2B, 'M', u'尸'),
(0x2F2C, 'M', u'屮'),
(0x2F2D, 'M', u'山'),
]
def _seg_27():
return [
(0x2F2E, 'M', u'巛'),
(0x2F2F, 'M', u'工'),
(0x2F30, 'M', u'己'),
(0x2F31, 'M', u'巾'),
(0x2F32, 'M', u'干'),
(0x2F33, 'M', u'幺'),
(0x2F34, 'M', u'广'),
(0x2F35, 'M', u'廴'),
(0x2F36, 'M', u'廾'),
(0x2F37, 'M', u'弋'),
(0x2F38, 'M', u'弓'),
(0x2F39, 'M', u'彐'),
(0x2F3A, 'M', u'彡'),
(0x2F3B, 'M', u'彳'),
(0x2F3C, 'M', u'心'),
(0x2F3D, 'M', u'戈'),
(0x2F3E, 'M', u'戶'),
(0x2F3F, 'M', u'手'),
(0x2F40, 'M', u'支'),
(0x2F41, 'M', u'攴'),
(0x2F42, 'M', u'文'),
(0x2F43, 'M', u'斗'),
(0x2F44, 'M', u'斤'),
(0x2F45, 'M', u'方'),
(0x2F46, 'M', u'无'),
(0x2F47, 'M', u'日'),
(0x2F48, 'M', u'曰'),
(0x2F49, 'M', u'月'),
(0x2F4A, 'M', u'木'),
(0x2F4B, 'M', u'欠'),
(0x2F4C, 'M', u'止'),
(0x2F4D, 'M', u'歹'),
(0x2F4E, 'M', u'殳'),
(0x2F4F, 'M', u'毋'),
(0x2F50, 'M', u'比'),
(0x2F51, 'M', u'毛'),
(0x2F52, 'M', u'氏'),
(0x2F53, 'M', u'气'),
(0x2F54, 'M', u'水'),
(0x2F55, 'M', u'火'),
(0x2F56, 'M', u'爪'),
(0x2F57, 'M', u'父'),
(0x2F58, 'M', u'爻'),
(0x2F59, 'M', u'爿'),
(0x2F5A, 'M', u'片'),
(0x2F5B, 'M', u'牙'),
(0x2F5C, 'M', u'牛'),
(0x2F5D, 'M', u'犬'),
(0x2F5E, 'M', u'玄'),
(0x2F5F, 'M', u'玉'),
(0x2F60, 'M', u'瓜'),
(0x2F61, 'M', u'瓦'),
(0x2F62, 'M', u'甘'),
(0x2F63, 'M', u'生'),
(0x2F64, 'M', u'用'),
(0x2F65, 'M', u'田'),
(0x2F66, 'M', u'疋'),
(0x2F67, 'M', u'疒'),
(0x2F68, 'M', u'癶'),
(0x2F69, 'M', u'白'),
(0x2F6A, 'M', u'皮'),
(0x2F6B, 'M', u'皿'),
(0x2F6C, 'M', u'目'),
(0x2F6D, 'M', u'矛'),
(0x2F6E, 'M', u'矢'),
(0x2F6F, 'M', u'石'),
(0x2F70, 'M', u'示'),
(0x2F71, 'M', u'禸'),
(0x2F72, 'M', u'禾'),
(0x2F73, 'M', u'穴'),
(0x2F74, 'M', u'立'),
(0x2F75, 'M', u'竹'),
(0x2F76, 'M', u'米'),
(0x2F77, 'M', u'糸'),
(0x2F78, 'M', u'缶'),
(0x2F79, 'M', u'网'),
(0x2F7A, 'M', u'羊'),
(0x2F7B, 'M', u'羽'),
(0x2F7C, 'M', u'老'),
(0x2F7D, 'M', u'而'),
(0x2F7E, 'M', u'耒'),
(0x2F7F, 'M', u'耳'),
(0x2F80, 'M', u'聿'),
(0x2F81, 'M', u'肉'),
(0x2F82, 'M', u'臣'),
(0x2F83, 'M', u'自'),
(0x2F84, 'M', u'至'),
(0x2F85, 'M', u'臼'),
(0x2F86, 'M', u'舌'),
(0x2F87, 'M', u'舛'),
(0x2F88, 'M', u'舟'),
(0x2F89, 'M', u'艮'),
(0x2F8A, 'M', u'色'),
(0x2F8B, 'M', u'艸'),
(0x2F8C, 'M', u'虍'),
(0x2F8D, 'M', u'虫'),
(0x2F8E, 'M', u'血'),
(0x2F8F, 'M', u'行'),
(0x2F90, 'M', u'衣'),
(0x2F91, 'M', u'襾'),
]
def _seg_28():
return [
(0x2F92, 'M', u'見'),
(0x2F93, 'M', u'角'),
(0x2F94, 'M', u'言'),
(0x2F95, 'M', u'谷'),
(0x2F96, 'M', u'豆'),
(0x2F97, 'M', u'豕'),
(0x2F98, 'M', u'豸'),
(0x2F99, 'M', u'貝'),
(0x2F9A, 'M', u'赤'),
(0x2F9B, 'M', u'走'),
(0x2F9C, 'M', u'足'),
(0x2F9D, 'M', u'身'),
(0x2F9E, 'M', u'車'),
(0x2F9F, 'M', u'辛'),
(0x2FA0, 'M', u'辰'),
(0x2FA1, 'M', u'辵'),
(0x2FA2, 'M', u'邑'),
(0x2FA3, 'M', u'酉'),
(0x2FA4, 'M', u'釆'),
(0x2FA5, 'M', u'里'),
(0x2FA6, 'M', u'金'),
(0x2FA7, 'M', u'長'),
(0x2FA8, 'M', u'門'),
(0x2FA9, 'M', u'阜'),
(0x2FAA, 'M', u'隶'),
(0x2FAB, 'M', u'隹'),
(0x2FAC, 'M', u'雨'),
(0x2FAD, 'M', u'靑'),
(0x2FAE, 'M', u'非'),
(0x2FAF, 'M', u'面'),
(0x2FB0, 'M', u'革'),
(0x2FB1, 'M', u'韋'),
(0x2FB2, 'M', u'韭'),
(0x2FB3, 'M', u'音'),
(0x2FB4, 'M', u'頁'),
(0x2FB5, 'M', u'風'),
(0x2FB6, 'M', u'飛'),
(0x2FB7, 'M', u'食'),
(0x2FB8, 'M', u'首'),
(0x2FB9, 'M', u'香'),
(0x2FBA, 'M', u'馬'),
(0x2FBB, 'M', u'骨'),
(0x2FBC, 'M', u'高'),
(0x2FBD, 'M', u'髟'),
(0x2FBE, 'M', u'鬥'),
(0x2FBF, 'M', u'鬯'),
(0x2FC0, 'M', u'鬲'),
(0x2FC1, 'M', u'鬼'),
(0x2FC2, 'M', u'魚'),
(0x2FC3, 'M', u'鳥'),
(0x2FC4, 'M', u'鹵'),
(0x2FC5, 'M', u'鹿'),
(0x2FC6, 'M', u'麥'),
(0x2FC7, 'M', u'麻'),
(0x2FC8, 'M', u'黃'),
(0x2FC9, 'M', u'黍'),
(0x2FCA, 'M', u'黑'),
(0x2FCB, 'M', u'黹'),
(0x2FCC, 'M', u'黽'),
(0x2FCD, 'M', u'鼎'),
(0x2FCE, 'M', u'鼓'),
(0x2FCF, 'M', u'鼠'),
(0x2FD0, 'M', u'鼻'),
(0x2FD1, 'M', u'齊'),
(0x2FD2, 'M', u'齒'),
(0x2FD3, 'M', u'龍'),
(0x2FD4, 'M', u'龜'),
(0x2FD5, 'M', u'龠'),
(0x2FD6, 'X'),
(0x3000, '3', u' '),
(0x3001, 'V'),
(0x3002, 'M', u'.'),
(0x3003, 'V'),
(0x3036, 'M', u'〒'),
(0x3037, 'V'),
(0x3038, 'M', u'十'),
(0x3039, 'M', u'卄'),
(0x303A, 'M', u'卅'),
(0x303B, 'V'),
(0x3040, 'X'),
(0x3041, 'V'),
(0x3097, 'X'),
(0x3099, 'V'),
(0x309B, '3', u' ゙'),
(0x309C, '3', u' ゚'),
(0x309D, 'V'),
(0x309F, 'M', u'より'),
(0x30A0, 'V'),
(0x30FF, 'M', u'コト'),
(0x3100, 'X'),
(0x3105, 'V'),
(0x3130, 'X'),
(0x3131, 'M', u'ᄀ'),
(0x3132, 'M', u'ᄁ'),
(0x3133, 'M', u'ᆪ'),
(0x3134, 'M', u'ᄂ'),
(0x3135, 'M', u'ᆬ'),
(0x3136, 'M', u'ᆭ'),
(0x3137, 'M', u'ᄃ'),
(0x3138, 'M', u'ᄄ'),
]
def _seg_29():
return [
(0x3139, 'M', u'ᄅ'),
(0x313A, 'M', u'ᆰ'),
(0x313B, 'M', u'ᆱ'),
(0x313C, 'M', u'ᆲ'),
(0x313D, 'M', u'ᆳ'),
(0x313E, 'M', u'ᆴ'),
(0x313F, 'M', u'ᆵ'),
(0x3140, 'M', u'ᄚ'),
(0x3141, 'M', u'ᄆ'),
(0x3142, 'M', u'ᄇ'),
(0x3143, 'M', u'ᄈ'),
(0x3144, 'M', u'ᄡ'),
(0x3145, 'M', u'ᄉ'),
(0x3146, 'M', u'ᄊ'),
(0x3147, 'M', u'ᄋ'),
(0x3148, 'M', u'ᄌ'),
(0x3149, 'M', u'ᄍ'),
(0x314A, 'M', u'ᄎ'),
(0x314B, 'M', u'ᄏ'),
(0x314C, 'M', u'ᄐ'),
(0x314D, 'M', u'ᄑ'),
(0x314E, 'M', u'ᄒ'),
(0x314F, 'M', u'ᅡ'),
(0x3150, 'M', u'ᅢ'),
(0x3151, 'M', u'ᅣ'),
(0x3152, 'M', u'ᅤ'),
(0x3153, 'M', u'ᅥ'),
(0x3154, 'M', u'ᅦ'),
(0x3155, 'M', u'ᅧ'),
(0x3156, 'M', u'ᅨ'),
(0x3157, 'M', u'ᅩ'),
(0x3158, 'M', u'ᅪ'),
(0x3159, 'M', u'ᅫ'),
(0x315A, 'M', u'ᅬ'),
(0x315B, 'M', u'ᅭ'),
(0x315C, 'M', u'ᅮ'),
(0x315D, 'M', u'ᅯ'),
(0x315E, 'M', u'ᅰ'),
(0x315F, 'M', u'ᅱ'),
(0x3160, 'M', u'ᅲ'),
(0x3161, 'M', u'ᅳ'),
(0x3162, 'M', u'ᅴ'),
(0x3163, 'M', u'ᅵ'),
(0x3164, 'X'),
(0x3165, 'M', u'ᄔ'),
(0x3166, 'M', u'ᄕ'),
(0x3167, 'M', u'ᇇ'),
(0x3168, 'M', u'ᇈ'),
(0x3169, 'M', u'ᇌ'),
(0x316A, 'M', u'ᇎ'),
(0x316B, 'M', u'ᇓ'),
(0x316C, 'M', u'ᇗ'),
(0x316D, 'M', u'ᇙ'),
(0x316E, 'M', u'ᄜ'),
(0x316F, 'M', u'ᇝ'),
(0x3170, 'M', u'ᇟ'),
(0x3171, 'M', u'ᄝ'),
(0x3172, 'M', u'ᄞ'),
(0x3173, 'M', u'ᄠ'),
(0x3174, 'M', u'ᄢ'),
(0x3175, 'M', u'ᄣ'),
(0x3176, 'M', u'ᄧ'),
(0x3177, 'M', u'ᄩ'),
(0x3178, 'M', u'ᄫ'),
(0x3179, 'M', u'ᄬ'),
(0x317A, 'M', u'ᄭ'),
(0x317B, 'M', u'ᄮ'),
(0x317C, 'M', u'ᄯ'),
(0x317D, 'M', u'ᄲ'),
(0x317E, 'M', u'ᄶ'),
(0x317F, 'M', u'ᅀ'),
(0x3180, 'M', u'ᅇ'),
(0x3181, 'M', u'ᅌ'),
(0x3182, 'M', u'ᇱ'),
(0x3183, 'M', u'ᇲ'),
(0x3184, 'M', u'ᅗ'),
(0x3185, 'M', u'ᅘ'),
(0x3186, 'M', u'ᅙ'),
(0x3187, 'M', u'ᆄ'),
(0x3188, 'M', u'ᆅ'),
(0x3189, 'M', u'ᆈ'),
(0x318A, 'M', u'ᆑ'),
(0x318B, 'M', u'ᆒ'),
(0x318C, 'M', u'ᆔ'),
(0x318D, 'M', u'ᆞ'),
(0x318E, 'M', u'ᆡ'),
(0x318F, 'X'),
(0x3190, 'V'),
(0x3192, 'M', u'一'),
(0x3193, 'M', u'二'),
(0x3194, 'M', u'三'),
(0x3195, 'M', u'四'),
(0x3196, 'M', u'上'),
(0x3197, 'M', u'中'),
(0x3198, 'M', u'下'),
(0x3199, 'M', u'甲'),
(0x319A, 'M', u'乙'),
(0x319B, 'M', u'丙'),
(0x319C, 'M', u'丁'),
(0x319D, 'M', u'天'),
]
def _seg_30():
return [
(0x319E, 'M', u'地'),
(0x319F, 'M', u'人'),
(0x31A0, 'V'),
(0x31BB, 'X'),
(0x31C0, 'V'),
(0x31E4, 'X'),
(0x31F0, 'V'),
(0x3200, '3', u'(ᄀ)'),
(0x3201, '3', u'(ᄂ)'),
(0x3202, '3', u'(ᄃ)'),
(0x3203, '3', u'(ᄅ)'),
(0x3204, '3', u'(ᄆ)'),
(0x3205, '3', u'(ᄇ)'),
(0x3206, '3', u'(ᄉ)'),
(0x3207, '3', u'(ᄋ)'),
(0x3208, '3', u'(ᄌ)'),
(0x3209, '3', u'(ᄎ)'),
(0x320A, '3', u'(ᄏ)'),
(0x320B, '3', u'(ᄐ)'),
(0x320C, '3', u'(ᄑ)'),
(0x320D, '3', u'(ᄒ)'),
(0x320E, '3', u'(가)'),
(0x320F, '3', u'(나)'),
(0x3210, '3', u'(다)'),
(0x3211, '3', u'(라)'),
(0x3212, '3', u'(마)'),
(0x3213, '3', u'(바)'),
(0x3214, '3', u'(사)'),
(0x3215, '3', u'(아)'),
(0x3216, '3', u'(자)'),
(0x3217, '3', u'(차)'),
(0x3218, '3', u'(카)'),
(0x3219, '3', u'(타)'),
(0x321A, '3', u'(파)'),
(0x321B, '3', u'(하)'),
(0x321C, '3', u'(주)'),
(0x321D, '3', u'(오전)'),
(0x321E, '3', u'(오후)'),
(0x321F, 'X'),
(0x3220, '3', u'(一)'),
(0x3221, '3', u'(二)'),
(0x3222, '3', u'(三)'),
(0x3223, '3', u'(四)'),
(0x3224, '3', u'(五)'),
(0x3225, '3', u'(六)'),
(0x3226, '3', u'(七)'),
(0x3227, '3', u'(八)'),
(0x3228, '3', u'(九)'),
(0x3229, '3', u'(十)'),
(0x322A, '3', u'(月)'),
(0x322B, '3', u'(火)'),
(0x322C, '3', u'(水)'),
(0x322D, '3', u'(木)'),
(0x322E, '3', u'(金)'),
(0x322F, '3', u'(土)'),
(0x3230, '3', u'(日)'),
(0x3231, '3', u'(株)'),
(0x3232, '3', u'(有)'),
(0x3233, '3', u'(社)'),
(0x3234, '3', u'(名)'),
(0x3235, '3', u'(特)'),
(0x3236, '3', u'(財)'),
(0x3237, '3', u'(祝)'),
(0x3238, '3', u'(労)'),
(0x3239, '3', u'(代)'),
(0x323A, '3', u'(呼)'),
(0x323B, '3', u'(学)'),
(0x323C, '3', u'(監)'),
(0x323D, '3', u'(企)'),
(0x323E, '3', u'(資)'),
(0x323F, '3', u'(協)'),
(0x3240, '3', u'(祭)'),
(0x3241, '3', u'(休)'),
(0x3242, '3', u'(自)'),
(0x3243, '3', u'(至)'),
(0x3244, 'M', u'問'),
(0x3245, 'M', u'幼'),
(0x3246, 'M', u'文'),
(0x3247, 'M', u'箏'),
(0x3248, 'V'),
(0x3250, 'M', u'pte'),
(0x3251, 'M', u'21'),
(0x3252, 'M', u'22'),
(0x3253, 'M', u'23'),
(0x3254, 'M', u'24'),
(0x3255, 'M', u'25'),
(0x3256, 'M', u'26'),
(0x3257, 'M', u'27'),
(0x3258, 'M', u'28'),
(0x3259, 'M', u'29'),
(0x325A, 'M', u'30'),
(0x325B, 'M', u'31'),
(0x325C, 'M', u'32'),
(0x325D, 'M', u'33'),
(0x325E, 'M', u'34'),
(0x325F, 'M', u'35'),
(0x3260, 'M', u'ᄀ'),
(0x3261, 'M', u'ᄂ'),
(0x3262, 'M', u'ᄃ'),
(0x3263, 'M', u'ᄅ'),
]
def _seg_31():
return [
(0x3264, 'M', u'ᄆ'),
(0x3265, 'M', u'ᄇ'),
(0x3266, 'M', u'ᄉ'),
(0x3267, 'M', u'ᄋ'),
(0x3268, 'M', u'ᄌ'),
(0x3269, 'M', u'ᄎ'),
(0x326A, 'M', u'ᄏ'),
(0x326B, 'M', u'ᄐ'),
(0x326C, 'M', u'ᄑ'),
(0x326D, 'M', u'ᄒ'),
(0x326E, 'M', u'가'),
(0x326F, 'M', u'나'),
(0x3270, 'M', u'다'),
(0x3271, 'M', u'라'),
(0x3272, 'M', u'마'),
(0x3273, 'M', u'바'),
(0x3274, 'M', u'사'),
(0x3275, 'M', u'아'),
(0x3276, 'M', u'자'),
(0x3277, 'M', u'차'),
(0x3278, 'M', u'카'),
(0x3279, 'M', u'타'),
(0x327A, 'M', u'파'),
(0x327B, 'M', u'하'),
(0x327C, 'M', u'참고'),
(0x327D, 'M', u'주의'),
(0x327E, 'M', u'우'),
(0x327F, 'V'),
(0x3280, 'M', u'一'),
(0x3281, 'M', u'二'),
(0x3282, 'M', u'三'),
(0x3283, 'M', u'四'),
(0x3284, 'M', u'五'),
(0x3285, 'M', u'六'),
(0x3286, 'M', u'七'),
(0x3287, 'M', u'八'),
(0x3288, 'M', u'九'),
(0x3289, 'M', u'十'),
(0x328A, 'M', u'月'),
(0x328B, 'M', u'火'),
(0x328C, 'M', u'水'),
(0x328D, 'M', u'木'),
(0x328E, 'M', u'金'),
(0x328F, 'M', u'土'),
(0x3290, 'M', u'日'),
(0x3291, 'M', u'株'),
(0x3292, 'M', u'有'),
(0x3293, 'M', u'社'),
(0x3294, 'M', u'名'),
(0x3295, 'M', u'特'),
(0x3296, 'M', u'財'),
(0x3297, 'M', u'祝'),
(0x3298, 'M', u'労'),
(0x3299, 'M', u'秘'),
(0x329A, 'M', u'男'),
(0x329B, 'M', u'女'),
(0x329C, 'M', u'適'),
(0x329D, 'M', u'優'),
(0x329E, 'M', u'印'),
(0x329F, 'M', u'注'),
(0x32A0, 'M', u'項'),
(0x32A1, 'M', u'休'),
(0x32A2, 'M', u'写'),
(0x32A3, 'M', u'正'),
(0x32A4, 'M', u'上'),
(0x32A5, 'M', u'中'),
(0x32A6, 'M', u'下'),
(0x32A7, 'M', u'左'),
(0x32A8, 'M', u'右'),
(0x32A9, 'M', u'医'),
(0x32AA, 'M', u'宗'),
(0x32AB, 'M', u'学'),
(0x32AC, 'M', u'監'),
(0x32AD, 'M', u'企'),
(0x32AE, 'M', u'資'),
(0x32AF, 'M', u'協'),
(0x32B0, 'M', u'夜'),
(0x32B1, 'M', u'36'),
(0x32B2, 'M', u'37'),
(0x32B3, 'M', u'38'),
(0x32B4, 'M', u'39'),
(0x32B5, 'M', u'40'),
(0x32B6, 'M', u'41'),
(0x32B7, 'M', u'42'),
(0x32B8, 'M', u'43'),
(0x32B9, 'M', u'44'),
(0x32BA, 'M', u'45'),
(0x32BB, 'M', u'46'),
(0x32BC, 'M', u'47'),
(0x32BD, 'M', u'48'),
(0x32BE, 'M', u'49'),
(0x32BF, 'M', u'50'),
(0x32C0, 'M', u'1月'),
(0x32C1, 'M', u'2月'),
(0x32C2, 'M', u'3月'),
(0x32C3, 'M', u'4月'),
(0x32C4, 'M', u'5月'),
(0x32C5, 'M', u'6月'),
(0x32C6, 'M', u'7月'),
(0x32C7, 'M', u'8月'),
]
def _seg_32():
return [
(0x32C8, 'M', u'9月'),
(0x32C9, 'M', u'10月'),
(0x32CA, 'M', u'11月'),
(0x32CB, 'M', u'12月'),
(0x32CC, 'M', u'hg'),
(0x32CD, 'M', u'erg'),
(0x32CE, 'M', u'ev'),
(0x32CF, 'M', u'ltd'),
(0x32D0, 'M', u'ア'),
(0x32D1, 'M', u'イ'),
(0x32D2, 'M', u'ウ'),
(0x32D3, 'M', u'エ'),
(0x32D4, 'M', u'オ'),
(0x32D5, 'M', u'カ'),
(0x32D6, 'M', u'キ'),
(0x32D7, 'M', u'ク'),
(0x32D8, 'M', u'ケ'),
(0x32D9, 'M', u'コ'),
(0x32DA, 'M', u'サ'),
(0x32DB, 'M', u'シ'),
(0x32DC, 'M', u'ス'),
(0x32DD, 'M', u'セ'),
(0x32DE, 'M', u'ソ'),
(0x32DF, 'M', u'タ'),
(0x32E0, 'M', u'チ'),
(0x32E1, 'M', u'ツ'),
(0x32E2, 'M', u'テ'),
(0x32E3, 'M', u'ト'),
(0x32E4, 'M', u'ナ'),
(0x32E5, 'M', u'ニ'),
(0x32E6, 'M', u'ヌ'),
(0x32E7, 'M', u'ネ'),
(0x32E8, 'M', u'ノ'),
(0x32E9, 'M', u'ハ'),
(0x32EA, 'M', u'ヒ'),
(0x32EB, 'M', u'フ'),
(0x32EC, 'M', u'ヘ'),
(0x32ED, 'M', u'ホ'),
(0x32EE, 'M', u'マ'),
(0x32EF, 'M', u'ミ'),
(0x32F0, 'M', u'ム'),
(0x32F1, 'M', u'メ'),
(0x32F2, 'M', u'モ'),
(0x32F3, 'M', u'ヤ'),
(0x32F4, 'M', u'ユ'),
(0x32F5, 'M', u'ヨ'),
(0x32F6, 'M', u'ラ'),
(0x32F7, 'M', u'リ'),
(0x32F8, 'M', u'ル'),
(0x32F9, 'M', u'レ'),
(0x32FA, 'M', u'ロ'),
(0x32FB, 'M', u'ワ'),
(0x32FC, 'M', u'ヰ'),
(0x32FD, 'M', u'ヱ'),
(0x32FE, 'M', u'ヲ'),
(0x32FF, 'X'),
(0x3300, 'M', u'アパート'),
(0x3301, 'M', u'アルファ'),
(0x3302, 'M', u'アンペア'),
(0x3303, 'M', u'アール'),
(0x3304, 'M', u'イニング'),
(0x3305, 'M', u'インチ'),
(0x3306, 'M', u'ウォン'),
(0x3307, 'M', u'エスクード'),
(0x3308, 'M', u'エーカー'),
(0x3309, 'M', u'オンス'),
(0x330A, 'M', u'オーム'),
(0x330B, 'M', u'カイリ'),
(0x330C, 'M', u'カラット'),
(0x330D, 'M', u'カロリー'),
(0x330E, 'M', u'ガロン'),
(0x330F, 'M', u'ガンマ'),
(0x3310, 'M', u'ギガ'),
(0x3311, 'M', u'ギニー'),
(0x3312, 'M', u'キュリー'),
(0x3313, 'M', u'ギルダー'),
(0x3314, 'M', u'キロ'),
(0x3315, 'M', u'キログラム'),
(0x3316, 'M', u'キロメートル'),
(0x3317, 'M', u'キロワット'),
(0x3318, 'M', u'グラム'),
(0x3319, 'M', u'グラムトン'),
(0x331A, 'M', u'クルゼイロ'),
(0x331B, 'M', u'クローネ'),
(0x331C, 'M', u'ケース'),
(0x331D, 'M', u'コルナ'),
(0x331E, 'M', u'コーポ'),
(0x331F, 'M', u'サイクル'),
(0x3320, 'M', u'サンチーム'),
(0x3321, 'M', u'シリング'),
(0x3322, 'M', u'センチ'),
(0x3323, 'M', u'セント'),
(0x3324, 'M', u'ダース'),
(0x3325, 'M', u'デシ'),
(0x3326, 'M', u'ドル'),
(0x3327, 'M', u'トン'),
(0x3328, 'M', u'ナノ'),
(0x3329, 'M', u'ノット'),
(0x332A, 'M', u'ハイツ'),
(0x332B, 'M', u'パーセント'),
]
def _seg_33():
return [
(0x332C, 'M', u'パーツ'),
(0x332D, 'M', u'バーレル'),
(0x332E, 'M', u'ピアストル'),
(0x332F, 'M', u'ピクル'),
(0x3330, 'M', u'ピコ'),
(0x3331, 'M', u'ビル'),
(0x3332, 'M', u'ファラッド'),
(0x3333, 'M', u'フィート'),
(0x3334, 'M', u'ブッシェル'),
(0x3335, 'M', u'フラン'),
(0x3336, 'M', u'ヘクタール'),
(0x3337, 'M', u'ペソ'),
(0x3338, 'M', u'ペニヒ'),
(0x3339, 'M', u'ヘルツ'),
(0x333A, 'M', u'ペンス'),
(0x333B, 'M', u'ページ'),
(0x333C, 'M', u'ベータ'),
(0x333D, 'M', u'ポイント'),
(0x333E, 'M', u'ボルト'),
(0x333F, 'M', u'ホン'),
(0x3340, 'M', u'ポンド'),
(0x3341, 'M', u'ホール'),
(0x3342, 'M', u'ホーン'),
(0x3343, 'M', u'マイクロ'),
(0x3344, 'M', u'マイル'),
(0x3345, 'M', u'マッハ'),
(0x3346, 'M', u'マルク'),
(0x3347, 'M', u'マンション'),
(0x3348, 'M', u'ミクロン'),
(0x3349, 'M', u'ミリ'),
(0x334A, 'M', u'ミリバール'),
(0x334B, 'M', u'メガ'),
(0x334C, 'M', u'メガトン'),
(0x334D, 'M', u'メートル'),
(0x334E, 'M', u'ヤード'),
(0x334F, 'M', u'ヤール'),
(0x3350, 'M', u'ユアン'),
(0x3351, 'M', u'リットル'),
(0x3352, 'M', u'リラ'),
(0x3353, 'M', u'ルピー'),
(0x3354, 'M', u'ルーブル'),
(0x3355, 'M', u'レム'),
(0x3356, 'M', u'レントゲン'),
(0x3357, 'M', u'ワット'),
(0x3358, 'M', u'0点'),
(0x3359, 'M', u'1点'),
(0x335A, 'M', u'2点'),
(0x335B, 'M', u'3点'),
(0x335C, 'M', u'4点'),
(0x335D, 'M', u'5点'),
(0x335E, 'M', u'6点'),
(0x335F, 'M', u'7点'),
(0x3360, 'M', u'8点'),
(0x3361, 'M', u'9点'),
(0x3362, 'M', u'10点'),
(0x3363, 'M', u'11点'),
(0x3364, 'M', u'12点'),
(0x3365, 'M', u'13点'),
(0x3366, 'M', u'14点'),
(0x3367, 'M', u'15点'),
(0x3368, 'M', u'16点'),
(0x3369, 'M', u'17点'),
(0x336A, 'M', u'18点'),
(0x336B, 'M', u'19点'),
(0x336C, 'M', u'20点'),
(0x336D, 'M', u'21点'),
(0x336E, 'M', u'22点'),
(0x336F, 'M', u'23点'),
(0x3370, 'M', u'24点'),
(0x3371, 'M', u'hpa'),
(0x3372, 'M', u'da'),
(0x3373, 'M', u'au'),
(0x3374, 'M', u'bar'),
(0x3375, 'M', u'ov'),
(0x3376, 'M', u'pc'),
(0x3377, 'M', u'dm'),
(0x3378, 'M', u'dm2'),
(0x3379, 'M', u'dm3'),
(0x337A, 'M', u'iu'),
(0x337B, 'M', u'平成'),
(0x337C, 'M', u'昭和'),
(0x337D, 'M', u'大正'),
(0x337E, 'M', u'明治'),
(0x337F, 'M', u'株式会社'),
(0x3380, 'M', u'pa'),
(0x3381, 'M', u'na'),
(0x3382, 'M', u'μa'),
(0x3383, 'M', u'ma'),
(0x3384, 'M', u'ka'),
(0x3385, 'M', u'kb'),
(0x3386, 'M', u'mb'),
(0x3387, 'M', u'gb'),
(0x3388, 'M', u'cal'),
(0x3389, 'M', u'kcal'),
(0x338A, 'M', u'pf'),
(0x338B, 'M', u'nf'),
(0x338C, 'M', u'μf'),
(0x338D, 'M', u'μg'),
(0x338E, 'M', u'mg'),
(0x338F, 'M', u'kg'),
]
def _seg_34():
return [
(0x3390, 'M', u'hz'),
(0x3391, 'M', u'khz'),
(0x3392, 'M', u'mhz'),
(0x3393, 'M', u'ghz'),
(0x3394, 'M', u'thz'),
(0x3395, 'M', u'μl'),
(0x3396, 'M', u'ml'),
(0x3397, 'M', u'dl'),
(0x3398, 'M', u'kl'),
(0x3399, 'M', u'fm'),
(0x339A, 'M', u'nm'),
(0x339B, 'M', u'μm'),
(0x339C, 'M', u'mm'),
(0x339D, 'M', u'cm'),
(0x339E, 'M', u'km'),
(0x339F, 'M', u'mm2'),
(0x33A0, 'M', u'cm2'),
(0x33A1, 'M', u'm2'),
(0x33A2, 'M', u'km2'),
(0x33A3, 'M', u'mm3'),
(0x33A4, 'M', u'cm3'),
(0x33A5, 'M', u'm3'),
(0x33A6, 'M', u'km3'),
(0x33A7, 'M', u'm∕s'),
(0x33A8, 'M', u'm∕s2'),
(0x33A9, 'M', u'pa'),
(0x33AA, 'M', u'kpa'),
(0x33AB, 'M', u'mpa'),
(0x33AC, 'M', u'gpa'),
(0x33AD, 'M', u'rad'),
(0x33AE, 'M', u'rad∕s'),
(0x33AF, 'M', u'rad∕s2'),
(0x33B0, 'M', u'ps'),
(0x33B1, 'M', u'ns'),
(0x33B2, 'M', u'μs'),
(0x33B3, 'M', u'ms'),
(0x33B4, 'M', u'pv'),
(0x33B5, 'M', u'nv'),
(0x33B6, 'M', u'μv'),
(0x33B7, 'M', u'mv'),
(0x33B8, 'M', u'kv'),
(0x33B9, 'M', u'mv'),
(0x33BA, 'M', u'pw'),
(0x33BB, 'M', u'nw'),
(0x33BC, 'M', u'μw'),
(0x33BD, 'M', u'mw'),
(0x33BE, 'M', u'kw'),
(0x33BF, 'M', u'mw'),
(0x33C0, 'M', u'kω'),
(0x33C1, 'M', u'mω'),
(0x33C2, 'X'),
(0x33C3, 'M', u'bq'),
(0x33C4, 'M', u'cc'),
(0x33C5, 'M', u'cd'),
(0x33C6, 'M', u'c∕kg'),
(0x33C7, 'X'),
(0x33C8, 'M', u'db'),
(0x33C9, 'M', u'gy'),
(0x33CA, 'M', u'ha'),
(0x33CB, 'M', u'hp'),
(0x33CC, 'M', u'in'),
(0x33CD, 'M', u'kk'),
(0x33CE, 'M', u'km'),
(0x33CF, 'M', u'kt'),
(0x33D0, 'M', u'lm'),
(0x33D1, 'M', u'ln'),
(0x33D2, 'M', u'log'),
(0x33D3, 'M', u'lx'),
(0x33D4, 'M', u'mb'),
(0x33D5, 'M', u'mil'),
(0x33D6, 'M', u'mol'),
(0x33D7, 'M', u'ph'),
(0x33D8, 'X'),
(0x33D9, 'M', u'ppm'),
(0x33DA, 'M', u'pr'),
(0x33DB, 'M', u'sr'),
(0x33DC, 'M', u'sv'),
(0x33DD, 'M', u'wb'),
(0x33DE, 'M', u'v∕m'),
(0x33DF, 'M', u'a∕m'),
(0x33E0, 'M', u'1日'),
(0x33E1, 'M', u'2日'),
(0x33E2, 'M', u'3日'),
(0x33E3, 'M', u'4日'),
(0x33E4, 'M', u'5日'),
(0x33E5, 'M', u'6日'),
(0x33E6, 'M', u'7日'),
(0x33E7, 'M', u'8日'),
(0x33E8, 'M', u'9日'),
(0x33E9, 'M', u'10日'),
(0x33EA, 'M', u'11日'),
(0x33EB, 'M', u'12日'),
(0x33EC, 'M', u'13日'),
(0x33ED, 'M', u'14日'),
(0x33EE, 'M', u'15日'),
(0x33EF, 'M', u'16日'),
(0x33F0, 'M', u'17日'),
(0x33F1, 'M', u'18日'),
(0x33F2, 'M', u'19日'),
(0x33F3, 'M', u'20日'),
]
def _seg_35():
return [
(0x33F4, 'M', u'21日'),
(0x33F5, 'M', u'22日'),
(0x33F6, 'M', u'23日'),
(0x33F7, 'M', u'24日'),
(0x33F8, 'M', u'25日'),
(0x33F9, 'M', u'26日'),
(0x33FA, 'M', u'27日'),
(0x33FB, 'M', u'28日'),
(0x33FC, 'M', u'29日'),
(0x33FD, 'M', u'30日'),
(0x33FE, 'M', u'31日'),
(0x33FF, 'M', u'gal'),
(0x3400, 'V'),
(0x4DB6, 'X'),
(0x4DC0, 'V'),
(0x9FF0, 'X'),
(0xA000, 'V'),
(0xA48D, 'X'),
(0xA490, 'V'),
(0xA4C7, 'X'),
(0xA4D0, 'V'),
(0xA62C, 'X'),
(0xA640, 'M', u'ꙁ'),
(0xA641, 'V'),
(0xA642, 'M', u'ꙃ'),
(0xA643, 'V'),
(0xA644, 'M', u'ꙅ'),
(0xA645, 'V'),
(0xA646, 'M', u'ꙇ'),
(0xA647, 'V'),
(0xA648, 'M', u'ꙉ'),
(0xA649, 'V'),
(0xA64A, 'M', u'ꙋ'),
(0xA64B, 'V'),
(0xA64C, 'M', u'ꙍ'),
(0xA64D, 'V'),
(0xA64E, 'M', u'ꙏ'),
(0xA64F, 'V'),
(0xA650, 'M', u'ꙑ'),
(0xA651, 'V'),
(0xA652, 'M', u'ꙓ'),
(0xA653, 'V'),
(0xA654, 'M', u'ꙕ'),
(0xA655, 'V'),
(0xA656, 'M', u'ꙗ'),
(0xA657, 'V'),
(0xA658, 'M', u'ꙙ'),
(0xA659, 'V'),
(0xA65A, 'M', u'ꙛ'),
(0xA65B, 'V'),
(0xA65C, 'M', u'ꙝ'),
(0xA65D, 'V'),
(0xA65E, 'M', u'ꙟ'),
(0xA65F, 'V'),
(0xA660, 'M', u'ꙡ'),
(0xA661, 'V'),
(0xA662, 'M', u'ꙣ'),
(0xA663, 'V'),
(0xA664, 'M', u'ꙥ'),
(0xA665, 'V'),
(0xA666, 'M', u'ꙧ'),
(0xA667, 'V'),
(0xA668, 'M', u'ꙩ'),
(0xA669, 'V'),
(0xA66A, 'M', u'ꙫ'),
(0xA66B, 'V'),
(0xA66C, 'M', u'ꙭ'),
(0xA66D, 'V'),
(0xA680, 'M', u'ꚁ'),
(0xA681, 'V'),
(0xA682, 'M', u'ꚃ'),
(0xA683, 'V'),
(0xA684, 'M', u'ꚅ'),
(0xA685, 'V'),
(0xA686, 'M', u'ꚇ'),
(0xA687, 'V'),
(0xA688, 'M', u'ꚉ'),
(0xA689, 'V'),
(0xA68A, 'M', u'ꚋ'),
(0xA68B, 'V'),
(0xA68C, 'M', u'ꚍ'),
(0xA68D, 'V'),
(0xA68E, 'M', u'ꚏ'),
(0xA68F, 'V'),
(0xA690, 'M', u'ꚑ'),
(0xA691, 'V'),
(0xA692, 'M', u'ꚓ'),
(0xA693, 'V'),
(0xA694, 'M', u'ꚕ'),
(0xA695, 'V'),
(0xA696, 'M', u'ꚗ'),
(0xA697, 'V'),
(0xA698, 'M', u'ꚙ'),
(0xA699, 'V'),
(0xA69A, 'M', u'ꚛ'),
(0xA69B, 'V'),
(0xA69C, 'M', u'ъ'),
(0xA69D, 'M', u'ь'),
(0xA69E, 'V'),
(0xA6F8, 'X'),
]
def _seg_36():
return [
(0xA700, 'V'),
(0xA722, 'M', u'ꜣ'),
(0xA723, 'V'),
(0xA724, 'M', u'ꜥ'),
(0xA725, 'V'),
(0xA726, 'M', u'ꜧ'),
(0xA727, 'V'),
(0xA728, 'M', u'ꜩ'),
(0xA729, 'V'),
(0xA72A, 'M', u'ꜫ'),
(0xA72B, 'V'),
(0xA72C, 'M', u'ꜭ'),
(0xA72D, 'V'),
(0xA72E, 'M', u'ꜯ'),
(0xA72F, 'V'),
(0xA732, 'M', u'ꜳ'),
(0xA733, 'V'),
(0xA734, 'M', u'ꜵ'),
(0xA735, 'V'),
(0xA736, 'M', u'ꜷ'),
(0xA737, 'V'),
(0xA738, 'M', u'ꜹ'),
(0xA739, 'V'),
(0xA73A, 'M', u'ꜻ'),
(0xA73B, 'V'),
(0xA73C, 'M', u'ꜽ'),
(0xA73D, 'V'),
(0xA73E, 'M', u'ꜿ'),
(0xA73F, 'V'),
(0xA740, 'M', u'ꝁ'),
(0xA741, 'V'),
(0xA742, 'M', u'ꝃ'),
(0xA743, 'V'),
(0xA744, 'M', u'ꝅ'),
(0xA745, 'V'),
(0xA746, 'M', u'ꝇ'),
(0xA747, 'V'),
(0xA748, 'M', u'ꝉ'),
(0xA749, 'V'),
(0xA74A, 'M', u'ꝋ'),
(0xA74B, 'V'),
(0xA74C, 'M', u'ꝍ'),
(0xA74D, 'V'),
(0xA74E, 'M', u'ꝏ'),
(0xA74F, 'V'),
(0xA750, 'M', u'ꝑ'),
(0xA751, 'V'),
(0xA752, 'M', u'ꝓ'),
(0xA753, 'V'),
(0xA754, 'M', u'ꝕ'),
(0xA755, 'V'),
(0xA756, 'M', u'ꝗ'),
(0xA757, 'V'),
(0xA758, 'M', u'ꝙ'),
(0xA759, 'V'),
(0xA75A, 'M', u'ꝛ'),
(0xA75B, 'V'),
(0xA75C, 'M', u'ꝝ'),
(0xA75D, 'V'),
(0xA75E, 'M', u'ꝟ'),
(0xA75F, 'V'),
(0xA760, 'M', u'ꝡ'),
(0xA761, 'V'),
(0xA762, 'M', u'ꝣ'),
(0xA763, 'V'),
(0xA764, 'M', u'ꝥ'),
(0xA765, 'V'),
(0xA766, 'M', u'ꝧ'),
(0xA767, 'V'),
(0xA768, 'M', u'ꝩ'),
(0xA769, 'V'),
(0xA76A, 'M', u'ꝫ'),
(0xA76B, 'V'),
(0xA76C, 'M', u'ꝭ'),
(0xA76D, 'V'),
(0xA76E, 'M', u'ꝯ'),
(0xA76F, 'V'),
(0xA770, 'M', u'ꝯ'),
(0xA771, 'V'),
(0xA779, 'M', u'ꝺ'),
(0xA77A, 'V'),
(0xA77B, 'M', u'ꝼ'),
(0xA77C, 'V'),
(0xA77D, 'M', u'ᵹ'),
(0xA77E, 'M', u'ꝿ'),
(0xA77F, 'V'),
(0xA780, 'M', u'ꞁ'),
(0xA781, 'V'),
(0xA782, 'M', u'ꞃ'),
(0xA783, 'V'),
(0xA784, 'M', u'ꞅ'),
(0xA785, 'V'),
(0xA786, 'M', u'ꞇ'),
(0xA787, 'V'),
(0xA78B, 'M', u'ꞌ'),
(0xA78C, 'V'),
(0xA78D, 'M', u'ɥ'),
(0xA78E, 'V'),
(0xA790, 'M', u'ꞑ'),
(0xA791, 'V'),
]
def _seg_37():
return [
(0xA792, 'M', u'ꞓ'),
(0xA793, 'V'),
(0xA796, 'M', u'ꞗ'),
(0xA797, 'V'),
(0xA798, 'M', u'ꞙ'),
(0xA799, 'V'),
(0xA79A, 'M', u'ꞛ'),
(0xA79B, 'V'),
(0xA79C, 'M', u'ꞝ'),
(0xA79D, 'V'),
(0xA79E, 'M', u'ꞟ'),
(0xA79F, 'V'),
(0xA7A0, 'M', u'ꞡ'),
(0xA7A1, 'V'),
(0xA7A2, 'M', u'ꞣ'),
(0xA7A3, 'V'),
(0xA7A4, 'M', u'ꞥ'),
(0xA7A5, 'V'),
(0xA7A6, 'M', u'ꞧ'),
(0xA7A7, 'V'),
(0xA7A8, 'M', u'ꞩ'),
(0xA7A9, 'V'),
(0xA7AA, 'M', u'ɦ'),
(0xA7AB, 'M', u'ɜ'),
(0xA7AC, 'M', u'ɡ'),
(0xA7AD, 'M', u'ɬ'),
(0xA7AE, 'M', u'ɪ'),
(0xA7AF, 'V'),
(0xA7B0, 'M', u'ʞ'),
(0xA7B1, 'M', u'ʇ'),
(0xA7B2, 'M', u'ʝ'),
(0xA7B3, 'M', u'ꭓ'),
(0xA7B4, 'M', u'ꞵ'),
(0xA7B5, 'V'),
(0xA7B6, 'M', u'ꞷ'),
(0xA7B7, 'V'),
(0xA7B8, 'X'),
(0xA7B9, 'V'),
(0xA7BA, 'X'),
(0xA7F7, 'V'),
(0xA7F8, 'M', u'ħ'),
(0xA7F9, 'M', u'œ'),
(0xA7FA, 'V'),
(0xA82C, 'X'),
(0xA830, 'V'),
(0xA83A, 'X'),
(0xA840, 'V'),
(0xA878, 'X'),
(0xA880, 'V'),
(0xA8C6, 'X'),
(0xA8CE, 'V'),
(0xA8DA, 'X'),
(0xA8E0, 'V'),
(0xA954, 'X'),
(0xA95F, 'V'),
(0xA97D, 'X'),
(0xA980, 'V'),
(0xA9CE, 'X'),
(0xA9CF, 'V'),
(0xA9DA, 'X'),
(0xA9DE, 'V'),
(0xA9FF, 'X'),
(0xAA00, 'V'),
(0xAA37, 'X'),
(0xAA40, 'V'),
(0xAA4E, 'X'),
(0xAA50, 'V'),
(0xAA5A, 'X'),
(0xAA5C, 'V'),
(0xAAC3, 'X'),
(0xAADB, 'V'),
(0xAAF7, 'X'),
(0xAB01, 'V'),
(0xAB07, 'X'),
(0xAB09, 'V'),
(0xAB0F, 'X'),
(0xAB11, 'V'),
(0xAB17, 'X'),
(0xAB20, 'V'),
(0xAB27, 'X'),
(0xAB28, 'V'),
(0xAB2F, 'X'),
(0xAB30, 'V'),
(0xAB5C, 'M', u'ꜧ'),
(0xAB5D, 'M', u'ꬷ'),
(0xAB5E, 'M', u'ɫ'),
(0xAB5F, 'M', u'ꭒ'),
(0xAB60, 'V'),
(0xAB66, 'X'),
(0xAB70, 'M', u'Ꭰ'),
(0xAB71, 'M', u'Ꭱ'),
(0xAB72, 'M', u'Ꭲ'),
(0xAB73, 'M', u'Ꭳ'),
(0xAB74, 'M', u'Ꭴ'),
(0xAB75, 'M', u'Ꭵ'),
(0xAB76, 'M', u'Ꭶ'),
(0xAB77, 'M', u'Ꭷ'),
(0xAB78, 'M', u'Ꭸ'),
(0xAB79, 'M', u'Ꭹ'),
(0xAB7A, 'M', u'Ꭺ'),
]
def _seg_38():
return [
(0xAB7B, 'M', u'Ꭻ'),
(0xAB7C, 'M', u'Ꭼ'),
(0xAB7D, 'M', u'Ꭽ'),
(0xAB7E, 'M', u'Ꭾ'),
(0xAB7F, 'M', u'Ꭿ'),
(0xAB80, 'M', u'Ꮀ'),
(0xAB81, 'M', u'Ꮁ'),
(0xAB82, 'M', u'Ꮂ'),
(0xAB83, 'M', u'Ꮃ'),
(0xAB84, 'M', u'Ꮄ'),
(0xAB85, 'M', u'Ꮅ'),
(0xAB86, 'M', u'Ꮆ'),
(0xAB87, 'M', u'Ꮇ'),
(0xAB88, 'M', u'Ꮈ'),
(0xAB89, 'M', u'Ꮉ'),
(0xAB8A, 'M', u'Ꮊ'),
(0xAB8B, 'M', u'Ꮋ'),
(0xAB8C, 'M', u'Ꮌ'),
(0xAB8D, 'M', u'Ꮍ'),
(0xAB8E, 'M', u'Ꮎ'),
(0xAB8F, 'M', u'Ꮏ'),
(0xAB90, 'M', u'Ꮐ'),
(0xAB91, 'M', u'Ꮑ'),
(0xAB92, 'M', u'Ꮒ'),
(0xAB93, 'M', u'Ꮓ'),
(0xAB94, 'M', u'Ꮔ'),
(0xAB95, 'M', u'Ꮕ'),
(0xAB96, 'M', u'Ꮖ'),
(0xAB97, 'M', u'Ꮗ'),
(0xAB98, 'M', u'Ꮘ'),
(0xAB99, 'M', u'Ꮙ'),
(0xAB9A, 'M', u'Ꮚ'),
(0xAB9B, 'M', u'Ꮛ'),
(0xAB9C, 'M', u'Ꮜ'),
(0xAB9D, 'M', u'Ꮝ'),
(0xAB9E, 'M', u'Ꮞ'),
(0xAB9F, 'M', u'Ꮟ'),
(0xABA0, 'M', u'Ꮠ'),
(0xABA1, 'M', u'Ꮡ'),
(0xABA2, 'M', u'Ꮢ'),
(0xABA3, 'M', u'Ꮣ'),
(0xABA4, 'M', u'Ꮤ'),
(0xABA5, 'M', u'Ꮥ'),
(0xABA6, 'M', u'Ꮦ'),
(0xABA7, 'M', u'Ꮧ'),
(0xABA8, 'M', u'Ꮨ'),
(0xABA9, 'M', u'Ꮩ'),
(0xABAA, 'M', u'Ꮪ'),
(0xABAB, 'M', u'Ꮫ'),
(0xABAC, 'M', u'Ꮬ'),
(0xABAD, 'M', u'Ꮭ'),
(0xABAE, 'M', u'Ꮮ'),
(0xABAF, 'M', u'Ꮯ'),
(0xABB0, 'M', u'Ꮰ'),
(0xABB1, 'M', u'Ꮱ'),
(0xABB2, 'M', u'Ꮲ'),
(0xABB3, 'M', u'Ꮳ'),
(0xABB4, 'M', u'Ꮴ'),
(0xABB5, 'M', u'Ꮵ'),
(0xABB6, 'M', u'Ꮶ'),
(0xABB7, 'M', u'Ꮷ'),
(0xABB8, 'M', u'Ꮸ'),
(0xABB9, 'M', u'Ꮹ'),
(0xABBA, 'M', u'Ꮺ'),
(0xABBB, 'M', u'Ꮻ'),
(0xABBC, 'M', u'Ꮼ'),
(0xABBD, 'M', u'Ꮽ'),
(0xABBE, 'M', u'Ꮾ'),
(0xABBF, 'M', u'Ꮿ'),
(0xABC0, 'V'),
(0xABEE, 'X'),
(0xABF0, 'V'),
(0xABFA, 'X'),
(0xAC00, 'V'),
(0xD7A4, 'X'),
(0xD7B0, 'V'),
(0xD7C7, 'X'),
(0xD7CB, 'V'),
(0xD7FC, 'X'),
(0xF900, 'M', u'豈'),
(0xF901, 'M', u'更'),
(0xF902, 'M', u'車'),
(0xF903, 'M', u'賈'),
(0xF904, 'M', u'滑'),
(0xF905, 'M', u'串'),
(0xF906, 'M', u'句'),
(0xF907, 'M', u'龜'),
(0xF909, 'M', u'契'),
(0xF90A, 'M', u'金'),
(0xF90B, 'M', u'喇'),
(0xF90C, 'M', u'奈'),
(0xF90D, 'M', u'懶'),
(0xF90E, 'M', u'癩'),
(0xF90F, 'M', u'羅'),
(0xF910, 'M', u'蘿'),
(0xF911, 'M', u'螺'),
(0xF912, 'M', u'裸'),
(0xF913, 'M', u'邏'),
(0xF914, 'M', u'樂'),
(0xF915, 'M', u'洛'),
]
def _seg_39():
return [
(0xF916, 'M', u'烙'),
(0xF917, 'M', u'珞'),
(0xF918, 'M', u'落'),
(0xF919, 'M', u'酪'),
(0xF91A, 'M', u'駱'),
(0xF91B, 'M', u'亂'),
(0xF91C, 'M', u'卵'),
(0xF91D, 'M', u'欄'),
(0xF91E, 'M', u'爛'),
(0xF91F, 'M', u'蘭'),
(0xF920, 'M', u'鸞'),
(0xF921, 'M', u'嵐'),
(0xF922, 'M', u'濫'),
(0xF923, 'M', u'藍'),
(0xF924, 'M', u'襤'),
(0xF925, 'M', u'拉'),
(0xF926, 'M', u'臘'),
(0xF927, 'M', u'蠟'),
(0xF928, 'M', u'廊'),
(0xF929, 'M', u'朗'),
(0xF92A, 'M', u'浪'),
(0xF92B, 'M', u'狼'),
(0xF92C, 'M', u'郎'),
(0xF92D, 'M', u'來'),
(0xF92E, 'M', u'冷'),
(0xF92F, 'M', u'勞'),
(0xF930, 'M', u'擄'),
(0xF931, 'M', u'櫓'),
(0xF932, 'M', u'爐'),
(0xF933, 'M', u'盧'),
(0xF934, 'M', u'老'),
(0xF935, 'M', u'蘆'),
(0xF936, 'M', u'虜'),
(0xF937, 'M', u'路'),
(0xF938, 'M', u'露'),
(0xF939, 'M', u'魯'),
(0xF93A, 'M', u'鷺'),
(0xF93B, 'M', u'碌'),
(0xF93C, 'M', u'祿'),
(0xF93D, 'M', u'綠'),
(0xF93E, 'M', u'菉'),
(0xF93F, 'M', u'錄'),
(0xF940, 'M', u'鹿'),
(0xF941, 'M', u'論'),
(0xF942, 'M', u'壟'),
(0xF943, 'M', u'弄'),
(0xF944, 'M', u'籠'),
(0xF945, 'M', u'聾'),
(0xF946, 'M', u'牢'),
(0xF947, 'M', u'磊'),
(0xF948, 'M', u'賂'),
(0xF949, 'M', u'雷'),
(0xF94A, 'M', u'壘'),
(0xF94B, 'M', u'屢'),
(0xF94C, 'M', u'樓'),
(0xF94D, 'M', u'淚'),
(0xF94E, 'M', u'漏'),
(0xF94F, 'M', u'累'),
(0xF950, 'M', u'縷'),
(0xF951, 'M', u'陋'),
(0xF952, 'M', u'勒'),
(0xF953, 'M', u'肋'),
(0xF954, 'M', u'凜'),
(0xF955, 'M', u'凌'),
(0xF956, 'M', u'稜'),
(0xF957, 'M', u'綾'),
(0xF958, 'M', u'菱'),
(0xF959, 'M', u'陵'),
(0xF95A, 'M', u'讀'),
(0xF95B, 'M', u'拏'),
(0xF95C, 'M', u'樂'),
(0xF95D, 'M', u'諾'),
(0xF95E, 'M', u'丹'),
(0xF95F, 'M', u'寧'),
(0xF960, 'M', u'怒'),
(0xF961, 'M', u'率'),
(0xF962, 'M', u'異'),
(0xF963, 'M', u'北'),
(0xF964, 'M', u'磻'),
(0xF965, 'M', u'便'),
(0xF966, 'M', u'復'),
(0xF967, 'M', u'不'),
(0xF968, 'M', u'泌'),
(0xF969, 'M', u'數'),
(0xF96A, 'M', u'索'),
(0xF96B, 'M', u'參'),
(0xF96C, 'M', u'塞'),
(0xF96D, 'M', u'省'),
(0xF96E, 'M', u'葉'),
(0xF96F, 'M', u'說'),
(0xF970, 'M', u'殺'),
(0xF971, 'M', u'辰'),
(0xF972, 'M', u'沈'),
(0xF973, 'M', u'拾'),
(0xF974, 'M', u'若'),
(0xF975, 'M', u'掠'),
(0xF976, 'M', u'略'),
(0xF977, 'M', u'亮'),
(0xF978, 'M', u'兩'),
(0xF979, 'M', u'凉'),
]
def _seg_40():
return [
(0xF97A, 'M', u'梁'),
(0xF97B, 'M', u'糧'),
(0xF97C, 'M', u'良'),
(0xF97D, 'M', u'諒'),
(0xF97E, 'M', u'量'),
(0xF97F, 'M', u'勵'),
(0xF980, 'M', u'呂'),
(0xF981, 'M', u'女'),
(0xF982, 'M', u'廬'),
(0xF983, 'M', u'旅'),
(0xF984, 'M', u'濾'),
(0xF985, 'M', u'礪'),
(0xF986, 'M', u'閭'),
(0xF987, 'M', u'驪'),
(0xF988, 'M', u'麗'),
(0xF989, 'M', u'黎'),
(0xF98A, 'M', u'力'),
(0xF98B, 'M', u'曆'),
(0xF98C, 'M', u'歷'),
(0xF98D, 'M', u'轢'),
(0xF98E, 'M', u'年'),
(0xF98F, 'M', u'憐'),
(0xF990, 'M', u'戀'),
(0xF991, 'M', u'撚'),
(0xF992, 'M', u'漣'),
(0xF993, 'M', u'煉'),
(0xF994, 'M', u'璉'),
(0xF995, 'M', u'秊'),
(0xF996, 'M', u'練'),
(0xF997, 'M', u'聯'),
(0xF998, 'M', u'輦'),
(0xF999, 'M', u'蓮'),
(0xF99A, 'M', u'連'),
(0xF99B, 'M', u'鍊'),
(0xF99C, 'M', u'列'),
(0xF99D, 'M', u'劣'),
(0xF99E, 'M', u'咽'),
(0xF99F, 'M', u'烈'),
(0xF9A0, 'M', u'裂'),
(0xF9A1, 'M', u'說'),
(0xF9A2, 'M', u'廉'),
(0xF9A3, 'M', u'念'),
(0xF9A4, 'M', u'捻'),
(0xF9A5, 'M', u'殮'),
(0xF9A6, 'M', u'簾'),
(0xF9A7, 'M', u'獵'),
(0xF9A8, 'M', u'令'),
(0xF9A9, 'M', u'囹'),
(0xF9AA, 'M', u'寧'),
(0xF9AB, 'M', u'嶺'),
(0xF9AC, 'M', u'怜'),
(0xF9AD, 'M', u'玲'),
(0xF9AE, 'M', u'瑩'),
(0xF9AF, 'M', u'羚'),
(0xF9B0, 'M', u'聆'),
(0xF9B1, 'M', u'鈴'),
(0xF9B2, 'M', u'零'),
(0xF9B3, 'M', u'靈'),
(0xF9B4, 'M', u'領'),
(0xF9B5, 'M', u'例'),
(0xF9B6, 'M', u'禮'),
(0xF9B7, 'M', u'醴'),
(0xF9B8, 'M', u'隸'),
(0xF9B9, 'M', u'惡'),
(0xF9BA, 'M', u'了'),
(0xF9BB, 'M', u'僚'),
(0xF9BC, 'M', u'寮'),
(0xF9BD, 'M', u'尿'),
(0xF9BE, 'M', u'料'),
(0xF9BF, 'M', u'樂'),
(0xF9C0, 'M', u'燎'),
(0xF9C1, 'M', u'療'),
(0xF9C2, 'M', u'蓼'),
(0xF9C3, 'M', u'遼'),
(0xF9C4, 'M', u'龍'),
(0xF9C5, 'M', u'暈'),
(0xF9C6, 'M', u'阮'),
(0xF9C7, 'M', u'劉'),
(0xF9C8, 'M', u'杻'),
(0xF9C9, 'M', u'柳'),
(0xF9CA, 'M', u'流'),
(0xF9CB, 'M', u'溜'),
(0xF9CC, 'M', u'琉'),
(0xF9CD, 'M', u'留'),
(0xF9CE, 'M', u'硫'),
(0xF9CF, 'M', u'紐'),
(0xF9D0, 'M', u'類'),
(0xF9D1, 'M', u'六'),
(0xF9D2, 'M', u'戮'),
(0xF9D3, 'M', u'陸'),
(0xF9D4, 'M', u'倫'),
(0xF9D5, 'M', u'崙'),
(0xF9D6, 'M', u'淪'),
(0xF9D7, 'M', u'輪'),
(0xF9D8, 'M', u'律'),
(0xF9D9, 'M', u'慄'),
(0xF9DA, 'M', u'栗'),
(0xF9DB, 'M', u'率'),
(0xF9DC, 'M', u'隆'),
(0xF9DD, 'M', u'利'),
]
def _seg_41():
return [
(0xF9DE, 'M', u'吏'),
(0xF9DF, 'M', u'履'),
(0xF9E0, 'M', u'易'),
(0xF9E1, 'M', u'李'),
(0xF9E2, 'M', u'梨'),
(0xF9E3, 'M', u'泥'),
(0xF9E4, 'M', u'理'),
(0xF9E5, 'M', u'痢'),
(0xF9E6, 'M', u'罹'),
(0xF9E7, 'M', u'裏'),
(0xF9E8, 'M', u'裡'),
(0xF9E9, 'M', u'里'),
(0xF9EA, 'M', u'離'),
(0xF9EB, 'M', u'匿'),
(0xF9EC, 'M', u'溺'),
(0xF9ED, 'M', u'吝'),
(0xF9EE, 'M', u'燐'),
(0xF9EF, 'M', u'璘'),
(0xF9F0, 'M', u'藺'),
(0xF9F1, 'M', u'隣'),
(0xF9F2, 'M', u'鱗'),
(0xF9F3, 'M', u'麟'),
(0xF9F4, 'M', u'林'),
(0xF9F5, 'M', u'淋'),
(0xF9F6, 'M', u'臨'),
(0xF9F7, 'M', u'立'),
(0xF9F8, 'M', u'笠'),
(0xF9F9, 'M', u'粒'),
(0xF9FA, 'M', u'狀'),
(0xF9FB, 'M', u'炙'),
(0xF9FC, 'M', u'識'),
(0xF9FD, 'M', u'什'),
(0xF9FE, 'M', u'茶'),
(0xF9FF, 'M', u'刺'),
(0xFA00, 'M', u'切'),
(0xFA01, 'M', u'度'),
(0xFA02, 'M', u'拓'),
(0xFA03, 'M', u'糖'),
(0xFA04, 'M', u'宅'),
(0xFA05, 'M', u'洞'),
(0xFA06, 'M', u'暴'),
(0xFA07, 'M', u'輻'),
(0xFA08, 'M', u'行'),
(0xFA09, 'M', u'降'),
(0xFA0A, 'M', u'見'),
(0xFA0B, 'M', u'廓'),
(0xFA0C, 'M', u'兀'),
(0xFA0D, 'M', u'嗀'),
(0xFA0E, 'V'),
(0xFA10, 'M', u'塚'),
(0xFA11, 'V'),
(0xFA12, 'M', u'晴'),
(0xFA13, 'V'),
(0xFA15, 'M', u'凞'),
(0xFA16, 'M', u'猪'),
(0xFA17, 'M', u'益'),
(0xFA18, 'M', u'礼'),
(0xFA19, 'M', u'神'),
(0xFA1A, 'M', u'祥'),
(0xFA1B, 'M', u'福'),
(0xFA1C, 'M', u'靖'),
(0xFA1D, 'M', u'精'),
(0xFA1E, 'M', u'羽'),
(0xFA1F, 'V'),
(0xFA20, 'M', u'蘒'),
(0xFA21, 'V'),
(0xFA22, 'M', u'諸'),
(0xFA23, 'V'),
(0xFA25, 'M', u'逸'),
(0xFA26, 'M', u'都'),
(0xFA27, 'V'),
(0xFA2A, 'M', u'飯'),
(0xFA2B, 'M', u'飼'),
(0xFA2C, 'M', u'館'),
(0xFA2D, 'M', u'鶴'),
(0xFA2E, 'M', u'郞'),
(0xFA2F, 'M', u'隷'),
(0xFA30, 'M', u'侮'),
(0xFA31, 'M', u'僧'),
(0xFA32, 'M', u'免'),
(0xFA33, 'M', u'勉'),
(0xFA34, 'M', u'勤'),
(0xFA35, 'M', u'卑'),
(0xFA36, 'M', u'喝'),
(0xFA37, 'M', u'嘆'),
(0xFA38, 'M', u'器'),
(0xFA39, 'M', u'塀'),
(0xFA3A, 'M', u'墨'),
(0xFA3B, 'M', u'層'),
(0xFA3C, 'M', u'屮'),
(0xFA3D, 'M', u'悔'),
(0xFA3E, 'M', u'慨'),
(0xFA3F, 'M', u'憎'),
(0xFA40, 'M', u'懲'),
(0xFA41, 'M', u'敏'),
(0xFA42, 'M', u'既'),
(0xFA43, 'M', u'暑'),
(0xFA44, 'M', u'梅'),
(0xFA45, 'M', u'海'),
(0xFA46, 'M', u'渚'),
]
def _seg_42():
return [
(0xFA47, 'M', u'漢'),
(0xFA48, 'M', u'煮'),
(0xFA49, 'M', u'爫'),
(0xFA4A, 'M', u'琢'),
(0xFA4B, 'M', u'碑'),
(0xFA4C, 'M', u'社'),
(0xFA4D, 'M', u'祉'),
(0xFA4E, 'M', u'祈'),
(0xFA4F, 'M', u'祐'),
(0xFA50, 'M', u'祖'),
(0xFA51, 'M', u'祝'),
(0xFA52, 'M', u'禍'),
(0xFA53, 'M', u'禎'),
(0xFA54, 'M', u'穀'),
(0xFA55, 'M', u'突'),
(0xFA56, 'M', u'節'),
(0xFA57, 'M', u'練'),
(0xFA58, 'M', u'縉'),
(0xFA59, 'M', u'繁'),
(0xFA5A, 'M', u'署'),
(0xFA5B, 'M', u'者'),
(0xFA5C, 'M', u'臭'),
(0xFA5D, 'M', u'艹'),
(0xFA5F, 'M', u'著'),
(0xFA60, 'M', u'褐'),
(0xFA61, 'M', u'視'),
(0xFA62, 'M', u'謁'),
(0xFA63, 'M', u'謹'),
(0xFA64, 'M', u'賓'),
(0xFA65, 'M', u'贈'),
(0xFA66, 'M', u'辶'),
(0xFA67, 'M', u'逸'),
(0xFA68, 'M', u'難'),
(0xFA69, 'M', u'響'),
(0xFA6A, 'M', u'頻'),
(0xFA6B, 'M', u'恵'),
(0xFA6C, 'M', u'𤋮'),
(0xFA6D, 'M', u'舘'),
(0xFA6E, 'X'),
(0xFA70, 'M', u'並'),
(0xFA71, 'M', u'况'),
(0xFA72, 'M', u'全'),
(0xFA73, 'M', u'侀'),
(0xFA74, 'M', u'充'),
(0xFA75, 'M', u'冀'),
(0xFA76, 'M', u'勇'),
(0xFA77, 'M', u'勺'),
(0xFA78, 'M', u'喝'),
(0xFA79, 'M', u'啕'),
(0xFA7A, 'M', u'喙'),
(0xFA7B, 'M', u'嗢'),
(0xFA7C, 'M', u'塚'),
(0xFA7D, 'M', u'墳'),
(0xFA7E, 'M', u'奄'),
(0xFA7F, 'M', u'奔'),
(0xFA80, 'M', u'婢'),
(0xFA81, 'M', u'嬨'),
(0xFA82, 'M', u'廒'),
(0xFA83, 'M', u'廙'),
(0xFA84, 'M', u'彩'),
(0xFA85, 'M', u'徭'),
(0xFA86, 'M', u'惘'),
(0xFA87, 'M', u'慎'),
(0xFA88, 'M', u'愈'),
(0xFA89, 'M', u'憎'),
(0xFA8A, 'M', u'慠'),
(0xFA8B, 'M', u'懲'),
(0xFA8C, 'M', u'戴'),
(0xFA8D, 'M', u'揄'),
(0xFA8E, 'M', u'搜'),
(0xFA8F, 'M', u'摒'),
(0xFA90, 'M', u'敖'),
(0xFA91, 'M', u'晴'),
(0xFA92, 'M', u'朗'),
(0xFA93, 'M', u'望'),
(0xFA94, 'M', u'杖'),
(0xFA95, 'M', u'歹'),
(0xFA96, 'M', u'殺'),
(0xFA97, 'M', u'流'),
(0xFA98, 'M', u'滛'),
(0xFA99, 'M', u'滋'),
(0xFA9A, 'M', u'漢'),
(0xFA9B, 'M', u'瀞'),
(0xFA9C, 'M', u'煮'),
(0xFA9D, 'M', u'瞧'),
(0xFA9E, 'M', u'爵'),
(0xFA9F, 'M', u'犯'),
(0xFAA0, 'M', u'猪'),
(0xFAA1, 'M', u'瑱'),
(0xFAA2, 'M', u'甆'),
(0xFAA3, 'M', u'画'),
(0xFAA4, 'M', u'瘝'),
(0xFAA5, 'M', u'瘟'),
(0xFAA6, 'M', u'益'),
(0xFAA7, 'M', u'盛'),
(0xFAA8, 'M', u'直'),
(0xFAA9, 'M', u'睊'),
(0xFAAA, 'M', u'着'),
(0xFAAB, 'M', u'磌'),
(0xFAAC, 'M', u'窱'),
]
def _seg_43():
return [
(0xFAAD, 'M', u'節'),
(0xFAAE, 'M', u'类'),
(0xFAAF, 'M', u'絛'),
(0xFAB0, 'M', u'練'),
(0xFAB1, 'M', u'缾'),
(0xFAB2, 'M', u'者'),
(0xFAB3, 'M', u'荒'),
(0xFAB4, 'M', u'華'),
(0xFAB5, 'M', u'蝹'),
(0xFAB6, 'M', u'襁'),
(0xFAB7, 'M', u'覆'),
(0xFAB8, 'M', u'視'),
(0xFAB9, 'M', u'調'),
(0xFABA, 'M', u'諸'),
(0xFABB, 'M', u'請'),
(0xFABC, 'M', u'謁'),
(0xFABD, 'M', u'諾'),
(0xFABE, 'M', u'諭'),
(0xFABF, 'M', u'謹'),
(0xFAC0, 'M', u'變'),
(0xFAC1, 'M', u'贈'),
(0xFAC2, 'M', u'輸'),
(0xFAC3, 'M', u'遲'),
(0xFAC4, 'M', u'醙'),
(0xFAC5, 'M', u'鉶'),
(0xFAC6, 'M', u'陼'),
(0xFAC7, 'M', u'難'),
(0xFAC8, 'M', u'靖'),
(0xFAC9, 'M', u'韛'),
(0xFACA, 'M', u'響'),
(0xFACB, 'M', u'頋'),
(0xFACC, 'M', u'頻'),
(0xFACD, 'M', u'鬒'),
(0xFACE, 'M', u'龜'),
(0xFACF, 'M', u'𢡊'),
(0xFAD0, 'M', u'𢡄'),
(0xFAD1, 'M', u'𣏕'),
(0xFAD2, 'M', u'㮝'),
(0xFAD3, 'M', u'䀘'),
(0xFAD4, 'M', u'䀹'),
(0xFAD5, 'M', u'𥉉'),
(0xFAD6, 'M', u'𥳐'),
(0xFAD7, 'M', u'𧻓'),
(0xFAD8, 'M', u'齃'),
(0xFAD9, 'M', u'龎'),
(0xFADA, 'X'),
(0xFB00, 'M', u'ff'),
(0xFB01, 'M', u'fi'),
(0xFB02, 'M', u'fl'),
(0xFB03, 'M', u'ffi'),
(0xFB04, 'M', u'ffl'),
(0xFB05, 'M', u'st'),
(0xFB07, 'X'),
(0xFB13, 'M', u'մն'),
(0xFB14, 'M', u'մե'),
(0xFB15, 'M', u'մի'),
(0xFB16, 'M', u'վն'),
(0xFB17, 'M', u'մխ'),
(0xFB18, 'X'),
(0xFB1D, 'M', u'יִ'),
(0xFB1E, 'V'),
(0xFB1F, 'M', u'ײַ'),
(0xFB20, 'M', u'ע'),
(0xFB21, 'M', u'א'),
(0xFB22, 'M', u'ד'),
(0xFB23, 'M', u'ה'),
(0xFB24, 'M', u'כ'),
(0xFB25, 'M', u'ל'),
(0xFB26, 'M', u'ם'),
(0xFB27, 'M', u'ר'),
(0xFB28, 'M', u'ת'),
(0xFB29, '3', u'+'),
(0xFB2A, 'M', u'שׁ'),
(0xFB2B, 'M', u'שׂ'),
(0xFB2C, 'M', u'שּׁ'),
(0xFB2D, 'M', u'שּׂ'),
(0xFB2E, 'M', u'אַ'),
(0xFB2F, 'M', u'אָ'),
(0xFB30, 'M', u'אּ'),
(0xFB31, 'M', u'בּ'),
(0xFB32, 'M', u'גּ'),
(0xFB33, 'M', u'דּ'),
(0xFB34, 'M', u'הּ'),
(0xFB35, 'M', u'וּ'),
(0xFB36, 'M', u'זּ'),
(0xFB37, 'X'),
(0xFB38, 'M', u'טּ'),
(0xFB39, 'M', u'יּ'),
(0xFB3A, 'M', u'ךּ'),
(0xFB3B, 'M', u'כּ'),
(0xFB3C, 'M', u'לּ'),
(0xFB3D, 'X'),
(0xFB3E, 'M', u'מּ'),
(0xFB3F, 'X'),
(0xFB40, 'M', u'נּ'),
(0xFB41, 'M', u'סּ'),
(0xFB42, 'X'),
(0xFB43, 'M', u'ףּ'),
(0xFB44, 'M', u'פּ'),
(0xFB45, 'X'),
]
def _seg_44():
return [
(0xFB46, 'M', u'צּ'),
(0xFB47, 'M', u'קּ'),
(0xFB48, 'M', u'רּ'),
(0xFB49, 'M', u'שּ'),
(0xFB4A, 'M', u'תּ'),
(0xFB4B, 'M', u'וֹ'),
(0xFB4C, 'M', u'בֿ'),
(0xFB4D, 'M', u'כֿ'),
(0xFB4E, 'M', u'פֿ'),
(0xFB4F, 'M', u'אל'),
(0xFB50, 'M', u'ٱ'),
(0xFB52, 'M', u'ٻ'),
(0xFB56, 'M', u'پ'),
(0xFB5A, 'M', u'ڀ'),
(0xFB5E, 'M', u'ٺ'),
(0xFB62, 'M', u'ٿ'),
(0xFB66, 'M', u'ٹ'),
(0xFB6A, 'M', u'ڤ'),
(0xFB6E, 'M', u'ڦ'),
(0xFB72, 'M', u'ڄ'),
(0xFB76, 'M', u'ڃ'),
(0xFB7A, 'M', u'چ'),
(0xFB7E, 'M', u'ڇ'),
(0xFB82, 'M', u'ڍ'),
(0xFB84, 'M', u'ڌ'),
(0xFB86, 'M', u'ڎ'),
(0xFB88, 'M', u'ڈ'),
(0xFB8A, 'M', u'ژ'),
(0xFB8C, 'M', u'ڑ'),
(0xFB8E, 'M', u'ک'),
(0xFB92, 'M', u'گ'),
(0xFB96, 'M', u'ڳ'),
(0xFB9A, 'M', u'ڱ'),
(0xFB9E, 'M', u'ں'),
(0xFBA0, 'M', u'ڻ'),
(0xFBA4, 'M', u'ۀ'),
(0xFBA6, 'M', u'ہ'),
(0xFBAA, 'M', u'ھ'),
(0xFBAE, 'M', u'ے'),
(0xFBB0, 'M', u'ۓ'),
(0xFBB2, 'V'),
(0xFBC2, 'X'),
(0xFBD3, 'M', u'ڭ'),
(0xFBD7, 'M', u'ۇ'),
(0xFBD9, 'M', u'ۆ'),
(0xFBDB, 'M', u'ۈ'),
(0xFBDD, 'M', u'ۇٴ'),
(0xFBDE, 'M', u'ۋ'),
(0xFBE0, 'M', u'ۅ'),
(0xFBE2, 'M', u'ۉ'),
(0xFBE4, 'M', u'ې'),
(0xFBE8, 'M', u'ى'),
(0xFBEA, 'M', u'ئا'),
(0xFBEC, 'M', u'ئە'),
(0xFBEE, 'M', u'ئو'),
(0xFBF0, 'M', u'ئۇ'),
(0xFBF2, 'M', u'ئۆ'),
(0xFBF4, 'M', u'ئۈ'),
(0xFBF6, 'M', u'ئې'),
(0xFBF9, 'M', u'ئى'),
(0xFBFC, 'M', u'ی'),
(0xFC00, 'M', u'ئج'),
(0xFC01, 'M', u'ئح'),
(0xFC02, 'M', u'ئم'),
(0xFC03, 'M', u'ئى'),
(0xFC04, 'M', u'ئي'),
(0xFC05, 'M', u'بج'),
(0xFC06, 'M', u'بح'),
(0xFC07, 'M', u'بخ'),
(0xFC08, 'M', u'بم'),
(0xFC09, 'M', u'بى'),
(0xFC0A, 'M', u'بي'),
(0xFC0B, 'M', u'تج'),
(0xFC0C, 'M', u'تح'),
(0xFC0D, 'M', u'تخ'),
(0xFC0E, 'M', u'تم'),
(0xFC0F, 'M', u'تى'),
(0xFC10, 'M', u'تي'),
(0xFC11, 'M', u'ثج'),
(0xFC12, 'M', u'ثم'),
(0xFC13, 'M', u'ثى'),
(0xFC14, 'M', u'ثي'),
(0xFC15, 'M', u'جح'),
(0xFC16, 'M', u'جم'),
(0xFC17, 'M', u'حج'),
(0xFC18, 'M', u'حم'),
(0xFC19, 'M', u'خج'),
(0xFC1A, 'M', u'خح'),
(0xFC1B, 'M', u'خم'),
(0xFC1C, 'M', u'سج'),
(0xFC1D, 'M', u'سح'),
(0xFC1E, 'M', u'سخ'),
(0xFC1F, 'M', u'سم'),
(0xFC20, 'M', u'صح'),
(0xFC21, 'M', u'صم'),
(0xFC22, 'M', u'ضج'),
(0xFC23, 'M', u'ضح'),
(0xFC24, 'M', u'ضخ'),
(0xFC25, 'M', u'ضم'),
(0xFC26, 'M', u'طح'),
]
def _seg_45():
return [
(0xFC27, 'M', u'طم'),
(0xFC28, 'M', u'ظم'),
(0xFC29, 'M', u'عج'),
(0xFC2A, 'M', u'عم'),
(0xFC2B, 'M', u'غج'),
(0xFC2C, 'M', u'غم'),
(0xFC2D, 'M', u'فج'),
(0xFC2E, 'M', u'فح'),
(0xFC2F, 'M', u'فخ'),
(0xFC30, 'M', u'فم'),
(0xFC31, 'M', u'فى'),
(0xFC32, 'M', u'في'),
(0xFC33, 'M', u'قح'),
(0xFC34, 'M', u'قم'),
(0xFC35, 'M', u'قى'),
(0xFC36, 'M', u'قي'),
(0xFC37, 'M', u'كا'),
(0xFC38, 'M', u'كج'),
(0xFC39, 'M', u'كح'),
(0xFC3A, 'M', u'كخ'),
(0xFC3B, 'M', u'كل'),
(0xFC3C, 'M', u'كم'),
(0xFC3D, 'M', u'كى'),
(0xFC3E, 'M', u'كي'),
(0xFC3F, 'M', u'لج'),
(0xFC40, 'M', u'لح'),
(0xFC41, 'M', u'لخ'),
(0xFC42, 'M', u'لم'),
(0xFC43, 'M', u'لى'),
(0xFC44, 'M', u'لي'),
(0xFC45, 'M', u'مج'),
(0xFC46, 'M', u'مح'),
(0xFC47, 'M', u'مخ'),
(0xFC48, 'M', u'مم'),
(0xFC49, 'M', u'مى'),
(0xFC4A, 'M', u'مي'),
(0xFC4B, 'M', u'نج'),
(0xFC4C, 'M', u'نح'),
(0xFC4D, 'M', u'نخ'),
(0xFC4E, 'M', u'نم'),
(0xFC4F, 'M', u'نى'),
(0xFC50, 'M', u'ني'),
(0xFC51, 'M', u'هج'),
(0xFC52, 'M', u'هم'),
(0xFC53, 'M', u'هى'),
(0xFC54, 'M', u'هي'),
(0xFC55, 'M', u'يج'),
(0xFC56, 'M', u'يح'),
(0xFC57, 'M', u'يخ'),
(0xFC58, 'M', u'يم'),
(0xFC59, 'M', u'يى'),
(0xFC5A, 'M', u'يي'),
(0xFC5B, 'M', u'ذٰ'),
(0xFC5C, 'M', u'رٰ'),
(0xFC5D, 'M', u'ىٰ'),
(0xFC5E, '3', u' ٌّ'),
(0xFC5F, '3', u' ٍّ'),
(0xFC60, '3', u' َّ'),
(0xFC61, '3', u' ُّ'),
(0xFC62, '3', u' ِّ'),
(0xFC63, '3', u' ّٰ'),
(0xFC64, 'M', u'ئر'),
(0xFC65, 'M', u'ئز'),
(0xFC66, 'M', u'ئم'),
(0xFC67, 'M', u'ئن'),
(0xFC68, 'M', u'ئى'),
(0xFC69, 'M', u'ئي'),
(0xFC6A, 'M', u'بر'),
(0xFC6B, 'M', u'بز'),
(0xFC6C, 'M', u'بم'),
(0xFC6D, 'M', u'بن'),
(0xFC6E, 'M', u'بى'),
(0xFC6F, 'M', u'بي'),
(0xFC70, 'M', u'تر'),
(0xFC71, 'M', u'تز'),
(0xFC72, 'M', u'تم'),
(0xFC73, 'M', u'تن'),
(0xFC74, 'M', u'تى'),
(0xFC75, 'M', u'تي'),
(0xFC76, 'M', u'ثر'),
(0xFC77, 'M', u'ثز'),
(0xFC78, 'M', u'ثم'),
(0xFC79, 'M', u'ثن'),
(0xFC7A, 'M', u'ثى'),
(0xFC7B, 'M', u'ثي'),
(0xFC7C, 'M', u'فى'),
(0xFC7D, 'M', u'في'),
(0xFC7E, 'M', u'قى'),
(0xFC7F, 'M', u'قي'),
(0xFC80, 'M', u'كا'),
(0xFC81, 'M', u'كل'),
(0xFC82, 'M', u'كم'),
(0xFC83, 'M', u'كى'),
(0xFC84, 'M', u'كي'),
(0xFC85, 'M', u'لم'),
(0xFC86, 'M', u'لى'),
(0xFC87, 'M', u'لي'),
(0xFC88, 'M', u'ما'),
(0xFC89, 'M', u'مم'),
(0xFC8A, 'M', u'نر'),
]
def _seg_46():
return [
(0xFC8B, 'M', u'نز'),
(0xFC8C, 'M', u'نم'),
(0xFC8D, 'M', u'نن'),
(0xFC8E, 'M', u'نى'),
(0xFC8F, 'M', u'ني'),
(0xFC90, 'M', u'ىٰ'),
(0xFC91, 'M', u'ير'),
(0xFC92, 'M', u'يز'),
(0xFC93, 'M', u'يم'),
(0xFC94, 'M', u'ين'),
(0xFC95, 'M', u'يى'),
(0xFC96, 'M', u'يي'),
(0xFC97, 'M', u'ئج'),
(0xFC98, 'M', u'ئح'),
(0xFC99, 'M', u'ئخ'),
(0xFC9A, 'M', u'ئم'),
(0xFC9B, 'M', u'ئه'),
(0xFC9C, 'M', u'بج'),
(0xFC9D, 'M', u'بح'),
(0xFC9E, 'M', u'بخ'),
(0xFC9F, 'M', u'بم'),
(0xFCA0, 'M', u'به'),
(0xFCA1, 'M', u'تج'),
(0xFCA2, 'M', u'تح'),
(0xFCA3, 'M', u'تخ'),
(0xFCA4, 'M', u'تم'),
(0xFCA5, 'M', u'ته'),
(0xFCA6, 'M', u'ثم'),
(0xFCA7, 'M', u'جح'),
(0xFCA8, 'M', u'جم'),
(0xFCA9, 'M', u'حج'),
(0xFCAA, 'M', u'حم'),
(0xFCAB, 'M', u'خج'),
(0xFCAC, 'M', u'خم'),
(0xFCAD, 'M', u'سج'),
(0xFCAE, 'M', u'سح'),
(0xFCAF, 'M', u'سخ'),
(0xFCB0, 'M', u'سم'),
(0xFCB1, 'M', u'صح'),
(0xFCB2, 'M', u'صخ'),
(0xFCB3, 'M', u'صم'),
(0xFCB4, 'M', u'ضج'),
(0xFCB5, 'M', u'ضح'),
(0xFCB6, 'M', u'ضخ'),
(0xFCB7, 'M', u'ضم'),
(0xFCB8, 'M', u'طح'),
(0xFCB9, 'M', u'ظم'),
(0xFCBA, 'M', u'عج'),
(0xFCBB, 'M', u'عم'),
(0xFCBC, 'M', u'غج'),
(0xFCBD, 'M', u'غم'),
(0xFCBE, 'M', u'فج'),
(0xFCBF, 'M', u'فح'),
(0xFCC0, 'M', u'فخ'),
(0xFCC1, 'M', u'فم'),
(0xFCC2, 'M', u'قح'),
(0xFCC3, 'M', u'قم'),
(0xFCC4, 'M', u'كج'),
(0xFCC5, 'M', u'كح'),
(0xFCC6, 'M', u'كخ'),
(0xFCC7, 'M', u'كل'),
(0xFCC8, 'M', u'كم'),
(0xFCC9, 'M', u'لج'),
(0xFCCA, 'M', u'لح'),
(0xFCCB, 'M', u'لخ'),
(0xFCCC, 'M', u'لم'),
(0xFCCD, 'M', u'له'),
(0xFCCE, 'M', u'مج'),
(0xFCCF, 'M', u'مح'),
(0xFCD0, 'M', u'مخ'),
(0xFCD1, 'M', u'مم'),
(0xFCD2, 'M', u'نج'),
(0xFCD3, 'M', u'نح'),
(0xFCD4, 'M', u'نخ'),
(0xFCD5, 'M', u'نم'),
(0xFCD6, 'M', u'نه'),
(0xFCD7, 'M', u'هج'),
(0xFCD8, 'M', u'هم'),
(0xFCD9, 'M', u'هٰ'),
(0xFCDA, 'M', u'يج'),
(0xFCDB, 'M', u'يح'),
(0xFCDC, 'M', u'يخ'),
(0xFCDD, 'M', u'يم'),
(0xFCDE, 'M', u'يه'),
(0xFCDF, 'M', u'ئم'),
(0xFCE0, 'M', u'ئه'),
(0xFCE1, 'M', u'بم'),
(0xFCE2, 'M', u'به'),
(0xFCE3, 'M', u'تم'),
(0xFCE4, 'M', u'ته'),
(0xFCE5, 'M', u'ثم'),
(0xFCE6, 'M', u'ثه'),
(0xFCE7, 'M', u'سم'),
(0xFCE8, 'M', u'سه'),
(0xFCE9, 'M', u'شم'),
(0xFCEA, 'M', u'شه'),
(0xFCEB, 'M', u'كل'),
(0xFCEC, 'M', u'كم'),
(0xFCED, 'M', u'لم'),
(0xFCEE, 'M', u'نم'),
]
def _seg_47():
return [
(0xFCEF, 'M', u'نه'),
(0xFCF0, 'M', u'يم'),
(0xFCF1, 'M', u'يه'),
(0xFCF2, 'M', u'ـَّ'),
(0xFCF3, 'M', u'ـُّ'),
(0xFCF4, 'M', u'ـِّ'),
(0xFCF5, 'M', u'طى'),
(0xFCF6, 'M', u'طي'),
(0xFCF7, 'M', u'عى'),
(0xFCF8, 'M', u'عي'),
(0xFCF9, 'M', u'غى'),
(0xFCFA, 'M', u'غي'),
(0xFCFB, 'M', u'سى'),
(0xFCFC, 'M', u'سي'),
(0xFCFD, 'M', u'شى'),
(0xFCFE, 'M', u'شي'),
(0xFCFF, 'M', u'حى'),
(0xFD00, 'M', u'حي'),
(0xFD01, 'M', u'جى'),
(0xFD02, 'M', u'جي'),
(0xFD03, 'M', u'خى'),
(0xFD04, 'M', u'خي'),
(0xFD05, 'M', u'صى'),
(0xFD06, 'M', u'صي'),
(0xFD07, 'M', u'ضى'),
(0xFD08, 'M', u'ضي'),
(0xFD09, 'M', u'شج'),
(0xFD0A, 'M', u'شح'),
(0xFD0B, 'M', u'شخ'),
(0xFD0C, 'M', u'شم'),
(0xFD0D, 'M', u'شر'),
(0xFD0E, 'M', u'سر'),
(0xFD0F, 'M', u'صر'),
(0xFD10, 'M', u'ضر'),
(0xFD11, 'M', u'طى'),
(0xFD12, 'M', u'طي'),
(0xFD13, 'M', u'عى'),
(0xFD14, 'M', u'عي'),
(0xFD15, 'M', u'غى'),
(0xFD16, 'M', u'غي'),
(0xFD17, 'M', u'سى'),
(0xFD18, 'M', u'سي'),
(0xFD19, 'M', u'شى'),
(0xFD1A, 'M', u'شي'),
(0xFD1B, 'M', u'حى'),
(0xFD1C, 'M', u'حي'),
(0xFD1D, 'M', u'جى'),
(0xFD1E, 'M', u'جي'),
(0xFD1F, 'M', u'خى'),
(0xFD20, 'M', u'خي'),
(0xFD21, 'M', u'صى'),
(0xFD22, 'M', u'صي'),
(0xFD23, 'M', u'ضى'),
(0xFD24, 'M', u'ضي'),
(0xFD25, 'M', u'شج'),
(0xFD26, 'M', u'شح'),
(0xFD27, 'M', u'شخ'),
(0xFD28, 'M', u'شم'),
(0xFD29, 'M', u'شر'),
(0xFD2A, 'M', u'سر'),
(0xFD2B, 'M', u'صر'),
(0xFD2C, 'M', u'ضر'),
(0xFD2D, 'M', u'شج'),
(0xFD2E, 'M', u'شح'),
(0xFD2F, 'M', u'شخ'),
(0xFD30, 'M', u'شم'),
(0xFD31, 'M', u'سه'),
(0xFD32, 'M', u'شه'),
(0xFD33, 'M', u'طم'),
(0xFD34, 'M', u'سج'),
(0xFD35, 'M', u'سح'),
(0xFD36, 'M', u'سخ'),
(0xFD37, 'M', u'شج'),
(0xFD38, 'M', u'شح'),
(0xFD39, 'M', u'شخ'),
(0xFD3A, 'M', u'طم'),
(0xFD3B, 'M', u'ظم'),
(0xFD3C, 'M', u'اً'),
(0xFD3E, 'V'),
(0xFD40, 'X'),
(0xFD50, 'M', u'تجم'),
(0xFD51, 'M', u'تحج'),
(0xFD53, 'M', u'تحم'),
(0xFD54, 'M', u'تخم'),
(0xFD55, 'M', u'تمج'),
(0xFD56, 'M', u'تمح'),
(0xFD57, 'M', u'تمخ'),
(0xFD58, 'M', u'جمح'),
(0xFD5A, 'M', u'حمي'),
(0xFD5B, 'M', u'حمى'),
(0xFD5C, 'M', u'سحج'),
(0xFD5D, 'M', u'سجح'),
(0xFD5E, 'M', u'سجى'),
(0xFD5F, 'M', u'سمح'),
(0xFD61, 'M', u'سمج'),
(0xFD62, 'M', u'سمم'),
(0xFD64, 'M', u'صحح'),
(0xFD66, 'M', u'صمم'),
(0xFD67, 'M', u'شحم'),
(0xFD69, 'M', u'شجي'),
]
def _seg_48():
return [
(0xFD6A, 'M', u'شمخ'),
(0xFD6C, 'M', u'شمم'),
(0xFD6E, 'M', u'ضحى'),
(0xFD6F, 'M', u'ضخم'),
(0xFD71, 'M', u'طمح'),
(0xFD73, 'M', u'طمم'),
(0xFD74, 'M', u'طمي'),
(0xFD75, 'M', u'عجم'),
(0xFD76, 'M', u'عمم'),
(0xFD78, 'M', u'عمى'),
(0xFD79, 'M', u'غمم'),
(0xFD7A, 'M', u'غمي'),
(0xFD7B, 'M', u'غمى'),
(0xFD7C, 'M', u'فخم'),
(0xFD7E, 'M', u'قمح'),
(0xFD7F, 'M', u'قمم'),
(0xFD80, 'M', u'لحم'),
(0xFD81, 'M', u'لحي'),
(0xFD82, 'M', u'لحى'),
(0xFD83, 'M', u'لجج'),
(0xFD85, 'M', u'لخم'),
(0xFD87, 'M', u'لمح'),
(0xFD89, 'M', u'محج'),
(0xFD8A, 'M', u'محم'),
(0xFD8B, 'M', u'محي'),
(0xFD8C, 'M', u'مجح'),
(0xFD8D, 'M', u'مجم'),
(0xFD8E, 'M', u'مخج'),
(0xFD8F, 'M', u'مخم'),
(0xFD90, 'X'),
(0xFD92, 'M', u'مجخ'),
(0xFD93, 'M', u'همج'),
(0xFD94, 'M', u'همم'),
(0xFD95, 'M', u'نحم'),
(0xFD96, 'M', u'نحى'),
(0xFD97, 'M', u'نجم'),
(0xFD99, 'M', u'نجى'),
(0xFD9A, 'M', u'نمي'),
(0xFD9B, 'M', u'نمى'),
(0xFD9C, 'M', u'يمم'),
(0xFD9E, 'M', u'بخي'),
(0xFD9F, 'M', u'تجي'),
(0xFDA0, 'M', u'تجى'),
(0xFDA1, 'M', u'تخي'),
(0xFDA2, 'M', u'تخى'),
(0xFDA3, 'M', u'تمي'),
(0xFDA4, 'M', u'تمى'),
(0xFDA5, 'M', u'جمي'),
(0xFDA6, 'M', u'جحى'),
(0xFDA7, 'M', u'جمى'),
(0xFDA8, 'M', u'سخى'),
(0xFDA9, 'M', u'صحي'),
(0xFDAA, 'M', u'شحي'),
(0xFDAB, 'M', u'ضحي'),
(0xFDAC, 'M', u'لجي'),
(0xFDAD, 'M', u'لمي'),
(0xFDAE, 'M', u'يحي'),
(0xFDAF, 'M', u'يجي'),
(0xFDB0, 'M', u'يمي'),
(0xFDB1, 'M', u'ممي'),
(0xFDB2, 'M', u'قمي'),
(0xFDB3, 'M', u'نحي'),
(0xFDB4, 'M', u'قمح'),
(0xFDB5, 'M', u'لحم'),
(0xFDB6, 'M', u'عمي'),
(0xFDB7, 'M', u'كمي'),
(0xFDB8, 'M', u'نجح'),
(0xFDB9, 'M', u'مخي'),
(0xFDBA, 'M', u'لجم'),
(0xFDBB, 'M', u'كمم'),
(0xFDBC, 'M', u'لجم'),
(0xFDBD, 'M', u'نجح'),
(0xFDBE, 'M', u'جحي'),
(0xFDBF, 'M', u'حجي'),
(0xFDC0, 'M', u'مجي'),
(0xFDC1, 'M', u'فمي'),
(0xFDC2, 'M', u'بحي'),
(0xFDC3, 'M', u'كمم'),
(0xFDC4, 'M', u'عجم'),
(0xFDC5, 'M', u'صمم'),
(0xFDC6, 'M', u'سخي'),
(0xFDC7, 'M', u'نجي'),
(0xFDC8, 'X'),
(0xFDF0, 'M', u'صلے'),
(0xFDF1, 'M', u'قلے'),
(0xFDF2, 'M', u'الله'),
(0xFDF3, 'M', u'اكبر'),
(0xFDF4, 'M', u'محمد'),
(0xFDF5, 'M', u'صلعم'),
(0xFDF6, 'M', u'رسول'),
(0xFDF7, 'M', u'عليه'),
(0xFDF8, 'M', u'وسلم'),
(0xFDF9, 'M', u'صلى'),
(0xFDFA, '3', u'صلى الله عليه وسلم'),
(0xFDFB, '3', u'جل جلاله'),
(0xFDFC, 'M', u'ریال'),
(0xFDFD, 'V'),
(0xFDFE, 'X'),
(0xFE00, 'I'),
(0xFE10, '3', u','),
]
def _seg_49():
return [
(0xFE11, 'M', u'、'),
(0xFE12, 'X'),
(0xFE13, '3', u':'),
(0xFE14, '3', u';'),
(0xFE15, '3', u'!'),
(0xFE16, '3', u'?'),
(0xFE17, 'M', u'〖'),
(0xFE18, 'M', u'〗'),
(0xFE19, 'X'),
(0xFE20, 'V'),
(0xFE30, 'X'),
(0xFE31, 'M', u'—'),
(0xFE32, 'M', u'–'),
(0xFE33, '3', u'_'),
(0xFE35, '3', u'('),
(0xFE36, '3', u')'),
(0xFE37, '3', u'{'),
(0xFE38, '3', u'}'),
(0xFE39, 'M', u'〔'),
(0xFE3A, 'M', u'〕'),
(0xFE3B, 'M', u'【'),
(0xFE3C, 'M', u'】'),
(0xFE3D, 'M', u'《'),
(0xFE3E, 'M', u'》'),
(0xFE3F, 'M', u'〈'),
(0xFE40, 'M', u'〉'),
(0xFE41, 'M', u'「'),
(0xFE42, 'M', u'」'),
(0xFE43, 'M', u'『'),
(0xFE44, 'M', u'』'),
(0xFE45, 'V'),
(0xFE47, '3', u'['),
(0xFE48, '3', u']'),
(0xFE49, '3', u' ̅'),
(0xFE4D, '3', u'_'),
(0xFE50, '3', u','),
(0xFE51, 'M', u'、'),
(0xFE52, 'X'),
(0xFE54, '3', u';'),
(0xFE55, '3', u':'),
(0xFE56, '3', u'?'),
(0xFE57, '3', u'!'),
(0xFE58, 'M', u'—'),
(0xFE59, '3', u'('),
(0xFE5A, '3', u')'),
(0xFE5B, '3', u'{'),
(0xFE5C, '3', u'}'),
(0xFE5D, 'M', u'〔'),
(0xFE5E, 'M', u'〕'),
(0xFE5F, '3', u'#'),
(0xFE60, '3', u'&'),
(0xFE61, '3', u'*'),
(0xFE62, '3', u'+'),
(0xFE63, 'M', u'-'),
(0xFE64, '3', u'<'),
(0xFE65, '3', u'>'),
(0xFE66, '3', u'='),
(0xFE67, 'X'),
(0xFE68, '3', u'\\'),
(0xFE69, '3', u'$'),
(0xFE6A, '3', u'%'),
(0xFE6B, '3', u'@'),
(0xFE6C, 'X'),
(0xFE70, '3', u' ً'),
(0xFE71, 'M', u'ـً'),
(0xFE72, '3', u' ٌ'),
(0xFE73, 'V'),
(0xFE74, '3', u' ٍ'),
(0xFE75, 'X'),
(0xFE76, '3', u' َ'),
(0xFE77, 'M', u'ـَ'),
(0xFE78, '3', u' ُ'),
(0xFE79, 'M', u'ـُ'),
(0xFE7A, '3', u' ِ'),
(0xFE7B, 'M', u'ـِ'),
(0xFE7C, '3', u' ّ'),
(0xFE7D, 'M', u'ـّ'),
(0xFE7E, '3', u' ْ'),
(0xFE7F, 'M', u'ـْ'),
(0xFE80, 'M', u'ء'),
(0xFE81, 'M', u'آ'),
(0xFE83, 'M', u'أ'),
(0xFE85, 'M', u'ؤ'),
(0xFE87, 'M', u'إ'),
(0xFE89, 'M', u'ئ'),
(0xFE8D, 'M', u'ا'),
(0xFE8F, 'M', u'ب'),
(0xFE93, 'M', u'ة'),
(0xFE95, 'M', u'ت'),
(0xFE99, 'M', u'ث'),
(0xFE9D, 'M', u'ج'),
(0xFEA1, 'M', u'ح'),
(0xFEA5, 'M', u'خ'),
(0xFEA9, 'M', u'د'),
(0xFEAB, 'M', u'ذ'),
(0xFEAD, 'M', u'ر'),
(0xFEAF, 'M', u'ز'),
(0xFEB1, 'M', u'س'),
(0xFEB5, 'M', u'ش'),
(0xFEB9, 'M', u'ص'),
]
def _seg_50():
return [
(0xFEBD, 'M', u'ض'),
(0xFEC1, 'M', u'ط'),
(0xFEC5, 'M', u'ظ'),
(0xFEC9, 'M', u'ع'),
(0xFECD, 'M', u'غ'),
(0xFED1, 'M', u'ف'),
(0xFED5, 'M', u'ق'),
(0xFED9, 'M', u'ك'),
(0xFEDD, 'M', u'ل'),
(0xFEE1, 'M', u'م'),
(0xFEE5, 'M', u'ن'),
(0xFEE9, 'M', u'ه'),
(0xFEED, 'M', u'و'),
(0xFEEF, 'M', u'ى'),
(0xFEF1, 'M', u'ي'),
(0xFEF5, 'M', u'لآ'),
(0xFEF7, 'M', u'لأ'),
(0xFEF9, 'M', u'لإ'),
(0xFEFB, 'M', u'لا'),
(0xFEFD, 'X'),
(0xFEFF, 'I'),
(0xFF00, 'X'),
(0xFF01, '3', u'!'),
(0xFF02, '3', u'"'),
(0xFF03, '3', u'#'),
(0xFF04, '3', u'$'),
(0xFF05, '3', u'%'),
(0xFF06, '3', u'&'),
(0xFF07, '3', u'\''),
(0xFF08, '3', u'('),
(0xFF09, '3', u')'),
(0xFF0A, '3', u'*'),
(0xFF0B, '3', u'+'),
(0xFF0C, '3', u','),
(0xFF0D, 'M', u'-'),
(0xFF0E, 'M', u'.'),
(0xFF0F, '3', u'/'),
(0xFF10, 'M', u'0'),
(0xFF11, 'M', u'1'),
(0xFF12, 'M', u'2'),
(0xFF13, 'M', u'3'),
(0xFF14, 'M', u'4'),
(0xFF15, 'M', u'5'),
(0xFF16, 'M', u'6'),
(0xFF17, 'M', u'7'),
(0xFF18, 'M', u'8'),
(0xFF19, 'M', u'9'),
(0xFF1A, '3', u':'),
(0xFF1B, '3', u';'),
(0xFF1C, '3', u'<'),
(0xFF1D, '3', u'='),
(0xFF1E, '3', u'>'),
(0xFF1F, '3', u'?'),
(0xFF20, '3', u'@'),
(0xFF21, 'M', u'a'),
(0xFF22, 'M', u'b'),
(0xFF23, 'M', u'c'),
(0xFF24, 'M', u'd'),
(0xFF25, 'M', u'e'),
(0xFF26, 'M', u'f'),
(0xFF27, 'M', u'g'),
(0xFF28, 'M', u'h'),
(0xFF29, 'M', u'i'),
(0xFF2A, 'M', u'j'),
(0xFF2B, 'M', u'k'),
(0xFF2C, 'M', u'l'),
(0xFF2D, 'M', u'm'),
(0xFF2E, 'M', u'n'),
(0xFF2F, 'M', u'o'),
(0xFF30, 'M', u'p'),
(0xFF31, 'M', u'q'),
(0xFF32, 'M', u'r'),
(0xFF33, 'M', u's'),
(0xFF34, 'M', u't'),
(0xFF35, 'M', u'u'),
(0xFF36, 'M', u'v'),
(0xFF37, 'M', u'w'),
(0xFF38, 'M', u'x'),
(0xFF39, 'M', u'y'),
(0xFF3A, 'M', u'z'),
(0xFF3B, '3', u'['),
(0xFF3C, '3', u'\\'),
(0xFF3D, '3', u']'),
(0xFF3E, '3', u'^'),
(0xFF3F, '3', u'_'),
(0xFF40, '3', u'`'),
(0xFF41, 'M', u'a'),
(0xFF42, 'M', u'b'),
(0xFF43, 'M', u'c'),
(0xFF44, 'M', u'd'),
(0xFF45, 'M', u'e'),
(0xFF46, 'M', u'f'),
(0xFF47, 'M', u'g'),
(0xFF48, 'M', u'h'),
(0xFF49, 'M', u'i'),
(0xFF4A, 'M', u'j'),
(0xFF4B, 'M', u'k'),
(0xFF4C, 'M', u'l'),
(0xFF4D, 'M', u'm'),
(0xFF4E, 'M', u'n'),
]
def _seg_51():
return [
(0xFF4F, 'M', u'o'),
(0xFF50, 'M', u'p'),
(0xFF51, 'M', u'q'),
(0xFF52, 'M', u'r'),
(0xFF53, 'M', u's'),
(0xFF54, 'M', u't'),
(0xFF55, 'M', u'u'),
(0xFF56, 'M', u'v'),
(0xFF57, 'M', u'w'),
(0xFF58, 'M', u'x'),
(0xFF59, 'M', u'y'),
(0xFF5A, 'M', u'z'),
(0xFF5B, '3', u'{'),
(0xFF5C, '3', u'|'),
(0xFF5D, '3', u'}'),
(0xFF5E, '3', u'~'),
(0xFF5F, 'M', u'⦅'),
(0xFF60, 'M', u'⦆'),
(0xFF61, 'M', u'.'),
(0xFF62, 'M', u'「'),
(0xFF63, 'M', u'」'),
(0xFF64, 'M', u'、'),
(0xFF65, 'M', u'・'),
(0xFF66, 'M', u'ヲ'),
(0xFF67, 'M', u'ァ'),
(0xFF68, 'M', u'ィ'),
(0xFF69, 'M', u'ゥ'),
(0xFF6A, 'M', u'ェ'),
(0xFF6B, 'M', u'ォ'),
(0xFF6C, 'M', u'ャ'),
(0xFF6D, 'M', u'ュ'),
(0xFF6E, 'M', u'ョ'),
(0xFF6F, 'M', u'ッ'),
(0xFF70, 'M', u'ー'),
(0xFF71, 'M', u'ア'),
(0xFF72, 'M', u'イ'),
(0xFF73, 'M', u'ウ'),
(0xFF74, 'M', u'エ'),
(0xFF75, 'M', u'オ'),
(0xFF76, 'M', u'カ'),
(0xFF77, 'M', u'キ'),
(0xFF78, 'M', u'ク'),
(0xFF79, 'M', u'ケ'),
(0xFF7A, 'M', u'コ'),
(0xFF7B, 'M', u'サ'),
(0xFF7C, 'M', u'シ'),
(0xFF7D, 'M', u'ス'),
(0xFF7E, 'M', u'セ'),
(0xFF7F, 'M', u'ソ'),
(0xFF80, 'M', u'タ'),
(0xFF81, 'M', u'チ'),
(0xFF82, 'M', u'ツ'),
(0xFF83, 'M', u'テ'),
(0xFF84, 'M', u'ト'),
(0xFF85, 'M', u'ナ'),
(0xFF86, 'M', u'ニ'),
(0xFF87, 'M', u'ヌ'),
(0xFF88, 'M', u'ネ'),
(0xFF89, 'M', u'ノ'),
(0xFF8A, 'M', u'ハ'),
(0xFF8B, 'M', u'ヒ'),
(0xFF8C, 'M', u'フ'),
(0xFF8D, 'M', u'ヘ'),
(0xFF8E, 'M', u'ホ'),
(0xFF8F, 'M', u'マ'),
(0xFF90, 'M', u'ミ'),
(0xFF91, 'M', u'ム'),
(0xFF92, 'M', u'メ'),
(0xFF93, 'M', u'モ'),
(0xFF94, 'M', u'ヤ'),
(0xFF95, 'M', u'ユ'),
(0xFF96, 'M', u'ヨ'),
(0xFF97, 'M', u'ラ'),
(0xFF98, 'M', u'リ'),
(0xFF99, 'M', u'ル'),
(0xFF9A, 'M', u'レ'),
(0xFF9B, 'M', u'ロ'),
(0xFF9C, 'M', u'ワ'),
(0xFF9D, 'M', u'ン'),
(0xFF9E, 'M', u'゙'),
(0xFF9F, 'M', u'゚'),
(0xFFA0, 'X'),
(0xFFA1, 'M', u'ᄀ'),
(0xFFA2, 'M', u'ᄁ'),
(0xFFA3, 'M', u'ᆪ'),
(0xFFA4, 'M', u'ᄂ'),
(0xFFA5, 'M', u'ᆬ'),
(0xFFA6, 'M', u'ᆭ'),
(0xFFA7, 'M', u'ᄃ'),
(0xFFA8, 'M', u'ᄄ'),
(0xFFA9, 'M', u'ᄅ'),
(0xFFAA, 'M', u'ᆰ'),
(0xFFAB, 'M', u'ᆱ'),
(0xFFAC, 'M', u'ᆲ'),
(0xFFAD, 'M', u'ᆳ'),
(0xFFAE, 'M', u'ᆴ'),
(0xFFAF, 'M', u'ᆵ'),
(0xFFB0, 'M', u'ᄚ'),
(0xFFB1, 'M', u'ᄆ'),
(0xFFB2, 'M', u'ᄇ'),
]
def _seg_52():
return [
(0xFFB3, 'M', u'ᄈ'),
(0xFFB4, 'M', u'ᄡ'),
(0xFFB5, 'M', u'ᄉ'),
(0xFFB6, 'M', u'ᄊ'),
(0xFFB7, 'M', u'ᄋ'),
(0xFFB8, 'M', u'ᄌ'),
(0xFFB9, 'M', u'ᄍ'),
(0xFFBA, 'M', u'ᄎ'),
(0xFFBB, 'M', u'ᄏ'),
(0xFFBC, 'M', u'ᄐ'),
(0xFFBD, 'M', u'ᄑ'),
(0xFFBE, 'M', u'ᄒ'),
(0xFFBF, 'X'),
(0xFFC2, 'M', u'ᅡ'),
(0xFFC3, 'M', u'ᅢ'),
(0xFFC4, 'M', u'ᅣ'),
(0xFFC5, 'M', u'ᅤ'),
(0xFFC6, 'M', u'ᅥ'),
(0xFFC7, 'M', u'ᅦ'),
(0xFFC8, 'X'),
(0xFFCA, 'M', u'ᅧ'),
(0xFFCB, 'M', u'ᅨ'),
(0xFFCC, 'M', u'ᅩ'),
(0xFFCD, 'M', u'ᅪ'),
(0xFFCE, 'M', u'ᅫ'),
(0xFFCF, 'M', u'ᅬ'),
(0xFFD0, 'X'),
(0xFFD2, 'M', u'ᅭ'),
(0xFFD3, 'M', u'ᅮ'),
(0xFFD4, 'M', u'ᅯ'),
(0xFFD5, 'M', u'ᅰ'),
(0xFFD6, 'M', u'ᅱ'),
(0xFFD7, 'M', u'ᅲ'),
(0xFFD8, 'X'),
(0xFFDA, 'M', u'ᅳ'),
(0xFFDB, 'M', u'ᅴ'),
(0xFFDC, 'M', u'ᅵ'),
(0xFFDD, 'X'),
(0xFFE0, 'M', u'¢'),
(0xFFE1, 'M', u'£'),
(0xFFE2, 'M', u'¬'),
(0xFFE3, '3', u' ̄'),
(0xFFE4, 'M', u'¦'),
(0xFFE5, 'M', u'¥'),
(0xFFE6, 'M', u'₩'),
(0xFFE7, 'X'),
(0xFFE8, 'M', u'│'),
(0xFFE9, 'M', u'←'),
(0xFFEA, 'M', u'↑'),
(0xFFEB, 'M', u'→'),
(0xFFEC, 'M', u'↓'),
(0xFFED, 'M', u'■'),
(0xFFEE, 'M', u'○'),
(0xFFEF, 'X'),
(0x10000, 'V'),
(0x1000C, 'X'),
(0x1000D, 'V'),
(0x10027, 'X'),
(0x10028, 'V'),
(0x1003B, 'X'),
(0x1003C, 'V'),
(0x1003E, 'X'),
(0x1003F, 'V'),
(0x1004E, 'X'),
(0x10050, 'V'),
(0x1005E, 'X'),
(0x10080, 'V'),
(0x100FB, 'X'),
(0x10100, 'V'),
(0x10103, 'X'),
(0x10107, 'V'),
(0x10134, 'X'),
(0x10137, 'V'),
(0x1018F, 'X'),
(0x10190, 'V'),
(0x1019C, 'X'),
(0x101A0, 'V'),
(0x101A1, 'X'),
(0x101D0, 'V'),
(0x101FE, 'X'),
(0x10280, 'V'),
(0x1029D, 'X'),
(0x102A0, 'V'),
(0x102D1, 'X'),
(0x102E0, 'V'),
(0x102FC, 'X'),
(0x10300, 'V'),
(0x10324, 'X'),
(0x1032D, 'V'),
(0x1034B, 'X'),
(0x10350, 'V'),
(0x1037B, 'X'),
(0x10380, 'V'),
(0x1039E, 'X'),
(0x1039F, 'V'),
(0x103C4, 'X'),
(0x103C8, 'V'),
(0x103D6, 'X'),
(0x10400, 'M', u'𐐨'),
(0x10401, 'M', u'𐐩'),
]
def _seg_53():
return [
(0x10402, 'M', u'𐐪'),
(0x10403, 'M', u'𐐫'),
(0x10404, 'M', u'𐐬'),
(0x10405, 'M', u'𐐭'),
(0x10406, 'M', u'𐐮'),
(0x10407, 'M', u'𐐯'),
(0x10408, 'M', u'𐐰'),
(0x10409, 'M', u'𐐱'),
(0x1040A, 'M', u'𐐲'),
(0x1040B, 'M', u'𐐳'),
(0x1040C, 'M', u'𐐴'),
(0x1040D, 'M', u'𐐵'),
(0x1040E, 'M', u'𐐶'),
(0x1040F, 'M', u'𐐷'),
(0x10410, 'M', u'𐐸'),
(0x10411, 'M', u'𐐹'),
(0x10412, 'M', u'𐐺'),
(0x10413, 'M', u'𐐻'),
(0x10414, 'M', u'𐐼'),
(0x10415, 'M', u'𐐽'),
(0x10416, 'M', u'𐐾'),
(0x10417, 'M', u'𐐿'),
(0x10418, 'M', u'𐑀'),
(0x10419, 'M', u'𐑁'),
(0x1041A, 'M', u'𐑂'),
(0x1041B, 'M', u'𐑃'),
(0x1041C, 'M', u'𐑄'),
(0x1041D, 'M', u'𐑅'),
(0x1041E, 'M', u'𐑆'),
(0x1041F, 'M', u'𐑇'),
(0x10420, 'M', u'𐑈'),
(0x10421, 'M', u'𐑉'),
(0x10422, 'M', u'𐑊'),
(0x10423, 'M', u'𐑋'),
(0x10424, 'M', u'𐑌'),
(0x10425, 'M', u'𐑍'),
(0x10426, 'M', u'𐑎'),
(0x10427, 'M', u'𐑏'),
(0x10428, 'V'),
(0x1049E, 'X'),
(0x104A0, 'V'),
(0x104AA, 'X'),
(0x104B0, 'M', u'𐓘'),
(0x104B1, 'M', u'𐓙'),
(0x104B2, 'M', u'𐓚'),
(0x104B3, 'M', u'𐓛'),
(0x104B4, 'M', u'𐓜'),
(0x104B5, 'M', u'𐓝'),
(0x104B6, 'M', u'𐓞'),
(0x104B7, 'M', u'𐓟'),
(0x104B8, 'M', u'𐓠'),
(0x104B9, 'M', u'𐓡'),
(0x104BA, 'M', u'𐓢'),
(0x104BB, 'M', u'𐓣'),
(0x104BC, 'M', u'𐓤'),
(0x104BD, 'M', u'𐓥'),
(0x104BE, 'M', u'𐓦'),
(0x104BF, 'M', u'𐓧'),
(0x104C0, 'M', u'𐓨'),
(0x104C1, 'M', u'𐓩'),
(0x104C2, 'M', u'𐓪'),
(0x104C3, 'M', u'𐓫'),
(0x104C4, 'M', u'𐓬'),
(0x104C5, 'M', u'𐓭'),
(0x104C6, 'M', u'𐓮'),
(0x104C7, 'M', u'𐓯'),
(0x104C8, 'M', u'𐓰'),
(0x104C9, 'M', u'𐓱'),
(0x104CA, 'M', u'𐓲'),
(0x104CB, 'M', u'𐓳'),
(0x104CC, 'M', u'𐓴'),
(0x104CD, 'M', u'𐓵'),
(0x104CE, 'M', u'𐓶'),
(0x104CF, 'M', u'𐓷'),
(0x104D0, 'M', u'𐓸'),
(0x104D1, 'M', u'𐓹'),
(0x104D2, 'M', u'𐓺'),
(0x104D3, 'M', u'𐓻'),
(0x104D4, 'X'),
(0x104D8, 'V'),
(0x104FC, 'X'),
(0x10500, 'V'),
(0x10528, 'X'),
(0x10530, 'V'),
(0x10564, 'X'),
(0x1056F, 'V'),
(0x10570, 'X'),
(0x10600, 'V'),
(0x10737, 'X'),
(0x10740, 'V'),
(0x10756, 'X'),
(0x10760, 'V'),
(0x10768, 'X'),
(0x10800, 'V'),
(0x10806, 'X'),
(0x10808, 'V'),
(0x10809, 'X'),
(0x1080A, 'V'),
(0x10836, 'X'),
(0x10837, 'V'),
]
def _seg_54():
return [
(0x10839, 'X'),
(0x1083C, 'V'),
(0x1083D, 'X'),
(0x1083F, 'V'),
(0x10856, 'X'),
(0x10857, 'V'),
(0x1089F, 'X'),
(0x108A7, 'V'),
(0x108B0, 'X'),
(0x108E0, 'V'),
(0x108F3, 'X'),
(0x108F4, 'V'),
(0x108F6, 'X'),
(0x108FB, 'V'),
(0x1091C, 'X'),
(0x1091F, 'V'),
(0x1093A, 'X'),
(0x1093F, 'V'),
(0x10940, 'X'),
(0x10980, 'V'),
(0x109B8, 'X'),
(0x109BC, 'V'),
(0x109D0, 'X'),
(0x109D2, 'V'),
(0x10A04, 'X'),
(0x10A05, 'V'),
(0x10A07, 'X'),
(0x10A0C, 'V'),
(0x10A14, 'X'),
(0x10A15, 'V'),
(0x10A18, 'X'),
(0x10A19, 'V'),
(0x10A36, 'X'),
(0x10A38, 'V'),
(0x10A3B, 'X'),
(0x10A3F, 'V'),
(0x10A49, 'X'),
(0x10A50, 'V'),
(0x10A59, 'X'),
(0x10A60, 'V'),
(0x10AA0, 'X'),
(0x10AC0, 'V'),
(0x10AE7, 'X'),
(0x10AEB, 'V'),
(0x10AF7, 'X'),
(0x10B00, 'V'),
(0x10B36, 'X'),
(0x10B39, 'V'),
(0x10B56, 'X'),
(0x10B58, 'V'),
(0x10B73, 'X'),
(0x10B78, 'V'),
(0x10B92, 'X'),
(0x10B99, 'V'),
(0x10B9D, 'X'),
(0x10BA9, 'V'),
(0x10BB0, 'X'),
(0x10C00, 'V'),
(0x10C49, 'X'),
(0x10C80, 'M', u'𐳀'),
(0x10C81, 'M', u'𐳁'),
(0x10C82, 'M', u'𐳂'),
(0x10C83, 'M', u'𐳃'),
(0x10C84, 'M', u'𐳄'),
(0x10C85, 'M', u'𐳅'),
(0x10C86, 'M', u'𐳆'),
(0x10C87, 'M', u'𐳇'),
(0x10C88, 'M', u'𐳈'),
(0x10C89, 'M', u'𐳉'),
(0x10C8A, 'M', u'𐳊'),
(0x10C8B, 'M', u'𐳋'),
(0x10C8C, 'M', u'𐳌'),
(0x10C8D, 'M', u'𐳍'),
(0x10C8E, 'M', u'𐳎'),
(0x10C8F, 'M', u'𐳏'),
(0x10C90, 'M', u'𐳐'),
(0x10C91, 'M', u'𐳑'),
(0x10C92, 'M', u'𐳒'),
(0x10C93, 'M', u'𐳓'),
(0x10C94, 'M', u'𐳔'),
(0x10C95, 'M', u'𐳕'),
(0x10C96, 'M', u'𐳖'),
(0x10C97, 'M', u'𐳗'),
(0x10C98, 'M', u'𐳘'),
(0x10C99, 'M', u'𐳙'),
(0x10C9A, 'M', u'𐳚'),
(0x10C9B, 'M', u'𐳛'),
(0x10C9C, 'M', u'𐳜'),
(0x10C9D, 'M', u'𐳝'),
(0x10C9E, 'M', u'𐳞'),
(0x10C9F, 'M', u'𐳟'),
(0x10CA0, 'M', u'𐳠'),
(0x10CA1, 'M', u'𐳡'),
(0x10CA2, 'M', u'𐳢'),
(0x10CA3, 'M', u'𐳣'),
(0x10CA4, 'M', u'𐳤'),
(0x10CA5, 'M', u'𐳥'),
(0x10CA6, 'M', u'𐳦'),
(0x10CA7, 'M', u'𐳧'),
(0x10CA8, 'M', u'𐳨'),
]
def _seg_55():
return [
(0x10CA9, 'M', u'𐳩'),
(0x10CAA, 'M', u'𐳪'),
(0x10CAB, 'M', u'𐳫'),
(0x10CAC, 'M', u'𐳬'),
(0x10CAD, 'M', u'𐳭'),
(0x10CAE, 'M', u'𐳮'),
(0x10CAF, 'M', u'𐳯'),
(0x10CB0, 'M', u'𐳰'),
(0x10CB1, 'M', u'𐳱'),
(0x10CB2, 'M', u'𐳲'),
(0x10CB3, 'X'),
(0x10CC0, 'V'),
(0x10CF3, 'X'),
(0x10CFA, 'V'),
(0x10D28, 'X'),
(0x10D30, 'V'),
(0x10D3A, 'X'),
(0x10E60, 'V'),
(0x10E7F, 'X'),
(0x10F00, 'V'),
(0x10F28, 'X'),
(0x10F30, 'V'),
(0x10F5A, 'X'),
(0x11000, 'V'),
(0x1104E, 'X'),
(0x11052, 'V'),
(0x11070, 'X'),
(0x1107F, 'V'),
(0x110BD, 'X'),
(0x110BE, 'V'),
(0x110C2, 'X'),
(0x110D0, 'V'),
(0x110E9, 'X'),
(0x110F0, 'V'),
(0x110FA, 'X'),
(0x11100, 'V'),
(0x11135, 'X'),
(0x11136, 'V'),
(0x11147, 'X'),
(0x11150, 'V'),
(0x11177, 'X'),
(0x11180, 'V'),
(0x111CE, 'X'),
(0x111D0, 'V'),
(0x111E0, 'X'),
(0x111E1, 'V'),
(0x111F5, 'X'),
(0x11200, 'V'),
(0x11212, 'X'),
(0x11213, 'V'),
(0x1123F, 'X'),
(0x11280, 'V'),
(0x11287, 'X'),
(0x11288, 'V'),
(0x11289, 'X'),
(0x1128A, 'V'),
(0x1128E, 'X'),
(0x1128F, 'V'),
(0x1129E, 'X'),
(0x1129F, 'V'),
(0x112AA, 'X'),
(0x112B0, 'V'),
(0x112EB, 'X'),
(0x112F0, 'V'),
(0x112FA, 'X'),
(0x11300, 'V'),
(0x11304, 'X'),
(0x11305, 'V'),
(0x1130D, 'X'),
(0x1130F, 'V'),
(0x11311, 'X'),
(0x11313, 'V'),
(0x11329, 'X'),
(0x1132A, 'V'),
(0x11331, 'X'),
(0x11332, 'V'),
(0x11334, 'X'),
(0x11335, 'V'),
(0x1133A, 'X'),
(0x1133B, 'V'),
(0x11345, 'X'),
(0x11347, 'V'),
(0x11349, 'X'),
(0x1134B, 'V'),
(0x1134E, 'X'),
(0x11350, 'V'),
(0x11351, 'X'),
(0x11357, 'V'),
(0x11358, 'X'),
(0x1135D, 'V'),
(0x11364, 'X'),
(0x11366, 'V'),
(0x1136D, 'X'),
(0x11370, 'V'),
(0x11375, 'X'),
(0x11400, 'V'),
(0x1145A, 'X'),
(0x1145B, 'V'),
(0x1145C, 'X'),
(0x1145D, 'V'),
]
def _seg_56():
return [
(0x1145F, 'X'),
(0x11480, 'V'),
(0x114C8, 'X'),
(0x114D0, 'V'),
(0x114DA, 'X'),
(0x11580, 'V'),
(0x115B6, 'X'),
(0x115B8, 'V'),
(0x115DE, 'X'),
(0x11600, 'V'),
(0x11645, 'X'),
(0x11650, 'V'),
(0x1165A, 'X'),
(0x11660, 'V'),
(0x1166D, 'X'),
(0x11680, 'V'),
(0x116B8, 'X'),
(0x116C0, 'V'),
(0x116CA, 'X'),
(0x11700, 'V'),
(0x1171B, 'X'),
(0x1171D, 'V'),
(0x1172C, 'X'),
(0x11730, 'V'),
(0x11740, 'X'),
(0x11800, 'V'),
(0x1183C, 'X'),
(0x118A0, 'M', u'𑣀'),
(0x118A1, 'M', u'𑣁'),
(0x118A2, 'M', u'𑣂'),
(0x118A3, 'M', u'𑣃'),
(0x118A4, 'M', u'𑣄'),
(0x118A5, 'M', u'𑣅'),
(0x118A6, 'M', u'𑣆'),
(0x118A7, 'M', u'𑣇'),
(0x118A8, 'M', u'𑣈'),
(0x118A9, 'M', u'𑣉'),
(0x118AA, 'M', u'𑣊'),
(0x118AB, 'M', u'𑣋'),
(0x118AC, 'M', u'𑣌'),
(0x118AD, 'M', u'𑣍'),
(0x118AE, 'M', u'𑣎'),
(0x118AF, 'M', u'𑣏'),
(0x118B0, 'M', u'𑣐'),
(0x118B1, 'M', u'𑣑'),
(0x118B2, 'M', u'𑣒'),
(0x118B3, 'M', u'𑣓'),
(0x118B4, 'M', u'𑣔'),
(0x118B5, 'M', u'𑣕'),
(0x118B6, 'M', u'𑣖'),
(0x118B7, 'M', u'𑣗'),
(0x118B8, 'M', u'𑣘'),
(0x118B9, 'M', u'𑣙'),
(0x118BA, 'M', u'𑣚'),
(0x118BB, 'M', u'𑣛'),
(0x118BC, 'M', u'𑣜'),
(0x118BD, 'M', u'𑣝'),
(0x118BE, 'M', u'𑣞'),
(0x118BF, 'M', u'𑣟'),
(0x118C0, 'V'),
(0x118F3, 'X'),
(0x118FF, 'V'),
(0x11900, 'X'),
(0x11A00, 'V'),
(0x11A48, 'X'),
(0x11A50, 'V'),
(0x11A84, 'X'),
(0x11A86, 'V'),
(0x11AA3, 'X'),
(0x11AC0, 'V'),
(0x11AF9, 'X'),
(0x11C00, 'V'),
(0x11C09, 'X'),
(0x11C0A, 'V'),
(0x11C37, 'X'),
(0x11C38, 'V'),
(0x11C46, 'X'),
(0x11C50, 'V'),
(0x11C6D, 'X'),
(0x11C70, 'V'),
(0x11C90, 'X'),
(0x11C92, 'V'),
(0x11CA8, 'X'),
(0x11CA9, 'V'),
(0x11CB7, 'X'),
(0x11D00, 'V'),
(0x11D07, 'X'),
(0x11D08, 'V'),
(0x11D0A, 'X'),
(0x11D0B, 'V'),
(0x11D37, 'X'),
(0x11D3A, 'V'),
(0x11D3B, 'X'),
(0x11D3C, 'V'),
(0x11D3E, 'X'),
(0x11D3F, 'V'),
(0x11D48, 'X'),
(0x11D50, 'V'),
(0x11D5A, 'X'),
(0x11D60, 'V'),
]
def _seg_57():
return [
(0x11D66, 'X'),
(0x11D67, 'V'),
(0x11D69, 'X'),
(0x11D6A, 'V'),
(0x11D8F, 'X'),
(0x11D90, 'V'),
(0x11D92, 'X'),
(0x11D93, 'V'),
(0x11D99, 'X'),
(0x11DA0, 'V'),
(0x11DAA, 'X'),
(0x11EE0, 'V'),
(0x11EF9, 'X'),
(0x12000, 'V'),
(0x1239A, 'X'),
(0x12400, 'V'),
(0x1246F, 'X'),
(0x12470, 'V'),
(0x12475, 'X'),
(0x12480, 'V'),
(0x12544, 'X'),
(0x13000, 'V'),
(0x1342F, 'X'),
(0x14400, 'V'),
(0x14647, 'X'),
(0x16800, 'V'),
(0x16A39, 'X'),
(0x16A40, 'V'),
(0x16A5F, 'X'),
(0x16A60, 'V'),
(0x16A6A, 'X'),
(0x16A6E, 'V'),
(0x16A70, 'X'),
(0x16AD0, 'V'),
(0x16AEE, 'X'),
(0x16AF0, 'V'),
(0x16AF6, 'X'),
(0x16B00, 'V'),
(0x16B46, 'X'),
(0x16B50, 'V'),
(0x16B5A, 'X'),
(0x16B5B, 'V'),
(0x16B62, 'X'),
(0x16B63, 'V'),
(0x16B78, 'X'),
(0x16B7D, 'V'),
(0x16B90, 'X'),
(0x16E60, 'V'),
(0x16E9B, 'X'),
(0x16F00, 'V'),
(0x16F45, 'X'),
(0x16F50, 'V'),
(0x16F7F, 'X'),
(0x16F8F, 'V'),
(0x16FA0, 'X'),
(0x16FE0, 'V'),
(0x16FE2, 'X'),
(0x17000, 'V'),
(0x187F2, 'X'),
(0x18800, 'V'),
(0x18AF3, 'X'),
(0x1B000, 'V'),
(0x1B11F, 'X'),
(0x1B170, 'V'),
(0x1B2FC, 'X'),
(0x1BC00, 'V'),
(0x1BC6B, 'X'),
(0x1BC70, 'V'),
(0x1BC7D, 'X'),
(0x1BC80, 'V'),
(0x1BC89, 'X'),
(0x1BC90, 'V'),
(0x1BC9A, 'X'),
(0x1BC9C, 'V'),
(0x1BCA0, 'I'),
(0x1BCA4, 'X'),
(0x1D000, 'V'),
(0x1D0F6, 'X'),
(0x1D100, 'V'),
(0x1D127, 'X'),
(0x1D129, 'V'),
(0x1D15E, 'M', u'𝅗𝅥'),
(0x1D15F, 'M', u'𝅘𝅥'),
(0x1D160, 'M', u'𝅘𝅥𝅮'),
(0x1D161, 'M', u'𝅘𝅥𝅯'),
(0x1D162, 'M', u'𝅘𝅥𝅰'),
(0x1D163, 'M', u'𝅘𝅥𝅱'),
(0x1D164, 'M', u'𝅘𝅥𝅲'),
(0x1D165, 'V'),
(0x1D173, 'X'),
(0x1D17B, 'V'),
(0x1D1BB, 'M', u'𝆹𝅥'),
(0x1D1BC, 'M', u'𝆺𝅥'),
(0x1D1BD, 'M', u'𝆹𝅥𝅮'),
(0x1D1BE, 'M', u'𝆺𝅥𝅮'),
(0x1D1BF, 'M', u'𝆹𝅥𝅯'),
(0x1D1C0, 'M', u'𝆺𝅥𝅯'),
(0x1D1C1, 'V'),
(0x1D1E9, 'X'),
(0x1D200, 'V'),
]
def _seg_58():
return [
(0x1D246, 'X'),
(0x1D2E0, 'V'),
(0x1D2F4, 'X'),
(0x1D300, 'V'),
(0x1D357, 'X'),
(0x1D360, 'V'),
(0x1D379, 'X'),
(0x1D400, 'M', u'a'),
(0x1D401, 'M', u'b'),
(0x1D402, 'M', u'c'),
(0x1D403, 'M', u'd'),
(0x1D404, 'M', u'e'),
(0x1D405, 'M', u'f'),
(0x1D406, 'M', u'g'),
(0x1D407, 'M', u'h'),
(0x1D408, 'M', u'i'),
(0x1D409, 'M', u'j'),
(0x1D40A, 'M', u'k'),
(0x1D40B, 'M', u'l'),
(0x1D40C, 'M', u'm'),
(0x1D40D, 'M', u'n'),
(0x1D40E, 'M', u'o'),
(0x1D40F, 'M', u'p'),
(0x1D410, 'M', u'q'),
(0x1D411, 'M', u'r'),
(0x1D412, 'M', u's'),
(0x1D413, 'M', u't'),
(0x1D414, 'M', u'u'),
(0x1D415, 'M', u'v'),
(0x1D416, 'M', u'w'),
(0x1D417, 'M', u'x'),
(0x1D418, 'M', u'y'),
(0x1D419, 'M', u'z'),
(0x1D41A, 'M', u'a'),
(0x1D41B, 'M', u'b'),
(0x1D41C, 'M', u'c'),
(0x1D41D, 'M', u'd'),
(0x1D41E, 'M', u'e'),
(0x1D41F, 'M', u'f'),
(0x1D420, 'M', u'g'),
(0x1D421, 'M', u'h'),
(0x1D422, 'M', u'i'),
(0x1D423, 'M', u'j'),
(0x1D424, 'M', u'k'),
(0x1D425, 'M', u'l'),
(0x1D426, 'M', u'm'),
(0x1D427, 'M', u'n'),
(0x1D428, 'M', u'o'),
(0x1D429, 'M', u'p'),
(0x1D42A, 'M', u'q'),
(0x1D42B, 'M', u'r'),
(0x1D42C, 'M', u's'),
(0x1D42D, 'M', u't'),
(0x1D42E, 'M', u'u'),
(0x1D42F, 'M', u'v'),
(0x1D430, 'M', u'w'),
(0x1D431, 'M', u'x'),
(0x1D432, 'M', u'y'),
(0x1D433, 'M', u'z'),
(0x1D434, 'M', u'a'),
(0x1D435, 'M', u'b'),
(0x1D436, 'M', u'c'),
(0x1D437, 'M', u'd'),
(0x1D438, 'M', u'e'),
(0x1D439, 'M', u'f'),
(0x1D43A, 'M', u'g'),
(0x1D43B, 'M', u'h'),
(0x1D43C, 'M', u'i'),
(0x1D43D, 'M', u'j'),
(0x1D43E, 'M', u'k'),
(0x1D43F, 'M', u'l'),
(0x1D440, 'M', u'm'),
(0x1D441, 'M', u'n'),
(0x1D442, 'M', u'o'),
(0x1D443, 'M', u'p'),
(0x1D444, 'M', u'q'),
(0x1D445, 'M', u'r'),
(0x1D446, 'M', u's'),
(0x1D447, 'M', u't'),
(0x1D448, 'M', u'u'),
(0x1D449, 'M', u'v'),
(0x1D44A, 'M', u'w'),
(0x1D44B, 'M', u'x'),
(0x1D44C, 'M', u'y'),
(0x1D44D, 'M', u'z'),
(0x1D44E, 'M', u'a'),
(0x1D44F, 'M', u'b'),
(0x1D450, 'M', u'c'),
(0x1D451, 'M', u'd'),
(0x1D452, 'M', u'e'),
(0x1D453, 'M', u'f'),
(0x1D454, 'M', u'g'),
(0x1D455, 'X'),
(0x1D456, 'M', u'i'),
(0x1D457, 'M', u'j'),
(0x1D458, 'M', u'k'),
(0x1D459, 'M', u'l'),
(0x1D45A, 'M', u'm'),
(0x1D45B, 'M', u'n'),
(0x1D45C, 'M', u'o'),
]
def _seg_59():
return [
(0x1D45D, 'M', u'p'),
(0x1D45E, 'M', u'q'),
(0x1D45F, 'M', u'r'),
(0x1D460, 'M', u's'),
(0x1D461, 'M', u't'),
(0x1D462, 'M', u'u'),
(0x1D463, 'M', u'v'),
(0x1D464, 'M', u'w'),
(0x1D465, 'M', u'x'),
(0x1D466, 'M', u'y'),
(0x1D467, 'M', u'z'),
(0x1D468, 'M', u'a'),
(0x1D469, 'M', u'b'),
(0x1D46A, 'M', u'c'),
(0x1D46B, 'M', u'd'),
(0x1D46C, 'M', u'e'),
(0x1D46D, 'M', u'f'),
(0x1D46E, 'M', u'g'),
(0x1D46F, 'M', u'h'),
(0x1D470, 'M', u'i'),
(0x1D471, 'M', u'j'),
(0x1D472, 'M', u'k'),
(0x1D473, 'M', u'l'),
(0x1D474, 'M', u'm'),
(0x1D475, 'M', u'n'),
(0x1D476, 'M', u'o'),
(0x1D477, 'M', u'p'),
(0x1D478, 'M', u'q'),
(0x1D479, 'M', u'r'),
(0x1D47A, 'M', u's'),
(0x1D47B, 'M', u't'),
(0x1D47C, 'M', u'u'),
(0x1D47D, 'M', u'v'),
(0x1D47E, 'M', u'w'),
(0x1D47F, 'M', u'x'),
(0x1D480, 'M', u'y'),
(0x1D481, 'M', u'z'),
(0x1D482, 'M', u'a'),
(0x1D483, 'M', u'b'),
(0x1D484, 'M', u'c'),
(0x1D485, 'M', u'd'),
(0x1D486, 'M', u'e'),
(0x1D487, 'M', u'f'),
(0x1D488, 'M', u'g'),
(0x1D489, 'M', u'h'),
(0x1D48A, 'M', u'i'),
(0x1D48B, 'M', u'j'),
(0x1D48C, 'M', u'k'),
(0x1D48D, 'M', u'l'),
(0x1D48E, 'M', u'm'),
(0x1D48F, 'M', u'n'),
(0x1D490, 'M', u'o'),
(0x1D491, 'M', u'p'),
(0x1D492, 'M', u'q'),
(0x1D493, 'M', u'r'),
(0x1D494, 'M', u's'),
(0x1D495, 'M', u't'),
(0x1D496, 'M', u'u'),
(0x1D497, 'M', u'v'),
(0x1D498, 'M', u'w'),
(0x1D499, 'M', u'x'),
(0x1D49A, 'M', u'y'),
(0x1D49B, 'M', u'z'),
(0x1D49C, 'M', u'a'),
(0x1D49D, 'X'),
(0x1D49E, 'M', u'c'),
(0x1D49F, 'M', u'd'),
(0x1D4A0, 'X'),
(0x1D4A2, 'M', u'g'),
(0x1D4A3, 'X'),
(0x1D4A5, 'M', u'j'),
(0x1D4A6, 'M', u'k'),
(0x1D4A7, 'X'),
(0x1D4A9, 'M', u'n'),
(0x1D4AA, 'M', u'o'),
(0x1D4AB, 'M', u'p'),
(0x1D4AC, 'M', u'q'),
(0x1D4AD, 'X'),
(0x1D4AE, 'M', u's'),
(0x1D4AF, 'M', u't'),
(0x1D4B0, 'M', u'u'),
(0x1D4B1, 'M', u'v'),
(0x1D4B2, 'M', u'w'),
(0x1D4B3, 'M', u'x'),
(0x1D4B4, 'M', u'y'),
(0x1D4B5, 'M', u'z'),
(0x1D4B6, 'M', u'a'),
(0x1D4B7, 'M', u'b'),
(0x1D4B8, 'M', u'c'),
(0x1D4B9, 'M', u'd'),
(0x1D4BA, 'X'),
(0x1D4BB, 'M', u'f'),
(0x1D4BC, 'X'),
(0x1D4BD, 'M', u'h'),
(0x1D4BE, 'M', u'i'),
(0x1D4BF, 'M', u'j'),
(0x1D4C0, 'M', u'k'),
(0x1D4C1, 'M', u'l'),
(0x1D4C2, 'M', u'm'),
(0x1D4C3, 'M', u'n'),
]
def _seg_60():
return [
(0x1D4C4, 'X'),
(0x1D4C5, 'M', u'p'),
(0x1D4C6, 'M', u'q'),
(0x1D4C7, 'M', u'r'),
(0x1D4C8, 'M', u's'),
(0x1D4C9, 'M', u't'),
(0x1D4CA, 'M', u'u'),
(0x1D4CB, 'M', u'v'),
(0x1D4CC, 'M', u'w'),
(0x1D4CD, 'M', u'x'),
(0x1D4CE, 'M', u'y'),
(0x1D4CF, 'M', u'z'),
(0x1D4D0, 'M', u'a'),
(0x1D4D1, 'M', u'b'),
(0x1D4D2, 'M', u'c'),
(0x1D4D3, 'M', u'd'),
(0x1D4D4, 'M', u'e'),
(0x1D4D5, 'M', u'f'),
(0x1D4D6, 'M', u'g'),
(0x1D4D7, 'M', u'h'),
(0x1D4D8, 'M', u'i'),
(0x1D4D9, 'M', u'j'),
(0x1D4DA, 'M', u'k'),
(0x1D4DB, 'M', u'l'),
(0x1D4DC, 'M', u'm'),
(0x1D4DD, 'M', u'n'),
(0x1D4DE, 'M', u'o'),
(0x1D4DF, 'M', u'p'),
(0x1D4E0, 'M', u'q'),
(0x1D4E1, 'M', u'r'),
(0x1D4E2, 'M', u's'),
(0x1D4E3, 'M', u't'),
(0x1D4E4, 'M', u'u'),
(0x1D4E5, 'M', u'v'),
(0x1D4E6, 'M', u'w'),
(0x1D4E7, 'M', u'x'),
(0x1D4E8, 'M', u'y'),
(0x1D4E9, 'M', u'z'),
(0x1D4EA, 'M', u'a'),
(0x1D4EB, 'M', u'b'),
(0x1D4EC, 'M', u'c'),
(0x1D4ED, 'M', u'd'),
(0x1D4EE, 'M', u'e'),
(0x1D4EF, 'M', u'f'),
(0x1D4F0, 'M', u'g'),
(0x1D4F1, 'M', u'h'),
(0x1D4F2, 'M', u'i'),
(0x1D4F3, 'M', u'j'),
(0x1D4F4, 'M', u'k'),
(0x1D4F5, 'M', u'l'),
(0x1D4F6, 'M', u'm'),
(0x1D4F7, 'M', u'n'),
(0x1D4F8, 'M', u'o'),
(0x1D4F9, 'M', u'p'),
(0x1D4FA, 'M', u'q'),
(0x1D4FB, 'M', u'r'),
(0x1D4FC, 'M', u's'),
(0x1D4FD, 'M', u't'),
(0x1D4FE, 'M', u'u'),
(0x1D4FF, 'M', u'v'),
(0x1D500, 'M', u'w'),
(0x1D501, 'M', u'x'),
(0x1D502, 'M', u'y'),
(0x1D503, 'M', u'z'),
(0x1D504, 'M', u'a'),
(0x1D505, 'M', u'b'),
(0x1D506, 'X'),
(0x1D507, 'M', u'd'),
(0x1D508, 'M', u'e'),
(0x1D509, 'M', u'f'),
(0x1D50A, 'M', u'g'),
(0x1D50B, 'X'),
(0x1D50D, 'M', u'j'),
(0x1D50E, 'M', u'k'),
(0x1D50F, 'M', u'l'),
(0x1D510, 'M', u'm'),
(0x1D511, 'M', u'n'),
(0x1D512, 'M', u'o'),
(0x1D513, 'M', u'p'),
(0x1D514, 'M', u'q'),
(0x1D515, 'X'),
(0x1D516, 'M', u's'),
(0x1D517, 'M', u't'),
(0x1D518, 'M', u'u'),
(0x1D519, 'M', u'v'),
(0x1D51A, 'M', u'w'),
(0x1D51B, 'M', u'x'),
(0x1D51C, 'M', u'y'),
(0x1D51D, 'X'),
(0x1D51E, 'M', u'a'),
(0x1D51F, 'M', u'b'),
(0x1D520, 'M', u'c'),
(0x1D521, 'M', u'd'),
(0x1D522, 'M', u'e'),
(0x1D523, 'M', u'f'),
(0x1D524, 'M', u'g'),
(0x1D525, 'M', u'h'),
(0x1D526, 'M', u'i'),
(0x1D527, 'M', u'j'),
(0x1D528, 'M', u'k'),
]
def _seg_61():
return [
(0x1D529, 'M', u'l'),
(0x1D52A, 'M', u'm'),
(0x1D52B, 'M', u'n'),
(0x1D52C, 'M', u'o'),
(0x1D52D, 'M', u'p'),
(0x1D52E, 'M', u'q'),
(0x1D52F, 'M', u'r'),
(0x1D530, 'M', u's'),
(0x1D531, 'M', u't'),
(0x1D532, 'M', u'u'),
(0x1D533, 'M', u'v'),
(0x1D534, 'M', u'w'),
(0x1D535, 'M', u'x'),
(0x1D536, 'M', u'y'),
(0x1D537, 'M', u'z'),
(0x1D538, 'M', u'a'),
(0x1D539, 'M', u'b'),
(0x1D53A, 'X'),
(0x1D53B, 'M', u'd'),
(0x1D53C, 'M', u'e'),
(0x1D53D, 'M', u'f'),
(0x1D53E, 'M', u'g'),
(0x1D53F, 'X'),
(0x1D540, 'M', u'i'),
(0x1D541, 'M', u'j'),
(0x1D542, 'M', u'k'),
(0x1D543, 'M', u'l'),
(0x1D544, 'M', u'm'),
(0x1D545, 'X'),
(0x1D546, 'M', u'o'),
(0x1D547, 'X'),
(0x1D54A, 'M', u's'),
(0x1D54B, 'M', u't'),
(0x1D54C, 'M', u'u'),
(0x1D54D, 'M', u'v'),
(0x1D54E, 'M', u'w'),
(0x1D54F, 'M', u'x'),
(0x1D550, 'M', u'y'),
(0x1D551, 'X'),
(0x1D552, 'M', u'a'),
(0x1D553, 'M', u'b'),
(0x1D554, 'M', u'c'),
(0x1D555, 'M', u'd'),
(0x1D556, 'M', u'e'),
(0x1D557, 'M', u'f'),
(0x1D558, 'M', u'g'),
(0x1D559, 'M', u'h'),
(0x1D55A, 'M', u'i'),
(0x1D55B, 'M', u'j'),
(0x1D55C, 'M', u'k'),
(0x1D55D, 'M', u'l'),
(0x1D55E, 'M', u'm'),
(0x1D55F, 'M', u'n'),
(0x1D560, 'M', u'o'),
(0x1D561, 'M', u'p'),
(0x1D562, 'M', u'q'),
(0x1D563, 'M', u'r'),
(0x1D564, 'M', u's'),
(0x1D565, 'M', u't'),
(0x1D566, 'M', u'u'),
(0x1D567, 'M', u'v'),
(0x1D568, 'M', u'w'),
(0x1D569, 'M', u'x'),
(0x1D56A, 'M', u'y'),
(0x1D56B, 'M', u'z'),
(0x1D56C, 'M', u'a'),
(0x1D56D, 'M', u'b'),
(0x1D56E, 'M', u'c'),
(0x1D56F, 'M', u'd'),
(0x1D570, 'M', u'e'),
(0x1D571, 'M', u'f'),
(0x1D572, 'M', u'g'),
(0x1D573, 'M', u'h'),
(0x1D574, 'M', u'i'),
(0x1D575, 'M', u'j'),
(0x1D576, 'M', u'k'),
(0x1D577, 'M', u'l'),
(0x1D578, 'M', u'm'),
(0x1D579, 'M', u'n'),
(0x1D57A, 'M', u'o'),
(0x1D57B, 'M', u'p'),
(0x1D57C, 'M', u'q'),
(0x1D57D, 'M', u'r'),
(0x1D57E, 'M', u's'),
(0x1D57F, 'M', u't'),
(0x1D580, 'M', u'u'),
(0x1D581, 'M', u'v'),
(0x1D582, 'M', u'w'),
(0x1D583, 'M', u'x'),
(0x1D584, 'M', u'y'),
(0x1D585, 'M', u'z'),
(0x1D586, 'M', u'a'),
(0x1D587, 'M', u'b'),
(0x1D588, 'M', u'c'),
(0x1D589, 'M', u'd'),
(0x1D58A, 'M', u'e'),
(0x1D58B, 'M', u'f'),
(0x1D58C, 'M', u'g'),
(0x1D58D, 'M', u'h'),
(0x1D58E, 'M', u'i'),
]
def _seg_62():
return [
(0x1D58F, 'M', u'j'),
(0x1D590, 'M', u'k'),
(0x1D591, 'M', u'l'),
(0x1D592, 'M', u'm'),
(0x1D593, 'M', u'n'),
(0x1D594, 'M', u'o'),
(0x1D595, 'M', u'p'),
(0x1D596, 'M', u'q'),
(0x1D597, 'M', u'r'),
(0x1D598, 'M', u's'),
(0x1D599, 'M', u't'),
(0x1D59A, 'M', u'u'),
(0x1D59B, 'M', u'v'),
(0x1D59C, 'M', u'w'),
(0x1D59D, 'M', u'x'),
(0x1D59E, 'M', u'y'),
(0x1D59F, 'M', u'z'),
(0x1D5A0, 'M', u'a'),
(0x1D5A1, 'M', u'b'),
(0x1D5A2, 'M', u'c'),
(0x1D5A3, 'M', u'd'),
(0x1D5A4, 'M', u'e'),
(0x1D5A5, 'M', u'f'),
(0x1D5A6, 'M', u'g'),
(0x1D5A7, 'M', u'h'),
(0x1D5A8, 'M', u'i'),
(0x1D5A9, 'M', u'j'),
(0x1D5AA, 'M', u'k'),
(0x1D5AB, 'M', u'l'),
(0x1D5AC, 'M', u'm'),
(0x1D5AD, 'M', u'n'),
(0x1D5AE, 'M', u'o'),
(0x1D5AF, 'M', u'p'),
(0x1D5B0, 'M', u'q'),
(0x1D5B1, 'M', u'r'),
(0x1D5B2, 'M', u's'),
(0x1D5B3, 'M', u't'),
(0x1D5B4, 'M', u'u'),
(0x1D5B5, 'M', u'v'),
(0x1D5B6, 'M', u'w'),
(0x1D5B7, 'M', u'x'),
(0x1D5B8, 'M', u'y'),
(0x1D5B9, 'M', u'z'),
(0x1D5BA, 'M', u'a'),
(0x1D5BB, 'M', u'b'),
(0x1D5BC, 'M', u'c'),
(0x1D5BD, 'M', u'd'),
(0x1D5BE, 'M', u'e'),
(0x1D5BF, 'M', u'f'),
(0x1D5C0, 'M', u'g'),
(0x1D5C1, 'M', u'h'),
(0x1D5C2, 'M', u'i'),
(0x1D5C3, 'M', u'j'),
(0x1D5C4, 'M', u'k'),
(0x1D5C5, 'M', u'l'),
(0x1D5C6, 'M', u'm'),
(0x1D5C7, 'M', u'n'),
(0x1D5C8, 'M', u'o'),
(0x1D5C9, 'M', u'p'),
(0x1D5CA, 'M', u'q'),
(0x1D5CB, 'M', u'r'),
(0x1D5CC, 'M', u's'),
(0x1D5CD, 'M', u't'),
(0x1D5CE, 'M', u'u'),
(0x1D5CF, 'M', u'v'),
(0x1D5D0, 'M', u'w'),
(0x1D5D1, 'M', u'x'),
(0x1D5D2, 'M', u'y'),
(0x1D5D3, 'M', u'z'),
(0x1D5D4, 'M', u'a'),
(0x1D5D5, 'M', u'b'),
(0x1D5D6, 'M', u'c'),
(0x1D5D7, 'M', u'd'),
(0x1D5D8, 'M', u'e'),
(0x1D5D9, 'M', u'f'),
(0x1D5DA, 'M', u'g'),
(0x1D5DB, 'M', u'h'),
(0x1D5DC, 'M', u'i'),
(0x1D5DD, 'M', u'j'),
(0x1D5DE, 'M', u'k'),
(0x1D5DF, 'M', u'l'),
(0x1D5E0, 'M', u'm'),
(0x1D5E1, 'M', u'n'),
(0x1D5E2, 'M', u'o'),
(0x1D5E3, 'M', u'p'),
(0x1D5E4, 'M', u'q'),
(0x1D5E5, 'M', u'r'),
(0x1D5E6, 'M', u's'),
(0x1D5E7, 'M', u't'),
(0x1D5E8, 'M', u'u'),
(0x1D5E9, 'M', u'v'),
(0x1D5EA, 'M', u'w'),
(0x1D5EB, 'M', u'x'),
(0x1D5EC, 'M', u'y'),
(0x1D5ED, 'M', u'z'),
(0x1D5EE, 'M', u'a'),
(0x1D5EF, 'M', u'b'),
(0x1D5F0, 'M', u'c'),
(0x1D5F1, 'M', u'd'),
(0x1D5F2, 'M', u'e'),
]
def _seg_63():
return [
(0x1D5F3, 'M', u'f'),
(0x1D5F4, 'M', u'g'),
(0x1D5F5, 'M', u'h'),
(0x1D5F6, 'M', u'i'),
(0x1D5F7, 'M', u'j'),
(0x1D5F8, 'M', u'k'),
(0x1D5F9, 'M', u'l'),
(0x1D5FA, 'M', u'm'),
(0x1D5FB, 'M', u'n'),
(0x1D5FC, 'M', u'o'),
(0x1D5FD, 'M', u'p'),
(0x1D5FE, 'M', u'q'),
(0x1D5FF, 'M', u'r'),
(0x1D600, 'M', u's'),
(0x1D601, 'M', u't'),
(0x1D602, 'M', u'u'),
(0x1D603, 'M', u'v'),
(0x1D604, 'M', u'w'),
(0x1D605, 'M', u'x'),
(0x1D606, 'M', u'y'),
(0x1D607, 'M', u'z'),
(0x1D608, 'M', u'a'),
(0x1D609, 'M', u'b'),
(0x1D60A, 'M', u'c'),
(0x1D60B, 'M', u'd'),
(0x1D60C, 'M', u'e'),
(0x1D60D, 'M', u'f'),
(0x1D60E, 'M', u'g'),
(0x1D60F, 'M', u'h'),
(0x1D610, 'M', u'i'),
(0x1D611, 'M', u'j'),
(0x1D612, 'M', u'k'),
(0x1D613, 'M', u'l'),
(0x1D614, 'M', u'm'),
(0x1D615, 'M', u'n'),
(0x1D616, 'M', u'o'),
(0x1D617, 'M', u'p'),
(0x1D618, 'M', u'q'),
(0x1D619, 'M', u'r'),
(0x1D61A, 'M', u's'),
(0x1D61B, 'M', u't'),
(0x1D61C, 'M', u'u'),
(0x1D61D, 'M', u'v'),
(0x1D61E, 'M', u'w'),
(0x1D61F, 'M', u'x'),
(0x1D620, 'M', u'y'),
(0x1D621, 'M', u'z'),
(0x1D622, 'M', u'a'),
(0x1D623, 'M', u'b'),
(0x1D624, 'M', u'c'),
(0x1D625, 'M', u'd'),
(0x1D626, 'M', u'e'),
(0x1D627, 'M', u'f'),
(0x1D628, 'M', u'g'),
(0x1D629, 'M', u'h'),
(0x1D62A, 'M', u'i'),
(0x1D62B, 'M', u'j'),
(0x1D62C, 'M', u'k'),
(0x1D62D, 'M', u'l'),
(0x1D62E, 'M', u'm'),
(0x1D62F, 'M', u'n'),
(0x1D630, 'M', u'o'),
(0x1D631, 'M', u'p'),
(0x1D632, 'M', u'q'),
(0x1D633, 'M', u'r'),
(0x1D634, 'M', u's'),
(0x1D635, 'M', u't'),
(0x1D636, 'M', u'u'),
(0x1D637, 'M', u'v'),
(0x1D638, 'M', u'w'),
(0x1D639, 'M', u'x'),
(0x1D63A, 'M', u'y'),
(0x1D63B, 'M', u'z'),
(0x1D63C, 'M', u'a'),
(0x1D63D, 'M', u'b'),
(0x1D63E, 'M', u'c'),
(0x1D63F, 'M', u'd'),
(0x1D640, 'M', u'e'),
(0x1D641, 'M', u'f'),
(0x1D642, 'M', u'g'),
(0x1D643, 'M', u'h'),
(0x1D644, 'M', u'i'),
(0x1D645, 'M', u'j'),
(0x1D646, 'M', u'k'),
(0x1D647, 'M', u'l'),
(0x1D648, 'M', u'm'),
(0x1D649, 'M', u'n'),
(0x1D64A, 'M', u'o'),
(0x1D64B, 'M', u'p'),
(0x1D64C, 'M', u'q'),
(0x1D64D, 'M', u'r'),
(0x1D64E, 'M', u's'),
(0x1D64F, 'M', u't'),
(0x1D650, 'M', u'u'),
(0x1D651, 'M', u'v'),
(0x1D652, 'M', u'w'),
(0x1D653, 'M', u'x'),
(0x1D654, 'M', u'y'),
(0x1D655, 'M', u'z'),
(0x1D656, 'M', u'a'),
]
def _seg_64():
return [
(0x1D657, 'M', u'b'),
(0x1D658, 'M', u'c'),
(0x1D659, 'M', u'd'),
(0x1D65A, 'M', u'e'),
(0x1D65B, 'M', u'f'),
(0x1D65C, 'M', u'g'),
(0x1D65D, 'M', u'h'),
(0x1D65E, 'M', u'i'),
(0x1D65F, 'M', u'j'),
(0x1D660, 'M', u'k'),
(0x1D661, 'M', u'l'),
(0x1D662, 'M', u'm'),
(0x1D663, 'M', u'n'),
(0x1D664, 'M', u'o'),
(0x1D665, 'M', u'p'),
(0x1D666, 'M', u'q'),
(0x1D667, 'M', u'r'),
(0x1D668, 'M', u's'),
(0x1D669, 'M', u't'),
(0x1D66A, 'M', u'u'),
(0x1D66B, 'M', u'v'),
(0x1D66C, 'M', u'w'),
(0x1D66D, 'M', u'x'),
(0x1D66E, 'M', u'y'),
(0x1D66F, 'M', u'z'),
(0x1D670, 'M', u'a'),
(0x1D671, 'M', u'b'),
(0x1D672, 'M', u'c'),
(0x1D673, 'M', u'd'),
(0x1D674, 'M', u'e'),
(0x1D675, 'M', u'f'),
(0x1D676, 'M', u'g'),
(0x1D677, 'M', u'h'),
(0x1D678, 'M', u'i'),
(0x1D679, 'M', u'j'),
(0x1D67A, 'M', u'k'),
(0x1D67B, 'M', u'l'),
(0x1D67C, 'M', u'm'),
(0x1D67D, 'M', u'n'),
(0x1D67E, 'M', u'o'),
(0x1D67F, 'M', u'p'),
(0x1D680, 'M', u'q'),
(0x1D681, 'M', u'r'),
(0x1D682, 'M', u's'),
(0x1D683, 'M', u't'),
(0x1D684, 'M', u'u'),
(0x1D685, 'M', u'v'),
(0x1D686, 'M', u'w'),
(0x1D687, 'M', u'x'),
(0x1D688, 'M', u'y'),
(0x1D689, 'M', u'z'),
(0x1D68A, 'M', u'a'),
(0x1D68B, 'M', u'b'),
(0x1D68C, 'M', u'c'),
(0x1D68D, 'M', u'd'),
(0x1D68E, 'M', u'e'),
(0x1D68F, 'M', u'f'),
(0x1D690, 'M', u'g'),
(0x1D691, 'M', u'h'),
(0x1D692, 'M', u'i'),
(0x1D693, 'M', u'j'),
(0x1D694, 'M', u'k'),
(0x1D695, 'M', u'l'),
(0x1D696, 'M', u'm'),
(0x1D697, 'M', u'n'),
(0x1D698, 'M', u'o'),
(0x1D699, 'M', u'p'),
(0x1D69A, 'M', u'q'),
(0x1D69B, 'M', u'r'),
(0x1D69C, 'M', u's'),
(0x1D69D, 'M', u't'),
(0x1D69E, 'M', u'u'),
(0x1D69F, 'M', u'v'),
(0x1D6A0, 'M', u'w'),
(0x1D6A1, 'M', u'x'),
(0x1D6A2, 'M', u'y'),
(0x1D6A3, 'M', u'z'),
(0x1D6A4, 'M', u'ı'),
(0x1D6A5, 'M', u'ȷ'),
(0x1D6A6, 'X'),
(0x1D6A8, 'M', u'α'),
(0x1D6A9, 'M', u'β'),
(0x1D6AA, 'M', u'γ'),
(0x1D6AB, 'M', u'δ'),
(0x1D6AC, 'M', u'ε'),
(0x1D6AD, 'M', u'ζ'),
(0x1D6AE, 'M', u'η'),
(0x1D6AF, 'M', u'θ'),
(0x1D6B0, 'M', u'ι'),
(0x1D6B1, 'M', u'κ'),
(0x1D6B2, 'M', u'λ'),
(0x1D6B3, 'M', u'μ'),
(0x1D6B4, 'M', u'ν'),
(0x1D6B5, 'M', u'ξ'),
(0x1D6B6, 'M', u'ο'),
(0x1D6B7, 'M', u'π'),
(0x1D6B8, 'M', u'ρ'),
(0x1D6B9, 'M', u'θ'),
(0x1D6BA, 'M', u'σ'),
(0x1D6BB, 'M', u'τ'),
]
def _seg_65():
return [
(0x1D6BC, 'M', u'υ'),
(0x1D6BD, 'M', u'φ'),
(0x1D6BE, 'M', u'χ'),
(0x1D6BF, 'M', u'ψ'),
(0x1D6C0, 'M', u'ω'),
(0x1D6C1, 'M', u'∇'),
(0x1D6C2, 'M', u'α'),
(0x1D6C3, 'M', u'β'),
(0x1D6C4, 'M', u'γ'),
(0x1D6C5, 'M', u'δ'),
(0x1D6C6, 'M', u'ε'),
(0x1D6C7, 'M', u'ζ'),
(0x1D6C8, 'M', u'η'),
(0x1D6C9, 'M', u'θ'),
(0x1D6CA, 'M', u'ι'),
(0x1D6CB, 'M', u'κ'),
(0x1D6CC, 'M', u'λ'),
(0x1D6CD, 'M', u'μ'),
(0x1D6CE, 'M', u'ν'),
(0x1D6CF, 'M', u'ξ'),
(0x1D6D0, 'M', u'ο'),
(0x1D6D1, 'M', u'π'),
(0x1D6D2, 'M', u'ρ'),
(0x1D6D3, 'M', u'σ'),
(0x1D6D5, 'M', u'τ'),
(0x1D6D6, 'M', u'υ'),
(0x1D6D7, 'M', u'φ'),
(0x1D6D8, 'M', u'χ'),
(0x1D6D9, 'M', u'ψ'),
(0x1D6DA, 'M', u'ω'),
(0x1D6DB, 'M', u'∂'),
(0x1D6DC, 'M', u'ε'),
(0x1D6DD, 'M', u'θ'),
(0x1D6DE, 'M', u'κ'),
(0x1D6DF, 'M', u'φ'),
(0x1D6E0, 'M', u'ρ'),
(0x1D6E1, 'M', u'π'),
(0x1D6E2, 'M', u'α'),
(0x1D6E3, 'M', u'β'),
(0x1D6E4, 'M', u'γ'),
(0x1D6E5, 'M', u'δ'),
(0x1D6E6, 'M', u'ε'),
(0x1D6E7, 'M', u'ζ'),
(0x1D6E8, 'M', u'η'),
(0x1D6E9, 'M', u'θ'),
(0x1D6EA, 'M', u'ι'),
(0x1D6EB, 'M', u'κ'),
(0x1D6EC, 'M', u'λ'),
(0x1D6ED, 'M', u'μ'),
(0x1D6EE, 'M', u'ν'),
(0x1D6EF, 'M', u'ξ'),
(0x1D6F0, 'M', u'ο'),
(0x1D6F1, 'M', u'π'),
(0x1D6F2, 'M', u'ρ'),
(0x1D6F3, 'M', u'θ'),
(0x1D6F4, 'M', u'σ'),
(0x1D6F5, 'M', u'τ'),
(0x1D6F6, 'M', u'υ'),
(0x1D6F7, 'M', u'φ'),
(0x1D6F8, 'M', u'χ'),
(0x1D6F9, 'M', u'ψ'),
(0x1D6FA, 'M', u'ω'),
(0x1D6FB, 'M', u'∇'),
(0x1D6FC, 'M', u'α'),
(0x1D6FD, 'M', u'β'),
(0x1D6FE, 'M', u'γ'),
(0x1D6FF, 'M', u'δ'),
(0x1D700, 'M', u'ε'),
(0x1D701, 'M', u'ζ'),
(0x1D702, 'M', u'η'),
(0x1D703, 'M', u'θ'),
(0x1D704, 'M', u'ι'),
(0x1D705, 'M', u'κ'),
(0x1D706, 'M', u'λ'),
(0x1D707, 'M', u'μ'),
(0x1D708, 'M', u'ν'),
(0x1D709, 'M', u'ξ'),
(0x1D70A, 'M', u'ο'),
(0x1D70B, 'M', u'π'),
(0x1D70C, 'M', u'ρ'),
(0x1D70D, 'M', u'σ'),
(0x1D70F, 'M', u'τ'),
(0x1D710, 'M', u'υ'),
(0x1D711, 'M', u'φ'),
(0x1D712, 'M', u'χ'),
(0x1D713, 'M', u'ψ'),
(0x1D714, 'M', u'ω'),
(0x1D715, 'M', u'∂'),
(0x1D716, 'M', u'ε'),
(0x1D717, 'M', u'θ'),
(0x1D718, 'M', u'κ'),
(0x1D719, 'M', u'φ'),
(0x1D71A, 'M', u'ρ'),
(0x1D71B, 'M', u'π'),
(0x1D71C, 'M', u'α'),
(0x1D71D, 'M', u'β'),
(0x1D71E, 'M', u'γ'),
(0x1D71F, 'M', u'δ'),
(0x1D720, 'M', u'ε'),
(0x1D721, 'M', u'ζ'),
]
def _seg_66():
return [
(0x1D722, 'M', u'η'),
(0x1D723, 'M', u'θ'),
(0x1D724, 'M', u'ι'),
(0x1D725, 'M', u'κ'),
(0x1D726, 'M', u'λ'),
(0x1D727, 'M', u'μ'),
(0x1D728, 'M', u'ν'),
(0x1D729, 'M', u'ξ'),
(0x1D72A, 'M', u'ο'),
(0x1D72B, 'M', u'π'),
(0x1D72C, 'M', u'ρ'),
(0x1D72D, 'M', u'θ'),
(0x1D72E, 'M', u'σ'),
(0x1D72F, 'M', u'τ'),
(0x1D730, 'M', u'υ'),
(0x1D731, 'M', u'φ'),
(0x1D732, 'M', u'χ'),
(0x1D733, 'M', u'ψ'),
(0x1D734, 'M', u'ω'),
(0x1D735, 'M', u'∇'),
(0x1D736, 'M', u'α'),
(0x1D737, 'M', u'β'),
(0x1D738, 'M', u'γ'),
(0x1D739, 'M', u'δ'),
(0x1D73A, 'M', u'ε'),
(0x1D73B, 'M', u'ζ'),
(0x1D73C, 'M', u'η'),
(0x1D73D, 'M', u'θ'),
(0x1D73E, 'M', u'ι'),
(0x1D73F, 'M', u'κ'),
(0x1D740, 'M', u'λ'),
(0x1D741, 'M', u'μ'),
(0x1D742, 'M', u'ν'),
(0x1D743, 'M', u'ξ'),
(0x1D744, 'M', u'ο'),
(0x1D745, 'M', u'π'),
(0x1D746, 'M', u'ρ'),
(0x1D747, 'M', u'σ'),
(0x1D749, 'M', u'τ'),
(0x1D74A, 'M', u'υ'),
(0x1D74B, 'M', u'φ'),
(0x1D74C, 'M', u'χ'),
(0x1D74D, 'M', u'ψ'),
(0x1D74E, 'M', u'ω'),
(0x1D74F, 'M', u'∂'),
(0x1D750, 'M', u'ε'),
(0x1D751, 'M', u'θ'),
(0x1D752, 'M', u'κ'),
(0x1D753, 'M', u'φ'),
(0x1D754, 'M', u'ρ'),
(0x1D755, 'M', u'π'),
(0x1D756, 'M', u'α'),
(0x1D757, 'M', u'β'),
(0x1D758, 'M', u'γ'),
(0x1D759, 'M', u'δ'),
(0x1D75A, 'M', u'ε'),
(0x1D75B, 'M', u'ζ'),
(0x1D75C, 'M', u'η'),
(0x1D75D, 'M', u'θ'),
(0x1D75E, 'M', u'ι'),
(0x1D75F, 'M', u'κ'),
(0x1D760, 'M', u'λ'),
(0x1D761, 'M', u'μ'),
(0x1D762, 'M', u'ν'),
(0x1D763, 'M', u'ξ'),
(0x1D764, 'M', u'ο'),
(0x1D765, 'M', u'π'),
(0x1D766, 'M', u'ρ'),
(0x1D767, 'M', u'θ'),
(0x1D768, 'M', u'σ'),
(0x1D769, 'M', u'τ'),
(0x1D76A, 'M', u'υ'),
(0x1D76B, 'M', u'φ'),
(0x1D76C, 'M', u'χ'),
(0x1D76D, 'M', u'ψ'),
(0x1D76E, 'M', u'ω'),
(0x1D76F, 'M', u'∇'),
(0x1D770, 'M', u'α'),
(0x1D771, 'M', u'β'),
(0x1D772, 'M', u'γ'),
(0x1D773, 'M', u'δ'),
(0x1D774, 'M', u'ε'),
(0x1D775, 'M', u'ζ'),
(0x1D776, 'M', u'η'),
(0x1D777, 'M', u'θ'),
(0x1D778, 'M', u'ι'),
(0x1D779, 'M', u'κ'),
(0x1D77A, 'M', u'λ'),
(0x1D77B, 'M', u'μ'),
(0x1D77C, 'M', u'ν'),
(0x1D77D, 'M', u'ξ'),
(0x1D77E, 'M', u'ο'),
(0x1D77F, 'M', u'π'),
(0x1D780, 'M', u'ρ'),
(0x1D781, 'M', u'σ'),
(0x1D783, 'M', u'τ'),
(0x1D784, 'M', u'υ'),
(0x1D785, 'M', u'φ'),
(0x1D786, 'M', u'χ'),
(0x1D787, 'M', u'ψ'),
]
def _seg_67():
return [
(0x1D788, 'M', u'ω'),
(0x1D789, 'M', u'∂'),
(0x1D78A, 'M', u'ε'),
(0x1D78B, 'M', u'θ'),
(0x1D78C, 'M', u'κ'),
(0x1D78D, 'M', u'φ'),
(0x1D78E, 'M', u'ρ'),
(0x1D78F, 'M', u'π'),
(0x1D790, 'M', u'α'),
(0x1D791, 'M', u'β'),
(0x1D792, 'M', u'γ'),
(0x1D793, 'M', u'δ'),
(0x1D794, 'M', u'ε'),
(0x1D795, 'M', u'ζ'),
(0x1D796, 'M', u'η'),
(0x1D797, 'M', u'θ'),
(0x1D798, 'M', u'ι'),
(0x1D799, 'M', u'κ'),
(0x1D79A, 'M', u'λ'),
(0x1D79B, 'M', u'μ'),
(0x1D79C, 'M', u'ν'),
(0x1D79D, 'M', u'ξ'),
(0x1D79E, 'M', u'ο'),
(0x1D79F, 'M', u'π'),
(0x1D7A0, 'M', u'ρ'),
(0x1D7A1, 'M', u'θ'),
(0x1D7A2, 'M', u'σ'),
(0x1D7A3, 'M', u'τ'),
(0x1D7A4, 'M', u'υ'),
(0x1D7A5, 'M', u'φ'),
(0x1D7A6, 'M', u'χ'),
(0x1D7A7, 'M', u'ψ'),
(0x1D7A8, 'M', u'ω'),
(0x1D7A9, 'M', u'∇'),
(0x1D7AA, 'M', u'α'),
(0x1D7AB, 'M', u'β'),
(0x1D7AC, 'M', u'γ'),
(0x1D7AD, 'M', u'δ'),
(0x1D7AE, 'M', u'ε'),
(0x1D7AF, 'M', u'ζ'),
(0x1D7B0, 'M', u'η'),
(0x1D7B1, 'M', u'θ'),
(0x1D7B2, 'M', u'ι'),
(0x1D7B3, 'M', u'κ'),
(0x1D7B4, 'M', u'λ'),
(0x1D7B5, 'M', u'μ'),
(0x1D7B6, 'M', u'ν'),
(0x1D7B7, 'M', u'ξ'),
(0x1D7B8, 'M', u'ο'),
(0x1D7B9, 'M', u'π'),
(0x1D7BA, 'M', u'ρ'),
(0x1D7BB, 'M', u'σ'),
(0x1D7BD, 'M', u'τ'),
(0x1D7BE, 'M', u'υ'),
(0x1D7BF, 'M', u'φ'),
(0x1D7C0, 'M', u'χ'),
(0x1D7C1, 'M', u'ψ'),
(0x1D7C2, 'M', u'ω'),
(0x1D7C3, 'M', u'∂'),
(0x1D7C4, 'M', u'ε'),
(0x1D7C5, 'M', u'θ'),
(0x1D7C6, 'M', u'κ'),
(0x1D7C7, 'M', u'φ'),
(0x1D7C8, 'M', u'ρ'),
(0x1D7C9, 'M', u'π'),
(0x1D7CA, 'M', u'ϝ'),
(0x1D7CC, 'X'),
(0x1D7CE, 'M', u'0'),
(0x1D7CF, 'M', u'1'),
(0x1D7D0, 'M', u'2'),
(0x1D7D1, 'M', u'3'),
(0x1D7D2, 'M', u'4'),
(0x1D7D3, 'M', u'5'),
(0x1D7D4, 'M', u'6'),
(0x1D7D5, 'M', u'7'),
(0x1D7D6, 'M', u'8'),
(0x1D7D7, 'M', u'9'),
(0x1D7D8, 'M', u'0'),
(0x1D7D9, 'M', u'1'),
(0x1D7DA, 'M', u'2'),
(0x1D7DB, 'M', u'3'),
(0x1D7DC, 'M', u'4'),
(0x1D7DD, 'M', u'5'),
(0x1D7DE, 'M', u'6'),
(0x1D7DF, 'M', u'7'),
(0x1D7E0, 'M', u'8'),
(0x1D7E1, 'M', u'9'),
(0x1D7E2, 'M', u'0'),
(0x1D7E3, 'M', u'1'),
(0x1D7E4, 'M', u'2'),
(0x1D7E5, 'M', u'3'),
(0x1D7E6, 'M', u'4'),
(0x1D7E7, 'M', u'5'),
(0x1D7E8, 'M', u'6'),
(0x1D7E9, 'M', u'7'),
(0x1D7EA, 'M', u'8'),
(0x1D7EB, 'M', u'9'),
(0x1D7EC, 'M', u'0'),
(0x1D7ED, 'M', u'1'),
(0x1D7EE, 'M', u'2'),
]
def _seg_68():
return [
(0x1D7EF, 'M', u'3'),
(0x1D7F0, 'M', u'4'),
(0x1D7F1, 'M', u'5'),
(0x1D7F2, 'M', u'6'),
(0x1D7F3, 'M', u'7'),
(0x1D7F4, 'M', u'8'),
(0x1D7F5, 'M', u'9'),
(0x1D7F6, 'M', u'0'),
(0x1D7F7, 'M', u'1'),
(0x1D7F8, 'M', u'2'),
(0x1D7F9, 'M', u'3'),
(0x1D7FA, 'M', u'4'),
(0x1D7FB, 'M', u'5'),
(0x1D7FC, 'M', u'6'),
(0x1D7FD, 'M', u'7'),
(0x1D7FE, 'M', u'8'),
(0x1D7FF, 'M', u'9'),
(0x1D800, 'V'),
(0x1DA8C, 'X'),
(0x1DA9B, 'V'),
(0x1DAA0, 'X'),
(0x1DAA1, 'V'),
(0x1DAB0, 'X'),
(0x1E000, 'V'),
(0x1E007, 'X'),
(0x1E008, 'V'),
(0x1E019, 'X'),
(0x1E01B, 'V'),
(0x1E022, 'X'),
(0x1E023, 'V'),
(0x1E025, 'X'),
(0x1E026, 'V'),
(0x1E02B, 'X'),
(0x1E800, 'V'),
(0x1E8C5, 'X'),
(0x1E8C7, 'V'),
(0x1E8D7, 'X'),
(0x1E900, 'M', u'𞤢'),
(0x1E901, 'M', u'𞤣'),
(0x1E902, 'M', u'𞤤'),
(0x1E903, 'M', u'𞤥'),
(0x1E904, 'M', u'𞤦'),
(0x1E905, 'M', u'𞤧'),
(0x1E906, 'M', u'𞤨'),
(0x1E907, 'M', u'𞤩'),
(0x1E908, 'M', u'𞤪'),
(0x1E909, 'M', u'𞤫'),
(0x1E90A, 'M', u'𞤬'),
(0x1E90B, 'M', u'𞤭'),
(0x1E90C, 'M', u'𞤮'),
(0x1E90D, 'M', u'𞤯'),
(0x1E90E, 'M', u'𞤰'),
(0x1E90F, 'M', u'𞤱'),
(0x1E910, 'M', u'𞤲'),
(0x1E911, 'M', u'𞤳'),
(0x1E912, 'M', u'𞤴'),
(0x1E913, 'M', u'𞤵'),
(0x1E914, 'M', u'𞤶'),
(0x1E915, 'M', u'𞤷'),
(0x1E916, 'M', u'𞤸'),
(0x1E917, 'M', u'𞤹'),
(0x1E918, 'M', u'𞤺'),
(0x1E919, 'M', u'𞤻'),
(0x1E91A, 'M', u'𞤼'),
(0x1E91B, 'M', u'𞤽'),
(0x1E91C, 'M', u'𞤾'),
(0x1E91D, 'M', u'𞤿'),
(0x1E91E, 'M', u'𞥀'),
(0x1E91F, 'M', u'𞥁'),
(0x1E920, 'M', u'𞥂'),
(0x1E921, 'M', u'𞥃'),
(0x1E922, 'V'),
(0x1E94B, 'X'),
(0x1E950, 'V'),
(0x1E95A, 'X'),
(0x1E95E, 'V'),
(0x1E960, 'X'),
(0x1EC71, 'V'),
(0x1ECB5, 'X'),
(0x1EE00, 'M', u'ا'),
(0x1EE01, 'M', u'ب'),
(0x1EE02, 'M', u'ج'),
(0x1EE03, 'M', u'د'),
(0x1EE04, 'X'),
(0x1EE05, 'M', u'و'),
(0x1EE06, 'M', u'ز'),
(0x1EE07, 'M', u'ح'),
(0x1EE08, 'M', u'ط'),
(0x1EE09, 'M', u'ي'),
(0x1EE0A, 'M', u'ك'),
(0x1EE0B, 'M', u'ل'),
(0x1EE0C, 'M', u'م'),
(0x1EE0D, 'M', u'ن'),
(0x1EE0E, 'M', u'س'),
(0x1EE0F, 'M', u'ع'),
(0x1EE10, 'M', u'ف'),
(0x1EE11, 'M', u'ص'),
(0x1EE12, 'M', u'ق'),
(0x1EE13, 'M', u'ر'),
(0x1EE14, 'M', u'ش'),
]
def _seg_69():
return [
(0x1EE15, 'M', u'ت'),
(0x1EE16, 'M', u'ث'),
(0x1EE17, 'M', u'خ'),
(0x1EE18, 'M', u'ذ'),
(0x1EE19, 'M', u'ض'),
(0x1EE1A, 'M', u'ظ'),
(0x1EE1B, 'M', u'غ'),
(0x1EE1C, 'M', u'ٮ'),
(0x1EE1D, 'M', u'ں'),
(0x1EE1E, 'M', u'ڡ'),
(0x1EE1F, 'M', u'ٯ'),
(0x1EE20, 'X'),
(0x1EE21, 'M', u'ب'),
(0x1EE22, 'M', u'ج'),
(0x1EE23, 'X'),
(0x1EE24, 'M', u'ه'),
(0x1EE25, 'X'),
(0x1EE27, 'M', u'ح'),
(0x1EE28, 'X'),
(0x1EE29, 'M', u'ي'),
(0x1EE2A, 'M', u'ك'),
(0x1EE2B, 'M', u'ل'),
(0x1EE2C, 'M', u'م'),
(0x1EE2D, 'M', u'ن'),
(0x1EE2E, 'M', u'س'),
(0x1EE2F, 'M', u'ع'),
(0x1EE30, 'M', u'ف'),
(0x1EE31, 'M', u'ص'),
(0x1EE32, 'M', u'ق'),
(0x1EE33, 'X'),
(0x1EE34, 'M', u'ش'),
(0x1EE35, 'M', u'ت'),
(0x1EE36, 'M', u'ث'),
(0x1EE37, 'M', u'خ'),
(0x1EE38, 'X'),
(0x1EE39, 'M', u'ض'),
(0x1EE3A, 'X'),
(0x1EE3B, 'M', u'غ'),
(0x1EE3C, 'X'),
(0x1EE42, 'M', u'ج'),
(0x1EE43, 'X'),
(0x1EE47, 'M', u'ح'),
(0x1EE48, 'X'),
(0x1EE49, 'M', u'ي'),
(0x1EE4A, 'X'),
(0x1EE4B, 'M', u'ل'),
(0x1EE4C, 'X'),
(0x1EE4D, 'M', u'ن'),
(0x1EE4E, 'M', u'س'),
(0x1EE4F, 'M', u'ع'),
(0x1EE50, 'X'),
(0x1EE51, 'M', u'ص'),
(0x1EE52, 'M', u'ق'),
(0x1EE53, 'X'),
(0x1EE54, 'M', u'ش'),
(0x1EE55, 'X'),
(0x1EE57, 'M', u'خ'),
(0x1EE58, 'X'),
(0x1EE59, 'M', u'ض'),
(0x1EE5A, 'X'),
(0x1EE5B, 'M', u'غ'),
(0x1EE5C, 'X'),
(0x1EE5D, 'M', u'ں'),
(0x1EE5E, 'X'),
(0x1EE5F, 'M', u'ٯ'),
(0x1EE60, 'X'),
(0x1EE61, 'M', u'ب'),
(0x1EE62, 'M', u'ج'),
(0x1EE63, 'X'),
(0x1EE64, 'M', u'ه'),
(0x1EE65, 'X'),
(0x1EE67, 'M', u'ح'),
(0x1EE68, 'M', u'ط'),
(0x1EE69, 'M', u'ي'),
(0x1EE6A, 'M', u'ك'),
(0x1EE6B, 'X'),
(0x1EE6C, 'M', u'م'),
(0x1EE6D, 'M', u'ن'),
(0x1EE6E, 'M', u'س'),
(0x1EE6F, 'M', u'ع'),
(0x1EE70, 'M', u'ف'),
(0x1EE71, 'M', u'ص'),
(0x1EE72, 'M', u'ق'),
(0x1EE73, 'X'),
(0x1EE74, 'M', u'ش'),
(0x1EE75, 'M', u'ت'),
(0x1EE76, 'M', u'ث'),
(0x1EE77, 'M', u'خ'),
(0x1EE78, 'X'),
(0x1EE79, 'M', u'ض'),
(0x1EE7A, 'M', u'ظ'),
(0x1EE7B, 'M', u'غ'),
(0x1EE7C, 'M', u'ٮ'),
(0x1EE7D, 'X'),
(0x1EE7E, 'M', u'ڡ'),
(0x1EE7F, 'X'),
(0x1EE80, 'M', u'ا'),
(0x1EE81, 'M', u'ب'),
(0x1EE82, 'M', u'ج'),
(0x1EE83, 'M', u'د'),
]
def _seg_70():
return [
(0x1EE84, 'M', u'ه'),
(0x1EE85, 'M', u'و'),
(0x1EE86, 'M', u'ز'),
(0x1EE87, 'M', u'ح'),
(0x1EE88, 'M', u'ط'),
(0x1EE89, 'M', u'ي'),
(0x1EE8A, 'X'),
(0x1EE8B, 'M', u'ل'),
(0x1EE8C, 'M', u'م'),
(0x1EE8D, 'M', u'ن'),
(0x1EE8E, 'M', u'س'),
(0x1EE8F, 'M', u'ع'),
(0x1EE90, 'M', u'ف'),
(0x1EE91, 'M', u'ص'),
(0x1EE92, 'M', u'ق'),
(0x1EE93, 'M', u'ر'),
(0x1EE94, 'M', u'ش'),
(0x1EE95, 'M', u'ت'),
(0x1EE96, 'M', u'ث'),
(0x1EE97, 'M', u'خ'),
(0x1EE98, 'M', u'ذ'),
(0x1EE99, 'M', u'ض'),
(0x1EE9A, 'M', u'ظ'),
(0x1EE9B, 'M', u'غ'),
(0x1EE9C, 'X'),
(0x1EEA1, 'M', u'ب'),
(0x1EEA2, 'M', u'ج'),
(0x1EEA3, 'M', u'د'),
(0x1EEA4, 'X'),
(0x1EEA5, 'M', u'و'),
(0x1EEA6, 'M', u'ز'),
(0x1EEA7, 'M', u'ح'),
(0x1EEA8, 'M', u'ط'),
(0x1EEA9, 'M', u'ي'),
(0x1EEAA, 'X'),
(0x1EEAB, 'M', u'ل'),
(0x1EEAC, 'M', u'م'),
(0x1EEAD, 'M', u'ن'),
(0x1EEAE, 'M', u'س'),
(0x1EEAF, 'M', u'ع'),
(0x1EEB0, 'M', u'ف'),
(0x1EEB1, 'M', u'ص'),
(0x1EEB2, 'M', u'ق'),
(0x1EEB3, 'M', u'ر'),
(0x1EEB4, 'M', u'ش'),
(0x1EEB5, 'M', u'ت'),
(0x1EEB6, 'M', u'ث'),
(0x1EEB7, 'M', u'خ'),
(0x1EEB8, 'M', u'ذ'),
(0x1EEB9, 'M', u'ض'),
(0x1EEBA, 'M', u'ظ'),
(0x1EEBB, 'M', u'غ'),
(0x1EEBC, 'X'),
(0x1EEF0, 'V'),
(0x1EEF2, 'X'),
(0x1F000, 'V'),
(0x1F02C, 'X'),
(0x1F030, 'V'),
(0x1F094, 'X'),
(0x1F0A0, 'V'),
(0x1F0AF, 'X'),
(0x1F0B1, 'V'),
(0x1F0C0, 'X'),
(0x1F0C1, 'V'),
(0x1F0D0, 'X'),
(0x1F0D1, 'V'),
(0x1F0F6, 'X'),
(0x1F101, '3', u'0,'),
(0x1F102, '3', u'1,'),
(0x1F103, '3', u'2,'),
(0x1F104, '3', u'3,'),
(0x1F105, '3', u'4,'),
(0x1F106, '3', u'5,'),
(0x1F107, '3', u'6,'),
(0x1F108, '3', u'7,'),
(0x1F109, '3', u'8,'),
(0x1F10A, '3', u'9,'),
(0x1F10B, 'V'),
(0x1F10D, 'X'),
(0x1F110, '3', u'(a)'),
(0x1F111, '3', u'(b)'),
(0x1F112, '3', u'(c)'),
(0x1F113, '3', u'(d)'),
(0x1F114, '3', u'(e)'),
(0x1F115, '3', u'(f)'),
(0x1F116, '3', u'(g)'),
(0x1F117, '3', u'(h)'),
(0x1F118, '3', u'(i)'),
(0x1F119, '3', u'(j)'),
(0x1F11A, '3', u'(k)'),
(0x1F11B, '3', u'(l)'),
(0x1F11C, '3', u'(m)'),
(0x1F11D, '3', u'(n)'),
(0x1F11E, '3', u'(o)'),
(0x1F11F, '3', u'(p)'),
(0x1F120, '3', u'(q)'),
(0x1F121, '3', u'(r)'),
(0x1F122, '3', u'(s)'),
(0x1F123, '3', u'(t)'),
(0x1F124, '3', u'(u)'),
]
def _seg_71():
return [
(0x1F125, '3', u'(v)'),
(0x1F126, '3', u'(w)'),
(0x1F127, '3', u'(x)'),
(0x1F128, '3', u'(y)'),
(0x1F129, '3', u'(z)'),
(0x1F12A, 'M', u'〔s〕'),
(0x1F12B, 'M', u'c'),
(0x1F12C, 'M', u'r'),
(0x1F12D, 'M', u'cd'),
(0x1F12E, 'M', u'wz'),
(0x1F12F, 'V'),
(0x1F130, 'M', u'a'),
(0x1F131, 'M', u'b'),
(0x1F132, 'M', u'c'),
(0x1F133, 'M', u'd'),
(0x1F134, 'M', u'e'),
(0x1F135, 'M', u'f'),
(0x1F136, 'M', u'g'),
(0x1F137, 'M', u'h'),
(0x1F138, 'M', u'i'),
(0x1F139, 'M', u'j'),
(0x1F13A, 'M', u'k'),
(0x1F13B, 'M', u'l'),
(0x1F13C, 'M', u'm'),
(0x1F13D, 'M', u'n'),
(0x1F13E, 'M', u'o'),
(0x1F13F, 'M', u'p'),
(0x1F140, 'M', u'q'),
(0x1F141, 'M', u'r'),
(0x1F142, 'M', u's'),
(0x1F143, 'M', u't'),
(0x1F144, 'M', u'u'),
(0x1F145, 'M', u'v'),
(0x1F146, 'M', u'w'),
(0x1F147, 'M', u'x'),
(0x1F148, 'M', u'y'),
(0x1F149, 'M', u'z'),
(0x1F14A, 'M', u'hv'),
(0x1F14B, 'M', u'mv'),
(0x1F14C, 'M', u'sd'),
(0x1F14D, 'M', u'ss'),
(0x1F14E, 'M', u'ppv'),
(0x1F14F, 'M', u'wc'),
(0x1F150, 'V'),
(0x1F16A, 'M', u'mc'),
(0x1F16B, 'M', u'md'),
(0x1F16C, 'X'),
(0x1F170, 'V'),
(0x1F190, 'M', u'dj'),
(0x1F191, 'V'),
(0x1F1AD, 'X'),
(0x1F1E6, 'V'),
(0x1F200, 'M', u'ほか'),
(0x1F201, 'M', u'ココ'),
(0x1F202, 'M', u'サ'),
(0x1F203, 'X'),
(0x1F210, 'M', u'手'),
(0x1F211, 'M', u'字'),
(0x1F212, 'M', u'双'),
(0x1F213, 'M', u'デ'),
(0x1F214, 'M', u'二'),
(0x1F215, 'M', u'多'),
(0x1F216, 'M', u'解'),
(0x1F217, 'M', u'天'),
(0x1F218, 'M', u'交'),
(0x1F219, 'M', u'映'),
(0x1F21A, 'M', u'無'),
(0x1F21B, 'M', u'料'),
(0x1F21C, 'M', u'前'),
(0x1F21D, 'M', u'後'),
(0x1F21E, 'M', u'再'),
(0x1F21F, 'M', u'新'),
(0x1F220, 'M', u'初'),
(0x1F221, 'M', u'終'),
(0x1F222, 'M', u'生'),
(0x1F223, 'M', u'販'),
(0x1F224, 'M', u'声'),
(0x1F225, 'M', u'吹'),
(0x1F226, 'M', u'演'),
(0x1F227, 'M', u'投'),
(0x1F228, 'M', u'捕'),
(0x1F229, 'M', u'一'),
(0x1F22A, 'M', u'三'),
(0x1F22B, 'M', u'遊'),
(0x1F22C, 'M', u'左'),
(0x1F22D, 'M', u'中'),
(0x1F22E, 'M', u'右'),
(0x1F22F, 'M', u'指'),
(0x1F230, 'M', u'走'),
(0x1F231, 'M', u'打'),
(0x1F232, 'M', u'禁'),
(0x1F233, 'M', u'空'),
(0x1F234, 'M', u'合'),
(0x1F235, 'M', u'満'),
(0x1F236, 'M', u'有'),
(0x1F237, 'M', u'月'),
(0x1F238, 'M', u'申'),
(0x1F239, 'M', u'割'),
(0x1F23A, 'M', u'営'),
(0x1F23B, 'M', u'配'),
]
def _seg_72():
return [
(0x1F23C, 'X'),
(0x1F240, 'M', u'〔本〕'),
(0x1F241, 'M', u'〔三〕'),
(0x1F242, 'M', u'〔二〕'),
(0x1F243, 'M', u'〔安〕'),
(0x1F244, 'M', u'〔点〕'),
(0x1F245, 'M', u'〔打〕'),
(0x1F246, 'M', u'〔盗〕'),
(0x1F247, 'M', u'〔勝〕'),
(0x1F248, 'M', u'〔敗〕'),
(0x1F249, 'X'),
(0x1F250, 'M', u'得'),
(0x1F251, 'M', u'可'),
(0x1F252, 'X'),
(0x1F260, 'V'),
(0x1F266, 'X'),
(0x1F300, 'V'),
(0x1F6D5, 'X'),
(0x1F6E0, 'V'),
(0x1F6ED, 'X'),
(0x1F6F0, 'V'),
(0x1F6FA, 'X'),
(0x1F700, 'V'),
(0x1F774, 'X'),
(0x1F780, 'V'),
(0x1F7D9, 'X'),
(0x1F800, 'V'),
(0x1F80C, 'X'),
(0x1F810, 'V'),
(0x1F848, 'X'),
(0x1F850, 'V'),
(0x1F85A, 'X'),
(0x1F860, 'V'),
(0x1F888, 'X'),
(0x1F890, 'V'),
(0x1F8AE, 'X'),
(0x1F900, 'V'),
(0x1F90C, 'X'),
(0x1F910, 'V'),
(0x1F93F, 'X'),
(0x1F940, 'V'),
(0x1F971, 'X'),
(0x1F973, 'V'),
(0x1F977, 'X'),
(0x1F97A, 'V'),
(0x1F97B, 'X'),
(0x1F97C, 'V'),
(0x1F9A3, 'X'),
(0x1F9B0, 'V'),
(0x1F9BA, 'X'),
(0x1F9C0, 'V'),
(0x1F9C3, 'X'),
(0x1F9D0, 'V'),
(0x1FA00, 'X'),
(0x1FA60, 'V'),
(0x1FA6E, 'X'),
(0x20000, 'V'),
(0x2A6D7, 'X'),
(0x2A700, 'V'),
(0x2B735, 'X'),
(0x2B740, 'V'),
(0x2B81E, 'X'),
(0x2B820, 'V'),
(0x2CEA2, 'X'),
(0x2CEB0, 'V'),
(0x2EBE1, 'X'),
(0x2F800, 'M', u'丽'),
(0x2F801, 'M', u'丸'),
(0x2F802, 'M', u'乁'),
(0x2F803, 'M', u'𠄢'),
(0x2F804, 'M', u'你'),
(0x2F805, 'M', u'侮'),
(0x2F806, 'M', u'侻'),
(0x2F807, 'M', u'倂'),
(0x2F808, 'M', u'偺'),
(0x2F809, 'M', u'備'),
(0x2F80A, 'M', u'僧'),
(0x2F80B, 'M', u'像'),
(0x2F80C, 'M', u'㒞'),
(0x2F80D, 'M', u'𠘺'),
(0x2F80E, 'M', u'免'),
(0x2F80F, 'M', u'兔'),
(0x2F810, 'M', u'兤'),
(0x2F811, 'M', u'具'),
(0x2F812, 'M', u'𠔜'),
(0x2F813, 'M', u'㒹'),
(0x2F814, 'M', u'內'),
(0x2F815, 'M', u'再'),
(0x2F816, 'M', u'𠕋'),
(0x2F817, 'M', u'冗'),
(0x2F818, 'M', u'冤'),
(0x2F819, 'M', u'仌'),
(0x2F81A, 'M', u'冬'),
(0x2F81B, 'M', u'况'),
(0x2F81C, 'M', u'𩇟'),
(0x2F81D, 'M', u'凵'),
(0x2F81E, 'M', u'刃'),
(0x2F81F, 'M', u'㓟'),
(0x2F820, 'M', u'刻'),
(0x2F821, 'M', u'剆'),
]
def _seg_73():
return [
(0x2F822, 'M', u'割'),
(0x2F823, 'M', u'剷'),
(0x2F824, 'M', u'㔕'),
(0x2F825, 'M', u'勇'),
(0x2F826, 'M', u'勉'),
(0x2F827, 'M', u'勤'),
(0x2F828, 'M', u'勺'),
(0x2F829, 'M', u'包'),
(0x2F82A, 'M', u'匆'),
(0x2F82B, 'M', u'北'),
(0x2F82C, 'M', u'卉'),
(0x2F82D, 'M', u'卑'),
(0x2F82E, 'M', u'博'),
(0x2F82F, 'M', u'即'),
(0x2F830, 'M', u'卽'),
(0x2F831, 'M', u'卿'),
(0x2F834, 'M', u'𠨬'),
(0x2F835, 'M', u'灰'),
(0x2F836, 'M', u'及'),
(0x2F837, 'M', u'叟'),
(0x2F838, 'M', u'𠭣'),
(0x2F839, 'M', u'叫'),
(0x2F83A, 'M', u'叱'),
(0x2F83B, 'M', u'吆'),
(0x2F83C, 'M', u'咞'),
(0x2F83D, 'M', u'吸'),
(0x2F83E, 'M', u'呈'),
(0x2F83F, 'M', u'周'),
(0x2F840, 'M', u'咢'),
(0x2F841, 'M', u'哶'),
(0x2F842, 'M', u'唐'),
(0x2F843, 'M', u'啓'),
(0x2F844, 'M', u'啣'),
(0x2F845, 'M', u'善'),
(0x2F847, 'M', u'喙'),
(0x2F848, 'M', u'喫'),
(0x2F849, 'M', u'喳'),
(0x2F84A, 'M', u'嗂'),
(0x2F84B, 'M', u'圖'),
(0x2F84C, 'M', u'嘆'),
(0x2F84D, 'M', u'圗'),
(0x2F84E, 'M', u'噑'),
(0x2F84F, 'M', u'噴'),
(0x2F850, 'M', u'切'),
(0x2F851, 'M', u'壮'),
(0x2F852, 'M', u'城'),
(0x2F853, 'M', u'埴'),
(0x2F854, 'M', u'堍'),
(0x2F855, 'M', u'型'),
(0x2F856, 'M', u'堲'),
(0x2F857, 'M', u'報'),
(0x2F858, 'M', u'墬'),
(0x2F859, 'M', u'𡓤'),
(0x2F85A, 'M', u'売'),
(0x2F85B, 'M', u'壷'),
(0x2F85C, 'M', u'夆'),
(0x2F85D, 'M', u'多'),
(0x2F85E, 'M', u'夢'),
(0x2F85F, 'M', u'奢'),
(0x2F860, 'M', u'𡚨'),
(0x2F861, 'M', u'𡛪'),
(0x2F862, 'M', u'姬'),
(0x2F863, 'M', u'娛'),
(0x2F864, 'M', u'娧'),
(0x2F865, 'M', u'姘'),
(0x2F866, 'M', u'婦'),
(0x2F867, 'M', u'㛮'),
(0x2F868, 'X'),
(0x2F869, 'M', u'嬈'),
(0x2F86A, 'M', u'嬾'),
(0x2F86C, 'M', u'𡧈'),
(0x2F86D, 'M', u'寃'),
(0x2F86E, 'M', u'寘'),
(0x2F86F, 'M', u'寧'),
(0x2F870, 'M', u'寳'),
(0x2F871, 'M', u'𡬘'),
(0x2F872, 'M', u'寿'),
(0x2F873, 'M', u'将'),
(0x2F874, 'X'),
(0x2F875, 'M', u'尢'),
(0x2F876, 'M', u'㞁'),
(0x2F877, 'M', u'屠'),
(0x2F878, 'M', u'屮'),
(0x2F879, 'M', u'峀'),
(0x2F87A, 'M', u'岍'),
(0x2F87B, 'M', u'𡷤'),
(0x2F87C, 'M', u'嵃'),
(0x2F87D, 'M', u'𡷦'),
(0x2F87E, 'M', u'嵮'),
(0x2F87F, 'M', u'嵫'),
(0x2F880, 'M', u'嵼'),
(0x2F881, 'M', u'巡'),
(0x2F882, 'M', u'巢'),
(0x2F883, 'M', u'㠯'),
(0x2F884, 'M', u'巽'),
(0x2F885, 'M', u'帨'),
(0x2F886, 'M', u'帽'),
(0x2F887, 'M', u'幩'),
(0x2F888, 'M', u'㡢'),
(0x2F889, 'M', u'𢆃'),
]
def _seg_74():
return [
(0x2F88A, 'M', u'㡼'),
(0x2F88B, 'M', u'庰'),
(0x2F88C, 'M', u'庳'),
(0x2F88D, 'M', u'庶'),
(0x2F88E, 'M', u'廊'),
(0x2F88F, 'M', u'𪎒'),
(0x2F890, 'M', u'廾'),
(0x2F891, 'M', u'𢌱'),
(0x2F893, 'M', u'舁'),
(0x2F894, 'M', u'弢'),
(0x2F896, 'M', u'㣇'),
(0x2F897, 'M', u'𣊸'),
(0x2F898, 'M', u'𦇚'),
(0x2F899, 'M', u'形'),
(0x2F89A, 'M', u'彫'),
(0x2F89B, 'M', u'㣣'),
(0x2F89C, 'M', u'徚'),
(0x2F89D, 'M', u'忍'),
(0x2F89E, 'M', u'志'),
(0x2F89F, 'M', u'忹'),
(0x2F8A0, 'M', u'悁'),
(0x2F8A1, 'M', u'㤺'),
(0x2F8A2, 'M', u'㤜'),
(0x2F8A3, 'M', u'悔'),
(0x2F8A4, 'M', u'𢛔'),
(0x2F8A5, 'M', u'惇'),
(0x2F8A6, 'M', u'慈'),
(0x2F8A7, 'M', u'慌'),
(0x2F8A8, 'M', u'慎'),
(0x2F8A9, 'M', u'慌'),
(0x2F8AA, 'M', u'慺'),
(0x2F8AB, 'M', u'憎'),
(0x2F8AC, 'M', u'憲'),
(0x2F8AD, 'M', u'憤'),
(0x2F8AE, 'M', u'憯'),
(0x2F8AF, 'M', u'懞'),
(0x2F8B0, 'M', u'懲'),
(0x2F8B1, 'M', u'懶'),
(0x2F8B2, 'M', u'成'),
(0x2F8B3, 'M', u'戛'),
(0x2F8B4, 'M', u'扝'),
(0x2F8B5, 'M', u'抱'),
(0x2F8B6, 'M', u'拔'),
(0x2F8B7, 'M', u'捐'),
(0x2F8B8, 'M', u'𢬌'),
(0x2F8B9, 'M', u'挽'),
(0x2F8BA, 'M', u'拼'),
(0x2F8BB, 'M', u'捨'),
(0x2F8BC, 'M', u'掃'),
(0x2F8BD, 'M', u'揤'),
(0x2F8BE, 'M', u'𢯱'),
(0x2F8BF, 'M', u'搢'),
(0x2F8C0, 'M', u'揅'),
(0x2F8C1, 'M', u'掩'),
(0x2F8C2, 'M', u'㨮'),
(0x2F8C3, 'M', u'摩'),
(0x2F8C4, 'M', u'摾'),
(0x2F8C5, 'M', u'撝'),
(0x2F8C6, 'M', u'摷'),
(0x2F8C7, 'M', u'㩬'),
(0x2F8C8, 'M', u'敏'),
(0x2F8C9, 'M', u'敬'),
(0x2F8CA, 'M', u'𣀊'),
(0x2F8CB, 'M', u'旣'),
(0x2F8CC, 'M', u'書'),
(0x2F8CD, 'M', u'晉'),
(0x2F8CE, 'M', u'㬙'),
(0x2F8CF, 'M', u'暑'),
(0x2F8D0, 'M', u'㬈'),
(0x2F8D1, 'M', u'㫤'),
(0x2F8D2, 'M', u'冒'),
(0x2F8D3, 'M', u'冕'),
(0x2F8D4, 'M', u'最'),
(0x2F8D5, 'M', u'暜'),
(0x2F8D6, 'M', u'肭'),
(0x2F8D7, 'M', u'䏙'),
(0x2F8D8, 'M', u'朗'),
(0x2F8D9, 'M', u'望'),
(0x2F8DA, 'M', u'朡'),
(0x2F8DB, 'M', u'杞'),
(0x2F8DC, 'M', u'杓'),
(0x2F8DD, 'M', u'𣏃'),
(0x2F8DE, 'M', u'㭉'),
(0x2F8DF, 'M', u'柺'),
(0x2F8E0, 'M', u'枅'),
(0x2F8E1, 'M', u'桒'),
(0x2F8E2, 'M', u'梅'),
(0x2F8E3, 'M', u'𣑭'),
(0x2F8E4, 'M', u'梎'),
(0x2F8E5, 'M', u'栟'),
(0x2F8E6, 'M', u'椔'),
(0x2F8E7, 'M', u'㮝'),
(0x2F8E8, 'M', u'楂'),
(0x2F8E9, 'M', u'榣'),
(0x2F8EA, 'M', u'槪'),
(0x2F8EB, 'M', u'檨'),
(0x2F8EC, 'M', u'𣚣'),
(0x2F8ED, 'M', u'櫛'),
(0x2F8EE, 'M', u'㰘'),
(0x2F8EF, 'M', u'次'),
]
def _seg_75():
return [
(0x2F8F0, 'M', u'𣢧'),
(0x2F8F1, 'M', u'歔'),
(0x2F8F2, 'M', u'㱎'),
(0x2F8F3, 'M', u'歲'),
(0x2F8F4, 'M', u'殟'),
(0x2F8F5, 'M', u'殺'),
(0x2F8F6, 'M', u'殻'),
(0x2F8F7, 'M', u'𣪍'),
(0x2F8F8, 'M', u'𡴋'),
(0x2F8F9, 'M', u'𣫺'),
(0x2F8FA, 'M', u'汎'),
(0x2F8FB, 'M', u'𣲼'),
(0x2F8FC, 'M', u'沿'),
(0x2F8FD, 'M', u'泍'),
(0x2F8FE, 'M', u'汧'),
(0x2F8FF, 'M', u'洖'),
(0x2F900, 'M', u'派'),
(0x2F901, 'M', u'海'),
(0x2F902, 'M', u'流'),
(0x2F903, 'M', u'浩'),
(0x2F904, 'M', u'浸'),
(0x2F905, 'M', u'涅'),
(0x2F906, 'M', u'𣴞'),
(0x2F907, 'M', u'洴'),
(0x2F908, 'M', u'港'),
(0x2F909, 'M', u'湮'),
(0x2F90A, 'M', u'㴳'),
(0x2F90B, 'M', u'滋'),
(0x2F90C, 'M', u'滇'),
(0x2F90D, 'M', u'𣻑'),
(0x2F90E, 'M', u'淹'),
(0x2F90F, 'M', u'潮'),
(0x2F910, 'M', u'𣽞'),
(0x2F911, 'M', u'𣾎'),
(0x2F912, 'M', u'濆'),
(0x2F913, 'M', u'瀹'),
(0x2F914, 'M', u'瀞'),
(0x2F915, 'M', u'瀛'),
(0x2F916, 'M', u'㶖'),
(0x2F917, 'M', u'灊'),
(0x2F918, 'M', u'災'),
(0x2F919, 'M', u'灷'),
(0x2F91A, 'M', u'炭'),
(0x2F91B, 'M', u'𠔥'),
(0x2F91C, 'M', u'煅'),
(0x2F91D, 'M', u'𤉣'),
(0x2F91E, 'M', u'熜'),
(0x2F91F, 'X'),
(0x2F920, 'M', u'爨'),
(0x2F921, 'M', u'爵'),
(0x2F922, 'M', u'牐'),
(0x2F923, 'M', u'𤘈'),
(0x2F924, 'M', u'犀'),
(0x2F925, 'M', u'犕'),
(0x2F926, 'M', u'𤜵'),
(0x2F927, 'M', u'𤠔'),
(0x2F928, 'M', u'獺'),
(0x2F929, 'M', u'王'),
(0x2F92A, 'M', u'㺬'),
(0x2F92B, 'M', u'玥'),
(0x2F92C, 'M', u'㺸'),
(0x2F92E, 'M', u'瑇'),
(0x2F92F, 'M', u'瑜'),
(0x2F930, 'M', u'瑱'),
(0x2F931, 'M', u'璅'),
(0x2F932, 'M', u'瓊'),
(0x2F933, 'M', u'㼛'),
(0x2F934, 'M', u'甤'),
(0x2F935, 'M', u'𤰶'),
(0x2F936, 'M', u'甾'),
(0x2F937, 'M', u'𤲒'),
(0x2F938, 'M', u'異'),
(0x2F939, 'M', u'𢆟'),
(0x2F93A, 'M', u'瘐'),
(0x2F93B, 'M', u'𤾡'),
(0x2F93C, 'M', u'𤾸'),
(0x2F93D, 'M', u'𥁄'),
(0x2F93E, 'M', u'㿼'),
(0x2F93F, 'M', u'䀈'),
(0x2F940, 'M', u'直'),
(0x2F941, 'M', u'𥃳'),
(0x2F942, 'M', u'𥃲'),
(0x2F943, 'M', u'𥄙'),
(0x2F944, 'M', u'𥄳'),
(0x2F945, 'M', u'眞'),
(0x2F946, 'M', u'真'),
(0x2F948, 'M', u'睊'),
(0x2F949, 'M', u'䀹'),
(0x2F94A, 'M', u'瞋'),
(0x2F94B, 'M', u'䁆'),
(0x2F94C, 'M', u'䂖'),
(0x2F94D, 'M', u'𥐝'),
(0x2F94E, 'M', u'硎'),
(0x2F94F, 'M', u'碌'),
(0x2F950, 'M', u'磌'),
(0x2F951, 'M', u'䃣'),
(0x2F952, 'M', u'𥘦'),
(0x2F953, 'M', u'祖'),
(0x2F954, 'M', u'𥚚'),
(0x2F955, 'M', u'𥛅'),
]
def _seg_76():
return [
(0x2F956, 'M', u'福'),
(0x2F957, 'M', u'秫'),
(0x2F958, 'M', u'䄯'),
(0x2F959, 'M', u'穀'),
(0x2F95A, 'M', u'穊'),
(0x2F95B, 'M', u'穏'),
(0x2F95C, 'M', u'𥥼'),
(0x2F95D, 'M', u'𥪧'),
(0x2F95F, 'X'),
(0x2F960, 'M', u'䈂'),
(0x2F961, 'M', u'𥮫'),
(0x2F962, 'M', u'篆'),
(0x2F963, 'M', u'築'),
(0x2F964, 'M', u'䈧'),
(0x2F965, 'M', u'𥲀'),
(0x2F966, 'M', u'糒'),
(0x2F967, 'M', u'䊠'),
(0x2F968, 'M', u'糨'),
(0x2F969, 'M', u'糣'),
(0x2F96A, 'M', u'紀'),
(0x2F96B, 'M', u'𥾆'),
(0x2F96C, 'M', u'絣'),
(0x2F96D, 'M', u'䌁'),
(0x2F96E, 'M', u'緇'),
(0x2F96F, 'M', u'縂'),
(0x2F970, 'M', u'繅'),
(0x2F971, 'M', u'䌴'),
(0x2F972, 'M', u'𦈨'),
(0x2F973, 'M', u'𦉇'),
(0x2F974, 'M', u'䍙'),
(0x2F975, 'M', u'𦋙'),
(0x2F976, 'M', u'罺'),
(0x2F977, 'M', u'𦌾'),
(0x2F978, 'M', u'羕'),
(0x2F979, 'M', u'翺'),
(0x2F97A, 'M', u'者'),
(0x2F97B, 'M', u'𦓚'),
(0x2F97C, 'M', u'𦔣'),
(0x2F97D, 'M', u'聠'),
(0x2F97E, 'M', u'𦖨'),
(0x2F97F, 'M', u'聰'),
(0x2F980, 'M', u'𣍟'),
(0x2F981, 'M', u'䏕'),
(0x2F982, 'M', u'育'),
(0x2F983, 'M', u'脃'),
(0x2F984, 'M', u'䐋'),
(0x2F985, 'M', u'脾'),
(0x2F986, 'M', u'媵'),
(0x2F987, 'M', u'𦞧'),
(0x2F988, 'M', u'𦞵'),
(0x2F989, 'M', u'𣎓'),
(0x2F98A, 'M', u'𣎜'),
(0x2F98B, 'M', u'舁'),
(0x2F98C, 'M', u'舄'),
(0x2F98D, 'M', u'辞'),
(0x2F98E, 'M', u'䑫'),
(0x2F98F, 'M', u'芑'),
(0x2F990, 'M', u'芋'),
(0x2F991, 'M', u'芝'),
(0x2F992, 'M', u'劳'),
(0x2F993, 'M', u'花'),
(0x2F994, 'M', u'芳'),
(0x2F995, 'M', u'芽'),
(0x2F996, 'M', u'苦'),
(0x2F997, 'M', u'𦬼'),
(0x2F998, 'M', u'若'),
(0x2F999, 'M', u'茝'),
(0x2F99A, 'M', u'荣'),
(0x2F99B, 'M', u'莭'),
(0x2F99C, 'M', u'茣'),
(0x2F99D, 'M', u'莽'),
(0x2F99E, 'M', u'菧'),
(0x2F99F, 'M', u'著'),
(0x2F9A0, 'M', u'荓'),
(0x2F9A1, 'M', u'菊'),
(0x2F9A2, 'M', u'菌'),
(0x2F9A3, 'M', u'菜'),
(0x2F9A4, 'M', u'𦰶'),
(0x2F9A5, 'M', u'𦵫'),
(0x2F9A6, 'M', u'𦳕'),
(0x2F9A7, 'M', u'䔫'),
(0x2F9A8, 'M', u'蓱'),
(0x2F9A9, 'M', u'蓳'),
(0x2F9AA, 'M', u'蔖'),
(0x2F9AB, 'M', u'𧏊'),
(0x2F9AC, 'M', u'蕤'),
(0x2F9AD, 'M', u'𦼬'),
(0x2F9AE, 'M', u'䕝'),
(0x2F9AF, 'M', u'䕡'),
(0x2F9B0, 'M', u'𦾱'),
(0x2F9B1, 'M', u'𧃒'),
(0x2F9B2, 'M', u'䕫'),
(0x2F9B3, 'M', u'虐'),
(0x2F9B4, 'M', u'虜'),
(0x2F9B5, 'M', u'虧'),
(0x2F9B6, 'M', u'虩'),
(0x2F9B7, 'M', u'蚩'),
(0x2F9B8, 'M', u'蚈'),
(0x2F9B9, 'M', u'蜎'),
(0x2F9BA, 'M', u'蛢'),
]
def _seg_77():
return [
(0x2F9BB, 'M', u'蝹'),
(0x2F9BC, 'M', u'蜨'),
(0x2F9BD, 'M', u'蝫'),
(0x2F9BE, 'M', u'螆'),
(0x2F9BF, 'X'),
(0x2F9C0, 'M', u'蟡'),
(0x2F9C1, 'M', u'蠁'),
(0x2F9C2, 'M', u'䗹'),
(0x2F9C3, 'M', u'衠'),
(0x2F9C4, 'M', u'衣'),
(0x2F9C5, 'M', u'𧙧'),
(0x2F9C6, 'M', u'裗'),
(0x2F9C7, 'M', u'裞'),
(0x2F9C8, 'M', u'䘵'),
(0x2F9C9, 'M', u'裺'),
(0x2F9CA, 'M', u'㒻'),
(0x2F9CB, 'M', u'𧢮'),
(0x2F9CC, 'M', u'𧥦'),
(0x2F9CD, 'M', u'䚾'),
(0x2F9CE, 'M', u'䛇'),
(0x2F9CF, 'M', u'誠'),
(0x2F9D0, 'M', u'諭'),
(0x2F9D1, 'M', u'變'),
(0x2F9D2, 'M', u'豕'),
(0x2F9D3, 'M', u'𧲨'),
(0x2F9D4, 'M', u'貫'),
(0x2F9D5, 'M', u'賁'),
(0x2F9D6, 'M', u'贛'),
(0x2F9D7, 'M', u'起'),
(0x2F9D8, 'M', u'𧼯'),
(0x2F9D9, 'M', u'𠠄'),
(0x2F9DA, 'M', u'跋'),
(0x2F9DB, 'M', u'趼'),
(0x2F9DC, 'M', u'跰'),
(0x2F9DD, 'M', u'𠣞'),
(0x2F9DE, 'M', u'軔'),
(0x2F9DF, 'M', u'輸'),
(0x2F9E0, 'M', u'𨗒'),
(0x2F9E1, 'M', u'𨗭'),
(0x2F9E2, 'M', u'邔'),
(0x2F9E3, 'M', u'郱'),
(0x2F9E4, 'M', u'鄑'),
(0x2F9E5, 'M', u'𨜮'),
(0x2F9E6, 'M', u'鄛'),
(0x2F9E7, 'M', u'鈸'),
(0x2F9E8, 'M', u'鋗'),
(0x2F9E9, 'M', u'鋘'),
(0x2F9EA, 'M', u'鉼'),
(0x2F9EB, 'M', u'鏹'),
(0x2F9EC, 'M', u'鐕'),
(0x2F9ED, 'M', u'𨯺'),
(0x2F9EE, 'M', u'開'),
(0x2F9EF, 'M', u'䦕'),
(0x2F9F0, 'M', u'閷'),
(0x2F9F1, 'M', u'𨵷'),
(0x2F9F2, 'M', u'䧦'),
(0x2F9F3, 'M', u'雃'),
(0x2F9F4, 'M', u'嶲'),
(0x2F9F5, 'M', u'霣'),
(0x2F9F6, 'M', u'𩅅'),
(0x2F9F7, 'M', u'𩈚'),
(0x2F9F8, 'M', u'䩮'),
(0x2F9F9, 'M', u'䩶'),
(0x2F9FA, 'M', u'韠'),
(0x2F9FB, 'M', u'𩐊'),
(0x2F9FC, 'M', u'䪲'),
(0x2F9FD, 'M', u'𩒖'),
(0x2F9FE, 'M', u'頋'),
(0x2FA00, 'M', u'頩'),
(0x2FA01, 'M', u'𩖶'),
(0x2FA02, 'M', u'飢'),
(0x2FA03, 'M', u'䬳'),
(0x2FA04, 'M', u'餩'),
(0x2FA05, 'M', u'馧'),
(0x2FA06, 'M', u'駂'),
(0x2FA07, 'M', u'駾'),
(0x2FA08, 'M', u'䯎'),
(0x2FA09, 'M', u'𩬰'),
(0x2FA0A, 'M', u'鬒'),
(0x2FA0B, 'M', u'鱀'),
(0x2FA0C, 'M', u'鳽'),
(0x2FA0D, 'M', u'䳎'),
(0x2FA0E, 'M', u'䳭'),
(0x2FA0F, 'M', u'鵧'),
(0x2FA10, 'M', u'𪃎'),
(0x2FA11, 'M', u'䳸'),
(0x2FA12, 'M', u'𪄅'),
(0x2FA13, 'M', u'𪈎'),
(0x2FA14, 'M', u'𪊑'),
(0x2FA15, 'M', u'麻'),
(0x2FA16, 'M', u'䵖'),
(0x2FA17, 'M', u'黹'),
(0x2FA18, 'M', u'黾'),
(0x2FA19, 'M', u'鼅'),
(0x2FA1A, 'M', u'鼏'),
(0x2FA1B, 'M', u'鼖'),
(0x2FA1C, 'M', u'鼻'),
(0x2FA1D, 'M', u'𪘀'),
(0x2FA1E, 'X'),
(0xE0100, 'I'),
]
def _seg_78():
return [
(0xE01F0, 'X'),
]
uts46data = tuple(
_seg_0()
+ _seg_1()
+ _seg_2()
+ _seg_3()
+ _seg_4()
+ _seg_5()
+ _seg_6()
+ _seg_7()
+ _seg_8()
+ _seg_9()
+ _seg_10()
+ _seg_11()
+ _seg_12()
+ _seg_13()
+ _seg_14()
+ _seg_15()
+ _seg_16()
+ _seg_17()
+ _seg_18()
+ _seg_19()
+ _seg_20()
+ _seg_21()
+ _seg_22()
+ _seg_23()
+ _seg_24()
+ _seg_25()
+ _seg_26()
+ _seg_27()
+ _seg_28()
+ _seg_29()
+ _seg_30()
+ _seg_31()
+ _seg_32()
+ _seg_33()
+ _seg_34()
+ _seg_35()
+ _seg_36()
+ _seg_37()
+ _seg_38()
+ _seg_39()
+ _seg_40()
+ _seg_41()
+ _seg_42()
+ _seg_43()
+ _seg_44()
+ _seg_45()
+ _seg_46()
+ _seg_47()
+ _seg_48()
+ _seg_49()
+ _seg_50()
+ _seg_51()
+ _seg_52()
+ _seg_53()
+ _seg_54()
+ _seg_55()
+ _seg_56()
+ _seg_57()
+ _seg_58()
+ _seg_59()
+ _seg_60()
+ _seg_61()
+ _seg_62()
+ _seg_63()
+ _seg_64()
+ _seg_65()
+ _seg_66()
+ _seg_67()
+ _seg_68()
+ _seg_69()
+ _seg_70()
+ _seg_71()
+ _seg_72()
+ _seg_73()
+ _seg_74()
+ _seg_75()
+ _seg_76()
+ _seg_77()
+ _seg_78()
)
| en | 0.812792 | # This file is automatically generated by tools/idna-data # vim: set fileencoding=utf-8 : IDNA Mapping Table from UTS46. | 1.400651 | 1 |
tests/kbcr/smart/test_smart.py | alex4321/ctp | 0 | 7520 | # -*- coding: utf-8 -*-
import numpy as np
import torch
from torch import nn
from kbcr.kernels import GaussianKernel
from kbcr.smart import NeuralKB
import pytest
@pytest.mark.light
def test_smart_v1():
embedding_size = 50
rs = np.random.RandomState(0)
for _ in range(32):
with torch.no_grad():
triples = [
('a', 'p', 'b'),
('c', 'q', 'd'),
('e', 'q', 'f'),
('g', 'q', 'h'),
('i', 'q', 'l'),
('m', 'q', 'n'),
('o', 'q', 'p'),
('q', 'q', 'r'),
('s', 'q', 't'),
('u', 'q', 'v')
]
entity_lst = sorted({s for (s, _, _) in triples} | {o for (_, _, o) in triples})
predicate_lst = sorted({p for (_, p, _) in triples})
nb_entities, nb_predicates = len(entity_lst), len(predicate_lst)
entity_to_index = {e: i for i, e in enumerate(entity_lst)}
predicate_to_index = {p: i for i, p in enumerate(predicate_lst)}
kernel = GaussianKernel()
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
fact_rel = torch.LongTensor(np.array([predicate_to_index[p] for (_, p, _) in triples]))
fact_arg1 = torch.LongTensor(np.array([entity_to_index[s] for (s, _, _) in triples]))
fact_arg2 = torch.LongTensor(np.array([entity_to_index[o] for (_, _, o) in triples]))
facts = [fact_rel, fact_arg1, fact_arg2]
model = NeuralKB(entity_embeddings=entity_embeddings, predicate_embeddings=predicate_embeddings,
kernel=kernel, facts=facts)
xs_np = rs.randint(nb_entities, size=32)
xp_np = rs.randint(nb_predicates, size=32)
xo_np = rs.randint(nb_entities, size=32)
xs_np[0] = 0
xp_np[0] = 0
xo_np[0] = 1
xs_np[1] = 2
xp_np[1] = 1
xo_np[1] = 3
xs = torch.LongTensor(xs_np)
xp = torch.LongTensor(xp_np)
xo = torch.LongTensor(xo_np)
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
print('xp_emb', xp_emb.shape)
res_sp, res_po = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
assert inf[0] > 0.9
assert inf[1] > 0.9
scores_sp, emb_sp = res_sp
scores_po, emb_po = res_po
print(scores_sp.shape, emb_sp.shape)
print(scores_po.shape, emb_po.shape)
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
print('AAA', inf)
print('BBB', scores_sp)
if __name__ == '__main__':
pytest.main([__file__])
# test_smart_v1()
| # -*- coding: utf-8 -*-
import numpy as np
import torch
from torch import nn
from kbcr.kernels import GaussianKernel
from kbcr.smart import NeuralKB
import pytest
@pytest.mark.light
def test_smart_v1():
embedding_size = 50
rs = np.random.RandomState(0)
for _ in range(32):
with torch.no_grad():
triples = [
('a', 'p', 'b'),
('c', 'q', 'd'),
('e', 'q', 'f'),
('g', 'q', 'h'),
('i', 'q', 'l'),
('m', 'q', 'n'),
('o', 'q', 'p'),
('q', 'q', 'r'),
('s', 'q', 't'),
('u', 'q', 'v')
]
entity_lst = sorted({s for (s, _, _) in triples} | {o for (_, _, o) in triples})
predicate_lst = sorted({p for (_, p, _) in triples})
nb_entities, nb_predicates = len(entity_lst), len(predicate_lst)
entity_to_index = {e: i for i, e in enumerate(entity_lst)}
predicate_to_index = {p: i for i, p in enumerate(predicate_lst)}
kernel = GaussianKernel()
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
fact_rel = torch.LongTensor(np.array([predicate_to_index[p] for (_, p, _) in triples]))
fact_arg1 = torch.LongTensor(np.array([entity_to_index[s] for (s, _, _) in triples]))
fact_arg2 = torch.LongTensor(np.array([entity_to_index[o] for (_, _, o) in triples]))
facts = [fact_rel, fact_arg1, fact_arg2]
model = NeuralKB(entity_embeddings=entity_embeddings, predicate_embeddings=predicate_embeddings,
kernel=kernel, facts=facts)
xs_np = rs.randint(nb_entities, size=32)
xp_np = rs.randint(nb_predicates, size=32)
xo_np = rs.randint(nb_entities, size=32)
xs_np[0] = 0
xp_np[0] = 0
xo_np[0] = 1
xs_np[1] = 2
xp_np[1] = 1
xo_np[1] = 3
xs = torch.LongTensor(xs_np)
xp = torch.LongTensor(xp_np)
xo = torch.LongTensor(xo_np)
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
print('xp_emb', xp_emb.shape)
res_sp, res_po = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
assert inf[0] > 0.9
assert inf[1] > 0.9
scores_sp, emb_sp = res_sp
scores_po, emb_po = res_po
print(scores_sp.shape, emb_sp.shape)
print(scores_po.shape, emb_po.shape)
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
print('AAA', inf)
print('BBB', scores_sp)
if __name__ == '__main__':
pytest.main([__file__])
# test_smart_v1()
| en | 0.673068 | # -*- coding: utf-8 -*- # test_smart_v1() | 1.980975 | 2 |
test.py | eseJiHeaLim/find_child | 0 | 7521 | import tkinter
window=tkinter.Tk()
window.title("YUN DAE HEE")
window.geometry("640x400+100+100")
window.resizable(True, True)
image=tkinter.PhotoImage(file="opencv_frame_0.png")
label=tkinter.Label(window, image=image)
label.pack()
window.mainloop() | import tkinter
window=tkinter.Tk()
window.title("YUN DAE HEE")
window.geometry("640x400+100+100")
window.resizable(True, True)
image=tkinter.PhotoImage(file="opencv_frame_0.png")
label=tkinter.Label(window, image=image)
label.pack()
window.mainloop() | none | 1 | 3.150241 | 3 |
|
UMSLHackRestAPI/api/urls.py | trujivan/climate-impact-changes | 1 | 7522 | <reponame>trujivan/climate-impact-changes<gh_stars>1-10
from django.urls import path, include
from .views import main_view, PredictionView
#router = routers.DefaultRouter(trailing_slash=False)
#router.register('years', YearView, basename='years')
#router.register('predict', PredictionView, basename='predict')
urlpatterns = [
#path('api/', get_dummy_data),
#path('pollution/predict', get_prediction, name='test_predict'),
#path('myform/', api_form_view, name='year_form'),
#path('api/', include(router.urls)),
path(r'', main_view, name="main"),
path(r'api/v1/predict', PredictionView.as_view(), name='predict')
] | from django.urls import path, include
from .views import main_view, PredictionView
#router = routers.DefaultRouter(trailing_slash=False)
#router.register('years', YearView, basename='years')
#router.register('predict', PredictionView, basename='predict')
urlpatterns = [
#path('api/', get_dummy_data),
#path('pollution/predict', get_prediction, name='test_predict'),
#path('myform/', api_form_view, name='year_form'),
#path('api/', include(router.urls)),
path(r'', main_view, name="main"),
path(r'api/v1/predict', PredictionView.as_view(), name='predict')
] | en | 0.214141 | #router = routers.DefaultRouter(trailing_slash=False) #router.register('years', YearView, basename='years') #router.register('predict', PredictionView, basename='predict') #path('api/', get_dummy_data), #path('pollution/predict', get_prediction, name='test_predict'), #path('myform/', api_form_view, name='year_form'), #path('api/', include(router.urls)), | 1.973978 | 2 |
ievv_opensource/utils/ievv_colorize.py | appressoas/ievv_opensource | 0 | 7523 | <reponame>appressoas/ievv_opensource<gh_stars>0
from django.conf import settings
from termcolor import colored
#: Red color constant for :func:`.ievv_colorize`.
COLOR_RED = 'red'
#: Blue color constant for :func:`.ievv_colorize`.
COLOR_BLUE = 'blue'
#: Yellow color constant for :func:`.ievv_colorize`.
COLOR_YELLOW = 'yellow'
#: Grey color constant for :func:`.ievv_colorize`.
COLOR_GREY = 'grey'
#: Green color constant for :func:`.ievv_colorize`.
COLOR_GREEN = 'green'
def colorize(text, color, bold=False):
"""
Colorize a string for stdout/stderr.
Colors are only applied if :setting:`IEVV_COLORIZE_USE_COLORS` is
``True`` or not defined (so it defaults to ``True``).
Examples:
Print blue text::
from ievv_opensource.utils import ievv_colorize
print(ievv_colorize('Test', color=ievv_colorize.COLOR_BLUE))
Print bold red text::
print(ievv_colorize('Test', color=ievv_colorize.COLOR_RED, bold=True))
Args:
text: The text (string) to colorize.
color: The color to use.
Should be one of:
- :obj:`.COLOR_RED`
- :obj:`.COLOR_BLUE`
- :obj:`.COLOR_YELLOW`
- :obj:`.COLOR_GREY`
- :obj:`.COLOR_GREEN`
- ``None`` (no color)
bold: Set this to ``True`` to use bold font.
"""
if getattr(settings, 'IEVV_COLORIZE_USE_COLORS', True) and color:
attrs = []
if bold:
attrs.append('bold')
return colored(text, color=color, attrs=attrs)
else:
return text
| from django.conf import settings
from termcolor import colored
#: Red color constant for :func:`.ievv_colorize`.
COLOR_RED = 'red'
#: Blue color constant for :func:`.ievv_colorize`.
COLOR_BLUE = 'blue'
#: Yellow color constant for :func:`.ievv_colorize`.
COLOR_YELLOW = 'yellow'
#: Grey color constant for :func:`.ievv_colorize`.
COLOR_GREY = 'grey'
#: Green color constant for :func:`.ievv_colorize`.
COLOR_GREEN = 'green'
def colorize(text, color, bold=False):
"""
Colorize a string for stdout/stderr.
Colors are only applied if :setting:`IEVV_COLORIZE_USE_COLORS` is
``True`` or not defined (so it defaults to ``True``).
Examples:
Print blue text::
from ievv_opensource.utils import ievv_colorize
print(ievv_colorize('Test', color=ievv_colorize.COLOR_BLUE))
Print bold red text::
print(ievv_colorize('Test', color=ievv_colorize.COLOR_RED, bold=True))
Args:
text: The text (string) to colorize.
color: The color to use.
Should be one of:
- :obj:`.COLOR_RED`
- :obj:`.COLOR_BLUE`
- :obj:`.COLOR_YELLOW`
- :obj:`.COLOR_GREY`
- :obj:`.COLOR_GREEN`
- ``None`` (no color)
bold: Set this to ``True`` to use bold font.
"""
if getattr(settings, 'IEVV_COLORIZE_USE_COLORS', True) and color:
attrs = []
if bold:
attrs.append('bold')
return colored(text, color=color, attrs=attrs)
else:
return text | en | 0.449517 | #: Red color constant for :func:`.ievv_colorize`. #: Blue color constant for :func:`.ievv_colorize`. #: Yellow color constant for :func:`.ievv_colorize`. #: Grey color constant for :func:`.ievv_colorize`. #: Green color constant for :func:`.ievv_colorize`. Colorize a string for stdout/stderr. Colors are only applied if :setting:`IEVV_COLORIZE_USE_COLORS` is ``True`` or not defined (so it defaults to ``True``). Examples: Print blue text:: from ievv_opensource.utils import ievv_colorize print(ievv_colorize('Test', color=ievv_colorize.COLOR_BLUE)) Print bold red text:: print(ievv_colorize('Test', color=ievv_colorize.COLOR_RED, bold=True)) Args: text: The text (string) to colorize. color: The color to use. Should be one of: - :obj:`.COLOR_RED` - :obj:`.COLOR_BLUE` - :obj:`.COLOR_YELLOW` - :obj:`.COLOR_GREY` - :obj:`.COLOR_GREEN` - ``None`` (no color) bold: Set this to ``True`` to use bold font. | 2.665969 | 3 |
RSICompute.py | bluefin1986/tinyspark | 3 | 7524 | <reponame>bluefin1986/tinyspark
# coding: utf-8
# In[1]:
import baostock as bs
import pandas as pd
import numpy as np
import talib as ta
import matplotlib.pyplot as plt
import KlineService
import BaoStockUtil
import math
import datetime
from scipy import integrate
from RSI import DayRSI,WeekRSI,MonthRSI,SixtyMinRSI
from concurrent.futures import ThreadPoolExecutor, as_completed
from Stock import Stock
import dbutil
from IPython.core.debugger import set_trace
#算积分用的节点数
INTEGRATE_CALC_RANGE = 4
RSI_OVER_BUY = 80
RSI_OVER_SELL = 20
RSI_OVER_BUY_12 = 75
RSI_OVER_SELL_12 = 25
RSI_OVER_BUY_24 = 70
RSI_OVER_SELL_24 = 30
RSI_MIDDLE = 50
#日线超卖区域积分阈值
RSI_INTE_OVERSELL_THRESHOLD_DAY = 50
# In[3]:
def findLatestRSIDate(period):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
cursor = collection.find().sort("date",-1).limit(1)
df = pd.DataFrame(list(cursor))
if df.empty:
return "1970-01-01"
return df["date"][0]
def clearRSI(period):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
collection.delete_many({})
indexes = collection.index_information()
if "code_1_date_1" in indexes.keys():
collection.drop_index( "code_1_date_1" )
def createIndex(period):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
collection.create_index( [("code", 1), ("date",1)])
def integrateValues(valuesArray):
return integrate.trapz(valuesArray, x=None, dx=1.0, axis=-1)
##
# 从数据库读指定日期RSI数据
#
#
def readRSI(period, stockCode, startDate, endDate):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
if type(startDate) == str:
startDate = datetime.datetime.strptime(startDate + "T00:00:00.000Z", "%Y-%m-%dT%H:%M:%S.000Z")
endDate = datetime.datetime.strptime(endDate + "T23:59:59.000Z", "%Y-%m-%dT%H:%M:%S.000Z")
cursor = collection.find({"code":stockCode,"date":{"$gte":startDate,"$lte":endDate}})
df = pd.DataFrame(list(cursor))
return df
##
# 写RSI数据库
#
#
def writeRSIToDB(period, stockCode, stockName, rsi_df):
dataList = []
for index,rsi in rsi_df.iterrows():
rsiDate = rsi['date']
if period == "day":
rsiObj = DayRSI(stockCode, stockName)
elif period == "week":
rsiObj = WeekRSI(stockCode, stockName)
elif period == "month":
rsiObj = MonthRSI(stockCode, stockName)
elif period == "5m":
rsiObj = FiveMinRSI(stockCode, stockName)
elif period == "15m":
rsiObj = FiftyMinRSI(stockCode, stockName)
elif period == "30m":
rsiObj = ThirtyMinRSI(stockCode, stockName)
elif period == "60m":
rsiObj = SixtyMinRSI(stockCode, stockName)
rsiObj.date = rsiDate
rsiObj.rsi_6 = rsi['rsi_6']
rsiObj.rsi_12 = rsi['rsi_12']
rsiObj.rsi_24 = rsi['rsi_24']
rsiObj.overBuy = rsi['overBuyFlag']
rsiObj.overSell = rsi['overSellFlag']
dataList.append(rsiObj.__dict__)
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
if len(dataList) > 0:
collection.insert_many(dataList)
else:
raise RuntimeError("RSI数据为空")
def computeStockRSI(period, stockCode, stockName, startDate, endDate):
try:
# compute1 = datetime.datetime.now().timestamp()
df = KlineService.readStockKline(stockCode, period, startDate, endDate)
# compute2 = datetime.datetime.now().timestamp()
# print("read stockLine:", compute2 - compute1)
if df.empty:
return False
if period == "day":
# 剔除日线停盘数据
df = df[df['tradeStatus'] == '1']
rsi_df = computeRSI(df)
# compute3 = datetime.datetime.now().timestamp()
# print("compute rsi:", compute3 - compute2)
writeRSIToDB(period, stockCode, stockName, rsi_df)
# compute4 = datetime.datetime.now().timestamp()
# print("write to db:", compute4 - compute3)
return True
except BaseException as e:
print ("download " + stockCode + " error:" + str(e))
return False
##
# 选择不同的Kline Collection
#
def chooseRSICollection(period):
periodRSICollection = {
"day" : "RSI_Day",
"week" : "RSI_Week",
"month" : "RSI_Month",
"5m" : "RSI_5m",
"15m" : "RSI_15m",
"30m" : "RSI_30m",
"60m" : "RSI_60m"
}
return periodRSICollection.get(period)
def computeRSI(klineDataFrame):
rsi_12days = ta.RSI(klineDataFrame['closePrice'],timeperiod=12)
rsi_6days = ta.RSI(klineDataFrame['closePrice'],timeperiod=6)
rsi_24days = ta.RSI(klineDataFrame['closePrice'],timeperiod=24)
rsiFrame = pd.DataFrame(klineDataFrame, columns=["date"])
rsiFrame['rsi_6'] = rsi_6days
rsiFrame['rsi_12'] = rsi_12days
rsiFrame['rsi_24'] = rsi_24days
##添加参考线位置
rsiFrame['overBuy'] = RSI_OVER_BUY
rsiFrame['overSell'] = RSI_OVER_SELL
rsiFrame['middle'] = RSI_MIDDLE
# RSI超卖和超买
rsi_buy_position = rsiFrame['rsi_12'] > RSI_OVER_BUY_12
rsi_sell_position = rsiFrame['rsi_12'] < RSI_OVER_SELL_12
rsiFrame.loc[rsi_buy_position[(rsi_buy_position == True) & (rsi_buy_position.shift() == False)].index, 'overBuyFlag'] = 'Yes'
rsiFrame.loc[rsi_sell_position[(rsi_sell_position == True) & (rsi_sell_position.shift() == False)].index, 'overSellFlag'] = 'Yes'
return rsiFrame
##
# 计算自起始日期起的RSI
#
#
def computeAllRSIDataOfPeriod(period, startDate):
# currtime = datetime.datetime.now().timestamp()
print("begin clear RSI period:", period)
clearRSI(period)
print("cleared RSI period:", period)
# time1 = datetime.datetime.now().timestamp()
# print("clear finished:",time1 - currtime)
stockDict = KlineService.allStocks()
# time2 = datetime.datetime.now().timestamp()
# print("read stocks finished:",time2 - time1)
endDate = str(datetime.date.today())
jobStart = datetime.datetime.now().timestamp()
processCount = 0
failCount = 0
jobTotal = len(stockDict)
'''
#起线程池来跑,单线程太慢了, 事实证明慢个鬼
executor = ThreadPoolExecutor(max_workers=1)
funcVars = []
for key,stock in stockDict.items():
#指数没有分钟线,调过指数的RSI分钟线计算
if period.endswith("m") and (key.startswith("sh.000") or key.startswith("sz.399")):
continue
funcVars.append([period, key, stock["name"], startDate, endDate])
all_task = [executor.submit(computeStockRSI, funcVar[0], funcVar[1], funcVar[2], funcVar[3], funcVar[4])
for funcVar in funcVars]
for future in as_completed(all_task):
processCount = processCount + 1
if not future.result():
failCount = failCount + 1
if processCount % 100 == 0 and processCount > 0:
print ("rsi process:", processCount, " of ", jobTotal ," failed:", failCount)
'''
for key,stock in stockDict.items():
processCount = processCount + 1
#指数没有分钟线,调过指数的RSI分钟线计算
if period.endswith("m") and (key.startswith("sh.000") or key.startswith("sz.399")):
continue
result = computeStockRSI(period, key, stock["name"], startDate, endDate)
if not result:
failCount = failCount + 1
if processCount % 100 == 0 and processCount > 0:
print ("rsi process:", processCount, " of ", jobTotal ," failed:", failCount)
jobFinished = datetime.datetime.now().timestamp()
createIndex(period)
print("write all stock RSI to db finished, cost:", jobFinished - jobStart)
return True
##
# 计算指定日期的RSI积分
#
#
def computeAllRSIDataIntegrate(period, specifiedDateStr, includeST):
BaoStockUtil.customLogin()
specifiedDate = datetime.datetime.strptime(specifiedDateStr, "%Y-%m-%d")
today = datetime.date.today()
#如果把时间设成未来,自动调成今天
if specifiedDate > datetime.datetime.today():
specifiedDate = datetime.date.today()
#避免跨年问题,直接从去年开始取
startDate = specifiedDate - datetime.timedelta(days = 365)
#取交易日列表,用作倒推周期使用
rs = bs.query_trade_dates(start_date=datetime.datetime.strftime(startDate, "%Y-%m-%d"), end_date = specifiedDate)
BaoStockUtil.customLogout()
if rs.error_code != '0':
raise RuntimeError("交易日api调用失败了:" + rs.error_code)
tradeDates = []
while (rs.error_code == '0') & rs.next():
row = rs.get_row_data()
if row[1] == "1":
tradeDates.append(row[0])
if len(tradeDates) == 0:
raise RuntimeError("取不到最新的交易日")
#若期望计算的日期比库里RSI最新日期还晚,数据不全待补齐
rsiLatestDate = findLatestRSIDate(period)
rsiLatestDateStr = datetime.datetime.strftime(rsiLatestDate, "%Y-%m-%d")
if rsiLatestDate < specifiedDate:
raise RuntimeError(specifiedDateStr + " 的 " + period + " RSI的数据不存在,待补齐数据")
#找到指定日期以及rsi存量数据最近日期在交易日周期的序号
specifiedDateIndex = tradeDates.index(specifiedDateStr)
if specifiedDateIndex == -1:
raise RuntimeError(specifiedDateStr + " 可能不是交易日")
daysBefore = computeRSIDataStartTradeDateRange(period, specifiedDateStr)
startDateIndex = specifiedDateIndex - daysBefore
#起始日期index负数,说明rsi数据不够
if startDateIndex < 0:
raise RuntimeError(period + " rsi数据不够")
startDateStr = tradeDates[startDateIndex]
print("compute rsi tradeDates from ", startDateStr, "to", specifiedDateStr)
processCount = 0
failCount = 0
startDateIndex = -1
dictStocks = KlineService.allStocks()
klineDataFrame = KlineService.readAllStockKline(period, specifiedDateStr, specifiedDateStr)
klineDataFrame = klineDataFrame.set_index("code")
klineDict = klineDataFrame.to_dict('index')
jobTotal = len(dictStocks)
rsiValueArrs = []
for i in range(0, 6):
rsiValueArrs.append([])
for key,stock in dictStocks.items():
processCount = processCount + 1
#指数没有分钟线,跳过指数的RSI分钟线计算
if period.endswith("m") and stock.stockType != 1:
continue
#如果不计算ST,跳过
if not includeST and stock["isST"]:
continue
#退市股就不要算了
if "退" in stock["name"]:
continue
#科创板不达门槛没法买,不看
if key.startswith("sh.68"):
continue
try:
rsiDF = readRSI(period, key, startDateStr, specifiedDateStr)
rsiCount = len(rsiDF)
if rsiCount < INTEGRATE_CALC_RANGE:
raise RuntimeError("积分计算节点不够")
rsiValueArrs[0].append(key)
rsiValueArrs[1].append(stock["name"])
rsiValueArrs[2].append(klineDict[key]["closePrice"])
#取最近的数据用于计算积分
rsiValueArrs[3].append(rsiDF["rsi_6"][rsiCount - INTEGRATE_CALC_RANGE : rsiCount])
rsiValueArrs[4].append(rsiDF["rsi_12"][rsiCount - INTEGRATE_CALC_RANGE : rsiCount])
rsiValueArrs[5].append(rsiDF["rsi_24"][rsiCount - INTEGRATE_CALC_RANGE : rsiCount])
except BaseException as e:
failCount = failCount + 1
print ("compute rsi integrate " + key + " error:" + str(e))
if processCount % 100 == 0 and processCount > 0:
print ("compute rsi integrate process:", processCount, " of ", jobTotal ," failed:", failCount)
rsi6Arr = np.array(rsiValueArrs[3]).reshape(-1, INTEGRATE_CALC_RANGE)
rsi6InteArr = integrateValues(rsi6Arr)
rsi12Arr = np.array(rsiValueArrs[4]).reshape(-1, INTEGRATE_CALC_RANGE)
rsi12InteArr = integrateValues(rsi12Arr)
rsi24Arr = np.array(rsiValueArrs[5]).reshape(-1, INTEGRATE_CALC_RANGE)
rsi24InteArr = integrateValues(rsi24Arr)
rsiInteDF = pd.DataFrame()
rsiInteDF["code"] = rsiValueArrs[0]
rsiInteDF["name"] = rsiValueArrs[1]
rsiInteDF["closePrice"] = rsiValueArrs[2]
rsiInteDF["rsi_inte_6"] = rsi6InteArr
rsiInteDF["rsi_inte_12"] = rsi12InteArr
rsiInteDF["rsi_inte_24"] = rsi24InteArr
return rsiInteDF
#算出计算本周期下指定数据需要的起始交易日
#每个交易日一共4小时,所以取4小时为一天,而不是24小时
#每个计算周期一共至少需要4个节点,分钟线RSI统一除以4*60=240分钟算出所需计算数据天数,最少为一天
#日线不用除分钟
## TODO 周线没想好怎么算,更别说月线了。
def computeRSIDataStartTradeDateRange(period, specifiedDate):
daysBefore = 0
if period.endswith("m"):
daysBefore = math.ceil(INTEGRATE_CALC_RANGE * (int(period.replace("m", "")) + 1) / (60 * 4))
elif period == "day":
daysBefore = INTEGRATE_CALC_RANGE
else:
raise RuntimeError("周期有误")
return daysBefore
| # coding: utf-8
# In[1]:
import baostock as bs
import pandas as pd
import numpy as np
import talib as ta
import matplotlib.pyplot as plt
import KlineService
import BaoStockUtil
import math
import datetime
from scipy import integrate
from RSI import DayRSI,WeekRSI,MonthRSI,SixtyMinRSI
from concurrent.futures import ThreadPoolExecutor, as_completed
from Stock import Stock
import dbutil
from IPython.core.debugger import set_trace
#算积分用的节点数
INTEGRATE_CALC_RANGE = 4
RSI_OVER_BUY = 80
RSI_OVER_SELL = 20
RSI_OVER_BUY_12 = 75
RSI_OVER_SELL_12 = 25
RSI_OVER_BUY_24 = 70
RSI_OVER_SELL_24 = 30
RSI_MIDDLE = 50
#日线超卖区域积分阈值
RSI_INTE_OVERSELL_THRESHOLD_DAY = 50
# In[3]:
def findLatestRSIDate(period):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
cursor = collection.find().sort("date",-1).limit(1)
df = pd.DataFrame(list(cursor))
if df.empty:
return "1970-01-01"
return df["date"][0]
def clearRSI(period):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
collection.delete_many({})
indexes = collection.index_information()
if "code_1_date_1" in indexes.keys():
collection.drop_index( "code_1_date_1" )
def createIndex(period):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
collection.create_index( [("code", 1), ("date",1)])
def integrateValues(valuesArray):
return integrate.trapz(valuesArray, x=None, dx=1.0, axis=-1)
##
# 从数据库读指定日期RSI数据
#
#
def readRSI(period, stockCode, startDate, endDate):
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
if type(startDate) == str:
startDate = datetime.datetime.strptime(startDate + "T00:00:00.000Z", "%Y-%m-%dT%H:%M:%S.000Z")
endDate = datetime.datetime.strptime(endDate + "T23:59:59.000Z", "%Y-%m-%dT%H:%M:%S.000Z")
cursor = collection.find({"code":stockCode,"date":{"$gte":startDate,"$lte":endDate}})
df = pd.DataFrame(list(cursor))
return df
##
# 写RSI数据库
#
#
def writeRSIToDB(period, stockCode, stockName, rsi_df):
dataList = []
for index,rsi in rsi_df.iterrows():
rsiDate = rsi['date']
if period == "day":
rsiObj = DayRSI(stockCode, stockName)
elif period == "week":
rsiObj = WeekRSI(stockCode, stockName)
elif period == "month":
rsiObj = MonthRSI(stockCode, stockName)
elif period == "5m":
rsiObj = FiveMinRSI(stockCode, stockName)
elif period == "15m":
rsiObj = FiftyMinRSI(stockCode, stockName)
elif period == "30m":
rsiObj = ThirtyMinRSI(stockCode, stockName)
elif period == "60m":
rsiObj = SixtyMinRSI(stockCode, stockName)
rsiObj.date = rsiDate
rsiObj.rsi_6 = rsi['rsi_6']
rsiObj.rsi_12 = rsi['rsi_12']
rsiObj.rsi_24 = rsi['rsi_24']
rsiObj.overBuy = rsi['overBuyFlag']
rsiObj.overSell = rsi['overSellFlag']
dataList.append(rsiObj.__dict__)
mydb = dbutil.connectDB()
collection = mydb[chooseRSICollection(period)]
if len(dataList) > 0:
collection.insert_many(dataList)
else:
raise RuntimeError("RSI数据为空")
def computeStockRSI(period, stockCode, stockName, startDate, endDate):
try:
# compute1 = datetime.datetime.now().timestamp()
df = KlineService.readStockKline(stockCode, period, startDate, endDate)
# compute2 = datetime.datetime.now().timestamp()
# print("read stockLine:", compute2 - compute1)
if df.empty:
return False
if period == "day":
# 剔除日线停盘数据
df = df[df['tradeStatus'] == '1']
rsi_df = computeRSI(df)
# compute3 = datetime.datetime.now().timestamp()
# print("compute rsi:", compute3 - compute2)
writeRSIToDB(period, stockCode, stockName, rsi_df)
# compute4 = datetime.datetime.now().timestamp()
# print("write to db:", compute4 - compute3)
return True
except BaseException as e:
print ("download " + stockCode + " error:" + str(e))
return False
##
# 选择不同的Kline Collection
#
def chooseRSICollection(period):
periodRSICollection = {
"day" : "RSI_Day",
"week" : "RSI_Week",
"month" : "RSI_Month",
"5m" : "RSI_5m",
"15m" : "RSI_15m",
"30m" : "RSI_30m",
"60m" : "RSI_60m"
}
return periodRSICollection.get(period)
def computeRSI(klineDataFrame):
rsi_12days = ta.RSI(klineDataFrame['closePrice'],timeperiod=12)
rsi_6days = ta.RSI(klineDataFrame['closePrice'],timeperiod=6)
rsi_24days = ta.RSI(klineDataFrame['closePrice'],timeperiod=24)
rsiFrame = pd.DataFrame(klineDataFrame, columns=["date"])
rsiFrame['rsi_6'] = rsi_6days
rsiFrame['rsi_12'] = rsi_12days
rsiFrame['rsi_24'] = rsi_24days
##添加参考线位置
rsiFrame['overBuy'] = RSI_OVER_BUY
rsiFrame['overSell'] = RSI_OVER_SELL
rsiFrame['middle'] = RSI_MIDDLE
# RSI超卖和超买
rsi_buy_position = rsiFrame['rsi_12'] > RSI_OVER_BUY_12
rsi_sell_position = rsiFrame['rsi_12'] < RSI_OVER_SELL_12
rsiFrame.loc[rsi_buy_position[(rsi_buy_position == True) & (rsi_buy_position.shift() == False)].index, 'overBuyFlag'] = 'Yes'
rsiFrame.loc[rsi_sell_position[(rsi_sell_position == True) & (rsi_sell_position.shift() == False)].index, 'overSellFlag'] = 'Yes'
return rsiFrame
##
# 计算自起始日期起的RSI
#
#
def computeAllRSIDataOfPeriod(period, startDate):
# currtime = datetime.datetime.now().timestamp()
print("begin clear RSI period:", period)
clearRSI(period)
print("cleared RSI period:", period)
# time1 = datetime.datetime.now().timestamp()
# print("clear finished:",time1 - currtime)
stockDict = KlineService.allStocks()
# time2 = datetime.datetime.now().timestamp()
# print("read stocks finished:",time2 - time1)
endDate = str(datetime.date.today())
jobStart = datetime.datetime.now().timestamp()
processCount = 0
failCount = 0
jobTotal = len(stockDict)
'''
#起线程池来跑,单线程太慢了, 事实证明慢个鬼
executor = ThreadPoolExecutor(max_workers=1)
funcVars = []
for key,stock in stockDict.items():
#指数没有分钟线,调过指数的RSI分钟线计算
if period.endswith("m") and (key.startswith("sh.000") or key.startswith("sz.399")):
continue
funcVars.append([period, key, stock["name"], startDate, endDate])
all_task = [executor.submit(computeStockRSI, funcVar[0], funcVar[1], funcVar[2], funcVar[3], funcVar[4])
for funcVar in funcVars]
for future in as_completed(all_task):
processCount = processCount + 1
if not future.result():
failCount = failCount + 1
if processCount % 100 == 0 and processCount > 0:
print ("rsi process:", processCount, " of ", jobTotal ," failed:", failCount)
'''
for key,stock in stockDict.items():
processCount = processCount + 1
#指数没有分钟线,调过指数的RSI分钟线计算
if period.endswith("m") and (key.startswith("sh.000") or key.startswith("sz.399")):
continue
result = computeStockRSI(period, key, stock["name"], startDate, endDate)
if not result:
failCount = failCount + 1
if processCount % 100 == 0 and processCount > 0:
print ("rsi process:", processCount, " of ", jobTotal ," failed:", failCount)
jobFinished = datetime.datetime.now().timestamp()
createIndex(period)
print("write all stock RSI to db finished, cost:", jobFinished - jobStart)
return True
##
# 计算指定日期的RSI积分
#
#
def computeAllRSIDataIntegrate(period, specifiedDateStr, includeST):
BaoStockUtil.customLogin()
specifiedDate = datetime.datetime.strptime(specifiedDateStr, "%Y-%m-%d")
today = datetime.date.today()
#如果把时间设成未来,自动调成今天
if specifiedDate > datetime.datetime.today():
specifiedDate = datetime.date.today()
#避免跨年问题,直接从去年开始取
startDate = specifiedDate - datetime.timedelta(days = 365)
#取交易日列表,用作倒推周期使用
rs = bs.query_trade_dates(start_date=datetime.datetime.strftime(startDate, "%Y-%m-%d"), end_date = specifiedDate)
BaoStockUtil.customLogout()
if rs.error_code != '0':
raise RuntimeError("交易日api调用失败了:" + rs.error_code)
tradeDates = []
while (rs.error_code == '0') & rs.next():
row = rs.get_row_data()
if row[1] == "1":
tradeDates.append(row[0])
if len(tradeDates) == 0:
raise RuntimeError("取不到最新的交易日")
#若期望计算的日期比库里RSI最新日期还晚,数据不全待补齐
rsiLatestDate = findLatestRSIDate(period)
rsiLatestDateStr = datetime.datetime.strftime(rsiLatestDate, "%Y-%m-%d")
if rsiLatestDate < specifiedDate:
raise RuntimeError(specifiedDateStr + " 的 " + period + " RSI的数据不存在,待补齐数据")
#找到指定日期以及rsi存量数据最近日期在交易日周期的序号
specifiedDateIndex = tradeDates.index(specifiedDateStr)
if specifiedDateIndex == -1:
raise RuntimeError(specifiedDateStr + " 可能不是交易日")
daysBefore = computeRSIDataStartTradeDateRange(period, specifiedDateStr)
startDateIndex = specifiedDateIndex - daysBefore
#起始日期index负数,说明rsi数据不够
if startDateIndex < 0:
raise RuntimeError(period + " rsi数据不够")
startDateStr = tradeDates[startDateIndex]
print("compute rsi tradeDates from ", startDateStr, "to", specifiedDateStr)
processCount = 0
failCount = 0
startDateIndex = -1
dictStocks = KlineService.allStocks()
klineDataFrame = KlineService.readAllStockKline(period, specifiedDateStr, specifiedDateStr)
klineDataFrame = klineDataFrame.set_index("code")
klineDict = klineDataFrame.to_dict('index')
jobTotal = len(dictStocks)
rsiValueArrs = []
for i in range(0, 6):
rsiValueArrs.append([])
for key,stock in dictStocks.items():
processCount = processCount + 1
#指数没有分钟线,跳过指数的RSI分钟线计算
if period.endswith("m") and stock.stockType != 1:
continue
#如果不计算ST,跳过
if not includeST and stock["isST"]:
continue
#退市股就不要算了
if "退" in stock["name"]:
continue
#科创板不达门槛没法买,不看
if key.startswith("sh.68"):
continue
try:
rsiDF = readRSI(period, key, startDateStr, specifiedDateStr)
rsiCount = len(rsiDF)
if rsiCount < INTEGRATE_CALC_RANGE:
raise RuntimeError("积分计算节点不够")
rsiValueArrs[0].append(key)
rsiValueArrs[1].append(stock["name"])
rsiValueArrs[2].append(klineDict[key]["closePrice"])
#取最近的数据用于计算积分
rsiValueArrs[3].append(rsiDF["rsi_6"][rsiCount - INTEGRATE_CALC_RANGE : rsiCount])
rsiValueArrs[4].append(rsiDF["rsi_12"][rsiCount - INTEGRATE_CALC_RANGE : rsiCount])
rsiValueArrs[5].append(rsiDF["rsi_24"][rsiCount - INTEGRATE_CALC_RANGE : rsiCount])
except BaseException as e:
failCount = failCount + 1
print ("compute rsi integrate " + key + " error:" + str(e))
if processCount % 100 == 0 and processCount > 0:
print ("compute rsi integrate process:", processCount, " of ", jobTotal ," failed:", failCount)
rsi6Arr = np.array(rsiValueArrs[3]).reshape(-1, INTEGRATE_CALC_RANGE)
rsi6InteArr = integrateValues(rsi6Arr)
rsi12Arr = np.array(rsiValueArrs[4]).reshape(-1, INTEGRATE_CALC_RANGE)
rsi12InteArr = integrateValues(rsi12Arr)
rsi24Arr = np.array(rsiValueArrs[5]).reshape(-1, INTEGRATE_CALC_RANGE)
rsi24InteArr = integrateValues(rsi24Arr)
rsiInteDF = pd.DataFrame()
rsiInteDF["code"] = rsiValueArrs[0]
rsiInteDF["name"] = rsiValueArrs[1]
rsiInteDF["closePrice"] = rsiValueArrs[2]
rsiInteDF["rsi_inte_6"] = rsi6InteArr
rsiInteDF["rsi_inte_12"] = rsi12InteArr
rsiInteDF["rsi_inte_24"] = rsi24InteArr
return rsiInteDF
#算出计算本周期下指定数据需要的起始交易日
#每个交易日一共4小时,所以取4小时为一天,而不是24小时
#每个计算周期一共至少需要4个节点,分钟线RSI统一除以4*60=240分钟算出所需计算数据天数,最少为一天
#日线不用除分钟
## TODO 周线没想好怎么算,更别说月线了。
def computeRSIDataStartTradeDateRange(period, specifiedDate):
daysBefore = 0
if period.endswith("m"):
daysBefore = math.ceil(INTEGRATE_CALC_RANGE * (int(period.replace("m", "")) + 1) / (60 * 4))
elif period == "day":
daysBefore = INTEGRATE_CALC_RANGE
else:
raise RuntimeError("周期有误")
return daysBefore | zh | 0.332508 | # coding: utf-8 # In[1]: #算积分用的节点数 #日线超卖区域积分阈值 # In[3]: ## # 从数据库读指定日期RSI数据 # # ## # 写RSI数据库 # # # compute1 = datetime.datetime.now().timestamp() # compute2 = datetime.datetime.now().timestamp() # print("read stockLine:", compute2 - compute1) # 剔除日线停盘数据 # compute3 = datetime.datetime.now().timestamp() # print("compute rsi:", compute3 - compute2) # compute4 = datetime.datetime.now().timestamp() # print("write to db:", compute4 - compute3) ## # 选择不同的Kline Collection # ##添加参考线位置 # RSI超卖和超买 ## # 计算自起始日期起的RSI # # # currtime = datetime.datetime.now().timestamp() # time1 = datetime.datetime.now().timestamp() # print("clear finished:",time1 - currtime) # time2 = datetime.datetime.now().timestamp() # print("read stocks finished:",time2 - time1) #起线程池来跑,单线程太慢了, 事实证明慢个鬼 executor = ThreadPoolExecutor(max_workers=1) funcVars = [] for key,stock in stockDict.items(): #指数没有分钟线,调过指数的RSI分钟线计算 if period.endswith("m") and (key.startswith("sh.000") or key.startswith("sz.399")): continue funcVars.append([period, key, stock["name"], startDate, endDate]) all_task = [executor.submit(computeStockRSI, funcVar[0], funcVar[1], funcVar[2], funcVar[3], funcVar[4]) for funcVar in funcVars] for future in as_completed(all_task): processCount = processCount + 1 if not future.result(): failCount = failCount + 1 if processCount % 100 == 0 and processCount > 0: print ("rsi process:", processCount, " of ", jobTotal ," failed:", failCount) #指数没有分钟线,调过指数的RSI分钟线计算 ## # 计算指定日期的RSI积分 # # #如果把时间设成未来,自动调成今天 #避免跨年问题,直接从去年开始取 #取交易日列表,用作倒推周期使用 #若期望计算的日期比库里RSI最新日期还晚,数据不全待补齐 #找到指定日期以及rsi存量数据最近日期在交易日周期的序号 #起始日期index负数,说明rsi数据不够 #指数没有分钟线,跳过指数的RSI分钟线计算 #如果不计算ST,跳过 #退市股就不要算了 #科创板不达门槛没法买,不看 #取最近的数据用于计算积分 #算出计算本周期下指定数据需要的起始交易日 #每个交易日一共4小时,所以取4小时为一天,而不是24小时 #每个计算周期一共至少需要4个节点,分钟线RSI统一除以4*60=240分钟算出所需计算数据天数,最少为一天 #日线不用除分钟 ## TODO 周线没想好怎么算,更别说月线了。 | 2.300597 | 2 |
osnoise/conf/base.py | abousselmi/OSNoise | 4 | 7525 | <gh_stars>1-10
# Copyright 2016 Orange
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
base_options = [
cfg.StrOpt(
'log_file_name',
default='osnoise.log',
help='Osnoise file name.'),
cfg.StrOpt(
'log_dir',
default='/var/log/osnoise/',
help='Osnoise log directory.'),
cfg.StrOpt(
'log_level',
default='info',
help='Log level.'),
cfg.StrOpt(
'log_file',
default='/var/log/osnoise/osnoise.log',
help='Log file'),
cfg.IntOpt(
'log_maxBytes',
default=1000000,
min=1000,
help='Log level.'),
cfg.IntOpt(
'log_backupCount',
default=5,
min=1,
help='Log level.'),
cfg.BoolOpt('log_config_append',
default=False,
deprecated_group='DEFAULT',
help='To append logs to existent log file or not.'),
]
def register_opts(conf):
conf.register_opts(base_options)
def list_opts():
return {'DEFAULT' : base_options} | # Copyright 2016 Orange
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
base_options = [
cfg.StrOpt(
'log_file_name',
default='osnoise.log',
help='Osnoise file name.'),
cfg.StrOpt(
'log_dir',
default='/var/log/osnoise/',
help='Osnoise log directory.'),
cfg.StrOpt(
'log_level',
default='info',
help='Log level.'),
cfg.StrOpt(
'log_file',
default='/var/log/osnoise/osnoise.log',
help='Log file'),
cfg.IntOpt(
'log_maxBytes',
default=1000000,
min=1000,
help='Log level.'),
cfg.IntOpt(
'log_backupCount',
default=5,
min=1,
help='Log level.'),
cfg.BoolOpt('log_config_append',
default=False,
deprecated_group='DEFAULT',
help='To append logs to existent log file or not.'),
]
def register_opts(conf):
conf.register_opts(base_options)
def list_opts():
return {'DEFAULT' : base_options} | en | 0.853211 | # Copyright 2016 Orange # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. | 1.696526 | 2 |
src/orionsensor/gui/sensors/proximitysensor.py | Ginkooo/ORION-sensor-visualizer | 0 | 7526 | <filename>src/orionsensor/gui/sensors/proximitysensor.py
from kivy.properties import NumericProperty
from gui.sensors.sensor import Sensor
import config
class ProximitySensor(Sensor):
"""Proximity sensor view"""
# maximum possible reading
max = NumericProperty(config.ProximitySensor.max)
# minimum possible reading
min = NumericProperty(config.ProximitySensor.min)
| <filename>src/orionsensor/gui/sensors/proximitysensor.py
from kivy.properties import NumericProperty
from gui.sensors.sensor import Sensor
import config
class ProximitySensor(Sensor):
"""Proximity sensor view"""
# maximum possible reading
max = NumericProperty(config.ProximitySensor.max)
# minimum possible reading
min = NumericProperty(config.ProximitySensor.min)
| en | 0.713309 | Proximity sensor view # maximum possible reading # minimum possible reading | 2.190569 | 2 |
Cogs/HelpCommand.py | gudtldn/DiscordStockBot | 1 | 7527 | <reponame>gudtldn/DiscordStockBot
#도움말
import discord
from discord.ext import commands
from discord.ext.commands import Context
from define import *
class HelpCommand_Context(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="도움말", aliases=["명령어", "?"])
@CommandExecutionTime
async def _HelpCommand(self, ctx: Context, command: str=None):
logger.info(f"[{type(ctx)}] {ctx.author.name}: {ctx.invoked_with} {command}")
if ctx.guild is None:
logger.info("Guild is None")
return
if command is not None:
if command.startswith("."):
command = command.replace(".", "", 1)
if command is None:
embed = discord.Embed(title="도움말", description="[] <-- 필수 입력항목 | <> <-- 선택 입력항목", color=RandomEmbedColor())
embed.add_field(name=".사용자등록", value="데이터 베이스에 사용자를 등록합니다.", inline=False)
embed.add_field(name=".자산정보", value="현재 자신의 자산정보를 확인합니다.", inline=False)
embed.add_field(name=".주가", value="현재 주가를 검색합니다.", inline=False)
embed.add_field(name=".매수", value="입력한 기업의 주식을 매수합니다.", inline=False)
embed.add_field(name=".매도", value="입력한 기업의 주식을 매도합니다.", inline=False)
embed.add_field(name=".지원금", value="1만원 ~ 10만원 사이의 돈을 랜덤으로 지급합니다.", inline=False)
embed.add_field(name=".초기화", value="자신의 자산정보를 초기화 합니다.", inline=False)
embed.add_field(name=".탈퇴", value="이 봇에 저장되어있는 사용자의 정보를 삭제합니다.", inline=False)
embed.add_field(name=".개인설정", value="개인설정을 확인 또는 수정합니다.", inline=False)
embed.add_field(name=".단축어설정", value="단축어목록을 확인하거나, 추가 또는 제거합니다.", inline=False)
embed.add_field(name=".관심종목", value="관심종목에 추가된 주식의 가격을 확인하거나, 추가 또는 제거합니다.", inline=False)
embed.set_footer(text="명령어를 자세히 보려면 「.도움말 <명령어 이름>」 을 써 주세요.")
await ctx.reply(embed=embed)
return
elif command in ("도움말", "명령어", "?"):
command_list = ["도움말", "명령어", "?"]
command_list.remove(command)
embed = discord.Embed(title="도움말", description="등록되어있는 명령어들을 출력합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("사용자등록", "등록"):
command_list = ["사용자등록", "등록"]
command_list.remove(command)
embed = discord.Embed(title="사용자등록", description="데이터 베이스에 사용자를 등록합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("자산정보", "자산조회"):
command_list = ["자산정보", "자산조회"]
command_list.remove(command)
embed = discord.Embed(title="자산정보", description="자신의 자산정보를 확인합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".자산정보 <@유저>", value="@유저의 자산정보를 확인합니다.", inline=False)
embed.add_field(name=".자산정보 <랭킹 | 순위>", value="이 서버에 있는 유저의 자산랭킹을 나열합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("주가", "시세"):
command_list = ["주가", "시세"]
command_list.remove(command)
embed = discord.Embed(title="주가", description="입력한 기업의 현재 주가를 확인합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".주가 [기업이름 | 기업번호]", value="기업이름 또는 기업번호로 검색합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("매수", "구매", "주식구매", "주식매수"):
command_list = ["매수", "구매", "주식구매", "주식매수"]
command_list.remove(command)
embed = discord.Embed(title="매수", description="입력한 기업의 주식을 매수합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".매수 [기업이름 | 기업번호] [매수 할 주식 개수]", value="입력한 기업의 주식을, 주식 개수만큼 매수합니다.", inline=False)
embed.add_field(name=".매수 [기업이름 | 기업번호] [풀매수 | 모두]", value="입력한 기업의 주식을 최대까지 매수합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("매도", "판매", "주식판매", "주식매도"):
command_list = ["매도", "판매", "주식판매", "주식매도"]
command_list.remove(command)
embed = discord.Embed(title="매도", description="입력한 기업의 주식을 매도합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".매도 [기업이름 | 기업번호] [매도 할 주식 개수]", value="입력한 기업의 주식을, 주식 개수만큼 매도합니다.", inline=False)
embed.add_field(name=".매도 [기업이름 | 기업번호] [반매도]", value="입력한 기업의 주식의 절반을 매도합니다.", inline=False)
embed.add_field(name=".매도 [기업이름 | 기업번호] [풀매도 | 모두]", value="입력한 기업의 주식을 모두 매도합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("지원금", "돈받기"):
command_list = ["지원금", "돈받기"]
command_list.remove(command)
embed = discord.Embed(title="지원금", description="1만원 ~ 10만원 사이의 돈을 랜덤으로 지급합니다. (쿨타임: 4시간)", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
await ctx.reply(embed=embed)
return
elif command == "초기화":
embed = discord.Embed(title="초기화", description="「초기화확인」를 입력해 자신의 자산정보를 초기화 합니다.", color=RandomEmbedColor())
embed.add_field(name=".초기화 [확인문구]", value="확인문구에는 「초기화확인」를 입력해 주세요.")
await ctx.reply(embed=embed)
return
elif command in ("탈퇴", "회원탈퇴"):
command_list = ["탈퇴", "회원탈퇴"]
command_list.remove(command)
embed = discord.Embed(title="탈퇴", description="「탈퇴확인」를 입력해 데이터 베이스에서 자신의 자산정보를 삭제합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".탈퇴 [확인문구]", value="확인문구에는 「탈퇴확인」를 입력해 주세요.")
await ctx.reply(embed=embed)
return
elif command in ("개인설정", "설정"):
command_list = ["개인설정", "설정"]
command_list.remove(command)
embed = discord.Embed(title="개인설정", description="개인설정을 확인 또는 수정합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".개인설정 설정정보", value="설정할 수 있는 목록을 확인합니다.", inline=False)
embed.add_field(name=".개인설정 자산정보 [true | false]", value="자산정보 공개여부를 설정합니다.", inline=False)
embed.add_field(name=".개인설정 지원금표시 [true | false]", value="지원금으로 얻은 돈 표시여부를 설정합니다.", inline=False)
embed.add_field(name=".개인설정 차트표시 [true | false]", value="`주가` 명령어에 차트를 표시합니다.", inline=False)
embed.add_field(name=".개인설정 쿨타임표시 [true | false]", value="`지원금` 명령어에 쿨타임을 바로 표시합니다.", inline=False)
embed.add_field(name=".개인설정 어제대비가격 [true | false]", value="`자산정보` 명령어에 현재 주가 대신, 어제 대비 가격을 표시합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("단축어설정", "단축어"):
command_list = ["단축어설정", "단축어"]
command_list.remove(command)
embed = discord.Embed(title="단축어설정", description="단축어목록을 확인하거나, 추가 또는 제거합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".단축어설정 목록", value="자신의 단축어 목록을 확인합니다.", inline=False)
embed.add_field(name=".단축어설정 추가 -이름 [기업이름] -번호 [기업번호]", value="단축어 목록에 단축어를 새로 추가합니다.\n\
사용 예: `.단축어 추가 -이름 삼전 -번호 005930`", inline=False)
embed.add_field(name=".단축어설정 추가 -번호 [기업번호]", value="단축어 목록에 단축어를 새로 추가합니다.(이름은 기업이름으로 설정됩니다)\n\
사용 예: `.단축어 추가 -번호 005930`", inline=False)
embed.add_field(name=".단축어설정 제거 -이름 [기업이름]", value="단축어 목록에 있는 단축어를 제거합니다.\n\
사용 예: `.단축어 제거 -이름 삼전`", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("관심종목", "관심"):
command_list = ["관심종목", "관심"]
command_list.remove(command)
embed = discord.Embed(title="관심종목", description="관심종목에 추가된 주식의 가격을 확인하거나, 추가 또는 제거합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".관심종목 주가", value="관심종목에 추가된 주식의 주가를 나열합니다.", inline=False)
embed.add_field(name=".관심종목 추가", value="관심종목에 주식을 추가합니다.", inline=False)
embed.add_field(name=".관심종목 제거", value="관심종목에서 주식을 제거합니다.", inline=False)
await ctx.reply(embed=embed)
return
else:
await ctx.reply("알 수 없는 명령어 입니다.")
return
def setup(bot: commands.Bot):
bot.add_cog(HelpCommand_Context(bot)) | #도움말
import discord
from discord.ext import commands
from discord.ext.commands import Context
from define import *
class HelpCommand_Context(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="도움말", aliases=["명령어", "?"])
@CommandExecutionTime
async def _HelpCommand(self, ctx: Context, command: str=None):
logger.info(f"[{type(ctx)}] {ctx.author.name}: {ctx.invoked_with} {command}")
if ctx.guild is None:
logger.info("Guild is None")
return
if command is not None:
if command.startswith("."):
command = command.replace(".", "", 1)
if command is None:
embed = discord.Embed(title="도움말", description="[] <-- 필수 입력항목 | <> <-- 선택 입력항목", color=RandomEmbedColor())
embed.add_field(name=".사용자등록", value="데이터 베이스에 사용자를 등록합니다.", inline=False)
embed.add_field(name=".자산정보", value="현재 자신의 자산정보를 확인합니다.", inline=False)
embed.add_field(name=".주가", value="현재 주가를 검색합니다.", inline=False)
embed.add_field(name=".매수", value="입력한 기업의 주식을 매수합니다.", inline=False)
embed.add_field(name=".매도", value="입력한 기업의 주식을 매도합니다.", inline=False)
embed.add_field(name=".지원금", value="1만원 ~ 10만원 사이의 돈을 랜덤으로 지급합니다.", inline=False)
embed.add_field(name=".초기화", value="자신의 자산정보를 초기화 합니다.", inline=False)
embed.add_field(name=".탈퇴", value="이 봇에 저장되어있는 사용자의 정보를 삭제합니다.", inline=False)
embed.add_field(name=".개인설정", value="개인설정을 확인 또는 수정합니다.", inline=False)
embed.add_field(name=".단축어설정", value="단축어목록을 확인하거나, 추가 또는 제거합니다.", inline=False)
embed.add_field(name=".관심종목", value="관심종목에 추가된 주식의 가격을 확인하거나, 추가 또는 제거합니다.", inline=False)
embed.set_footer(text="명령어를 자세히 보려면 「.도움말 <명령어 이름>」 을 써 주세요.")
await ctx.reply(embed=embed)
return
elif command in ("도움말", "명령어", "?"):
command_list = ["도움말", "명령어", "?"]
command_list.remove(command)
embed = discord.Embed(title="도움말", description="등록되어있는 명령어들을 출력합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("사용자등록", "등록"):
command_list = ["사용자등록", "등록"]
command_list.remove(command)
embed = discord.Embed(title="사용자등록", description="데이터 베이스에 사용자를 등록합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("자산정보", "자산조회"):
command_list = ["자산정보", "자산조회"]
command_list.remove(command)
embed = discord.Embed(title="자산정보", description="자신의 자산정보를 확인합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".자산정보 <@유저>", value="@유저의 자산정보를 확인합니다.", inline=False)
embed.add_field(name=".자산정보 <랭킹 | 순위>", value="이 서버에 있는 유저의 자산랭킹을 나열합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("주가", "시세"):
command_list = ["주가", "시세"]
command_list.remove(command)
embed = discord.Embed(title="주가", description="입력한 기업의 현재 주가를 확인합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".주가 [기업이름 | 기업번호]", value="기업이름 또는 기업번호로 검색합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("매수", "구매", "주식구매", "주식매수"):
command_list = ["매수", "구매", "주식구매", "주식매수"]
command_list.remove(command)
embed = discord.Embed(title="매수", description="입력한 기업의 주식을 매수합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".매수 [기업이름 | 기업번호] [매수 할 주식 개수]", value="입력한 기업의 주식을, 주식 개수만큼 매수합니다.", inline=False)
embed.add_field(name=".매수 [기업이름 | 기업번호] [풀매수 | 모두]", value="입력한 기업의 주식을 최대까지 매수합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("매도", "판매", "주식판매", "주식매도"):
command_list = ["매도", "판매", "주식판매", "주식매도"]
command_list.remove(command)
embed = discord.Embed(title="매도", description="입력한 기업의 주식을 매도합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".매도 [기업이름 | 기업번호] [매도 할 주식 개수]", value="입력한 기업의 주식을, 주식 개수만큼 매도합니다.", inline=False)
embed.add_field(name=".매도 [기업이름 | 기업번호] [반매도]", value="입력한 기업의 주식의 절반을 매도합니다.", inline=False)
embed.add_field(name=".매도 [기업이름 | 기업번호] [풀매도 | 모두]", value="입력한 기업의 주식을 모두 매도합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("지원금", "돈받기"):
command_list = ["지원금", "돈받기"]
command_list.remove(command)
embed = discord.Embed(title="지원금", description="1만원 ~ 10만원 사이의 돈을 랜덤으로 지급합니다. (쿨타임: 4시간)", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
await ctx.reply(embed=embed)
return
elif command == "초기화":
embed = discord.Embed(title="초기화", description="「초기화확인」를 입력해 자신의 자산정보를 초기화 합니다.", color=RandomEmbedColor())
embed.add_field(name=".초기화 [확인문구]", value="확인문구에는 「초기화확인」를 입력해 주세요.")
await ctx.reply(embed=embed)
return
elif command in ("탈퇴", "회원탈퇴"):
command_list = ["탈퇴", "회원탈퇴"]
command_list.remove(command)
embed = discord.Embed(title="탈퇴", description="「탈퇴확인」를 입력해 데이터 베이스에서 자신의 자산정보를 삭제합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".탈퇴 [확인문구]", value="확인문구에는 「탈퇴확인」를 입력해 주세요.")
await ctx.reply(embed=embed)
return
elif command in ("개인설정", "설정"):
command_list = ["개인설정", "설정"]
command_list.remove(command)
embed = discord.Embed(title="개인설정", description="개인설정을 확인 또는 수정합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".개인설정 설정정보", value="설정할 수 있는 목록을 확인합니다.", inline=False)
embed.add_field(name=".개인설정 자산정보 [true | false]", value="자산정보 공개여부를 설정합니다.", inline=False)
embed.add_field(name=".개인설정 지원금표시 [true | false]", value="지원금으로 얻은 돈 표시여부를 설정합니다.", inline=False)
embed.add_field(name=".개인설정 차트표시 [true | false]", value="`주가` 명령어에 차트를 표시합니다.", inline=False)
embed.add_field(name=".개인설정 쿨타임표시 [true | false]", value="`지원금` 명령어에 쿨타임을 바로 표시합니다.", inline=False)
embed.add_field(name=".개인설정 어제대비가격 [true | false]", value="`자산정보` 명령어에 현재 주가 대신, 어제 대비 가격을 표시합니다.", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("단축어설정", "단축어"):
command_list = ["단축어설정", "단축어"]
command_list.remove(command)
embed = discord.Embed(title="단축어설정", description="단축어목록을 확인하거나, 추가 또는 제거합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".단축어설정 목록", value="자신의 단축어 목록을 확인합니다.", inline=False)
embed.add_field(name=".단축어설정 추가 -이름 [기업이름] -번호 [기업번호]", value="단축어 목록에 단축어를 새로 추가합니다.\n\
사용 예: `.단축어 추가 -이름 삼전 -번호 005930`", inline=False)
embed.add_field(name=".단축어설정 추가 -번호 [기업번호]", value="단축어 목록에 단축어를 새로 추가합니다.(이름은 기업이름으로 설정됩니다)\n\
사용 예: `.단축어 추가 -번호 005930`", inline=False)
embed.add_field(name=".단축어설정 제거 -이름 [기업이름]", value="단축어 목록에 있는 단축어를 제거합니다.\n\
사용 예: `.단축어 제거 -이름 삼전`", inline=False)
await ctx.reply(embed=embed)
return
elif command in ("관심종목", "관심"):
command_list = ["관심종목", "관심"]
command_list.remove(command)
embed = discord.Embed(title="관심종목", description="관심종목에 추가된 주식의 가격을 확인하거나, 추가 또는 제거합니다.", color=RandomEmbedColor())
embed.add_field(name="다른이름", value=f"{', '.join(command_list)}", inline=False)
embed.add_field(name=".관심종목 주가", value="관심종목에 추가된 주식의 주가를 나열합니다.", inline=False)
embed.add_field(name=".관심종목 추가", value="관심종목에 주식을 추가합니다.", inline=False)
embed.add_field(name=".관심종목 제거", value="관심종목에서 주식을 제거합니다.", inline=False)
await ctx.reply(embed=embed)
return
else:
await ctx.reply("알 수 없는 명령어 입니다.")
return
def setup(bot: commands.Bot):
bot.add_cog(HelpCommand_Context(bot)) | none | 1 | 2.481777 | 2 |
|
test/test_model/cprofile_test.py | SupermeLC/PyNeval | 12 | 7528 | <filename>test/test_model/cprofile_test.py
import cProfile
import pstats
import os
# 性能分析装饰器定义
def do_cprofile(filename):
"""
Decorator for function profiling.
"""
def wrapper(func):
def profiled_func(*args, **kwargs):
# Flag for do profiling or not.
DO_PROF = False
if DO_PROF:
profile = cProfile.Profile()
profile.enable()
result = func(*args, **kwargs)
profile.disable()
# Sort stat by internal time.
sortby = "tottime"
ps = pstats.Stats(profile).sort_stats(sortby)
ps.dump_stats(filename)
else:
result = func(*args, **kwargs)
return result
return profiled_func
return wrapper | <filename>test/test_model/cprofile_test.py
import cProfile
import pstats
import os
# 性能分析装饰器定义
def do_cprofile(filename):
"""
Decorator for function profiling.
"""
def wrapper(func):
def profiled_func(*args, **kwargs):
# Flag for do profiling or not.
DO_PROF = False
if DO_PROF:
profile = cProfile.Profile()
profile.enable()
result = func(*args, **kwargs)
profile.disable()
# Sort stat by internal time.
sortby = "tottime"
ps = pstats.Stats(profile).sort_stats(sortby)
ps.dump_stats(filename)
else:
result = func(*args, **kwargs)
return result
return profiled_func
return wrapper | en | 0.710747 | # 性能分析装饰器定义 Decorator for function profiling. # Flag for do profiling or not. # Sort stat by internal time. | 2.565689 | 3 |
renku/core/commands/providers/api.py | cyberhck/renku-python | 0 | 7529 | <filename>renku/core/commands/providers/api.py<gh_stars>0
# Copyright 2019 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API for providers."""
import abc
class ProviderApi(abc.ABC):
"""Interface defining provider methods."""
@abc.abstractmethod
def find_record(self, uri):
"""Find record by uri."""
pass
@abc.abstractmethod
def get_exporter(self, dataset, secret):
"""Get export manager."""
pass
@staticmethod
@abc.abstractmethod
def supports(uri):
"""Whether or not this provider supports a given uri."""
pass
class ExporterApi(abc.ABC):
"""Interface defining exporter methods."""
@abc.abstractmethod
def set_access_token(self, access_token):
"""Set access token."""
pass
@abc.abstractmethod
def access_token_url(self):
"""Endpoint for creation of access token."""
pass
@abc.abstractmethod
def export(self, publish):
"""Execute export process."""
pass
| <filename>renku/core/commands/providers/api.py<gh_stars>0
# Copyright 2019 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API for providers."""
import abc
class ProviderApi(abc.ABC):
"""Interface defining provider methods."""
@abc.abstractmethod
def find_record(self, uri):
"""Find record by uri."""
pass
@abc.abstractmethod
def get_exporter(self, dataset, secret):
"""Get export manager."""
pass
@staticmethod
@abc.abstractmethod
def supports(uri):
"""Whether or not this provider supports a given uri."""
pass
class ExporterApi(abc.ABC):
"""Interface defining exporter methods."""
@abc.abstractmethod
def set_access_token(self, access_token):
"""Set access token."""
pass
@abc.abstractmethod
def access_token_url(self):
"""Endpoint for creation of access token."""
pass
@abc.abstractmethod
def export(self, publish):
"""Execute export process."""
pass
| en | 0.790732 | # Copyright 2019 - Swiss Data Science Center (SDSC) # A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. API for providers. Interface defining provider methods. Find record by uri. Get export manager. Whether or not this provider supports a given uri. Interface defining exporter methods. Set access token. Endpoint for creation of access token. Execute export process. | 1.978974 | 2 |
cattr/__init__.py | bluetech/cattrs | 1 | 7530 | <filename>cattr/__init__.py
# -*- coding: utf-8 -*-
from .converters import Converter, UnstructureStrategy
__all__ = ('global_converter', 'unstructure', 'structure',
'structure_attrs_fromtuple', 'structure_attrs_fromdict',
'UnstructureStrategy')
__author__ = '<NAME>'
__email__ = '<EMAIL>'
global_converter = Converter()
unstructure = global_converter.unstructure
structure = global_converter.structure
structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple
structure_attrs_fromdict = global_converter.structure_attrs_fromdict
register_structure_hook = global_converter.register_structure_hook
register_structure_hook_func = global_converter.register_structure_hook_func
register_unstructure_hook = global_converter.register_unstructure_hook
register_unstructure_hook_func = \
global_converter.register_unstructure_hook_func
| <filename>cattr/__init__.py
# -*- coding: utf-8 -*-
from .converters import Converter, UnstructureStrategy
__all__ = ('global_converter', 'unstructure', 'structure',
'structure_attrs_fromtuple', 'structure_attrs_fromdict',
'UnstructureStrategy')
__author__ = '<NAME>'
__email__ = '<EMAIL>'
global_converter = Converter()
unstructure = global_converter.unstructure
structure = global_converter.structure
structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple
structure_attrs_fromdict = global_converter.structure_attrs_fromdict
register_structure_hook = global_converter.register_structure_hook
register_structure_hook_func = global_converter.register_structure_hook_func
register_unstructure_hook = global_converter.register_unstructure_hook
register_unstructure_hook_func = \
global_converter.register_unstructure_hook_func
| en | 0.769321 | # -*- coding: utf-8 -*- | 1.920058 | 2 |
vega/security/run_dask.py | zjzh/vega | 0 | 7531 | # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run dask scheduler and worker."""
import os
import subprocess
import shutil
import logging
import socket
import random
from distributed import Client
from distributed.security import Security
from .conf import get_config
from .verify_cert import verify_cert
sec_cfg = get_config('server')
def get_client_security(address):
"""Get client."""
address = address.replace("tcp", "tls")
if not verify_cert(sec_cfg.ca_cert, sec_cfg.client_cert_dask):
logging.error(f"The cert {sec_cfg.ca_cert} and {sec_cfg.client_cert_dask} are invalid, please check.")
sec = Security(tls_ca_file=sec_cfg.ca_cert,
tls_client_cert=sec_cfg.client_cert_dask,
tls_client_key=sec_cfg.client_secret_key_dask,
require_encryption=True)
return Client(address, security=sec)
def get_address_security(master_host, master_port):
"""Get address."""
return "tls://{}:{}".format(master_host, master_port)
def run_scheduler_security(ip, port, tmp_file):
"""Run scheduler."""
if not verify_cert(sec_cfg.ca_cert, sec_cfg.server_cert_dask):
logging.error(f"The cert {sec_cfg.ca_cert} and {sec_cfg.server_cert_dask} are invalid, please check.")
return subprocess.Popen(
[
"dask-scheduler",
"--no-dashboard",
"--no-show",
f"--tls-ca-file={sec_cfg.ca_cert}",
f"--tls-cert={sec_cfg.server_cert_dask}",
f"--tls-key={sec_cfg.server_secret_key_dask}",
f"--host={ip}",
"--protocol=tls",
f"--port={port}",
f"--scheduler-file={tmp_file}",
f"--local-directory={os.path.dirname(tmp_file)}",
],
env=os.environ
)
def _available_port(min_port, max_port) -> int:
_sock = socket.socket()
while True:
port = random.randint(min_port, max_port)
try:
_sock.bind(('', port))
_sock.close()
return port
except Exception:
logging.debug('Failed to get available port, continue.')
continue
return None
def run_local_worker_security(slave_ip, address, local_dir):
"""Run dask-worker on local node."""
address = address.replace("tcp", "tls")
nanny_port = _available_port(30000, 30999)
worker_port = _available_port(29000, 29999)
pid = subprocess.Popen(
[
"dask-worker",
address,
'--nthreads=1',
'--nprocs=1',
'--memory-limit=0',
f"--local-directory={local_dir}",
f"--tls-ca-file={sec_cfg.ca_cert}",
f"--tls-cert={sec_cfg.client_cert_dask}",
f"--tls-key={sec_cfg.client_secret_key_dask}",
"--no-dashboard",
f"--host={slave_ip}",
"--protocol=tls",
f"--nanny-port={nanny_port}",
f"--worker-port={worker_port}",
],
env=os.environ
)
return pid
def run_remote_worker_security(slave_ip, address, local_dir):
"""Run dask-worker on remote node."""
address = address.replace("tcp", "tls")
nanny_port = _available_port(30000, 30999)
worker_port = _available_port(29000, 29999)
pid = subprocess.Popen(
[
"ssh",
slave_ip,
shutil.which("dask-worker"),
address,
'--nthreads=1',
'--nprocs=1',
'--memory-limit=0',
f"--local-directory={local_dir}",
f"--tls-ca-file={sec_cfg.ca_cert}",
f"--tls-cert={sec_cfg.client_cert_dask}",
f"--tls-key={sec_cfg.client_secret_key_dask}",
"--no-dashboard",
f"--host={slave_ip}",
"--protocol=tls",
f"--nanny-port={nanny_port}",
f"--worker-port={worker_port}",
],
env=os.environ
)
return pid
| # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run dask scheduler and worker."""
import os
import subprocess
import shutil
import logging
import socket
import random
from distributed import Client
from distributed.security import Security
from .conf import get_config
from .verify_cert import verify_cert
sec_cfg = get_config('server')
def get_client_security(address):
"""Get client."""
address = address.replace("tcp", "tls")
if not verify_cert(sec_cfg.ca_cert, sec_cfg.client_cert_dask):
logging.error(f"The cert {sec_cfg.ca_cert} and {sec_cfg.client_cert_dask} are invalid, please check.")
sec = Security(tls_ca_file=sec_cfg.ca_cert,
tls_client_cert=sec_cfg.client_cert_dask,
tls_client_key=sec_cfg.client_secret_key_dask,
require_encryption=True)
return Client(address, security=sec)
def get_address_security(master_host, master_port):
"""Get address."""
return "tls://{}:{}".format(master_host, master_port)
def run_scheduler_security(ip, port, tmp_file):
"""Run scheduler."""
if not verify_cert(sec_cfg.ca_cert, sec_cfg.server_cert_dask):
logging.error(f"The cert {sec_cfg.ca_cert} and {sec_cfg.server_cert_dask} are invalid, please check.")
return subprocess.Popen(
[
"dask-scheduler",
"--no-dashboard",
"--no-show",
f"--tls-ca-file={sec_cfg.ca_cert}",
f"--tls-cert={sec_cfg.server_cert_dask}",
f"--tls-key={sec_cfg.server_secret_key_dask}",
f"--host={ip}",
"--protocol=tls",
f"--port={port}",
f"--scheduler-file={tmp_file}",
f"--local-directory={os.path.dirname(tmp_file)}",
],
env=os.environ
)
def _available_port(min_port, max_port) -> int:
_sock = socket.socket()
while True:
port = random.randint(min_port, max_port)
try:
_sock.bind(('', port))
_sock.close()
return port
except Exception:
logging.debug('Failed to get available port, continue.')
continue
return None
def run_local_worker_security(slave_ip, address, local_dir):
"""Run dask-worker on local node."""
address = address.replace("tcp", "tls")
nanny_port = _available_port(30000, 30999)
worker_port = _available_port(29000, 29999)
pid = subprocess.Popen(
[
"dask-worker",
address,
'--nthreads=1',
'--nprocs=1',
'--memory-limit=0',
f"--local-directory={local_dir}",
f"--tls-ca-file={sec_cfg.ca_cert}",
f"--tls-cert={sec_cfg.client_cert_dask}",
f"--tls-key={sec_cfg.client_secret_key_dask}",
"--no-dashboard",
f"--host={slave_ip}",
"--protocol=tls",
f"--nanny-port={nanny_port}",
f"--worker-port={worker_port}",
],
env=os.environ
)
return pid
def run_remote_worker_security(slave_ip, address, local_dir):
"""Run dask-worker on remote node."""
address = address.replace("tcp", "tls")
nanny_port = _available_port(30000, 30999)
worker_port = _available_port(29000, 29999)
pid = subprocess.Popen(
[
"ssh",
slave_ip,
shutil.which("dask-worker"),
address,
'--nthreads=1',
'--nprocs=1',
'--memory-limit=0',
f"--local-directory={local_dir}",
f"--tls-ca-file={sec_cfg.ca_cert}",
f"--tls-cert={sec_cfg.client_cert_dask}",
f"--tls-key={sec_cfg.client_secret_key_dask}",
"--no-dashboard",
f"--host={slave_ip}",
"--protocol=tls",
f"--nanny-port={nanny_port}",
f"--worker-port={worker_port}",
],
env=os.environ
)
return pid
| en | 0.835108 | # -*- coding: utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Run dask scheduler and worker. Get client. Get address. Run scheduler. Run dask-worker on local node. Run dask-worker on remote node. | 2.02716 | 2 |
MISSGANvsStarGAN/core/solver.py | NoaBrazilay/DeepLearningProject | 2 | 7532 | """
StarGAN v2
Copyright (c) 2020-present NAVER Corp.
This work is licensed under the Creative Commons Attribution-NonCommercial
4.0 International License. To view a copy of this license, visit
http://creativecommons.org/licenses/by-nc/4.0/ or send a letter to
Creative Commons, PO Box 1866, Mountain View, CA 94042, USA.
"""
import os
from os.path import join as ospj
import time
import datetime
from munch import Munch
import torch
import torch.nn as nn
import torch.nn.functional as F
from core.model import build_model
from core.checkpoint import CheckpointIO
from core.data_loader import InputFetcher
import core.utils as utils
from metrics.eval import calculate_metrics
class Solver(nn.Module):
def __init__(self, args):
super().__init__()
self.args = args
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.nets, self.nets_ema, self.vgg, self.VggExtract = build_model(args)
self.instancenorm = nn.InstanceNorm2d(512, affine=False)
self.L1Loss = nn.L1Loss()
# below setattrs are to make networks be children of Solver, e.g., for self.to(self.device)
for name, module in self.nets.items():
utils.print_network(module, name)
setattr(self, name, module)
for name, module in self.nets_ema.items():
setattr(self, name + '_ema', module)
if args.mode == 'train':
self.optims = Munch()
for net in self.nets.keys():
if net == 'fan':
continue
self.optims[net] = torch.optim.Adam(
params=self.nets[net].parameters(),
lr=args.f_lr if net == 'mapping_network' else args.lr,
betas=[args.beta1, args.beta2],
weight_decay=args.weight_decay)
self.ckptios = [CheckpointIO(ospj(args.checkpoint_dir, '100000_nets.ckpt'), **self.nets),
CheckpointIO(ospj(args.checkpoint_dir, '100000_nets_ema.ckpt'), **self.nets_ema),
CheckpointIO(ospj(args.checkpoint_dir, '100000_optims.ckpt'), **self.optims)]
else:
self.ckptios = [CheckpointIO(ospj(args.checkpoint_dir, '100000_nets_ema.ckpt'), **self.nets_ema)]
self.to(self.device)
for name, network in self.named_children():
# Do not initialize the FAN parameters
if ('ema' not in name) and ('fan' not in name):
print('Initializing %s...' % name)
network.apply(utils.he_init)
def _save_checkpoint(self, step):
for ckptio in self.ckptios:
ckptio.save(step)
def _load_checkpoint(self, step):
for ckptio in self.ckptios:
ckptio.load(step)
def _reset_grad(self):
for optim in self.optims.values():
optim.zero_grad()
def train(self, loaders):
args = self.args
nets = self.nets
nets_ema = self.nets_ema
optims = self.optims
# fetch random validation images for debugging
fetcher = InputFetcher(loaders.src, loaders.ref, args.latent_dim, 'train')
fetcher_val = InputFetcher(loaders.val, None, args.latent_dim, 'val')
inputs_val = next(fetcher_val)
# resume training if necessary
if args.resume_iter > 0:
self._load_checkpoint(args.resume_iter)
# remember the initial value of ds weight
initial_lambda_ds = args.lambda_ds
print('Start training...')
start_time = time.time()
for i in range(args.resume_iter, args.total_iters):
# fetch images and labels
inputs = next(fetcher)
x_real, y_org = inputs.x_src, inputs.y_src
x_ref, x_ref2, y_trg = inputs.x_ref, inputs.x_ref2, inputs.y_ref
z_trg, z_trg2 = inputs.z_trg, inputs.z_trg2
masks = nets.fan.get_heatmap(x_real) if args.w_hpf > 0 else None
# train the discriminator
d_loss, d_losses_latent = compute_d_loss(
nets, args, x_real, y_org, y_trg, z_trg=z_trg, masks=masks)
self._reset_grad()
d_loss.backward()
optims.discriminator.step()
d_loss, d_losses_ref = compute_d_loss(
nets, args, x_real, y_org, y_trg, x_ref=x_ref, masks=masks)
self._reset_grad()
d_loss.backward()
optims.discriminator.step()
# train the generator
g_loss, g_losses_latent = compute_g_loss(
nets, args, x_real, y_org, y_trg, z_trgs=[z_trg, z_trg2], masks=masks,VggExtract=self.VggExtract, IN = self.instancenorm, L1Loss=self.L1Loss)
self._reset_grad()
g_loss.backward()
optims.generator.step()
optims.mapping_network.step()
optims.style_encoder.step()
g_loss, g_losses_ref = compute_g_loss(
nets, args, x_real, y_org, y_trg, x_refs=[x_ref, x_ref2], masks=masks, VggExtract=self.VggExtract, IN = self.instancenorm, L1Loss=self.L1Loss)
self._reset_grad()
g_loss.backward()
optims.generator.step()
# compute moving average of network parameters
moving_average(nets.generator, nets_ema.generator, beta=0.999)
moving_average(nets.mapping_network, nets_ema.mapping_network, beta=0.999)
moving_average(nets.style_encoder, nets_ema.style_encoder, beta=0.999)
# decay weight for diversity sensitive loss
if args.lambda_ds > 0:
args.lambda_ds -= (initial_lambda_ds / args.ds_iter)
# print out log info
if (i+1) % args.print_every == 0:
elapsed = time.time() - start_time
elapsed = str(datetime.timedelta(seconds=elapsed))[:-7]
log = "Elapsed time [%s], Iteration [%i/%i], " % (elapsed, i+1, args.total_iters)
all_losses = dict()
for loss, prefix in zip([d_losses_latent, d_losses_ref, g_losses_latent, g_losses_ref],
['D/latent_', 'D/ref_', 'G/latent_', 'G/ref_']):
for key, value in loss.items():
all_losses[prefix + key] = value
all_losses['G/lambda_ds'] = args.lambda_ds
log += ' '.join(['%s: [%.4f]' % (key, value) for key, value in all_losses.items()])
print(log)
# generate images for debugging
if (i+1) % args.sample_every == 0:
os.makedirs(args.sample_dir, exist_ok=True)
utils.debug_image(nets_ema, args, inputs=inputs_val, step=i+1)
# save model checkpoints
if (i+1) % args.save_every == 0:
self._save_checkpoint(step=i+1)
# compute FID and LPIPS if necessary
if (i+1) % args.eval_every == 0:
calculate_metrics(nets_ema, args, i+1, mode='latent')
calculate_metrics(nets_ema, args, i+1, mode='reference')
@torch.no_grad()
def sample(self, loaders):
args = self.args
nets_ema = self.nets_ema
os.makedirs(args.result_dir, exist_ok=True)
self._load_checkpoint(args.resume_iter)
src = next(InputFetcher(loaders.src, None, args.latent_dim, 'test'))
ref = next(InputFetcher(loaders.ref, None, args.latent_dim, 'test'))
fname = ospj(args.result_dir, 'reference.jpg')
print('Working on {}...'.format(fname))
utils.translate_using_reference(nets_ema, args, src.x, ref.x, ref.y, fname)
# fname = ospj(args.result_dir, 'video_ref.mp4')
# print('Working on {}...'.format(fname))
# utils.video_ref(nets_ema, args, src.x, ref.x, ref.y, fname)
@torch.no_grad()
def evaluate(self):
args = self.args
nets_ema = self.nets_ema
resume_iter = args.resume_iter
self._load_checkpoint(args.resume_iter)
calculate_metrics(nets_ema, args, step=resume_iter, mode='latent')
calculate_metrics(nets_ema, args, step=resume_iter, mode='reference')
def compute_d_loss(nets, args, x_real, y_org, y_trg, z_trg=None, x_ref=None, masks=None):
assert (z_trg is None) != (x_ref is None)
# with real images
x_real.requires_grad_()
out = nets.discriminator(x_real, y_org)
loss_real = adv_loss(out, 1)
loss_reg = r1_reg(out, x_real)
# with fake images
with torch.no_grad():
if z_trg is not None:
s_trg = nets.mapping_network(z_trg, y_trg)
else: # x_ref is not None
s_trg = nets.style_encoder(x_ref, y_trg)
x_fake,_ = nets.generator(x_real, s_trg, masks=masks)
out = nets.discriminator(x_fake, y_trg)
loss_fake = adv_loss(out, 0)
loss = loss_real + loss_fake + args.lambda_reg * loss_reg
return loss, Munch(real=loss_real.item(),
fake=loss_fake.item(),
reg=loss_reg.item())
def compute_g_loss(nets, args, x_real, y_org, y_trg, z_trgs=None, x_refs=None, masks=None, VggExtract= None, IN= None, L1Loss=None):
assert (z_trgs is None) != (x_refs is None)
if z_trgs is not None:
z_trg, z_trg2 = z_trgs
if x_refs is not None:
x_ref, x_ref2 = x_refs
# adversarial loss
if z_trgs is not None:
s_trg = nets.mapping_network(z_trg, y_trg)
else:
s_trg = nets.style_encoder(x_ref, y_trg)
x_fake, content_latent_real = nets.generator(x_real, s_trg, masks=masks)
out = nets.discriminator(x_fake, y_trg)
loss_adv = adv_loss(out, 1)
# style reconstruction loss
s_pred = nets.style_encoder(x_fake, y_trg)
loss_sty = torch.mean(torch.abs(s_pred - s_trg))
# diversity sensitive loss
if z_trgs is not None:
s_trg2 = nets.mapping_network(z_trg2, y_trg)
else:
s_trg2 = nets.style_encoder(x_ref2, y_trg)
x_fake2, content_latent_real2 = nets.generator(x_real, s_trg2, masks=masks)
x_fake2 = x_fake2.detach()
loss_ds = torch.mean(torch.abs(x_fake - x_fake2))
# cycle-consistency loss
masks = nets.fan.get_heatmap(x_fake) if args.w_hpf > 0 else None
s_org = nets.style_encoder(x_real, y_org)
x_rec, content_latent_reco = nets.generator(x_fake, s_org, masks=masks)
loss_cyc = torch.mean(torch.abs(x_rec - x_real))
loss_vgg = compute_vgg_loss(x_fake, x_real, VggExtract, IN, L1Loss) if args.vgg_w > 0 else 0
loss_sacl = utils.abs_criterion(content_latent_real, content_latent_reco) if args.loss_sacl > 0 else 0 # Loss style aware content loss
loss_sacl2 = utils.abs_criterion(content_latent_real2, content_latent_reco) if args.loss_sacl > 0 else 0 # Loss style aware content loss
loss = loss_adv + args.lambda_sty * loss_sty \
- args.lambda_ds * loss_ds + args.lambda_cyc * loss_cyc + args.lambda_vgg * loss_vgg + args.lambda_loss_sacl * loss_sacl+ args.lambda_loss_sacl * loss_sacl2
return loss, Munch(adv=loss_adv.item(),
sty=loss_sty.item(),
ds=loss_ds.item(),
cyc=loss_cyc.item())
def moving_average(model, model_test, beta=0.999):
for param, param_test in zip(model.parameters(), model_test.parameters()):
param_test.data = torch.lerp(param.data, param_test.data, beta)
def adv_loss(logits, target):
assert target in [1, 0]
targets = torch.full_like(logits, fill_value=target)
loss = F.binary_cross_entropy_with_logits(logits, targets)
return loss
def compute_vgg_loss(img, target, VggExtract, IN, L1Loss):
# img_vgg = utils.vgg_preprocess(img)
# target_vgg = utils.vgg_preprocess(target)
# img_fea = vgg(img_vgg)
# target_fea = vgg(target_vgg)
img_fea_dict = VggExtract(img)
target_fea_dict = VggExtract(target)
# loss = torch.mean((img_fea_dict['relu3_3'] - target_fea_dict['relu3_3']) ** 2)
# loss = torch.mean(torch.abs(img_fea_dict['relu3_3'] - target_fea_dict['relu3_3']))
loss = L1Loss(img_fea_dict['relu2_2'] , target_fea_dict['relu2_2'])
return loss
def r1_reg(d_out, x_in):
# zero-centered gradient penalty for real images
batch_size = x_in.size(0)
grad_dout = torch.autograd.grad(
outputs=d_out.sum(), inputs=x_in,
create_graph=True, retain_graph=True, only_inputs=True
)[0]
grad_dout2 = grad_dout.pow(2)
assert(grad_dout2.size() == x_in.size())
reg = 0.5 * grad_dout2.view(batch_size, -1).sum(1).mean(0)
return reg | """
StarGAN v2
Copyright (c) 2020-present NAVER Corp.
This work is licensed under the Creative Commons Attribution-NonCommercial
4.0 International License. To view a copy of this license, visit
http://creativecommons.org/licenses/by-nc/4.0/ or send a letter to
Creative Commons, PO Box 1866, Mountain View, CA 94042, USA.
"""
import os
from os.path import join as ospj
import time
import datetime
from munch import Munch
import torch
import torch.nn as nn
import torch.nn.functional as F
from core.model import build_model
from core.checkpoint import CheckpointIO
from core.data_loader import InputFetcher
import core.utils as utils
from metrics.eval import calculate_metrics
class Solver(nn.Module):
def __init__(self, args):
super().__init__()
self.args = args
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.nets, self.nets_ema, self.vgg, self.VggExtract = build_model(args)
self.instancenorm = nn.InstanceNorm2d(512, affine=False)
self.L1Loss = nn.L1Loss()
# below setattrs are to make networks be children of Solver, e.g., for self.to(self.device)
for name, module in self.nets.items():
utils.print_network(module, name)
setattr(self, name, module)
for name, module in self.nets_ema.items():
setattr(self, name + '_ema', module)
if args.mode == 'train':
self.optims = Munch()
for net in self.nets.keys():
if net == 'fan':
continue
self.optims[net] = torch.optim.Adam(
params=self.nets[net].parameters(),
lr=args.f_lr if net == 'mapping_network' else args.lr,
betas=[args.beta1, args.beta2],
weight_decay=args.weight_decay)
self.ckptios = [CheckpointIO(ospj(args.checkpoint_dir, '100000_nets.ckpt'), **self.nets),
CheckpointIO(ospj(args.checkpoint_dir, '100000_nets_ema.ckpt'), **self.nets_ema),
CheckpointIO(ospj(args.checkpoint_dir, '100000_optims.ckpt'), **self.optims)]
else:
self.ckptios = [CheckpointIO(ospj(args.checkpoint_dir, '100000_nets_ema.ckpt'), **self.nets_ema)]
self.to(self.device)
for name, network in self.named_children():
# Do not initialize the FAN parameters
if ('ema' not in name) and ('fan' not in name):
print('Initializing %s...' % name)
network.apply(utils.he_init)
def _save_checkpoint(self, step):
for ckptio in self.ckptios:
ckptio.save(step)
def _load_checkpoint(self, step):
for ckptio in self.ckptios:
ckptio.load(step)
def _reset_grad(self):
for optim in self.optims.values():
optim.zero_grad()
def train(self, loaders):
args = self.args
nets = self.nets
nets_ema = self.nets_ema
optims = self.optims
# fetch random validation images for debugging
fetcher = InputFetcher(loaders.src, loaders.ref, args.latent_dim, 'train')
fetcher_val = InputFetcher(loaders.val, None, args.latent_dim, 'val')
inputs_val = next(fetcher_val)
# resume training if necessary
if args.resume_iter > 0:
self._load_checkpoint(args.resume_iter)
# remember the initial value of ds weight
initial_lambda_ds = args.lambda_ds
print('Start training...')
start_time = time.time()
for i in range(args.resume_iter, args.total_iters):
# fetch images and labels
inputs = next(fetcher)
x_real, y_org = inputs.x_src, inputs.y_src
x_ref, x_ref2, y_trg = inputs.x_ref, inputs.x_ref2, inputs.y_ref
z_trg, z_trg2 = inputs.z_trg, inputs.z_trg2
masks = nets.fan.get_heatmap(x_real) if args.w_hpf > 0 else None
# train the discriminator
d_loss, d_losses_latent = compute_d_loss(
nets, args, x_real, y_org, y_trg, z_trg=z_trg, masks=masks)
self._reset_grad()
d_loss.backward()
optims.discriminator.step()
d_loss, d_losses_ref = compute_d_loss(
nets, args, x_real, y_org, y_trg, x_ref=x_ref, masks=masks)
self._reset_grad()
d_loss.backward()
optims.discriminator.step()
# train the generator
g_loss, g_losses_latent = compute_g_loss(
nets, args, x_real, y_org, y_trg, z_trgs=[z_trg, z_trg2], masks=masks,VggExtract=self.VggExtract, IN = self.instancenorm, L1Loss=self.L1Loss)
self._reset_grad()
g_loss.backward()
optims.generator.step()
optims.mapping_network.step()
optims.style_encoder.step()
g_loss, g_losses_ref = compute_g_loss(
nets, args, x_real, y_org, y_trg, x_refs=[x_ref, x_ref2], masks=masks, VggExtract=self.VggExtract, IN = self.instancenorm, L1Loss=self.L1Loss)
self._reset_grad()
g_loss.backward()
optims.generator.step()
# compute moving average of network parameters
moving_average(nets.generator, nets_ema.generator, beta=0.999)
moving_average(nets.mapping_network, nets_ema.mapping_network, beta=0.999)
moving_average(nets.style_encoder, nets_ema.style_encoder, beta=0.999)
# decay weight for diversity sensitive loss
if args.lambda_ds > 0:
args.lambda_ds -= (initial_lambda_ds / args.ds_iter)
# print out log info
if (i+1) % args.print_every == 0:
elapsed = time.time() - start_time
elapsed = str(datetime.timedelta(seconds=elapsed))[:-7]
log = "Elapsed time [%s], Iteration [%i/%i], " % (elapsed, i+1, args.total_iters)
all_losses = dict()
for loss, prefix in zip([d_losses_latent, d_losses_ref, g_losses_latent, g_losses_ref],
['D/latent_', 'D/ref_', 'G/latent_', 'G/ref_']):
for key, value in loss.items():
all_losses[prefix + key] = value
all_losses['G/lambda_ds'] = args.lambda_ds
log += ' '.join(['%s: [%.4f]' % (key, value) for key, value in all_losses.items()])
print(log)
# generate images for debugging
if (i+1) % args.sample_every == 0:
os.makedirs(args.sample_dir, exist_ok=True)
utils.debug_image(nets_ema, args, inputs=inputs_val, step=i+1)
# save model checkpoints
if (i+1) % args.save_every == 0:
self._save_checkpoint(step=i+1)
# compute FID and LPIPS if necessary
if (i+1) % args.eval_every == 0:
calculate_metrics(nets_ema, args, i+1, mode='latent')
calculate_metrics(nets_ema, args, i+1, mode='reference')
@torch.no_grad()
def sample(self, loaders):
args = self.args
nets_ema = self.nets_ema
os.makedirs(args.result_dir, exist_ok=True)
self._load_checkpoint(args.resume_iter)
src = next(InputFetcher(loaders.src, None, args.latent_dim, 'test'))
ref = next(InputFetcher(loaders.ref, None, args.latent_dim, 'test'))
fname = ospj(args.result_dir, 'reference.jpg')
print('Working on {}...'.format(fname))
utils.translate_using_reference(nets_ema, args, src.x, ref.x, ref.y, fname)
# fname = ospj(args.result_dir, 'video_ref.mp4')
# print('Working on {}...'.format(fname))
# utils.video_ref(nets_ema, args, src.x, ref.x, ref.y, fname)
@torch.no_grad()
def evaluate(self):
args = self.args
nets_ema = self.nets_ema
resume_iter = args.resume_iter
self._load_checkpoint(args.resume_iter)
calculate_metrics(nets_ema, args, step=resume_iter, mode='latent')
calculate_metrics(nets_ema, args, step=resume_iter, mode='reference')
def compute_d_loss(nets, args, x_real, y_org, y_trg, z_trg=None, x_ref=None, masks=None):
assert (z_trg is None) != (x_ref is None)
# with real images
x_real.requires_grad_()
out = nets.discriminator(x_real, y_org)
loss_real = adv_loss(out, 1)
loss_reg = r1_reg(out, x_real)
# with fake images
with torch.no_grad():
if z_trg is not None:
s_trg = nets.mapping_network(z_trg, y_trg)
else: # x_ref is not None
s_trg = nets.style_encoder(x_ref, y_trg)
x_fake,_ = nets.generator(x_real, s_trg, masks=masks)
out = nets.discriminator(x_fake, y_trg)
loss_fake = adv_loss(out, 0)
loss = loss_real + loss_fake + args.lambda_reg * loss_reg
return loss, Munch(real=loss_real.item(),
fake=loss_fake.item(),
reg=loss_reg.item())
def compute_g_loss(nets, args, x_real, y_org, y_trg, z_trgs=None, x_refs=None, masks=None, VggExtract= None, IN= None, L1Loss=None):
assert (z_trgs is None) != (x_refs is None)
if z_trgs is not None:
z_trg, z_trg2 = z_trgs
if x_refs is not None:
x_ref, x_ref2 = x_refs
# adversarial loss
if z_trgs is not None:
s_trg = nets.mapping_network(z_trg, y_trg)
else:
s_trg = nets.style_encoder(x_ref, y_trg)
x_fake, content_latent_real = nets.generator(x_real, s_trg, masks=masks)
out = nets.discriminator(x_fake, y_trg)
loss_adv = adv_loss(out, 1)
# style reconstruction loss
s_pred = nets.style_encoder(x_fake, y_trg)
loss_sty = torch.mean(torch.abs(s_pred - s_trg))
# diversity sensitive loss
if z_trgs is not None:
s_trg2 = nets.mapping_network(z_trg2, y_trg)
else:
s_trg2 = nets.style_encoder(x_ref2, y_trg)
x_fake2, content_latent_real2 = nets.generator(x_real, s_trg2, masks=masks)
x_fake2 = x_fake2.detach()
loss_ds = torch.mean(torch.abs(x_fake - x_fake2))
# cycle-consistency loss
masks = nets.fan.get_heatmap(x_fake) if args.w_hpf > 0 else None
s_org = nets.style_encoder(x_real, y_org)
x_rec, content_latent_reco = nets.generator(x_fake, s_org, masks=masks)
loss_cyc = torch.mean(torch.abs(x_rec - x_real))
loss_vgg = compute_vgg_loss(x_fake, x_real, VggExtract, IN, L1Loss) if args.vgg_w > 0 else 0
loss_sacl = utils.abs_criterion(content_latent_real, content_latent_reco) if args.loss_sacl > 0 else 0 # Loss style aware content loss
loss_sacl2 = utils.abs_criterion(content_latent_real2, content_latent_reco) if args.loss_sacl > 0 else 0 # Loss style aware content loss
loss = loss_adv + args.lambda_sty * loss_sty \
- args.lambda_ds * loss_ds + args.lambda_cyc * loss_cyc + args.lambda_vgg * loss_vgg + args.lambda_loss_sacl * loss_sacl+ args.lambda_loss_sacl * loss_sacl2
return loss, Munch(adv=loss_adv.item(),
sty=loss_sty.item(),
ds=loss_ds.item(),
cyc=loss_cyc.item())
def moving_average(model, model_test, beta=0.999):
for param, param_test in zip(model.parameters(), model_test.parameters()):
param_test.data = torch.lerp(param.data, param_test.data, beta)
def adv_loss(logits, target):
assert target in [1, 0]
targets = torch.full_like(logits, fill_value=target)
loss = F.binary_cross_entropy_with_logits(logits, targets)
return loss
def compute_vgg_loss(img, target, VggExtract, IN, L1Loss):
# img_vgg = utils.vgg_preprocess(img)
# target_vgg = utils.vgg_preprocess(target)
# img_fea = vgg(img_vgg)
# target_fea = vgg(target_vgg)
img_fea_dict = VggExtract(img)
target_fea_dict = VggExtract(target)
# loss = torch.mean((img_fea_dict['relu3_3'] - target_fea_dict['relu3_3']) ** 2)
# loss = torch.mean(torch.abs(img_fea_dict['relu3_3'] - target_fea_dict['relu3_3']))
loss = L1Loss(img_fea_dict['relu2_2'] , target_fea_dict['relu2_2'])
return loss
def r1_reg(d_out, x_in):
# zero-centered gradient penalty for real images
batch_size = x_in.size(0)
grad_dout = torch.autograd.grad(
outputs=d_out.sum(), inputs=x_in,
create_graph=True, retain_graph=True, only_inputs=True
)[0]
grad_dout2 = grad_dout.pow(2)
assert(grad_dout2.size() == x_in.size())
reg = 0.5 * grad_dout2.view(batch_size, -1).sum(1).mean(0)
return reg | en | 0.617109 | StarGAN v2 Copyright (c) 2020-present NAVER Corp. This work is licensed under the Creative Commons Attribution-NonCommercial 4.0 International License. To view a copy of this license, visit http://creativecommons.org/licenses/by-nc/4.0/ or send a letter to Creative Commons, PO Box 1866, Mountain View, CA 94042, USA. # below setattrs are to make networks be children of Solver, e.g., for self.to(self.device) # Do not initialize the FAN parameters # fetch random validation images for debugging # resume training if necessary # remember the initial value of ds weight # fetch images and labels # train the discriminator # train the generator # compute moving average of network parameters # decay weight for diversity sensitive loss # print out log info # generate images for debugging # save model checkpoints # compute FID and LPIPS if necessary # fname = ospj(args.result_dir, 'video_ref.mp4') # print('Working on {}...'.format(fname)) # utils.video_ref(nets_ema, args, src.x, ref.x, ref.y, fname) # with real images # with fake images # x_ref is not None # adversarial loss # style reconstruction loss # diversity sensitive loss # cycle-consistency loss # Loss style aware content loss # Loss style aware content loss # img_vgg = utils.vgg_preprocess(img) # target_vgg = utils.vgg_preprocess(target) # img_fea = vgg(img_vgg) # target_fea = vgg(target_vgg) # loss = torch.mean((img_fea_dict['relu3_3'] - target_fea_dict['relu3_3']) ** 2) # loss = torch.mean(torch.abs(img_fea_dict['relu3_3'] - target_fea_dict['relu3_3'])) # zero-centered gradient penalty for real images | 1.868991 | 2 |
1. Algorithmic Toolbox/week2_algorithmic_warmup/4_lcm.py | vishweshwartyagi/Data-Structures-and-Algorithms-UCSD | 0 | 7533 | <reponame>vishweshwartyagi/Data-Structures-and-Algorithms-UCSD<filename>1. Algorithmic Toolbox/week2_algorithmic_warmup/4_lcm.py
# Uses python3
import sys
def lcm_naive(a, b):
for l in range(1, a*b + 1):
if l % a == 0 and l % b == 0:
return l
return a*b
def gcd(a, b):
if a%b == 0:
return b
elif b%a == 0:
return a
if a > b:
return gcd(a%b, b)
else:
return gcd(b%a, a)
def lcm(a, b):
return int((a*b) / gcd(a, b))
if __name__ == '__main__':
# input = sys.stdin.read()
a, b = map(int, input().split())
# print(lcm_naive(a, b))
print(lcm(a, b))
| Algorithmic Toolbox/week2_algorithmic_warmup/4_lcm.py
# Uses python3
import sys
def lcm_naive(a, b):
for l in range(1, a*b + 1):
if l % a == 0 and l % b == 0:
return l
return a*b
def gcd(a, b):
if a%b == 0:
return b
elif b%a == 0:
return a
if a > b:
return gcd(a%b, b)
else:
return gcd(b%a, a)
def lcm(a, b):
return int((a*b) / gcd(a, b))
if __name__ == '__main__':
# input = sys.stdin.read()
a, b = map(int, input().split())
# print(lcm_naive(a, b))
print(lcm(a, b)) | en | 0.199824 | # Uses python3 # input = sys.stdin.read() # print(lcm_naive(a, b)) | 3.598127 | 4 |
guessing_game.py | JoviCastillo/TH-Project-1-guessing-game- | 0 | 7534 | import random
highscore = []
def not_in_range(guess_it):
"""This is to check that the numbers inputted by the user are in range,
and will let the user know. If the numbers are in range then it passes.
"""
if guess_it < 1:
print('I am not thinking of negative numbers!')
elif guess_it > 10:
print('That number is way bigger than 10!')
else:
pass
def new_game(tries):
"""After the user has guessed the number correctly, the game
will ask the player if they would like to play again. Yes will start
the game again. No will exit the game. Highscore will be displayed
by the lowest amount of tries recorded.
"""
play_again = input('Would you like to play again? (Yes/No) ')
if play_again.upper() == 'YES':
highscore.append(tries)
highscore.sort
print('The highscore is {}.'.format(highscore[0]))
start_game()
elif play_again.upper() == 'NO':
exit()
else:
play_again = input('Please let me know by typing yes or no: ')
def start_game(): # title screen of the game
"""This is the start of the game which include the title screen and
is the main function that runs all the other functions as well.
"""
print('-' * 40)
print('Welcome to the Number Guessing Game!!!')
print('-' * 40)
print('I am thinking of a number between 1-10.')
random_number = random.randint(1, 10)
tries = 0
while True:
try:
guess_it = int(input('Can you guess it?: '))
except ValueError:
print('I said number, not gibberish!')
else:
while guess_it != random_number:
not_in_range(guess_it)
tries += 1
if guess_it > random_number:
print('That is too high!')
elif guess_it < random_number:
print('That is too low')
break
else:
print('You guessed it right! Your number was {}.'.format(random_number))
print('It took you {} tries.'.format(tries))
break
new_game(tries)
if __name__ == '__main__':
# Kick off the program by calling the start_game function.
start_game()
| import random
highscore = []
def not_in_range(guess_it):
"""This is to check that the numbers inputted by the user are in range,
and will let the user know. If the numbers are in range then it passes.
"""
if guess_it < 1:
print('I am not thinking of negative numbers!')
elif guess_it > 10:
print('That number is way bigger than 10!')
else:
pass
def new_game(tries):
"""After the user has guessed the number correctly, the game
will ask the player if they would like to play again. Yes will start
the game again. No will exit the game. Highscore will be displayed
by the lowest amount of tries recorded.
"""
play_again = input('Would you like to play again? (Yes/No) ')
if play_again.upper() == 'YES':
highscore.append(tries)
highscore.sort
print('The highscore is {}.'.format(highscore[0]))
start_game()
elif play_again.upper() == 'NO':
exit()
else:
play_again = input('Please let me know by typing yes or no: ')
def start_game(): # title screen of the game
"""This is the start of the game which include the title screen and
is the main function that runs all the other functions as well.
"""
print('-' * 40)
print('Welcome to the Number Guessing Game!!!')
print('-' * 40)
print('I am thinking of a number between 1-10.')
random_number = random.randint(1, 10)
tries = 0
while True:
try:
guess_it = int(input('Can you guess it?: '))
except ValueError:
print('I said number, not gibberish!')
else:
while guess_it != random_number:
not_in_range(guess_it)
tries += 1
if guess_it > random_number:
print('That is too high!')
elif guess_it < random_number:
print('That is too low')
break
else:
print('You guessed it right! Your number was {}.'.format(random_number))
print('It took you {} tries.'.format(tries))
break
new_game(tries)
if __name__ == '__main__':
# Kick off the program by calling the start_game function.
start_game()
| en | 0.973428 | This is to check that the numbers inputted by the user are in range, and will let the user know. If the numbers are in range then it passes. After the user has guessed the number correctly, the game will ask the player if they would like to play again. Yes will start the game again. No will exit the game. Highscore will be displayed by the lowest amount of tries recorded. # title screen of the game This is the start of the game which include the title screen and is the main function that runs all the other functions as well. # Kick off the program by calling the start_game function. | 4.248314 | 4 |
MAIL_SERVER.py | dastacy/gve_devnet_unity_unread_voicemail_notifier | 0 | 7535 | <reponame>dastacy/gve_devnet_unity_unread_voicemail_notifier
#!/usr/bin/env python3
USER = r'server\user'
PASSWORD = '<PASSWORD>'
HOSTNAME = 'hostname.goes.here.com'
DOMAIN = 'domain.goes.here.com'
FROM_ADDR = '<EMAIL>'
| #!/usr/bin/env python3
USER = r'server\user'
PASSWORD = '<PASSWORD>'
HOSTNAME = 'hostname.goes.here.com'
DOMAIN = 'domain.goes.here.com'
FROM_ADDR = '<EMAIL>' | fr | 0.221828 | #!/usr/bin/env python3 | 1.182449 | 1 |
src/testrsscast/rss/ytconverter_example.py | anetczuk/rsscast | 0 | 7536 | #!/usr/bin/python3
#
# MIT License
#
# Copyright (c) 2021 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
try:
## following import success only when file is directly executed from command line
## otherwise will throw exception when executing as parameter for "python -m"
# pylint: disable=W0611
import __init__
except ImportError as error:
## when import fails then it means that the script was executed indirectly
## in this case __init__ is already loaded
pass
import sys
import argparse
import rsscast.logger as logger
from rsscast.rss.ytconverter import convert_yt
if __name__ != '__main__':
sys.exit(0)
parser = argparse.ArgumentParser(description='YouTube convert example')
args = parser.parse_args()
logger.configure_console()
converted = convert_yt( "https://www.youtube.com/watch?v=BLRUiVXeZKU", "/tmp/yt_example.mp3" )
print("converted:", converted)
| #!/usr/bin/python3
#
# MIT License
#
# Copyright (c) 2021 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
try:
## following import success only when file is directly executed from command line
## otherwise will throw exception when executing as parameter for "python -m"
# pylint: disable=W0611
import __init__
except ImportError as error:
## when import fails then it means that the script was executed indirectly
## in this case __init__ is already loaded
pass
import sys
import argparse
import rsscast.logger as logger
from rsscast.rss.ytconverter import convert_yt
if __name__ != '__main__':
sys.exit(0)
parser = argparse.ArgumentParser(description='YouTube convert example')
args = parser.parse_args()
logger.configure_console()
converted = convert_yt( "https://www.youtube.com/watch?v=BLRUiVXeZKU", "/tmp/yt_example.mp3" )
print("converted:", converted)
| en | 0.780447 | #!/usr/bin/python3 # # MIT License # # Copyright (c) 2021 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ## following import success only when file is directly executed from command line ## otherwise will throw exception when executing as parameter for "python -m" # pylint: disable=W0611 ## when import fails then it means that the script was executed indirectly ## in this case __init__ is already loaded | 1.997403 | 2 |
cli.py | palazzem/elmo-server | 0 | 7537 | <gh_stars>0
import click
APP_YAML_TEMPLATE = """runtime: python37
env_variables:
ELMO_BASE_URL: '{BASE_URL}'
ELMO_VENDOR: '{VENDOR}'
handlers:
- url: /.*
script: auto
secure: always
redirect_http_response_code: 301
"""
@click.command()
@click.argument("base_url")
@click.argument("vendor")
def generate_app_yaml(base_url, vendor):
"""Use APP_YAML_TEMPLATE to generate app.yaml for AppEngine deployments.
Args:
base_url: defines ELMO_BASE_URL env variable in AppEngine config.
vendor: defines ELMO_VENDOR env variable in AppEngine config.
Returns:
Writes `app.yaml` file in the current folder.
"""
print("Writing the following deployment config to disk:")
app_yaml = APP_YAML_TEMPLATE.format(BASE_URL=base_url, VENDOR=vendor)
print(app_yaml)
with open("app.yaml", "w") as f:
f.write(app_yaml)
print("Done! You can deploy the service with `gcloud app deploy`")
if __name__ == "__main__":
generate_app_yaml()
| import click
APP_YAML_TEMPLATE = """runtime: python37
env_variables:
ELMO_BASE_URL: '{BASE_URL}'
ELMO_VENDOR: '{VENDOR}'
handlers:
- url: /.*
script: auto
secure: always
redirect_http_response_code: 301
"""
@click.command()
@click.argument("base_url")
@click.argument("vendor")
def generate_app_yaml(base_url, vendor):
"""Use APP_YAML_TEMPLATE to generate app.yaml for AppEngine deployments.
Args:
base_url: defines ELMO_BASE_URL env variable in AppEngine config.
vendor: defines ELMO_VENDOR env variable in AppEngine config.
Returns:
Writes `app.yaml` file in the current folder.
"""
print("Writing the following deployment config to disk:")
app_yaml = APP_YAML_TEMPLATE.format(BASE_URL=base_url, VENDOR=vendor)
print(app_yaml)
with open("app.yaml", "w") as f:
f.write(app_yaml)
print("Done! You can deploy the service with `gcloud app deploy`")
if __name__ == "__main__":
generate_app_yaml() | en | 0.536975 | runtime: python37 env_variables: ELMO_BASE_URL: '{BASE_URL}' ELMO_VENDOR: '{VENDOR}' handlers: - url: /.* script: auto secure: always redirect_http_response_code: 301 Use APP_YAML_TEMPLATE to generate app.yaml for AppEngine deployments. Args: base_url: defines ELMO_BASE_URL env variable in AppEngine config. vendor: defines ELMO_VENDOR env variable in AppEngine config. Returns: Writes `app.yaml` file in the current folder. | 3.029544 | 3 |
utils/data/dataset_catalog.py | rs9899/Parsing-R-CNN | 289 | 7538 | <gh_stars>100-1000
import os.path as osp
# Root directory of project
ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
# Path to data dir
_DATA_DIR = osp.abspath(osp.join(ROOT_DIR, 'data'))
# Required dataset entry keys
_IM_DIR = 'image_directory'
_ANN_FN = 'annotation_file'
# Available datasets
COMMON_DATASETS = {
'coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_train2017.json',
},
'coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_val2017.json',
},
'coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json',
},
'coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'keypoints_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_train2017.json'
},
'keypoints_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_val2017.json'
},
'keypoints_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json'
},
'keypoints_coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'dense_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_train2017.json',
},
'dense_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_val2017.json',
},
'dense_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_test.json',
},
'CIHP_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/train_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_train.json',
},
'CIHP_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/val_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_val.json',
},
'CIHP_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/test_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_test.json',
},
'MHP-v2_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/train_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_train.json',
},
'MHP-v2_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/val_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_val.json',
},
'MHP-v2_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_all.json',
},
'MHP-v2_test_inter_top10': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top10.json',
},
'MHP-v2_test_inter_top20': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top20.json',
},
'PASCAL-Person-Part_train': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/train_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_train.json',
},
'PASCAL-Person-Part_test': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/test_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_test.json',
}
}
| import os.path as osp
# Root directory of project
ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
# Path to data dir
_DATA_DIR = osp.abspath(osp.join(ROOT_DIR, 'data'))
# Required dataset entry keys
_IM_DIR = 'image_directory'
_ANN_FN = 'annotation_file'
# Available datasets
COMMON_DATASETS = {
'coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_train2017.json',
},
'coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/instances_val2017.json',
},
'coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json',
},
'coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'keypoints_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_train2017.json'
},
'keypoints_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/person_keypoints_val2017.json'
},
'keypoints_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test2017.json'
},
'keypoints_coco_2017_test-dev': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/image_info_test-dev2017.json',
},
'dense_coco_2017_train': {
_IM_DIR:
_DATA_DIR + '/coco/images/train2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_train2017.json',
},
'dense_coco_2017_val': {
_IM_DIR:
_DATA_DIR + '/coco/images/val2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_val2017.json',
},
'dense_coco_2017_test': {
_IM_DIR:
_DATA_DIR + '/coco/images/test2017',
_ANN_FN:
_DATA_DIR + '/coco/annotations/DensePoseData/densepose_coco_test.json',
},
'CIHP_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/train_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_train.json',
},
'CIHP_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/val_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_val.json',
},
'CIHP_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/CIHP/test_img',
_ANN_FN:
_DATA_DIR + '/CIHP/annotations/CIHP_test.json',
},
'MHP-v2_train': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/train_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_train.json',
},
'MHP-v2_val': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/val_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_val.json',
},
'MHP-v2_test': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_all.json',
},
'MHP-v2_test_inter_top10': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top10.json',
},
'MHP-v2_test_inter_top20': { # new addition by wzh
_IM_DIR:
_DATA_DIR + '/MHP-v2/test_img',
_ANN_FN:
_DATA_DIR + '/MHP-v2/annotations/MHP-v2_test_inter_top20.json',
},
'PASCAL-Person-Part_train': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/train_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_train.json',
},
'PASCAL-Person-Part_test': { # new addition by soeaver
_IM_DIR:
_DATA_DIR + '/PASCAL-Person-Part/test_img',
_ANN_FN:
_DATA_DIR + '/PASCAL-Person-Part/annotations/pascal_person_part_test.json',
}
} | en | 0.97697 | # Root directory of project # Path to data dir # Required dataset entry keys # Available datasets # new addition by wzh # new addition by wzh # new addition by wzh # new addition by wzh # new addition by wzh # new addition by wzh # new addition by wzh # new addition by wzh # new addition by soeaver # new addition by soeaver | 1.99173 | 2 |
prepareDataSet.py | Dakewe-DS1000/LapRSNet | 6 | 7539 | # Prepare my dataset for Digital Pathology
import os
import math
import cv2
import pdb
rootFolder = "F:\DataBase\LymphnodePathology"
trainFolder = rootFolder + "\\trainDataSet"
testFolder = rootFolder + "\\testDataSet"
srcTrainFilePath = trainFolder + "\\20X\\"
dstTrainFilePath = trainFolder + "\\5X\\"
srcTestFilePath = testFolder + "\\20X\\"
dstTestFilePath = testFolder + "\\5X\\"
factor = 4
if __name__ == '__main__':
srcTrainFileNameList = os.listdir(srcTrainFilePath)
srcTestFileNameList = os.listdir(srcTestFilePath)
for srcTrainFileName in srcTrainFileNameList:
srcTrainImage = cv2.imread(srcTrainFilePath + srcTrainFileName)
imgHeight, imgWidth, _ = srcTrainImage.shape
newWidth = int(imgWidth / factor)
newHeight = int(imgHeight / factor)
newSize = (newWidth, newHeight)
dstTrainImage = cv2.resize(srcTrainImage, newSize, interpolation=cv2.INTER_AREA)
print("Train File Name : %s, (%d, %d) => (%d, %d)" %(srcTrainFileName, imgWidth, imgHeight, newSize[0], newSize[1]))
cv2.imwrite(dstTrainFilePath + srcTrainFileName, dstTrainImage)
for srcTestFileName in srcTestFileNameList:
srcTestImage = cv2.imread(srcTestFilePath + srcTestFileName)
imgHeight, imgWidth, _ = srcTestImage.shape
newWidth = int(imgWidth / factor)
newHeight = int(imgHeight / factor)
newSize = (newWidth, newHeight)
dstTestImage = cv2.resize(srcTestImage, newSize, interpolation=cv2.INTER_AREA)
print("Test File Name : %s, (%d, %d) => (%d, %d)" %(srcTestFileName, imgWidth, imgHeight, newSize[0], newSize[1]))
cv2.imwrite(dstTestFilePath + srcTestFileName, dstTestImage)
| # Prepare my dataset for Digital Pathology
import os
import math
import cv2
import pdb
rootFolder = "F:\DataBase\LymphnodePathology"
trainFolder = rootFolder + "\\trainDataSet"
testFolder = rootFolder + "\\testDataSet"
srcTrainFilePath = trainFolder + "\\20X\\"
dstTrainFilePath = trainFolder + "\\5X\\"
srcTestFilePath = testFolder + "\\20X\\"
dstTestFilePath = testFolder + "\\5X\\"
factor = 4
if __name__ == '__main__':
srcTrainFileNameList = os.listdir(srcTrainFilePath)
srcTestFileNameList = os.listdir(srcTestFilePath)
for srcTrainFileName in srcTrainFileNameList:
srcTrainImage = cv2.imread(srcTrainFilePath + srcTrainFileName)
imgHeight, imgWidth, _ = srcTrainImage.shape
newWidth = int(imgWidth / factor)
newHeight = int(imgHeight / factor)
newSize = (newWidth, newHeight)
dstTrainImage = cv2.resize(srcTrainImage, newSize, interpolation=cv2.INTER_AREA)
print("Train File Name : %s, (%d, %d) => (%d, %d)" %(srcTrainFileName, imgWidth, imgHeight, newSize[0], newSize[1]))
cv2.imwrite(dstTrainFilePath + srcTrainFileName, dstTrainImage)
for srcTestFileName in srcTestFileNameList:
srcTestImage = cv2.imread(srcTestFilePath + srcTestFileName)
imgHeight, imgWidth, _ = srcTestImage.shape
newWidth = int(imgWidth / factor)
newHeight = int(imgHeight / factor)
newSize = (newWidth, newHeight)
dstTestImage = cv2.resize(srcTestImage, newSize, interpolation=cv2.INTER_AREA)
print("Test File Name : %s, (%d, %d) => (%d, %d)" %(srcTestFileName, imgWidth, imgHeight, newSize[0], newSize[1]))
cv2.imwrite(dstTestFilePath + srcTestFileName, dstTestImage)
| en | 0.596148 | # Prepare my dataset for Digital Pathology | 2.150905 | 2 |
sample_project/sample_content/serializers.py | zentrumnawi/solid-backend | 1 | 7540 | <gh_stars>1-10
from rest_framework import serializers
from solid_backend.photograph.serializers import PhotographSerializer
from solid_backend.media_object.serializers import MediaObjectSerializer
from .models import SampleProfile
class SampleProfileSerializer(serializers.ModelSerializer):
media_objects = MediaObjectSerializer(many=True)
class Meta:
model = SampleProfile
fields = "__all__"
depth = 1
| from rest_framework import serializers
from solid_backend.photograph.serializers import PhotographSerializer
from solid_backend.media_object.serializers import MediaObjectSerializer
from .models import SampleProfile
class SampleProfileSerializer(serializers.ModelSerializer):
media_objects = MediaObjectSerializer(many=True)
class Meta:
model = SampleProfile
fields = "__all__"
depth = 1 | none | 1 | 1.748312 | 2 |
|
tests/reshape_4/generate_pb.py | wchsieh/utensor_cgen | 0 | 7541 | # -*- coding: utf8 -*-
import os
from utensor_cgen.utils import save_consts, save_graph, save_idx
import numpy as np
import tensorflow as tf
def generate():
test_dir = os.path.dirname(__file__)
graph = tf.Graph()
with graph.as_default():
x = tf.constant(np.random.randn(10),
dtype=tf.float32,
name='x')
output_x = tf.reshape(x, [5, 2], name="output_x")
with tf.Session(graph=graph) as sess:
save_consts(sess, test_dir)
save_graph(graph, 'test_reshape_4', test_dir)
np_output = output_x.eval()
save_idx(np_output, os.path.join(test_dir, 'output_x.idx'))
# test_reshape_4.pb is the same as test_quant_reshape_4.pb
# hack, since we do not have QuantizedReshape yet
if __name__ == "__main__":
generate()
| # -*- coding: utf8 -*-
import os
from utensor_cgen.utils import save_consts, save_graph, save_idx
import numpy as np
import tensorflow as tf
def generate():
test_dir = os.path.dirname(__file__)
graph = tf.Graph()
with graph.as_default():
x = tf.constant(np.random.randn(10),
dtype=tf.float32,
name='x')
output_x = tf.reshape(x, [5, 2], name="output_x")
with tf.Session(graph=graph) as sess:
save_consts(sess, test_dir)
save_graph(graph, 'test_reshape_4', test_dir)
np_output = output_x.eval()
save_idx(np_output, os.path.join(test_dir, 'output_x.idx'))
# test_reshape_4.pb is the same as test_quant_reshape_4.pb
# hack, since we do not have QuantizedReshape yet
if __name__ == "__main__":
generate()
| en | 0.922751 | # -*- coding: utf8 -*- # test_reshape_4.pb is the same as test_quant_reshape_4.pb # hack, since we do not have QuantizedReshape yet | 2.217694 | 2 |
junn-predict/junn_predict/common/logging.py | modsim/junn | 0 | 7542 | """Logging helpers."""
import logging
import sys
import colorlog
import tqdm
class TqdmLoggingHandler(logging.StreamHandler):
"""TqdmLoggingHandler, outputs log messages to the console compatible with tqdm."""
def emit(self, record): # noqa: D102
message = self.format(record)
tqdm.tqdm.write(message)
class DelayedFileLog(logging.StreamHandler):
"""DelayedFileLog will cache messages till it can write them to a specified file."""
def __init__(self): # noqa: D107
super().__init__()
self.file_name = None
self.buffer = []
def emit(self, record): # noqa: D102
if self.file_name is None:
message = self.format(record)
self.buffer.append(message)
else:
super().emit(record)
def setFilename(self, file_name, mode='a'):
"""
Set the filename to write the log messages to.
:param file_name: File name to use.
:param mode: File open mode, by default 'a'.
:return: None
"""
self.file_name = file_name
stream = open(file_name, mode)
for old_message in self.buffer:
stream.write(old_message + self.terminator)
self.setStream(stream)
def setup_logging(level):
"""
Set the logging up to the specified level.
:param level: Log level
:return: None
"""
name_to_log_level = get_name_to_log_level_dict()
if level in name_to_log_level:
level = name_to_log_level[level]
tqdm_log_handler = TqdmLoggingHandler()
log_format = (
"%(asctime)-15s.%(msecs)03d %(process)d %(levelname)s %(name)s %(message)s"
)
log_datefmt = '%Y-%m-%d %H:%M:%S'
tqdm_log_handler.setFormatter(
colorlog.TTYColoredFormatter(
fmt='%(log_color)s' + log_format, datefmt=log_datefmt, stream=sys.stdout
)
)
buffer = DelayedFileLog()
log_handlers = [tqdm_log_handler, buffer]
# noinspection PyArgumentList
logging.basicConfig(
level=level, format=log_format, datefmt=log_datefmt, handlers=log_handlers
)
def get_name_to_log_level_dict():
"""
Return a dict with a mapping of log levels.
:return: The dict
"""
# noinspection PyProtectedMember
name_to_log_level = logging._nameToLevel.copy()
return name_to_log_level
def get_log_levels():
"""
Return supported log levels.
:return: List of log levels
"""
log_levels = [
k for k, v in sorted(get_name_to_log_level_dict().items(), key=lambda ab: ab[1])
]
log_levels.remove('NOTSET')
return log_levels
| """Logging helpers."""
import logging
import sys
import colorlog
import tqdm
class TqdmLoggingHandler(logging.StreamHandler):
"""TqdmLoggingHandler, outputs log messages to the console compatible with tqdm."""
def emit(self, record): # noqa: D102
message = self.format(record)
tqdm.tqdm.write(message)
class DelayedFileLog(logging.StreamHandler):
"""DelayedFileLog will cache messages till it can write them to a specified file."""
def __init__(self): # noqa: D107
super().__init__()
self.file_name = None
self.buffer = []
def emit(self, record): # noqa: D102
if self.file_name is None:
message = self.format(record)
self.buffer.append(message)
else:
super().emit(record)
def setFilename(self, file_name, mode='a'):
"""
Set the filename to write the log messages to.
:param file_name: File name to use.
:param mode: File open mode, by default 'a'.
:return: None
"""
self.file_name = file_name
stream = open(file_name, mode)
for old_message in self.buffer:
stream.write(old_message + self.terminator)
self.setStream(stream)
def setup_logging(level):
"""
Set the logging up to the specified level.
:param level: Log level
:return: None
"""
name_to_log_level = get_name_to_log_level_dict()
if level in name_to_log_level:
level = name_to_log_level[level]
tqdm_log_handler = TqdmLoggingHandler()
log_format = (
"%(asctime)-15s.%(msecs)03d %(process)d %(levelname)s %(name)s %(message)s"
)
log_datefmt = '%Y-%m-%d %H:%M:%S'
tqdm_log_handler.setFormatter(
colorlog.TTYColoredFormatter(
fmt='%(log_color)s' + log_format, datefmt=log_datefmt, stream=sys.stdout
)
)
buffer = DelayedFileLog()
log_handlers = [tqdm_log_handler, buffer]
# noinspection PyArgumentList
logging.basicConfig(
level=level, format=log_format, datefmt=log_datefmt, handlers=log_handlers
)
def get_name_to_log_level_dict():
"""
Return a dict with a mapping of log levels.
:return: The dict
"""
# noinspection PyProtectedMember
name_to_log_level = logging._nameToLevel.copy()
return name_to_log_level
def get_log_levels():
"""
Return supported log levels.
:return: List of log levels
"""
log_levels = [
k for k, v in sorted(get_name_to_log_level_dict().items(), key=lambda ab: ab[1])
]
log_levels.remove('NOTSET')
return log_levels
| en | 0.574987 | Logging helpers. TqdmLoggingHandler, outputs log messages to the console compatible with tqdm. # noqa: D102 DelayedFileLog will cache messages till it can write them to a specified file. # noqa: D107 # noqa: D102 Set the filename to write the log messages to. :param file_name: File name to use. :param mode: File open mode, by default 'a'. :return: None Set the logging up to the specified level. :param level: Log level :return: None # noinspection PyArgumentList Return a dict with a mapping of log levels. :return: The dict # noinspection PyProtectedMember Return supported log levels. :return: List of log levels | 2.629661 | 3 |
subpartcode/ultrasonic_basic_code.py | LesterYHZ/Automated-Bridge-Inspection-Robot-Project | 1 | 7543 | #Basic Ultrasonic sensor (HC-SR04) code
import RPi.GPIO as GPIO #GPIO RPI library
import time # makes sure Pi waits between steps
GPIO.setmode(GPIO.BCM) #sets GPIO pin numbering
#GPIO.setmode(GPIO.BOARD)
#Remove warnings
GPIO.setwarnings(False)
#Create loop variable
#loop = 1
#BCM
TRIG = 23 #output pin - triggers the sensor
ECHO = 24 #input pin - reads the return signal from the sensor
#BOARD
#TRIG=16
#ECHO=18
#Looping not necessary
#Print a message to let the user know that distance measurement is in progress
print ("Distance Measurement In Progress")
#Set two GPIO ports as inputs/outputs
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
#while loop == 1: #Looping forever
while True: #Looping forever
#Ensure the trigger pin is set low
GPIO.output(TRIG, False)
#Give the sensor a second to settle
print ("Waiting for Sensor to Settle")
#time.sleep(2)
time.sleep(1)
#Create trigger pulse
GPIO.output(TRIG,True)
#Set trigger pin high for 10uS
time.sleep(0.00001)
#Set it low again
GPIO.output(TRIG,False)
#Record the last low timestamp for ECHO (just before the return signal is received and the pin goes high)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
#Once a signal is received, the value changes from low to high, and the signal will remain high for the duration of the echo pulse
while GPIO.input(ECHO)==1:
pulse_end = time.time()
#speed=distance/time
#speed of sound at sea level = 343m/s
#34300 = distance/(time/2)
#17150 = distance/time
#17150*time = distance
#Calculating...
pulse_duration = pulse_end - pulse_start
distance_cm = pulse_duration*17150
#distance_cm = pulse_duration*0.034/2;
distance_cm = round(distance_cm,2)
distance_inch = distance_cm/2.54 #2.54 cm in 1 inch
#distance_inch = pulse_duration*0.0133/2
distance_inch = round(distance_inch,2)
distance_feet = distance_inch/12
distance_feet = round(distance_feet,2)
#Print distance
#print ("Distance:",distance_cm,"cm")
#print ("Distance:",distance_inch,"in")
print ("Distance:",distance_feet,"ft")
#Delay
time.sleep(2)
#Clean GPIO pins to ensure all inputs/outputs are reset
GPIO.cleanup()
| #Basic Ultrasonic sensor (HC-SR04) code
import RPi.GPIO as GPIO #GPIO RPI library
import time # makes sure Pi waits between steps
GPIO.setmode(GPIO.BCM) #sets GPIO pin numbering
#GPIO.setmode(GPIO.BOARD)
#Remove warnings
GPIO.setwarnings(False)
#Create loop variable
#loop = 1
#BCM
TRIG = 23 #output pin - triggers the sensor
ECHO = 24 #input pin - reads the return signal from the sensor
#BOARD
#TRIG=16
#ECHO=18
#Looping not necessary
#Print a message to let the user know that distance measurement is in progress
print ("Distance Measurement In Progress")
#Set two GPIO ports as inputs/outputs
GPIO.setup(TRIG,GPIO.OUT)
GPIO.setup(ECHO,GPIO.IN)
#while loop == 1: #Looping forever
while True: #Looping forever
#Ensure the trigger pin is set low
GPIO.output(TRIG, False)
#Give the sensor a second to settle
print ("Waiting for Sensor to Settle")
#time.sleep(2)
time.sleep(1)
#Create trigger pulse
GPIO.output(TRIG,True)
#Set trigger pin high for 10uS
time.sleep(0.00001)
#Set it low again
GPIO.output(TRIG,False)
#Record the last low timestamp for ECHO (just before the return signal is received and the pin goes high)
while GPIO.input(ECHO)==0:
pulse_start = time.time()
#Once a signal is received, the value changes from low to high, and the signal will remain high for the duration of the echo pulse
while GPIO.input(ECHO)==1:
pulse_end = time.time()
#speed=distance/time
#speed of sound at sea level = 343m/s
#34300 = distance/(time/2)
#17150 = distance/time
#17150*time = distance
#Calculating...
pulse_duration = pulse_end - pulse_start
distance_cm = pulse_duration*17150
#distance_cm = pulse_duration*0.034/2;
distance_cm = round(distance_cm,2)
distance_inch = distance_cm/2.54 #2.54 cm in 1 inch
#distance_inch = pulse_duration*0.0133/2
distance_inch = round(distance_inch,2)
distance_feet = distance_inch/12
distance_feet = round(distance_feet,2)
#Print distance
#print ("Distance:",distance_cm,"cm")
#print ("Distance:",distance_inch,"in")
print ("Distance:",distance_feet,"ft")
#Delay
time.sleep(2)
#Clean GPIO pins to ensure all inputs/outputs are reset
GPIO.cleanup()
| en | 0.821497 | #Basic Ultrasonic sensor (HC-SR04) code #GPIO RPI library # makes sure Pi waits between steps #sets GPIO pin numbering #GPIO.setmode(GPIO.BOARD) #Remove warnings #Create loop variable #loop = 1 #BCM #output pin - triggers the sensor #input pin - reads the return signal from the sensor #BOARD #TRIG=16 #ECHO=18 #Looping not necessary #Print a message to let the user know that distance measurement is in progress #Set two GPIO ports as inputs/outputs #while loop == 1: #Looping forever #Looping forever #Ensure the trigger pin is set low #Give the sensor a second to settle #time.sleep(2) #Create trigger pulse #Set trigger pin high for 10uS #Set it low again #Record the last low timestamp for ECHO (just before the return signal is received and the pin goes high) #Once a signal is received, the value changes from low to high, and the signal will remain high for the duration of the echo pulse #speed=distance/time #speed of sound at sea level = 343m/s #34300 = distance/(time/2) #17150 = distance/time #17150*time = distance #Calculating... #distance_cm = pulse_duration*0.034/2; #2.54 cm in 1 inch #distance_inch = pulse_duration*0.0133/2 #Print distance #print ("Distance:",distance_cm,"cm") #print ("Distance:",distance_inch,"in") #Delay #Clean GPIO pins to ensure all inputs/outputs are reset | 3.420359 | 3 |
Mentorama/Modulo 3 - POO/Retangulo.py | MOURAIGOR/python | 0 | 7544 | class Retangulo:
# Atributos
def __init__(self, comprimento, altura):
self.setcomprimento(comprimento)
self.setAltura(altura)
# Métodos
def setcomprimento(self, comprimento):
self.comprimento = comprimento
def getcomprimento(self):
return self.comprimento
def setAltura(self, altura):
self.altura = altura
def getAltura(self):
return self.altura
def calculaArea(self):
return self.comprimento * self.altura
def calculaPerimetro(self):
return 2 * self.comprimento + 2 * self.altura
# Executando
comprimento = int(input('Valor do comprimento: '))
altura = int(input('Valor da altura: '))
retangulo = Retangulo(comprimento, altura)
print('A area do retangulo é: %d' % retangulo.calculaArea())
print('O perimetro do retangulo é : %d' % retangulo.calculaPerimetro())
| class Retangulo:
# Atributos
def __init__(self, comprimento, altura):
self.setcomprimento(comprimento)
self.setAltura(altura)
# Métodos
def setcomprimento(self, comprimento):
self.comprimento = comprimento
def getcomprimento(self):
return self.comprimento
def setAltura(self, altura):
self.altura = altura
def getAltura(self):
return self.altura
def calculaArea(self):
return self.comprimento * self.altura
def calculaPerimetro(self):
return 2 * self.comprimento + 2 * self.altura
# Executando
comprimento = int(input('Valor do comprimento: '))
altura = int(input('Valor da altura: '))
retangulo = Retangulo(comprimento, altura)
print('A area do retangulo é: %d' % retangulo.calculaArea())
print('O perimetro do retangulo é : %d' % retangulo.calculaPerimetro())
| pt | 0.36695 | # Atributos # Métodos # Executando | 3.859154 | 4 |
DEMs/denmark/download_dk_dem.py | PeterFogh/digital_elevation_model_use_cases | 0 | 7545 | <filename>DEMs/denmark/download_dk_dem.py
"""
Fetch all files from Kortforsyningen FTP server folder.
Copyright (c) 2021 <NAME>
See also command line alternative in `download_dk_dem.sh`
"""
from ftplib import FTP, error_perm
import os
from pathlib import Path
import time
import operator
import functools
import shutil
# TODO: use logging to std instead of print(time.ctime())
from environs import Env
# Functions
def download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
# Set up empty local dir and FTP current work dir before tree traversal.
shutil.rmtree(local_dir)
ftp.cwd(remote_dir.parent.as_posix())
local_dir.mkdir(parents=True, exist_ok=True)
return _recursive_download_FTP_tree(ftp, remote_dir, local_dir)
def _is_ftp_dir(ftp, name):
"""
Check if FTP entry is a directory.
Modified from here https://www.daniweb.com/programming/software-development/threads/243712/ftplib-isdir-or-isfile
to accommodate not necessarily being in the top-level directory.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
name: str
Name of FTP file system entry to check if directory or not.
"""
try:
current_dir = ftp.pwd()
ftp.cwd(name)
#print(f'File system entry "{name=}" is a directory.')
ftp.cwd(current_dir)
return True
except error_perm as e:
#print(f'File system entry "{name=}" is a file.')
return False
def _recursive_download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
print(f'{remote_dir=}')
print(f'{local_dir=}')
ftp.cwd(remote_dir.name)
local_dir.mkdir(exist_ok=True)
print(f'{time.ctime()}: Fetching file & directory names within "{remote_dir}".')
dir_entries = ftp.nlst()
print(f'{time.ctime()}: Fetched file & directory names within "{remote_dir}".')
dirs = []
for filename in sorted(dir_entries)[-5:]: # TODO: remove restriction on downloaded of entries
if _is_ftp_dir(ftp, filename):
dirs.append(filename)
else:
local_file = local_dir/filename
print(f'{time.ctime()}: Downloading "{local_file}".')
ftp.retrbinary(
cmd=f'RETR {filename}',
callback=local_file.open('wb').write)
print(f'{time.ctime()}: Downloaded "{local_file}".')
print(f'Traverse dir tree to "{dirs=}"')
map_download_FTP_tree = map(lambda dir: _recursive_download_FTP_tree(
ftp, remote_dir/dir, local_dir/dir), dirs)
return functools.reduce(operator.iand, map_download_FTP_tree, True)
if __name__ == '__main__':
# Load environment variables from local `.env` file.
env = Env()
env.read_env()
# Set up server and source/destination paths.
ftp_host = 'ftp.kortforsyningen.dk'
dem_ftp_dir = Path('dhm_danmarks_hoejdemodel/DTM')
local_ftp_dir = env.path('LOCAL_FTP_DIR', './')
local_dem_ftp_dir = local_ftp_dir/'kortforsyningen'/dem_ftp_dir
# Perform FTP download.
print(f'{time.ctime()}: Connect to {ftp_host}')
ftp = FTP(ftp_host)
ftp.login(env('KORTFORSYNING_USERNAME'), env('KORTFORSYNING_PASSWORD'))
download_FTP_tree(ftp, dem_ftp_dir, local_dem_ftp_dir)
ftp.close()
print(f'{time.ctime()}: Finished')
| <filename>DEMs/denmark/download_dk_dem.py
"""
Fetch all files from Kortforsyningen FTP server folder.
Copyright (c) 2021 <NAME>
See also command line alternative in `download_dk_dem.sh`
"""
from ftplib import FTP, error_perm
import os
from pathlib import Path
import time
import operator
import functools
import shutil
# TODO: use logging to std instead of print(time.ctime())
from environs import Env
# Functions
def download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
# Set up empty local dir and FTP current work dir before tree traversal.
shutil.rmtree(local_dir)
ftp.cwd(remote_dir.parent.as_posix())
local_dir.mkdir(parents=True, exist_ok=True)
return _recursive_download_FTP_tree(ftp, remote_dir, local_dir)
def _is_ftp_dir(ftp, name):
"""
Check if FTP entry is a directory.
Modified from here https://www.daniweb.com/programming/software-development/threads/243712/ftplib-isdir-or-isfile
to accommodate not necessarily being in the top-level directory.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
name: str
Name of FTP file system entry to check if directory or not.
"""
try:
current_dir = ftp.pwd()
ftp.cwd(name)
#print(f'File system entry "{name=}" is a directory.')
ftp.cwd(current_dir)
return True
except error_perm as e:
#print(f'File system entry "{name=}" is a file.')
return False
def _recursive_download_FTP_tree(ftp, remote_dir, local_dir):
"""
Download FTP directory and all content to local directory.
Inspired by https://stackoverflow.com/a/55127679/7796217.
Parameters:
ftp : ftplib.FTP
Established FTP connection after login.
remote_dir : pathlib.Path
FTP directory to download.
local_dir : pathlib.Path
Local directory to store downloaded content.
"""
print(f'{remote_dir=}')
print(f'{local_dir=}')
ftp.cwd(remote_dir.name)
local_dir.mkdir(exist_ok=True)
print(f'{time.ctime()}: Fetching file & directory names within "{remote_dir}".')
dir_entries = ftp.nlst()
print(f'{time.ctime()}: Fetched file & directory names within "{remote_dir}".')
dirs = []
for filename in sorted(dir_entries)[-5:]: # TODO: remove restriction on downloaded of entries
if _is_ftp_dir(ftp, filename):
dirs.append(filename)
else:
local_file = local_dir/filename
print(f'{time.ctime()}: Downloading "{local_file}".')
ftp.retrbinary(
cmd=f'RETR {filename}',
callback=local_file.open('wb').write)
print(f'{time.ctime()}: Downloaded "{local_file}".')
print(f'Traverse dir tree to "{dirs=}"')
map_download_FTP_tree = map(lambda dir: _recursive_download_FTP_tree(
ftp, remote_dir/dir, local_dir/dir), dirs)
return functools.reduce(operator.iand, map_download_FTP_tree, True)
if __name__ == '__main__':
# Load environment variables from local `.env` file.
env = Env()
env.read_env()
# Set up server and source/destination paths.
ftp_host = 'ftp.kortforsyningen.dk'
dem_ftp_dir = Path('dhm_danmarks_hoejdemodel/DTM')
local_ftp_dir = env.path('LOCAL_FTP_DIR', './')
local_dem_ftp_dir = local_ftp_dir/'kortforsyningen'/dem_ftp_dir
# Perform FTP download.
print(f'{time.ctime()}: Connect to {ftp_host}')
ftp = FTP(ftp_host)
ftp.login(env('KORTFORSYNING_USERNAME'), env('KORTFORSYNING_PASSWORD'))
download_FTP_tree(ftp, dem_ftp_dir, local_dem_ftp_dir)
ftp.close()
print(f'{time.ctime()}: Finished')
| en | 0.680394 | Fetch all files from Kortforsyningen FTP server folder. Copyright (c) 2021 <NAME> See also command line alternative in `download_dk_dem.sh` # TODO: use logging to std instead of print(time.ctime()) # Functions Download FTP directory and all content to local directory. Inspired by https://stackoverflow.com/a/55127679/7796217. Parameters: ftp : ftplib.FTP Established FTP connection after login. remote_dir : pathlib.Path FTP directory to download. local_dir : pathlib.Path Local directory to store downloaded content. # Set up empty local dir and FTP current work dir before tree traversal. Check if FTP entry is a directory. Modified from here https://www.daniweb.com/programming/software-development/threads/243712/ftplib-isdir-or-isfile to accommodate not necessarily being in the top-level directory. Parameters: ftp : ftplib.FTP Established FTP connection after login. name: str Name of FTP file system entry to check if directory or not. #print(f'File system entry "{name=}" is a directory.') #print(f'File system entry "{name=}" is a file.') Download FTP directory and all content to local directory. Inspired by https://stackoverflow.com/a/55127679/7796217. Parameters: ftp : ftplib.FTP Established FTP connection after login. remote_dir : pathlib.Path FTP directory to download. local_dir : pathlib.Path Local directory to store downloaded content. # TODO: remove restriction on downloaded of entries # Load environment variables from local `.env` file. # Set up server and source/destination paths. # Perform FTP download. | 2.92118 | 3 |
6_refin_widgets.py | jiaxinjiang2919/Refinance-Calculator | 14 | 7546 | <gh_stars>10-100
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 24 15:02:37 2019
@author: <NAME>
"""
from tkinter import *
import numpy as np
class LoanCalculator:
def __init__(self):
window = Tk()
window.title("Loan Calculator")
Label(window, text="Loan Amount").grid(row=1, column=1, sticky=W)
Label(window, text="Interest rate").grid(row=2, column=1, sticky=W)
Label(window, text="Term (years)").grid(row=3, column=1, sticky=W)
Label(window, text=None).grid(row=4,column=1) # space between inputs and outputs
Label(window, text="Payment:").grid(row=5, column=1, sticky=W)
Label(window, text="Total Payments:").grid(row=6, column=1, sticky=W)
# variables to store loan inputs
self.pv = StringVar()
self.interest_rate = StringVar()
self.term = StringVar()
# varianbles for loan outputs
self.pmt = StringVar()
self.total = StringVar()
# text boxes to hold inputs and outputs
Entry(window, textvariable = self.pv,
justify=RIGHT).grid(row=1,column=2, padx=(0,5))
Entry(window, textvariable = self.interest_rate,
justify=RIGHT).grid(row=2,column=2, padx=(0,5))
Entry(window, textvariable = self.term,
justify=RIGHT).grid(row=3,column=2, padx=(0,5))
Label(window, textvariable = self.pmt,
font="Helvetica 12 bold",
justify=RIGHT).grid(row=5,column=2,sticky= E )
Label(window, textvariable = self.total,
font="Helvetica 12 bold",
justify=RIGHT).grid(row=6,column=2, sticky= E)
Button(window, text="Calculate Payment", command=self.calcPayment).grid(row=7,column=2, padx= (60,5), pady=5)
# Refinance variables
self.old_pmt = StringVar()
self.time_left = StringVar()
self.refi_cost = StringVar()
# Refinance widgets
Label(window, text="Current Payment").grid(row=8,column=1)
Label(window, text="Time Left").grid(row=9,column=1)
Label(window, text="Cost of Refi").grid(row=10,column=1)
Entry(window, textvariable=self.old_pmt, justify=RIGHT).grid(row=8,column=2, padx=(0,5))
Entry(window, textvariable=self.time_left, justify=RIGHT).grid(row=9,column=2, padx=(0,5))
Entry(window, textvariable=self.refi_cost, justify=RIGHT).grid(row=10,column=2, padx=(0,5))
# Refi output variables
self.monthly_savings = StringVar()
self.payback = StringVar()
self.overall_savings = StringVar()
Label(window, text="Payback Months:").grid(row=11,column=1)
Label(window, text="Monthly Savings:").grid(row=12,column=1)
Label(window, text="Overall Savings:").grid(row=13,column=1)
Button(window, text="Evaluate Refi", command=self.evalRefi).grid(row=14,column=2, padx= (100,5), pady=5)
window.mainloop()
def calcPayment(self):
pv = float(self.pv.get())
rate = float(self.interest_rate.get())
term = int(self.term.get())
pmt = np.pmt(rate / 1200, term * 12, -pv,0)
total = pmt * term * 12
self.pmt.set("$" + format(pmt, "5,.2f"))
self.total.set("$" + format(total, "8,.2f"))
def evalRefi():
pass
LoanCalculator()
| # -*- coding: utf-8 -*-
"""
Created on Sun Mar 24 15:02:37 2019
@author: <NAME>
"""
from tkinter import *
import numpy as np
class LoanCalculator:
def __init__(self):
window = Tk()
window.title("Loan Calculator")
Label(window, text="Loan Amount").grid(row=1, column=1, sticky=W)
Label(window, text="Interest rate").grid(row=2, column=1, sticky=W)
Label(window, text="Term (years)").grid(row=3, column=1, sticky=W)
Label(window, text=None).grid(row=4,column=1) # space between inputs and outputs
Label(window, text="Payment:").grid(row=5, column=1, sticky=W)
Label(window, text="Total Payments:").grid(row=6, column=1, sticky=W)
# variables to store loan inputs
self.pv = StringVar()
self.interest_rate = StringVar()
self.term = StringVar()
# varianbles for loan outputs
self.pmt = StringVar()
self.total = StringVar()
# text boxes to hold inputs and outputs
Entry(window, textvariable = self.pv,
justify=RIGHT).grid(row=1,column=2, padx=(0,5))
Entry(window, textvariable = self.interest_rate,
justify=RIGHT).grid(row=2,column=2, padx=(0,5))
Entry(window, textvariable = self.term,
justify=RIGHT).grid(row=3,column=2, padx=(0,5))
Label(window, textvariable = self.pmt,
font="Helvetica 12 bold",
justify=RIGHT).grid(row=5,column=2,sticky= E )
Label(window, textvariable = self.total,
font="Helvetica 12 bold",
justify=RIGHT).grid(row=6,column=2, sticky= E)
Button(window, text="Calculate Payment", command=self.calcPayment).grid(row=7,column=2, padx= (60,5), pady=5)
# Refinance variables
self.old_pmt = StringVar()
self.time_left = StringVar()
self.refi_cost = StringVar()
# Refinance widgets
Label(window, text="Current Payment").grid(row=8,column=1)
Label(window, text="Time Left").grid(row=9,column=1)
Label(window, text="Cost of Refi").grid(row=10,column=1)
Entry(window, textvariable=self.old_pmt, justify=RIGHT).grid(row=8,column=2, padx=(0,5))
Entry(window, textvariable=self.time_left, justify=RIGHT).grid(row=9,column=2, padx=(0,5))
Entry(window, textvariable=self.refi_cost, justify=RIGHT).grid(row=10,column=2, padx=(0,5))
# Refi output variables
self.monthly_savings = StringVar()
self.payback = StringVar()
self.overall_savings = StringVar()
Label(window, text="Payback Months:").grid(row=11,column=1)
Label(window, text="Monthly Savings:").grid(row=12,column=1)
Label(window, text="Overall Savings:").grid(row=13,column=1)
Button(window, text="Evaluate Refi", command=self.evalRefi).grid(row=14,column=2, padx= (100,5), pady=5)
window.mainloop()
def calcPayment(self):
pv = float(self.pv.get())
rate = float(self.interest_rate.get())
term = int(self.term.get())
pmt = np.pmt(rate / 1200, term * 12, -pv,0)
total = pmt * term * 12
self.pmt.set("$" + format(pmt, "5,.2f"))
self.total.set("$" + format(total, "8,.2f"))
def evalRefi():
pass
LoanCalculator() | en | 0.680345 | # -*- coding: utf-8 -*- Created on Sun Mar 24 15:02:37 2019
@author: <NAME> # space between inputs and outputs # variables to store loan inputs # varianbles for loan outputs # text boxes to hold inputs and outputs # Refinance variables # Refinance widgets # Refi output variables | 3.547802 | 4 |
ndctl.py | davelarsen58/pmemtool | 3 | 7547 | <reponame>davelarsen58/pmemtool
#!/usr/bin/python3
#
# PMTOOL NDCTL Python Module
# Copyright (C) <NAME>
# Released under MIT License
import os
import json
from common import message, get_linenumber, pretty_print
from common import V0, V1, V2, V3, V4, V5, D0, D1, D2, D3, D4, D5
import common as c
import time
DEFAULT_FSTAB_FILE = "/etc/fstab"
DEFAULT_NDCTL_FILE = "/tmp/ndctl_list_NDRH.txt"
DEBUG = 0
VERBOSE = c.VERBOSE
tmp_dir = '/tmp'
timers = []
# If working in a test sandbox, change paths
# to start with path to sandbox
#
if not os.getenv('SANDBOX'):
SANDBOX = ''
else:
SANDBOX = os.environ['SANDBOX']
print('Enabling Sandbox at:', SANDBOX)
# FSTAB = SANDBOX + '/etc/fstab'
DEVDIR = SANDBOX + '/dev'
DEV_UUID = DEVDIR + '/disk/by-uuid/'
NDCTL_FILE = SANDBOX + "/tmp/ndctl_list_NDRH.txt"
ndctl = {}
# ---------------------------------------------------------------------
def clean_up():
'''clean up all tmp files associated with this mdule'''
name = 'clean_up()'
tic = time.perf_counter()
status = False
file_name = '/tmp/ndctl*.txt'
status = c.clean_up(file_name)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return status
def get_nmem_dev_list(node):
''' returns list of nmems['nmem0' 'nmem1' 'nmem2' 'nmem3' 'nmem4' 'nmem5']
ndctl list -D -U 0
{
"dev":"nmem2",
"id":"8089-a2-1836-00002716",
"handle":33,
"phys_id":42,
"flag_failed_flush":true,
"flag_smart_event":true,
"security":"disabled"
}
'''
name = 'get_nmem_dev_list()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-D_-U_node' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -D -U " + str(node) + " > " + file_name
if not os.path.exists(file_name):
os.system(cmd)
tmp = {}
my_list = []
with open(file_name, 'r') as f:
tmp = json.load(f)
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_region_dev_list(node):
''' returns list of regions devices, ie: "region0"
ndctl list -U 0
[
{
"dev":"region0",
"size":1623497637888,
"available_size":0,
"max_available_extent":0,
"type":"pmem",
"iset_id":-7155516910447809332,
"persistence_domain":"memory_controller"
}
]
'''
name = 'get_region_dev_list()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-R_-U_node' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -R -U " + str(node) + " > " + file_name
if not os.path.exists(file_name):
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_ns_dev(node):
''' returns list of namespace names, ie: "namespace0.0"
ndctl list -U 0
[
{
"dev":"namespace0.0",
"mode":"fsdax",
"map":"dev",
"size":1598128390144,
"uuid":"115ff8e8-bd52-47b8-a678-9b200902d864",
"sector_size":512,
"align":2097152,
"blockdev":"pmem0"
}
]
'''
name = 'get_ns_dev()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-N_-U' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -N -U " + str(node) + " > " + file_name
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
#
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
#
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_ns_block_dev(node):
''' returns list of ns blockdevs, ie: "pmem0"
ndctl list -U 0
[
{
"dev":"namespace0.0",
"mode":"fsdax",
"map":"dev",
"size":1598128390144,
"uuid":"115ff8e8-bd52-47b8-a678-9b200902d864",
"sector_size":512,
"align":2097152,
"blockdev":"pmem0"
}
]
'''
name = 'get_ns_block_dev()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-N_-U' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -N -U " + str(node) + " > " + file_name
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
#
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['blockdev'])
#
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def dump(file_name = NDCTL_FILE):
"""
dump the config to a file to parse
"""
name = 'dump()'
tic = time.perf_counter()
# message("Function:", __name__, "File:", file_name )
# if VERBOSE: print(' Querying ndctl data:', file_name, end="...")
# ndctl list -NDRH
cmd = "/usr/bin/ndctl list -NDRH > " + file_name
os.system(cmd)
# if VERBOSE: print('Done')
def parse(file_name = NDCTL_FILE):
"""
parse ndctl dump file into dict: ndctl
"""
name = 'parse()'
tic = time.perf_counter()
global ndctl
# if DEBUG: print("DEBUG: Function:", __name__, "File:", file_name )
# if VERBOSE: print(' Parsing ndctl data:', file_name, end="...")
with open(file_name, 'r') as f:
ndctl = json.load(f)
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", ndctl)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ndctl
# - +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +
# Accessor Functions
#
def get_region_dimm_list(region):
"""
returns list of pmem dimms assocaited with pmem region
"""
name = 'get_region_dimm_list()'
tic = time.perf_counter()
global ndctl
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['mappings'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
dimm_list.append(ndctl['regions'][r]['mappings'][d]['dimm'])
continue
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, region, "DIMMS", dimm_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return dimm_list
def get_region_list():
"""
Region List
returns list of all pmem regions
"""
name = 'get_region_list()'
tic = time.perf_counter()
global ndctl
region_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
region_list.append(ndctl['regions'][r]['dev'])
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", region_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return region_list
def get_region_ns_device_list(region):
"""
Region Namespace Device List
returns list of all pmem namespaces names associated w/ pmem region
"""
name = 'get_region_ns_device_list()'
tic = time.perf_counter()
ns_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['namespaces'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
ns_list.append(ndctl['regions'][r]['namespaces'][d]['blockdev'])
continue
# if VERBOSE: print('Done')
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_list
def get_region_ns_name_list(region):
"""
Region Namespace List
returns list of all pmem namespaces names associated w/ pmem region
"""
name = 'get_region_ns_name_list()'
tic = time.perf_counter()
ns_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['namespaces'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
ns_list.append(ndctl['regions'][r]['namespaces'][d]['dev'])
continue
# if VERBOSE: print('Done')
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_list
def get_dimm_status(dimm):
"""
DIMM List
returns status of given dimm
"""
name = 'get_dimm_status()'
tic = time.perf_counter()
# dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for d in range(len(ndctl['dimms'])):
if DEBUG: print(ndctl['dimms'][d]['dev'], ndctl['dimms'][d]['health']['health_state'])
if ndctl['dimms'][d]['dev'] == dimm:
status = ndctl['dimms'][d]['health']['health_state']
break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", dimmList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return status
def get_dimm_list():
"""
DIMM List
returns list of all pmem devices in system
"""
name = 'get_dimm_list()'
tic = time.perf_counter()
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for d in range(len(ndctl['dimms'])):
dimm_list.append(ndctl['dimms'][d]['dev'])
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", dimmList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return dimm_list
def get_region_by_dimm(dimm):
"""
Get Region by DIMM
returns region associated with PMEM device
"""
name = 'get_region_by_dimm()'
tic = time.perf_counter()
region = "regionX"
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimmList = get_region_dimm_list(region)
# print("get_region_by_dimm.r", r, region, dimmList )
if dimm in dimmList: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", region)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return region
def get_ns_name_list_by_dimm(dimm):
"""
Get PMEM Namespace name by DIMM
returns list of pmem namespaces associated with name
"""
name = 'get_ns_name_list_by_dimm()'
tic = time.perf_counter()
nsNameList = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimmList = get_region_dimm_list(region)
# we should have a region to lookup namespaces
nsNameList = get_region_ns_name_list(region)
if dimm in dimmList: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", nsNameList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return nsNameList
def get_ns_device_list_by_dimm(dimm):
"""
Get Namespace Devices by DIMM
returns pmem namespace device for given DIMM
"""
name = 'get_ns_device_list_by_dimm()'
tic = time.perf_counter()
ns_device_list = []
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimm_list = get_region_dimm_list(region)
# we should have a region to lookup namespaces
ns_device_list = get_region_ns_device_list(region)
if dimm in dimm_list: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", ns_device_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_device_list
def list_dimm_table():
name = 'list_dimm_table()'
tic = time.perf_counter()
print()
print("Optane Persistent Memory DIMM Status")
print()
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("Linux", "DIMM", "DIMM", "DIMM", "Cntrl", "Remaining") )
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("Device", "UID", "Health", "Temp", "Temp", "Life") )
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("-------", "--------------------", "------", "------", "------", "----") )
for x in range(len(ndctl['dimms'])):
print("%-7s %-21s %6s %-6s %-6s %-6s" % (
ndctl['dimms'][x]['dev'], \
ndctl['dimms'][x]['id'], \
ndctl['dimms'][x]['health']['health_state'], \
ndctl['dimms'][x]['health']['temperature_celsius'], \
ndctl['dimms'][x]['health']['controller_temperature_celsius'], \
ndctl['dimms'][x]['health']['spares_percentage'] \
))
def module_test():
name = 'module_test()'
tic = time.perf_counter()
import sys
import os
global VERBOSE
global DEBUG
VERBOSE = 0
DEBUG = 0
# Dicts
ndctl = {}
# Lists
regionList = []
dimmList = []
nsList = []
nsDeviceList = []
nsNameList = []
region = "region1"
dimm = "nmem0"
print("Module: ndctl.py: Testing Functions")
dump()
ndctl = parse()
# OK
dimmList = get_dimm_list()
print(" MAIN:get_dimm_list:dimmList:", dimmList)
# OK
regionList = get_region_list()
print(" MAIN:get_region_list:regionList:", regionList)
# OK
dimmList = get_region_dimm_list(region)
print(" MAIN:get_region_dimm_list:dimmList", " Region:", region, "DIMM's", dimmList)
# OK
region = "region0"
nsList = get_region_ns_name_list(region)
print(" MAIN:get_region_ns_name_list:nsList", " Region:", region, "NS", nsList)
# OK
region = "region1"
nsList = get_region_ns_device_list(region)
print(" MAIN:get_region_ns_device_list:nsList", " Region:", region, "NS", nsList)
dimm = "nmem1"
region = get_region_by_dimm(dimm)
print(" MAIN:get_region_by_dimm:region", " DIMM:", dimm, "Region:", region)
nsDeviceList = get_ns_device_list_by_dimm(dimm)
print(" MAIN:get_ns_device_list_by_dimm:nsDeviceList", nsDeviceList)
nsNameList = get_ns_name_list_by_dimm(dimm)
print(" MAIN:get_ns_name_list_by_dimm:nsNameList", nsNameList)
dimm = "nmem8"
dimmStatus = get_dimm_status(dimm)
print(" MAIN:get_dimm_status:dimmStatus", dimm, dimmStatus)
print(" MAIN:listDimmsFull")
list_dimm_table()
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
def print_timers(t = timers):
'''
------------ndctl function timers---------------------
Function Elapsed Start End
-------------------- --------- ----------- ------------
show_socket() 0.5140 941291.4208 941291.9348
parse_socket() 0.0004 941291.9348 941291.9352
show_dimm() 2.0074 941291.9352 941293.9426
parse_dimm() 0.0068 941293.9426 941293.9494
show_region() 3.8237 941293.9494 941297.7731
parse_region() 0.0006 941297.7732 941297.7737
show_dimm() 2.5911 941297.7781 941300.3692
parse_dimm() 0.0051 941300.3692 941300.3743
get_dimms() 2.5962 941297.7781 941300.3744
list_dimms() 0.0004 941300.3744 941300.3748
'''
print('------------Start ndctl function timers---------------')
print('%30s %8s %11s %11s' % ('Function', 'Elapsed', 'Start', 'End') )
print('%30s %8s %11s %11s' % ('------------------------------', '---------', '-----------', '------------') )
first = t[0]['tic']
last = t[len(t) -1]['toc']
for i in t:
print('%30s %9.4f %11.4f %11.4f' % (i['name'], i['elapsed'], i['tic'], i['toc']) )
print('%30s %9.4f %11.4f %11.4f' % ('NDCTL Overall', last - first, first, last) )
print()
print('------------End ndctl function timers-----------------')
def main():
name = 'main()'
tic = time.perf_counter()
print("This module is not intended to run standalone")
print("import this module into your script to use or use")
print("Persistent Memory Tool, pmt")
module_test()
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
print_timers()
if __name__ == "__main__":
main()
| #!/usr/bin/python3
#
# PMTOOL NDCTL Python Module
# Copyright (C) <NAME>
# Released under MIT License
import os
import json
from common import message, get_linenumber, pretty_print
from common import V0, V1, V2, V3, V4, V5, D0, D1, D2, D3, D4, D5
import common as c
import time
DEFAULT_FSTAB_FILE = "/etc/fstab"
DEFAULT_NDCTL_FILE = "/tmp/ndctl_list_NDRH.txt"
DEBUG = 0
VERBOSE = c.VERBOSE
tmp_dir = '/tmp'
timers = []
# If working in a test sandbox, change paths
# to start with path to sandbox
#
if not os.getenv('SANDBOX'):
SANDBOX = ''
else:
SANDBOX = os.environ['SANDBOX']
print('Enabling Sandbox at:', SANDBOX)
# FSTAB = SANDBOX + '/etc/fstab'
DEVDIR = SANDBOX + '/dev'
DEV_UUID = DEVDIR + '/disk/by-uuid/'
NDCTL_FILE = SANDBOX + "/tmp/ndctl_list_NDRH.txt"
ndctl = {}
# ---------------------------------------------------------------------
def clean_up():
'''clean up all tmp files associated with this mdule'''
name = 'clean_up()'
tic = time.perf_counter()
status = False
file_name = '/tmp/ndctl*.txt'
status = c.clean_up(file_name)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return status
def get_nmem_dev_list(node):
''' returns list of nmems['nmem0' 'nmem1' 'nmem2' 'nmem3' 'nmem4' 'nmem5']
ndctl list -D -U 0
{
"dev":"nmem2",
"id":"8089-a2-1836-00002716",
"handle":33,
"phys_id":42,
"flag_failed_flush":true,
"flag_smart_event":true,
"security":"disabled"
}
'''
name = 'get_nmem_dev_list()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-D_-U_node' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -D -U " + str(node) + " > " + file_name
if not os.path.exists(file_name):
os.system(cmd)
tmp = {}
my_list = []
with open(file_name, 'r') as f:
tmp = json.load(f)
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_region_dev_list(node):
''' returns list of regions devices, ie: "region0"
ndctl list -U 0
[
{
"dev":"region0",
"size":1623497637888,
"available_size":0,
"max_available_extent":0,
"type":"pmem",
"iset_id":-7155516910447809332,
"persistence_domain":"memory_controller"
}
]
'''
name = 'get_region_dev_list()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-R_-U_node' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -R -U " + str(node) + " > " + file_name
if not os.path.exists(file_name):
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_ns_dev(node):
''' returns list of namespace names, ie: "namespace0.0"
ndctl list -U 0
[
{
"dev":"namespace0.0",
"mode":"fsdax",
"map":"dev",
"size":1598128390144,
"uuid":"115ff8e8-bd52-47b8-a678-9b200902d864",
"sector_size":512,
"align":2097152,
"blockdev":"pmem0"
}
]
'''
name = 'get_ns_dev()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-N_-U' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -N -U " + str(node) + " > " + file_name
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
#
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['dev'])
#
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def get_ns_block_dev(node):
''' returns list of ns blockdevs, ie: "pmem0"
ndctl list -U 0
[
{
"dev":"namespace0.0",
"mode":"fsdax",
"map":"dev",
"size":1598128390144,
"uuid":"115ff8e8-bd52-47b8-a678-9b200902d864",
"sector_size":512,
"align":2097152,
"blockdev":"pmem0"
}
]
'''
name = 'get_ns_block_dev()'
tic = time.perf_counter()
file_name = '/tmp/ndctl_list_-N_-U' + str(node) + '.txt'
cmd = "/usr/bin/ndctl list -N -U " + str(node) + " > " + file_name
os.system(cmd)
#
tmp = {}
with open(file_name, 'r') as f:
tmp = json.load(f)
#
my_list = []
for t in range(len(tmp)):
my_list.append(tmp[0]['blockdev'])
#
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return my_list
# ---------------------------------------------------------------------
def dump(file_name = NDCTL_FILE):
"""
dump the config to a file to parse
"""
name = 'dump()'
tic = time.perf_counter()
# message("Function:", __name__, "File:", file_name )
# if VERBOSE: print(' Querying ndctl data:', file_name, end="...")
# ndctl list -NDRH
cmd = "/usr/bin/ndctl list -NDRH > " + file_name
os.system(cmd)
# if VERBOSE: print('Done')
def parse(file_name = NDCTL_FILE):
"""
parse ndctl dump file into dict: ndctl
"""
name = 'parse()'
tic = time.perf_counter()
global ndctl
# if DEBUG: print("DEBUG: Function:", __name__, "File:", file_name )
# if VERBOSE: print(' Parsing ndctl data:', file_name, end="...")
with open(file_name, 'r') as f:
ndctl = json.load(f)
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", ndctl)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ndctl
# - +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +
# Accessor Functions
#
def get_region_dimm_list(region):
"""
returns list of pmem dimms assocaited with pmem region
"""
name = 'get_region_dimm_list()'
tic = time.perf_counter()
global ndctl
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['mappings'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
dimm_list.append(ndctl['regions'][r]['mappings'][d]['dimm'])
continue
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, region, "DIMMS", dimm_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return dimm_list
def get_region_list():
"""
Region List
returns list of all pmem regions
"""
name = 'get_region_list()'
tic = time.perf_counter()
global ndctl
region_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
region_list.append(ndctl['regions'][r]['dev'])
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", region_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return region_list
def get_region_ns_device_list(region):
"""
Region Namespace Device List
returns list of all pmem namespaces names associated w/ pmem region
"""
name = 'get_region_ns_device_list()'
tic = time.perf_counter()
ns_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['namespaces'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
ns_list.append(ndctl['regions'][r]['namespaces'][d]['blockdev'])
continue
# if VERBOSE: print('Done')
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_list
def get_region_ns_name_list(region):
"""
Region Namespace List
returns list of all pmem namespaces names associated w/ pmem region
"""
name = 'get_region_ns_name_list()'
tic = time.perf_counter()
ns_list = []
# if DEBUG: print("DEBUG: Function:", __name__, "Region:", region )
# if VERBOSE: print(' getting:', __name__, end="...")
for r in range(len(ndctl['regions'])):
# if this region matches arg, get DIMM mappings
if ndctl['regions'][r]['dev'] == region:
for d in range(len(ndctl['regions'][r]['namespaces'])):
if DEBUG: print(' ndctl[regions][r]mappings', ndctl['regions'][r]['mappings'][d]['dimm'])
ns_list.append(ndctl['regions'][r]['namespaces'][d]['dev'])
continue
# if VERBOSE: print('Done')
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_list
def get_dimm_status(dimm):
"""
DIMM List
returns status of given dimm
"""
name = 'get_dimm_status()'
tic = time.perf_counter()
# dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for d in range(len(ndctl['dimms'])):
if DEBUG: print(ndctl['dimms'][d]['dev'], ndctl['dimms'][d]['health']['health_state'])
if ndctl['dimms'][d]['dev'] == dimm:
status = ndctl['dimms'][d]['health']['health_state']
break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", dimmList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return status
def get_dimm_list():
"""
DIMM List
returns list of all pmem devices in system
"""
name = 'get_dimm_list()'
tic = time.perf_counter()
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
for d in range(len(ndctl['dimms'])):
dimm_list.append(ndctl['dimms'][d]['dev'])
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", dimmList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return dimm_list
def get_region_by_dimm(dimm):
"""
Get Region by DIMM
returns region associated with PMEM device
"""
name = 'get_region_by_dimm()'
tic = time.perf_counter()
region = "regionX"
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimmList = get_region_dimm_list(region)
# print("get_region_by_dimm.r", r, region, dimmList )
if dimm in dimmList: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", region)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return region
def get_ns_name_list_by_dimm(dimm):
"""
Get PMEM Namespace name by DIMM
returns list of pmem namespaces associated with name
"""
name = 'get_ns_name_list_by_dimm()'
tic = time.perf_counter()
nsNameList = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimmList = get_region_dimm_list(region)
# we should have a region to lookup namespaces
nsNameList = get_region_ns_name_list(region)
if dimm in dimmList: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", nsNameList)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return nsNameList
def get_ns_device_list_by_dimm(dimm):
"""
Get Namespace Devices by DIMM
returns pmem namespace device for given DIMM
"""
name = 'get_ns_device_list_by_dimm()'
tic = time.perf_counter()
ns_device_list = []
dimm_list = []
# if DEBUG: print("DEBUG: Function:", __name__ )
# if VERBOSE: print(' getting:', __name__, end="...")
# loop through regions, get dimmList for each, check if match
for r in range(len(ndctl['regions'])):
region = ndctl['regions'][r]['dev']
dimm_list = get_region_dimm_list(region)
# we should have a region to lookup namespaces
ns_device_list = get_region_ns_device_list(region)
if dimm in dimm_list: break
# if VERBOSE: print('Done')
# if DEBUG: print("Debug:", __name__, ":", ns_device_list)
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
return ns_device_list
def list_dimm_table():
name = 'list_dimm_table()'
tic = time.perf_counter()
print()
print("Optane Persistent Memory DIMM Status")
print()
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("Linux", "DIMM", "DIMM", "DIMM", "Cntrl", "Remaining") )
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("Device", "UID", "Health", "Temp", "Temp", "Life") )
print("%-7s %-21s %-6s %-6s %-6s %-6s" % ("-------", "--------------------", "------", "------", "------", "----") )
for x in range(len(ndctl['dimms'])):
print("%-7s %-21s %6s %-6s %-6s %-6s" % (
ndctl['dimms'][x]['dev'], \
ndctl['dimms'][x]['id'], \
ndctl['dimms'][x]['health']['health_state'], \
ndctl['dimms'][x]['health']['temperature_celsius'], \
ndctl['dimms'][x]['health']['controller_temperature_celsius'], \
ndctl['dimms'][x]['health']['spares_percentage'] \
))
def module_test():
name = 'module_test()'
tic = time.perf_counter()
import sys
import os
global VERBOSE
global DEBUG
VERBOSE = 0
DEBUG = 0
# Dicts
ndctl = {}
# Lists
regionList = []
dimmList = []
nsList = []
nsDeviceList = []
nsNameList = []
region = "region1"
dimm = "nmem0"
print("Module: ndctl.py: Testing Functions")
dump()
ndctl = parse()
# OK
dimmList = get_dimm_list()
print(" MAIN:get_dimm_list:dimmList:", dimmList)
# OK
regionList = get_region_list()
print(" MAIN:get_region_list:regionList:", regionList)
# OK
dimmList = get_region_dimm_list(region)
print(" MAIN:get_region_dimm_list:dimmList", " Region:", region, "DIMM's", dimmList)
# OK
region = "region0"
nsList = get_region_ns_name_list(region)
print(" MAIN:get_region_ns_name_list:nsList", " Region:", region, "NS", nsList)
# OK
region = "region1"
nsList = get_region_ns_device_list(region)
print(" MAIN:get_region_ns_device_list:nsList", " Region:", region, "NS", nsList)
dimm = "nmem1"
region = get_region_by_dimm(dimm)
print(" MAIN:get_region_by_dimm:region", " DIMM:", dimm, "Region:", region)
nsDeviceList = get_ns_device_list_by_dimm(dimm)
print(" MAIN:get_ns_device_list_by_dimm:nsDeviceList", nsDeviceList)
nsNameList = get_ns_name_list_by_dimm(dimm)
print(" MAIN:get_ns_name_list_by_dimm:nsNameList", nsNameList)
dimm = "nmem8"
dimmStatus = get_dimm_status(dimm)
print(" MAIN:get_dimm_status:dimmStatus", dimm, dimmStatus)
print(" MAIN:listDimmsFull")
list_dimm_table()
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
def print_timers(t = timers):
'''
------------ndctl function timers---------------------
Function Elapsed Start End
-------------------- --------- ----------- ------------
show_socket() 0.5140 941291.4208 941291.9348
parse_socket() 0.0004 941291.9348 941291.9352
show_dimm() 2.0074 941291.9352 941293.9426
parse_dimm() 0.0068 941293.9426 941293.9494
show_region() 3.8237 941293.9494 941297.7731
parse_region() 0.0006 941297.7732 941297.7737
show_dimm() 2.5911 941297.7781 941300.3692
parse_dimm() 0.0051 941300.3692 941300.3743
get_dimms() 2.5962 941297.7781 941300.3744
list_dimms() 0.0004 941300.3744 941300.3748
'''
print('------------Start ndctl function timers---------------')
print('%30s %8s %11s %11s' % ('Function', 'Elapsed', 'Start', 'End') )
print('%30s %8s %11s %11s' % ('------------------------------', '---------', '-----------', '------------') )
first = t[0]['tic']
last = t[len(t) -1]['toc']
for i in t:
print('%30s %9.4f %11.4f %11.4f' % (i['name'], i['elapsed'], i['tic'], i['toc']) )
print('%30s %9.4f %11.4f %11.4f' % ('NDCTL Overall', last - first, first, last) )
print()
print('------------End ndctl function timers-----------------')
def main():
name = 'main()'
tic = time.perf_counter()
print("This module is not intended to run standalone")
print("import this module into your script to use or use")
print("Persistent Memory Tool, pmt")
module_test()
toc = time.perf_counter()
delta_t = toc - tic
td = {'name': name, "elapsed": delta_t, 'tic': tic, 'toc': toc}
timers.append(td)
print_timers()
if __name__ == "__main__":
main() | en | 0.325317 | #!/usr/bin/python3 # # PMTOOL NDCTL Python Module # Copyright (C) <NAME> # Released under MIT License # If working in a test sandbox, change paths # to start with path to sandbox # # FSTAB = SANDBOX + '/etc/fstab' # --------------------------------------------------------------------- clean up all tmp files associated with this mdule returns list of nmems['nmem0' 'nmem1' 'nmem2' 'nmem3' 'nmem4' 'nmem5'] ndctl list -D -U 0 { "dev":"nmem2", "id":"8089-a2-1836-00002716", "handle":33, "phys_id":42, "flag_failed_flush":true, "flag_smart_event":true, "security":"disabled" } # --------------------------------------------------------------------- returns list of regions devices, ie: "region0" ndctl list -U 0 [ { "dev":"region0", "size":1623497637888, "available_size":0, "max_available_extent":0, "type":"pmem", "iset_id":-7155516910447809332, "persistence_domain":"memory_controller" } ] # # --------------------------------------------------------------------- returns list of namespace names, ie: "namespace0.0" ndctl list -U 0 [ { "dev":"namespace0.0", "mode":"fsdax", "map":"dev", "size":1598128390144, "uuid":"115ff8e8-bd52-47b8-a678-9b200902d864", "sector_size":512, "align":2097152, "blockdev":"pmem0" } ] # # # # --------------------------------------------------------------------- returns list of ns blockdevs, ie: "pmem0" ndctl list -U 0 [ { "dev":"namespace0.0", "mode":"fsdax", "map":"dev", "size":1598128390144, "uuid":"115ff8e8-bd52-47b8-a678-9b200902d864", "sector_size":512, "align":2097152, "blockdev":"pmem0" } ] # # # # --------------------------------------------------------------------- dump the config to a file to parse # message("Function:", __name__, "File:", file_name ) # if VERBOSE: print(' Querying ndctl data:', file_name, end="...") # ndctl list -NDRH # if VERBOSE: print('Done') parse ndctl dump file into dict: ndctl # if DEBUG: print("DEBUG: Function:", __name__, "File:", file_name ) # if VERBOSE: print(' Parsing ndctl data:', file_name, end="...") # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", ndctl) # - +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- +- + # Accessor Functions # returns list of pmem dimms assocaited with pmem region # if DEBUG: print("DEBUG: Function:", __name__, "Region:", region ) # if VERBOSE: print(' getting:', __name__, end="...") # if this region matches arg, get DIMM mappings # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, region, "DIMMS", dimm_list) Region List returns list of all pmem regions # if DEBUG: print("DEBUG: Function:", __name__ ) # if VERBOSE: print(' getting:', __name__, end="...") # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", region_list) Region Namespace Device List returns list of all pmem namespaces names associated w/ pmem region # if DEBUG: print("DEBUG: Function:", __name__, "Region:", region ) # if VERBOSE: print(' getting:', __name__, end="...") # if this region matches arg, get DIMM mappings # if VERBOSE: print('Done') Region Namespace List returns list of all pmem namespaces names associated w/ pmem region # if DEBUG: print("DEBUG: Function:", __name__, "Region:", region ) # if VERBOSE: print(' getting:', __name__, end="...") # if this region matches arg, get DIMM mappings # if VERBOSE: print('Done') DIMM List returns status of given dimm # dimm_list = [] # if DEBUG: print("DEBUG: Function:", __name__ ) # if VERBOSE: print(' getting:', __name__, end="...") # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", dimmList) DIMM List returns list of all pmem devices in system # if DEBUG: print("DEBUG: Function:", __name__ ) # if VERBOSE: print(' getting:', __name__, end="...") # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", dimmList) Get Region by DIMM returns region associated with PMEM device # if DEBUG: print("DEBUG: Function:", __name__ ) # if VERBOSE: print(' getting:', __name__, end="...") # loop through regions, get dimmList for each, check if match # print("get_region_by_dimm.r", r, region, dimmList ) # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", region) Get PMEM Namespace name by DIMM returns list of pmem namespaces associated with name # if DEBUG: print("DEBUG: Function:", __name__ ) # if VERBOSE: print(' getting:', __name__, end="...") # loop through regions, get dimmList for each, check if match # we should have a region to lookup namespaces # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", nsNameList) Get Namespace Devices by DIMM returns pmem namespace device for given DIMM # if DEBUG: print("DEBUG: Function:", __name__ ) # if VERBOSE: print(' getting:', __name__, end="...") # loop through regions, get dimmList for each, check if match # we should have a region to lookup namespaces # if VERBOSE: print('Done') # if DEBUG: print("Debug:", __name__, ":", ns_device_list) # Dicts # Lists # OK # OK # OK # OK # OK ------------ndctl function timers--------------------- Function Elapsed Start End -------------------- --------- ----------- ------------ show_socket() 0.5140 941291.4208 941291.9348 parse_socket() 0.0004 941291.9348 941291.9352 show_dimm() 2.0074 941291.9352 941293.9426 parse_dimm() 0.0068 941293.9426 941293.9494 show_region() 3.8237 941293.9494 941297.7731 parse_region() 0.0006 941297.7732 941297.7737 show_dimm() 2.5911 941297.7781 941300.3692 parse_dimm() 0.0051 941300.3692 941300.3743 get_dimms() 2.5962 941297.7781 941300.3744 list_dimms() 0.0004 941300.3744 941300.3748 | 2.077803 | 2 |
tb/sources/__init__.py | DronMDF/manabot | 1 | 7548 | from .admin import ReviewListAdmin, SoAdminReviewIsOut, SoReviewForAdmin
from .admin_commands import (
AdminCommands,
AdminFilteredCommands,
ReviewListByCommands,
SoIgnoreReview,
SoSubmitReview
)
from .gerrit import ReviewOnServer, SoNewReview, SoOutReview, SoUpdateReview
from .reaction import (
ReactionAlways,
ReactionChoiced,
ReactionRestrict,
ReactionReview
)
from .review_list import (
ReviewDifference,
ReviewForUpdate,
ReviewIgnored,
ReviewIsNeed,
ReviewOne,
ReviewUnderControl,
ReviewVerified
)
from .telegram import (
SoNoTelegramTimeout,
SoTelegram,
TelegramBot,
TelegramOffsetFromDb
)
from .utility import SoJoin, SoSafe
| from .admin import ReviewListAdmin, SoAdminReviewIsOut, SoReviewForAdmin
from .admin_commands import (
AdminCommands,
AdminFilteredCommands,
ReviewListByCommands,
SoIgnoreReview,
SoSubmitReview
)
from .gerrit import ReviewOnServer, SoNewReview, SoOutReview, SoUpdateReview
from .reaction import (
ReactionAlways,
ReactionChoiced,
ReactionRestrict,
ReactionReview
)
from .review_list import (
ReviewDifference,
ReviewForUpdate,
ReviewIgnored,
ReviewIsNeed,
ReviewOne,
ReviewUnderControl,
ReviewVerified
)
from .telegram import (
SoNoTelegramTimeout,
SoTelegram,
TelegramBot,
TelegramOffsetFromDb
)
from .utility import SoJoin, SoSafe
| none | 1 | 1.047243 | 1 |
|
sdk/python/pulumi_gcp/accesscontextmanager/service_perimeter.py | sisisin/pulumi-gcp | 121 | 7549 | <gh_stars>100-1000
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServicePerimeterArgs', 'ServicePerimeter']
@pulumi.input_type
class ServicePerimeterArgs:
def __init__(__self__, *,
parent: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input['ServicePerimeterSpecArgs']] = None,
status: Optional[pulumi.Input['ServicePerimeterStatusArgs']] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a ServicePerimeter resource.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
pulumi.set(__self__, "parent", parent)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if perimeter_type is not None:
pulumi.set(__self__, "perimeter_type", perimeter_type)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
if use_explicit_dry_run_spec is not None:
pulumi.set(__self__, "use_explicit_dry_run_spec", use_explicit_dry_run_spec)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@perimeter_type.setter
def perimeter_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "perimeter_type", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ServicePerimeterSpecArgs']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ServicePerimeterSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ServicePerimeterStatusArgs']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ServicePerimeterStatusArgs']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> Optional[pulumi.Input[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
@use_explicit_dry_run_spec.setter
def use_explicit_dry_run_spec(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_explicit_dry_run_spec", value)
@pulumi.input_type
class _ServicePerimeterState:
def __init__(__self__, *,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input['ServicePerimeterSpecArgs']] = None,
status: Optional[pulumi.Input['ServicePerimeterStatusArgs']] = None,
title: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering ServicePerimeter resources.
:param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if parent is not None:
pulumi.set(__self__, "parent", parent)
if perimeter_type is not None:
pulumi.set(__self__, "perimeter_type", perimeter_type)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
if title is not None:
pulumi.set(__self__, "title", title)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
if use_explicit_dry_run_spec is not None:
pulumi.set(__self__, "use_explicit_dry_run_spec", use_explicit_dry_run_spec)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
Time the AccessPolicy was created in UTC.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@perimeter_type.setter
def perimeter_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "perimeter_type", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ServicePerimeterSpecArgs']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ServicePerimeterSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ServicePerimeterStatusArgs']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ServicePerimeterStatusArgs']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
Time the AccessPolicy was updated in UTC.
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> Optional[pulumi.Input[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
@use_explicit_dry_run_spec.setter
def use_explicit_dry_run_spec(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_explicit_dry_run_spec", value)
class ServicePerimeter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
ServicePerimeter describes a set of GCP resources which can freely import
and export data amongst themselves, but not export outside of the
ServicePerimeter. If a request with a source within this ServicePerimeter
has a target outside of the ServicePerimeter, the request will be blocked.
Otherwise the request is allowed. There are two types of Service Perimeter
- Regular and Bridge. Regular Service Perimeters cannot overlap, a single
GCP project can only belong to a single regular Service Perimeter. Service
Perimeter Bridges can contain only GCP projects as members, a single GCP
project may belong to multiple Service Perimeter Bridges.
To get more information about ServicePerimeter, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
> **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
you must specify a `billing_project` and set `user_project_override` to true
in the provider configuration. Otherwise the ACM API will return a 403 error.
Your account must have the `serviceusage.services.use` permission on the
`billing_project` you defined.
## Example Usage
### Access Context Manager Service Perimeter Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="restrict_storage")
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
### Access Context Manager Service Perimeter Secure Data Exchange
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=["bigquery.googleapis.com"],
),
),
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="secure_data_exchange",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
require_screen_lock=False,
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
),
regions=[
"CH",
"IT",
"US",
],
)],
))
test_access = gcp.accesscontextmanager.ServicePerimeter("test-access",
parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}",
title="%s",
perimeter_type="PERIMETER_TYPE_REGULAR",
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
access_levels=[access_level.name],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
),
ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs(
ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs(
sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs(
access_level=google_access_context_manager_access_level["test-access"]["name"],
)],
identity_type="ANY_IDENTITY",
),
ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs(
resources=["*"],
operations=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="bigquery.googleapis.com",
method_selectors=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="BigQueryStorage.ReadRows",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="TableService.ListTables",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
permission="bigquery.jobs.get",
),
],
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="storage.googleapis.com",
method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="google.storage.objects.create",
)],
),
],
),
)],
egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs(
egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs(
identity_type="ANY_USER_ACCOUNT",
),
)],
))
```
### Access Context Manager Service Perimeter Dry Run
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs(
restricted_services=["storage.googleapis.com"],
),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["bigquery.googleapis.com"],
),
title="restrict_bigquery_dryrun_storage",
use_explicit_dry_run_spec=True)
```
## Import
ServicePerimeter can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServicePerimeterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
ServicePerimeter describes a set of GCP resources which can freely import
and export data amongst themselves, but not export outside of the
ServicePerimeter. If a request with a source within this ServicePerimeter
has a target outside of the ServicePerimeter, the request will be blocked.
Otherwise the request is allowed. There are two types of Service Perimeter
- Regular and Bridge. Regular Service Perimeters cannot overlap, a single
GCP project can only belong to a single regular Service Perimeter. Service
Perimeter Bridges can contain only GCP projects as members, a single GCP
project may belong to multiple Service Perimeter Bridges.
To get more information about ServicePerimeter, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
> **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
you must specify a `billing_project` and set `user_project_override` to true
in the provider configuration. Otherwise the ACM API will return a 403 error.
Your account must have the `serviceusage.services.use` permission on the
`billing_project` you defined.
## Example Usage
### Access Context Manager Service Perimeter Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="restrict_storage")
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
### Access Context Manager Service Perimeter Secure Data Exchange
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=["bigquery.googleapis.com"],
),
),
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="secure_data_exchange",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
require_screen_lock=False,
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
),
regions=[
"CH",
"IT",
"US",
],
)],
))
test_access = gcp.accesscontextmanager.ServicePerimeter("test-access",
parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}",
title="%s",
perimeter_type="PERIMETER_TYPE_REGULAR",
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
access_levels=[access_level.name],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
),
ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs(
ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs(
sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs(
access_level=google_access_context_manager_access_level["test-access"]["name"],
)],
identity_type="ANY_IDENTITY",
),
ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs(
resources=["*"],
operations=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="bigquery.googleapis.com",
method_selectors=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="BigQueryStorage.ReadRows",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="TableService.ListTables",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
permission="bigquery.jobs.get",
),
],
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="storage.googleapis.com",
method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="google.storage.objects.create",
)],
),
],
),
)],
egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs(
egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs(
identity_type="ANY_USER_ACCOUNT",
),
)],
))
```
### Access Context Manager Service Perimeter Dry Run
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs(
restricted_services=["storage.googleapis.com"],
),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["bigquery.googleapis.com"],
),
title="restrict_bigquery_dryrun_storage",
use_explicit_dry_run_spec=True)
```
## Import
ServicePerimeter can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}}
```
:param str resource_name: The name of the resource.
:param ServicePerimeterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServicePerimeterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServicePerimeterArgs.__new__(ServicePerimeterArgs)
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["perimeter_type"] = perimeter_type
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = status
if title is None and not opts.urn:
raise TypeError("Missing required property 'title'")
__props__.__dict__["title"] = title
__props__.__dict__["use_explicit_dry_run_spec"] = use_explicit_dry_run_spec
__props__.__dict__["create_time"] = None
__props__.__dict__["update_time"] = None
super(ServicePerimeter, __self__).__init__(
'gcp:accesscontextmanager/servicePerimeter:ServicePerimeter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None) -> 'ServicePerimeter':
"""
Get an existing ServicePerimeter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServicePerimeterState.__new__(_ServicePerimeterState)
__props__.__dict__["create_time"] = create_time
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["parent"] = parent
__props__.__dict__["perimeter_type"] = perimeter_type
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = status
__props__.__dict__["title"] = title
__props__.__dict__["update_time"] = update_time
__props__.__dict__["use_explicit_dry_run_spec"] = use_explicit_dry_run_spec
return ServicePerimeter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
Time the AccessPolicy was created in UTC.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parent(self) -> pulumi.Output[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@property
@pulumi.getter
def spec(self) -> pulumi.Output[Optional['outputs.ServicePerimeterSpec']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional['outputs.ServicePerimeterStatus']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> pulumi.Output[str]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
Time the AccessPolicy was updated in UTC.
"""
return pulumi.get(self, "update_time")
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> pulumi.Output[Optional[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
| # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServicePerimeterArgs', 'ServicePerimeter']
@pulumi.input_type
class ServicePerimeterArgs:
def __init__(__self__, *,
parent: pulumi.Input[str],
title: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input['ServicePerimeterSpecArgs']] = None,
status: Optional[pulumi.Input['ServicePerimeterStatusArgs']] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a ServicePerimeter resource.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
pulumi.set(__self__, "parent", parent)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if perimeter_type is not None:
pulumi.set(__self__, "perimeter_type", perimeter_type)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
if use_explicit_dry_run_spec is not None:
pulumi.set(__self__, "use_explicit_dry_run_spec", use_explicit_dry_run_spec)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter
def title(self) -> pulumi.Input[str]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: pulumi.Input[str]):
pulumi.set(self, "title", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@perimeter_type.setter
def perimeter_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "perimeter_type", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ServicePerimeterSpecArgs']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ServicePerimeterSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ServicePerimeterStatusArgs']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ServicePerimeterStatusArgs']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> Optional[pulumi.Input[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
@use_explicit_dry_run_spec.setter
def use_explicit_dry_run_spec(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_explicit_dry_run_spec", value)
@pulumi.input_type
class _ServicePerimeterState:
def __init__(__self__, *,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input['ServicePerimeterSpecArgs']] = None,
status: Optional[pulumi.Input['ServicePerimeterStatusArgs']] = None,
title: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering ServicePerimeter resources.
:param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if parent is not None:
pulumi.set(__self__, "parent", parent)
if perimeter_type is not None:
pulumi.set(__self__, "perimeter_type", perimeter_type)
if spec is not None:
pulumi.set(__self__, "spec", spec)
if status is not None:
pulumi.set(__self__, "status", status)
if title is not None:
pulumi.set(__self__, "title", title)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
if use_explicit_dry_run_spec is not None:
pulumi.set(__self__, "use_explicit_dry_run_spec", use_explicit_dry_run_spec)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
Time the AccessPolicy was created in UTC.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@perimeter_type.setter
def perimeter_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "perimeter_type", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['ServicePerimeterSpecArgs']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['ServicePerimeterSpecArgs']]):
pulumi.set(self, "spec", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input['ServicePerimeterStatusArgs']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input['ServicePerimeterStatusArgs']]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
Time the AccessPolicy was updated in UTC.
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> Optional[pulumi.Input[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec")
@use_explicit_dry_run_spec.setter
def use_explicit_dry_run_spec(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "use_explicit_dry_run_spec", value)
class ServicePerimeter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
ServicePerimeter describes a set of GCP resources which can freely import
and export data amongst themselves, but not export outside of the
ServicePerimeter. If a request with a source within this ServicePerimeter
has a target outside of the ServicePerimeter, the request will be blocked.
Otherwise the request is allowed. There are two types of Service Perimeter
- Regular and Bridge. Regular Service Perimeters cannot overlap, a single
GCP project can only belong to a single regular Service Perimeter. Service
Perimeter Bridges can contain only GCP projects as members, a single GCP
project may belong to multiple Service Perimeter Bridges.
To get more information about ServicePerimeter, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
> **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
you must specify a `billing_project` and set `user_project_override` to true
in the provider configuration. Otherwise the ACM API will return a 403 error.
Your account must have the `serviceusage.services.use` permission on the
`billing_project` you defined.
## Example Usage
### Access Context Manager Service Perimeter Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="restrict_storage")
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
### Access Context Manager Service Perimeter Secure Data Exchange
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=["bigquery.googleapis.com"],
),
),
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="secure_data_exchange",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
require_screen_lock=False,
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
),
regions=[
"CH",
"IT",
"US",
],
)],
))
test_access = gcp.accesscontextmanager.ServicePerimeter("test-access",
parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}",
title="%s",
perimeter_type="PERIMETER_TYPE_REGULAR",
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
access_levels=[access_level.name],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
),
ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs(
ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs(
sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs(
access_level=google_access_context_manager_access_level["test-access"]["name"],
)],
identity_type="ANY_IDENTITY",
),
ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs(
resources=["*"],
operations=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="bigquery.googleapis.com",
method_selectors=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="BigQueryStorage.ReadRows",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="TableService.ListTables",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
permission="bigquery.jobs.get",
),
],
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="storage.googleapis.com",
method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="google.storage.objects.create",
)],
),
],
),
)],
egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs(
egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs(
identity_type="ANY_USER_ACCOUNT",
),
)],
))
```
### Access Context Manager Service Perimeter Dry Run
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs(
restricted_services=["storage.googleapis.com"],
),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["bigquery.googleapis.com"],
),
title="restrict_bigquery_dryrun_storage",
use_explicit_dry_run_spec=True)
```
## Import
ServicePerimeter can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServicePerimeterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
ServicePerimeter describes a set of GCP resources which can freely import
and export data amongst themselves, but not export outside of the
ServicePerimeter. If a request with a source within this ServicePerimeter
has a target outside of the ServicePerimeter, the request will be blocked.
Otherwise the request is allowed. There are two types of Service Perimeter
- Regular and Bridge. Regular Service Perimeters cannot overlap, a single
GCP project can only belong to a single regular Service Perimeter. Service
Perimeter Bridges can contain only GCP projects as members, a single GCP
project may belong to multiple Service Perimeter Bridges.
To get more information about ServicePerimeter, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
> **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
you must specify a `billing_project` and set `user_project_override` to true
in the provider configuration. Otherwise the ACM API will return a 403 error.
Your account must have the `serviceusage.services.use` permission on the
`billing_project` you defined.
## Example Usage
### Access Context Manager Service Perimeter Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="restrict_storage")
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
### Access Context Manager Service Perimeter Secure Data Exchange
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
title="",
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=["bigquery.googleapis.com"],
),
),
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="secure_data_exchange",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
require_screen_lock=False,
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
),
regions=[
"CH",
"IT",
"US",
],
)],
))
test_access = gcp.accesscontextmanager.ServicePerimeter("test-access",
parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}",
title="%s",
perimeter_type="PERIMETER_TYPE_REGULAR",
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
access_levels=[access_level.name],
vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs(
enable_restriction=True,
allowed_services=[
"bigquery.googleapis.com",
"storage.googleapis.com",
],
),
ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs(
ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs(
sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs(
access_level=google_access_context_manager_access_level["test-access"]["name"],
)],
identity_type="ANY_IDENTITY",
),
ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs(
resources=["*"],
operations=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="bigquery.googleapis.com",
method_selectors=[
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="BigQueryStorage.ReadRows",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="TableService.ListTables",
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
permission="bigquery.jobs.get",
),
],
),
gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs(
service_name="storage.googleapis.com",
method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs(
method="google.storage.objects.create",
)],
),
],
),
)],
egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs(
egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs(
identity_type="ANY_USER_ACCOUNT",
),
)],
))
```
### Access Context Manager Service Perimeter Dry Run
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs(
restricted_services=["storage.googleapis.com"],
),
status=gcp.accesscontextmanager.ServicePerimeterStatusArgs(
restricted_services=["bigquery.googleapis.com"],
),
title="restrict_bigquery_dryrun_storage",
use_explicit_dry_run_spec=True)
```
## Import
ServicePerimeter can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}}
```
:param str resource_name: The name of the resource.
:param ServicePerimeterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServicePerimeterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServicePerimeterArgs.__new__(ServicePerimeterArgs)
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["perimeter_type"] = perimeter_type
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = status
if title is None and not opts.urn:
raise TypeError("Missing required property 'title'")
__props__.__dict__["title"] = title
__props__.__dict__["use_explicit_dry_run_spec"] = use_explicit_dry_run_spec
__props__.__dict__["create_time"] = None
__props__.__dict__["update_time"] = None
super(ServicePerimeter, __self__).__init__(
'gcp:accesscontextmanager/servicePerimeter:ServicePerimeter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
perimeter_type: Optional[pulumi.Input[str]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]] = None,
title: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
use_explicit_dry_run_spec: Optional[pulumi.Input[bool]] = None) -> 'ServicePerimeter':
"""
Get an existing ServicePerimeter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC.
:param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect
behavior.
:param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
:param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
:param pulumi.Input[str] title: Human readable title. Must be unique within the Policy.
:param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC.
:param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServicePerimeterState.__new__(_ServicePerimeterState)
__props__.__dict__["create_time"] = create_time
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["parent"] = parent
__props__.__dict__["perimeter_type"] = perimeter_type
__props__.__dict__["spec"] = spec
__props__.__dict__["status"] = status
__props__.__dict__["title"] = title
__props__.__dict__["update_time"] = update_time
__props__.__dict__["use_explicit_dry_run_spec"] = use_explicit_dry_run_spec
return ServicePerimeter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
Time the AccessPolicy was created in UTC.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of the ServicePerimeter and its use. Does not affect
behavior.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name for the ServicePerimeter. The short_name component must
begin with a letter and only include alphanumeric and '_'.
Format: accessPolicies/{policy_id}/servicePerimeters/{short_name}
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parent(self) -> pulumi.Output[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter(name="perimeterType")
def perimeter_type(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the type of the Perimeter. There are two types: regular and
bridge. Regular Service Perimeter contains resources, access levels,
and restricted services. Every resource can be in at most
ONE regular Service Perimeter.
In addition to being in a regular service perimeter, a resource can also
be in zero or more perimeter bridges. A perimeter bridge only contains
resources. Cross project operations are permitted if all effected
resources share some perimeter (whether bridge or regular). Perimeter
Bridge does not contain access levels or services: those are governed
entirely by the regular perimeter that resource is in.
Perimeter Bridges are typically useful when building more complex
topologies with many independent perimeters that need to share some data
with a common perimeter, but should not be able to share data among
themselves.
Default value is `PERIMETER_TYPE_REGULAR`.
Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`.
"""
return pulumi.get(self, "perimeter_type")
@property
@pulumi.getter
def spec(self) -> pulumi.Output[Optional['outputs.ServicePerimeterSpec']]:
"""
Proposed (or dry run) ServicePerimeter configuration.
This configuration allows to specify and test ServicePerimeter configuration
without enforcing actual access restrictions. Only allowed to be set when
the `useExplicitDryRunSpec` flag is set.
Structure is documented below.
"""
return pulumi.get(self, "spec")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional['outputs.ServicePerimeterStatus']]:
"""
ServicePerimeter configuration. Specifies sets of resources,
restricted services and access levels that determine
perimeter content and boundaries.
Structure is documented below.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> pulumi.Output[str]:
"""
Human readable title. Must be unique within the Policy.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
Time the AccessPolicy was updated in UTC.
"""
return pulumi.get(self, "update_time")
@property
@pulumi.getter(name="useExplicitDryRunSpec")
def use_explicit_dry_run_spec(self) -> pulumi.Output[Optional[bool]]:
"""
Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists
for all Service Perimeters, and that spec is identical to the status for those
Service Perimeters. When this flag is set, it inhibits the generation of the
implicit spec, thereby allowing the user to explicitly provide a
configuration ("spec") to use in a dry-run version of the Service Perimeter.
This allows the user to test changes to the enforced config ("status") without
actually enforcing them. This testing is done through analyzing the differences
between currently enforced and suggested restrictions. useExplicitDryRunSpec must
bet set to True if any of the fields in the spec are set to non-default values.
"""
return pulumi.get(self, "use_explicit_dry_run_spec") | en | 0.650665 | # coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** The set of arguments for constructing a ServicePerimeter resource. :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} Human readable title. Must be unique within the Policy. Description of the ServicePerimeter and its use. Does not affect behavior. Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. Input properties used for looking up and filtering ServicePerimeter resources. :param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. Time the AccessPolicy was created in UTC. Description of the ServicePerimeter and its use. Does not affect behavior. Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. Human readable title. Must be unique within the Policy. Time the AccessPolicy was updated in UTC. Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. ServicePerimeter describes a set of GCP resources which can freely import and export data amongst themselves, but not export outside of the ServicePerimeter. If a request with a source within this ServicePerimeter has a target outside of the ServicePerimeter, the request will be blocked. Otherwise the request is allowed. There are two types of Service Perimeter - Regular and Bridge. Regular Service Perimeters cannot overlap, a single GCP project can only belong to a single regular Service Perimeter. Service Perimeter Bridges can contain only GCP projects as members, a single GCP project may belong to multiple Service Perimeter Bridges. To get more information about ServicePerimeter, see: * [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters) * How-to Guides * [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart) > **Warning:** If you are using User ADCs (Application Default Credentials) with this resource, you must specify a `billing_project` and set `user_project_override` to true in the provider configuration. Otherwise the ACM API will return a 403 error. Your account must have the `serviceusage.services.use` permission on the `billing_project` you defined. ## Example Usage ### Access Context Manager Service Perimeter Basic ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), title="restrict_storage") access_level = gcp.accesscontextmanager.AccessLevel("access-level", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], require_screen_lock=False, ), regions=[ "CH", "IT", "US", ], )], ), parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="chromeos_no_lock") ``` ### Access Context Manager Service Perimeter Secure Data Exchange ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), service_perimeters=[ gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), ), gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["bigtable.googleapis.com"], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=["bigquery.googleapis.com"], ), ), ), ]) access_level = gcp.accesscontextmanager.AccessLevel("access-level", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="secure_data_exchange", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( require_screen_lock=False, os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], ), regions=[ "CH", "IT", "US", ], )], )) test_access = gcp.accesscontextmanager.ServicePerimeter("test-access", parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}", title="%s", perimeter_type="PERIMETER_TYPE_REGULAR", status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], access_levels=[access_level.name], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], ), ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs( ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs( sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs( access_level=google_access_context_manager_access_level["test-access"]["name"], )], identity_type="ANY_IDENTITY", ), ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs( resources=["*"], operations=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="bigquery.googleapis.com", method_selectors=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="BigQueryStorage.ReadRows", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="TableService.ListTables", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( permission="bigquery.jobs.get", ), ], ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="storage.googleapis.com", method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="google.storage.objects.create", )], ), ], ), )], egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs( egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs( identity_type="ANY_USER_ACCOUNT", ), )], )) ``` ### Access Context Manager Service Perimeter Dry Run ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs( restricted_services=["storage.googleapis.com"], ), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["bigquery.googleapis.com"], ), title="restrict_bigquery_dryrun_storage", use_explicit_dry_run_spec=True) ``` ## Import ServicePerimeter can be imported using any of these accepted formats ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}} ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. ServicePerimeter describes a set of GCP resources which can freely import and export data amongst themselves, but not export outside of the ServicePerimeter. If a request with a source within this ServicePerimeter has a target outside of the ServicePerimeter, the request will be blocked. Otherwise the request is allowed. There are two types of Service Perimeter - Regular and Bridge. Regular Service Perimeters cannot overlap, a single GCP project can only belong to a single regular Service Perimeter. Service Perimeter Bridges can contain only GCP projects as members, a single GCP project may belong to multiple Service Perimeter Bridges. To get more information about ServicePerimeter, see: * [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters) * How-to Guides * [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart) > **Warning:** If you are using User ADCs (Application Default Credentials) with this resource, you must specify a `billing_project` and set `user_project_override` to true in the provider configuration. Otherwise the ACM API will return a 403 error. Your account must have the `serviceusage.services.use` permission on the `billing_project` you defined. ## Example Usage ### Access Context Manager Service Perimeter Basic ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), title="restrict_storage") access_level = gcp.accesscontextmanager.AccessLevel("access-level", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], require_screen_lock=False, ), regions=[ "CH", "IT", "US", ], )], ), parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="chromeos_no_lock") ``` ### Access Context Manager Service Perimeter Secure Data Exchange ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), service_perimeters=[ gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), ), gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["bigtable.googleapis.com"], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=["bigquery.googleapis.com"], ), ), ), ]) access_level = gcp.accesscontextmanager.AccessLevel("access-level", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="secure_data_exchange", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( require_screen_lock=False, os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], ), regions=[ "CH", "IT", "US", ], )], )) test_access = gcp.accesscontextmanager.ServicePerimeter("test-access", parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}", title="%s", perimeter_type="PERIMETER_TYPE_REGULAR", status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], access_levels=[access_level.name], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], ), ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs( ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs( sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs( access_level=google_access_context_manager_access_level["test-access"]["name"], )], identity_type="ANY_IDENTITY", ), ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs( resources=["*"], operations=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="bigquery.googleapis.com", method_selectors=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="BigQueryStorage.ReadRows", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="TableService.ListTables", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( permission="bigquery.jobs.get", ), ], ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="storage.googleapis.com", method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="google.storage.objects.create", )], ), ], ), )], egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs( egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs( identity_type="ANY_USER_ACCOUNT", ), )], )) ``` ### Access Context Manager Service Perimeter Dry Run ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs( restricted_services=["storage.googleapis.com"], ), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["bigquery.googleapis.com"], ), title="restrict_bigquery_dryrun_storage", use_explicit_dry_run_spec=True) ``` ## Import ServicePerimeter can be imported using any of these accepted formats ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}} ``` :param str resource_name: The name of the resource. :param ServicePerimeterArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. Get an existing ServicePerimeter resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. Time the AccessPolicy was created in UTC. Description of the ServicePerimeter and its use. Does not affect behavior. Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. Human readable title. Must be unique within the Policy. Time the AccessPolicy was updated in UTC. Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. | 1.862727 | 2 |
core/swift3.1.1Action/swift3runner.py | ianpartridge/incubator-openwhisk-runtime-swift | 2 | 7550 | """Python proxy to run Swift action.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"""
import os
import glob
import sys
import subprocess
import codecs
import json
sys.path.append('../actionProxy')
from actionproxy import ActionRunner, main, setRunner # noqa
SRC_EPILOGUE_FILE = '/swift3Action/epilogue.swift'
DEST_SCRIPT_FILE = '/swift3Action/spm-build/main.swift'
DEST_SCRIPT_DIR = '/swift3Action/spm-build'
DEST_BIN_FILE = '/swift3Action/spm-build/.build/release/Action'
BUILD_PROCESS = ['./swiftbuildandlink.sh']
class Swift3Runner(ActionRunner):
def __init__(self):
ActionRunner.__init__(self, DEST_SCRIPT_FILE, DEST_BIN_FILE)
# remove pre-existing binary before receiving a new binary
def preinit(self):
try:
os.remove(self.binary)
except: pass
def epilogue(self, init_message):
# skip if executable already exists (was unzipped)
if os.path.isfile(self.binary):
return
if 'main' in init_message:
main_function = init_message['main']
else:
main_function = 'main'
# make sure there is a main.swift file
open(DEST_SCRIPT_FILE, 'a').close()
with codecs.open(DEST_SCRIPT_FILE, 'a', 'utf-8') as fp:
os.chdir(DEST_SCRIPT_DIR)
for file in glob.glob("*.swift"):
if file not in ["Package.swift", "main.swift", "_WhiskJSONUtils.swift", "_Whisk.swift"]:
with codecs.open(file, 'r', 'utf-8') as f:
fp.write(f.read())
with codecs.open(SRC_EPILOGUE_FILE, 'r', 'utf-8') as ep:
fp.write(ep.read())
fp.write('_run_main(mainFunction: %s)\n' % main_function)
def build(self, init_message):
# short circuit the build, if there already exists a binary
# from the zip file
if os.path.isfile(self.binary):
# file may not have executable permission, set it
os.chmod(self.binary, 0o555)
return
p = subprocess.Popen(
BUILD_PROCESS,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=DEST_SCRIPT_DIR)
# run the process and wait until it completes.
# stdout/stderr will not be None because we passed PIPEs to Popen
(o, e) = p.communicate()
# stdout/stderr may be either text or bytes, depending on Python
# version, so if bytes, decode to text. Note that in Python 2
# a string will match both types; so also skip decoding in that case
if isinstance(o, bytes) and not isinstance(o, str):
o = o.decode('utf-8')
if isinstance(e, bytes) and not isinstance(e, str):
e = e.decode('utf-8')
if o:
sys.stdout.write(o)
sys.stdout.flush()
if e:
sys.stderr.write(e)
sys.stderr.flush()
def env(self, message):
env = ActionRunner.env(self, message)
args = message.get('value', {}) if message else {}
env['WHISK_INPUT'] = json.dumps(args)
return env
if __name__ == '__main__':
setRunner(Swift3Runner())
main()
| """Python proxy to run Swift action.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"""
import os
import glob
import sys
import subprocess
import codecs
import json
sys.path.append('../actionProxy')
from actionproxy import ActionRunner, main, setRunner # noqa
SRC_EPILOGUE_FILE = '/swift3Action/epilogue.swift'
DEST_SCRIPT_FILE = '/swift3Action/spm-build/main.swift'
DEST_SCRIPT_DIR = '/swift3Action/spm-build'
DEST_BIN_FILE = '/swift3Action/spm-build/.build/release/Action'
BUILD_PROCESS = ['./swiftbuildandlink.sh']
class Swift3Runner(ActionRunner):
def __init__(self):
ActionRunner.__init__(self, DEST_SCRIPT_FILE, DEST_BIN_FILE)
# remove pre-existing binary before receiving a new binary
def preinit(self):
try:
os.remove(self.binary)
except: pass
def epilogue(self, init_message):
# skip if executable already exists (was unzipped)
if os.path.isfile(self.binary):
return
if 'main' in init_message:
main_function = init_message['main']
else:
main_function = 'main'
# make sure there is a main.swift file
open(DEST_SCRIPT_FILE, 'a').close()
with codecs.open(DEST_SCRIPT_FILE, 'a', 'utf-8') as fp:
os.chdir(DEST_SCRIPT_DIR)
for file in glob.glob("*.swift"):
if file not in ["Package.swift", "main.swift", "_WhiskJSONUtils.swift", "_Whisk.swift"]:
with codecs.open(file, 'r', 'utf-8') as f:
fp.write(f.read())
with codecs.open(SRC_EPILOGUE_FILE, 'r', 'utf-8') as ep:
fp.write(ep.read())
fp.write('_run_main(mainFunction: %s)\n' % main_function)
def build(self, init_message):
# short circuit the build, if there already exists a binary
# from the zip file
if os.path.isfile(self.binary):
# file may not have executable permission, set it
os.chmod(self.binary, 0o555)
return
p = subprocess.Popen(
BUILD_PROCESS,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=DEST_SCRIPT_DIR)
# run the process and wait until it completes.
# stdout/stderr will not be None because we passed PIPEs to Popen
(o, e) = p.communicate()
# stdout/stderr may be either text or bytes, depending on Python
# version, so if bytes, decode to text. Note that in Python 2
# a string will match both types; so also skip decoding in that case
if isinstance(o, bytes) and not isinstance(o, str):
o = o.decode('utf-8')
if isinstance(e, bytes) and not isinstance(e, str):
e = e.decode('utf-8')
if o:
sys.stdout.write(o)
sys.stdout.flush()
if e:
sys.stderr.write(e)
sys.stderr.flush()
def env(self, message):
env = ActionRunner.env(self, message)
args = message.get('value', {}) if message else {}
env['WHISK_INPUT'] = json.dumps(args)
return env
if __name__ == '__main__':
setRunner(Swift3Runner())
main()
| en | 0.849496 | Python proxy to run Swift action. /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ # noqa # remove pre-existing binary before receiving a new binary # skip if executable already exists (was unzipped) # make sure there is a main.swift file # short circuit the build, if there already exists a binary # from the zip file # file may not have executable permission, set it # run the process and wait until it completes. # stdout/stderr will not be None because we passed PIPEs to Popen # stdout/stderr may be either text or bytes, depending on Python # version, so if bytes, decode to text. Note that in Python 2 # a string will match both types; so also skip decoding in that case | 2.212555 | 2 |
src/ychaos/utils/types.py | vanderh0ff/ychaos | 8 | 7551 | <filename>src/ychaos/utils/types.py
from typing import Dict, List, TypeVar, Union
JsonTypeVar = TypeVar("JsonTypeVar")
JsonPrimitive = Union[str, float, int, bool, None]
JsonDict = Dict[str, JsonTypeVar]
JsonArray = List[JsonTypeVar]
Json = Union[JsonPrimitive, JsonDict, JsonArray]
| <filename>src/ychaos/utils/types.py
from typing import Dict, List, TypeVar, Union
JsonTypeVar = TypeVar("JsonTypeVar")
JsonPrimitive = Union[str, float, int, bool, None]
JsonDict = Dict[str, JsonTypeVar]
JsonArray = List[JsonTypeVar]
Json = Union[JsonPrimitive, JsonDict, JsonArray]
| none | 1 | 2.682554 | 3 |
|
yandex_market_language/models/promo.py | stefanitsky/yandex_market_language | 7 | 7552 | import typing as t
from yandex_market_language import models
from yandex_market_language.models.abstract import XMLElement, XMLSubElement
class Promo(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
MAPPING = {
"start-date": "start_date",
"end-date": "end_date",
"description": "description",
"url": "url",
}
__slots__ = [
'promo_id',
'promo_type',
'purchase',
'promo_gifts',
'start_date',
'end_date',
'description',
'url'
]
def __init__(
self,
promo_id: str,
promo_type: str,
purchase: "Purchase",
promo_gifts: t.List["PromoGift"],
start_date=None,
end_date=None,
description=None,
url=None,
):
self.promo_id = promo_id
self.promo_type = promo_type
self.start_date = start_date
self.end_date = end_date
self.description = description
self.url = url
self.purchase = purchase
self.promo_gifts = promo_gifts
def create_dict(self, **kwargs) -> dict:
return dict(
promo_id=self.promo_id,
promo_type=self.promo_type,
start_date=self.start_date,
end_date=self.end_date,
description=self.description,
url=self.url,
purchase=self.purchase.to_dict(),
promo_gifts=[pg.to_dict() for pg in self.promo_gifts],
)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {"id": self.promo_id, "type": self.promo_type}
promo_el = XMLElement("promo", attribs)
for tag, attr in self.MAPPING.items():
v = getattr(self, attr)
if v:
el = XMLSubElement(promo_el, tag)
el.text = v
# Add purchase el
self.purchase.to_xml(promo_el)
# Add promo gifts
promo_gifts_el = XMLSubElement(promo_el, "promo-gifts")
for pg in self.promo_gifts:
pg.to_xml(promo_gifts_el)
return promo_el
@classmethod
def from_xml(cls, promo_el: XMLElement) -> "Promo":
kwargs = dict(
promo_id=promo_el.attrib.get("id"),
promo_type=promo_el.attrib.get("type"),
promo_gifts=[]
)
for el in promo_el:
if el.tag in cls.MAPPING:
kwargs[cls.MAPPING[el.tag]] = el.text
elif el.tag == "purchase":
kwargs["purchase"] = Purchase.from_xml(el)
elif el.tag == "promo-gifts":
for pg_el in el:
kwargs["promo_gifts"].append(PromoGift.from_xml(pg_el))
return Promo(**kwargs)
class Purchase(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'products',
'required_quantity'
]
def __init__(self, products: t.List["Product"], required_quantity="1"):
self.required_quantity = required_quantity
self.products = products
def create_dict(self, **kwargs) -> dict:
return dict(
required_quantity=self.required_quantity,
products=[p.to_dict() for p in self.products]
)
def create_xml(self, **kwargs) -> XMLElement:
purchase_el = XMLElement("purchase")
# Add required quantity el
required_quantity_el = XMLSubElement(purchase_el, "required-quantity")
required_quantity_el.text = self.required_quantity
# Add products el
for p in self.products:
p.to_xml(purchase_el)
return purchase_el
@staticmethod
def from_xml(purchase_el: XMLElement) -> "Purchase":
kwargs = {"products": []}
for el in purchase_el:
if el.tag == "required-quantity":
kwargs["required_quantity"] = el.text
elif el.tag == "product":
kwargs["products"].append(Product.from_xml(el))
return Purchase(**kwargs)
class Product(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'offer_id',
'category_id'
]
def __init__(self, offer_id: str = None, category_id: str = None):
self.offer_id = offer_id
self.category_id = category_id
def create_dict(self, **kwargs) -> dict:
return dict(
offer_id=self.offer_id,
category_id=self.category_id,
)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {}
if self.offer_id:
attribs["offer-id"] = self.offer_id
if self.category_id:
attribs["category-id"] = self.category_id
return XMLElement("product", attribs)
@staticmethod
def from_xml(product_el: XMLElement) -> "Product":
return Product(
offer_id=product_el.attrib.get("offer-id"),
category_id=product_el.attrib.get("category-id")
)
class PromoGift(models.AbstractModel):
"""
Docs:
https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'offer_id',
'gift_id'
]
def __init__(self, offer_id: str = None, gift_id: str = None):
self.offer_id = offer_id
self.gift_id = gift_id
def create_dict(self, **kwargs) -> dict:
return dict(offer_id=self.offer_id, gift_id=self.gift_id)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {}
if self.offer_id:
attribs["offer-id"] = self.offer_id
elif self.gift_id:
attribs["gift-id"] = self.gift_id
return XMLElement("promo-gift", attribs)
@staticmethod
def from_xml(el: XMLElement) -> "PromoGift":
return PromoGift(
offer_id=el.attrib.get("offer-id"),
gift_id=el.attrib.get("gift-id")
)
| import typing as t
from yandex_market_language import models
from yandex_market_language.models.abstract import XMLElement, XMLSubElement
class Promo(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
MAPPING = {
"start-date": "start_date",
"end-date": "end_date",
"description": "description",
"url": "url",
}
__slots__ = [
'promo_id',
'promo_type',
'purchase',
'promo_gifts',
'start_date',
'end_date',
'description',
'url'
]
def __init__(
self,
promo_id: str,
promo_type: str,
purchase: "Purchase",
promo_gifts: t.List["PromoGift"],
start_date=None,
end_date=None,
description=None,
url=None,
):
self.promo_id = promo_id
self.promo_type = promo_type
self.start_date = start_date
self.end_date = end_date
self.description = description
self.url = url
self.purchase = purchase
self.promo_gifts = promo_gifts
def create_dict(self, **kwargs) -> dict:
return dict(
promo_id=self.promo_id,
promo_type=self.promo_type,
start_date=self.start_date,
end_date=self.end_date,
description=self.description,
url=self.url,
purchase=self.purchase.to_dict(),
promo_gifts=[pg.to_dict() for pg in self.promo_gifts],
)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {"id": self.promo_id, "type": self.promo_type}
promo_el = XMLElement("promo", attribs)
for tag, attr in self.MAPPING.items():
v = getattr(self, attr)
if v:
el = XMLSubElement(promo_el, tag)
el.text = v
# Add purchase el
self.purchase.to_xml(promo_el)
# Add promo gifts
promo_gifts_el = XMLSubElement(promo_el, "promo-gifts")
for pg in self.promo_gifts:
pg.to_xml(promo_gifts_el)
return promo_el
@classmethod
def from_xml(cls, promo_el: XMLElement) -> "Promo":
kwargs = dict(
promo_id=promo_el.attrib.get("id"),
promo_type=promo_el.attrib.get("type"),
promo_gifts=[]
)
for el in promo_el:
if el.tag in cls.MAPPING:
kwargs[cls.MAPPING[el.tag]] = el.text
elif el.tag == "purchase":
kwargs["purchase"] = Purchase.from_xml(el)
elif el.tag == "promo-gifts":
for pg_el in el:
kwargs["promo_gifts"].append(PromoGift.from_xml(pg_el))
return Promo(**kwargs)
class Purchase(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'products',
'required_quantity'
]
def __init__(self, products: t.List["Product"], required_quantity="1"):
self.required_quantity = required_quantity
self.products = products
def create_dict(self, **kwargs) -> dict:
return dict(
required_quantity=self.required_quantity,
products=[p.to_dict() for p in self.products]
)
def create_xml(self, **kwargs) -> XMLElement:
purchase_el = XMLElement("purchase")
# Add required quantity el
required_quantity_el = XMLSubElement(purchase_el, "required-quantity")
required_quantity_el.text = self.required_quantity
# Add products el
for p in self.products:
p.to_xml(purchase_el)
return purchase_el
@staticmethod
def from_xml(purchase_el: XMLElement) -> "Purchase":
kwargs = {"products": []}
for el in purchase_el:
if el.tag == "required-quantity":
kwargs["required_quantity"] = el.text
elif el.tag == "product":
kwargs["products"].append(Product.from_xml(el))
return Purchase(**kwargs)
class Product(models.AbstractModel):
"""
Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'offer_id',
'category_id'
]
def __init__(self, offer_id: str = None, category_id: str = None):
self.offer_id = offer_id
self.category_id = category_id
def create_dict(self, **kwargs) -> dict:
return dict(
offer_id=self.offer_id,
category_id=self.category_id,
)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {}
if self.offer_id:
attribs["offer-id"] = self.offer_id
if self.category_id:
attribs["category-id"] = self.category_id
return XMLElement("product", attribs)
@staticmethod
def from_xml(product_el: XMLElement) -> "Product":
return Product(
offer_id=product_el.attrib.get("offer-id"),
category_id=product_el.attrib.get("category-id")
)
class PromoGift(models.AbstractModel):
"""
Docs:
https://yandex.ru/support/partnermarket/elements/promo-gift.html
"""
__slots__ = [
'offer_id',
'gift_id'
]
def __init__(self, offer_id: str = None, gift_id: str = None):
self.offer_id = offer_id
self.gift_id = gift_id
def create_dict(self, **kwargs) -> dict:
return dict(offer_id=self.offer_id, gift_id=self.gift_id)
def create_xml(self, **kwargs) -> XMLElement:
attribs = {}
if self.offer_id:
attribs["offer-id"] = self.offer_id
elif self.gift_id:
attribs["gift-id"] = self.gift_id
return XMLElement("promo-gift", attribs)
@staticmethod
def from_xml(el: XMLElement) -> "PromoGift":
return PromoGift(
offer_id=el.attrib.get("offer-id"),
gift_id=el.attrib.get("gift-id")
)
| ru | 0.315486 | Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html # Add purchase el # Add promo gifts Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html # Add required quantity el # Add products el Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html Docs: https://yandex.ru/support/partnermarket/elements/promo-gift.html | 2.675572 | 3 |
src/testCmd.py | skogsbaer/check-assignments | 0 | 7553 | <filename>src/testCmd.py
import shell
from dataclasses import dataclass
from utils import *
from ownLogging import *
from typing import *
from ansi import *
import re
import os
import testHaskell
import testPython
import testJava
@dataclass
class TestArgs:
dirs: List[str]
assignments: List[str] # take all if empty
interactive: bool
startAt: str
INSPECT_COMMAND = 'inspect'
RERUN_COMMAND = 'rerun'
CONTINUE_COMMAND = 'continue'
HELP_COMMAND = 'help'
def readCommand(cfg, args, studentDir, assignment):
f = assignment.getMainFile(studentDir)
commands = [('h', HELP_COMMAND, 'Print this help message')]
if f:
commands.append( ('i', INSPECT_COMMAND, f'Inspect file {f}') )
commands.append( ('r', RERUN_COMMAND, f'Re-run tests') )
commands.append( ('c', CONTINUE_COMMAND, f'Continue with next assignment/student') )
def printHelp():
for char, cmd, help in commands:
print(f' {char}: {help}')
shortcutHelp = [x[0] for x in commands]
while True:
try:
c = input(f'What to do next? {"/".join(shortcutHelp)} ')
except EOFError:
raise KeyboardInterrupt()
for chars, cmd, help in commands:
if c in chars:
if cmd == HELP_COMMAND:
printHelp()
else:
return cmd
break
else:
print(f'Unknown command {c}.')
printHelp()
def inspectFile(cfg, args, studentDir, assignment):
f = assignment.getMainFile(studentDir)
editor = cfg.editor()
os.system(f"{editor} '{f}'")
TEST_DICT = {
'python': testPython.runPythonTests,
'java': testJava.runJavaTests,
'haskell': testHaskell.runHaskellTests
}
def prettyStudent(cfg, studentDir):
try:
(name, matrikel) = parseSubmissionDir(cfg, studentDir)
return f'{name} ({matrikel})'
except ValueError:
x = shell.basename(studentDir)
if not x:
x = studentDir
x = stripLeadingSlash(x)
x = stripTrailingSlash(x)
return x
def runTestsForAssignment(cfg, args, studentDir, assignment):
print(blue(f'Checking assignment {assignment.id} for student {prettyStudent(cfg, studentDir)}'))
k = assignment.kind
if k in TEST_DICT:
fun = TEST_DICT[k]
fun(cfg, args, studentDir, assignment)
else:
abort(f"Don't know how to run tests for assignment kind {k}")
def interactiveLoop(cfg, args, studentDir, a):
runTestsForAssignment(cfg, args, studentDir, a)
if args.interactive:
while True:
print()
print(blue(f'Just checked assignment {a.id} for student {prettyStudent(cfg, studentDir)}'))
cmd = readCommand(cfg, args, studentDir, a)
if cmd == INSPECT_COMMAND:
inspectFile(cfg, args, studentDir, a)
elif cmd == RERUN_COMMAND:
runTestsForAssignment(cfg, args, studentDir, a)
elif cmd == CONTINUE_COMMAND:
return
def runTests(cfg, args):
dirs = args.dirs
if not dirs:
dirs = collectSubmissionDirs(cfg)
dirs = sorted(dirs)
if args.startAt:
l = dirs
dirs = []
for x in l:
if shell.basename(x) >= args.startAt:
dirs.append(x)
else:
print(f'Skipping {x} as requested')
for d in dirs:
assignments = cfg.assignments
if args.assignments:
assignments = []
for a in cfg.assignments:
if a.id in args.assignments:
assignments.append(a)
if not assignments:
print(f'No assignments found or selected!')
for i, a in enumerate(assignments):
interactiveLoop(cfg, args, d, a)
if i > 0:
print()
| <filename>src/testCmd.py
import shell
from dataclasses import dataclass
from utils import *
from ownLogging import *
from typing import *
from ansi import *
import re
import os
import testHaskell
import testPython
import testJava
@dataclass
class TestArgs:
dirs: List[str]
assignments: List[str] # take all if empty
interactive: bool
startAt: str
INSPECT_COMMAND = 'inspect'
RERUN_COMMAND = 'rerun'
CONTINUE_COMMAND = 'continue'
HELP_COMMAND = 'help'
def readCommand(cfg, args, studentDir, assignment):
f = assignment.getMainFile(studentDir)
commands = [('h', HELP_COMMAND, 'Print this help message')]
if f:
commands.append( ('i', INSPECT_COMMAND, f'Inspect file {f}') )
commands.append( ('r', RERUN_COMMAND, f'Re-run tests') )
commands.append( ('c', CONTINUE_COMMAND, f'Continue with next assignment/student') )
def printHelp():
for char, cmd, help in commands:
print(f' {char}: {help}')
shortcutHelp = [x[0] for x in commands]
while True:
try:
c = input(f'What to do next? {"/".join(shortcutHelp)} ')
except EOFError:
raise KeyboardInterrupt()
for chars, cmd, help in commands:
if c in chars:
if cmd == HELP_COMMAND:
printHelp()
else:
return cmd
break
else:
print(f'Unknown command {c}.')
printHelp()
def inspectFile(cfg, args, studentDir, assignment):
f = assignment.getMainFile(studentDir)
editor = cfg.editor()
os.system(f"{editor} '{f}'")
TEST_DICT = {
'python': testPython.runPythonTests,
'java': testJava.runJavaTests,
'haskell': testHaskell.runHaskellTests
}
def prettyStudent(cfg, studentDir):
try:
(name, matrikel) = parseSubmissionDir(cfg, studentDir)
return f'{name} ({matrikel})'
except ValueError:
x = shell.basename(studentDir)
if not x:
x = studentDir
x = stripLeadingSlash(x)
x = stripTrailingSlash(x)
return x
def runTestsForAssignment(cfg, args, studentDir, assignment):
print(blue(f'Checking assignment {assignment.id} for student {prettyStudent(cfg, studentDir)}'))
k = assignment.kind
if k in TEST_DICT:
fun = TEST_DICT[k]
fun(cfg, args, studentDir, assignment)
else:
abort(f"Don't know how to run tests for assignment kind {k}")
def interactiveLoop(cfg, args, studentDir, a):
runTestsForAssignment(cfg, args, studentDir, a)
if args.interactive:
while True:
print()
print(blue(f'Just checked assignment {a.id} for student {prettyStudent(cfg, studentDir)}'))
cmd = readCommand(cfg, args, studentDir, a)
if cmd == INSPECT_COMMAND:
inspectFile(cfg, args, studentDir, a)
elif cmd == RERUN_COMMAND:
runTestsForAssignment(cfg, args, studentDir, a)
elif cmd == CONTINUE_COMMAND:
return
def runTests(cfg, args):
dirs = args.dirs
if not dirs:
dirs = collectSubmissionDirs(cfg)
dirs = sorted(dirs)
if args.startAt:
l = dirs
dirs = []
for x in l:
if shell.basename(x) >= args.startAt:
dirs.append(x)
else:
print(f'Skipping {x} as requested')
for d in dirs:
assignments = cfg.assignments
if args.assignments:
assignments = []
for a in cfg.assignments:
if a.id in args.assignments:
assignments.append(a)
if not assignments:
print(f'No assignments found or selected!')
for i, a in enumerate(assignments):
interactiveLoop(cfg, args, d, a)
if i > 0:
print()
| en | 0.184606 | # take all if empty | 2.683551 | 3 |
kipoi_containers/singularityhelper.py | kipoi/kipoi-containers | 0 | 7554 | <gh_stars>0
from collections import Counter
from datetime import datetime
import os
import requests
from subprocess import Popen, PIPE
from pathlib import Path
import json
from typing import Dict, Union, TYPE_CHECKING
from kipoi_utils.external.torchvision.dataset_utils import download_url
if TYPE_CHECKING:
import zenodoclient
ZENODO_BASE = "https://zenodo.org"
ZENODO_DEPOSITION = f"{ZENODO_BASE}/api/deposit/depositions"
PathType = Union[str, Path]
def cleanup(singularity_file_path: PathType) -> None:
"""
Deletes the singularity image that was created by build_singularity_image
"""
if isinstance(singularity_file_path, str):
singularity_file_path = Path(singularity_file_path)
if singularity_file_path.exists():
singularity_file_path.unlink()
def build_singularity_image(
name_of_docker_image: str,
singularity_image_name: str,
singularity_image_folder: PathType,
) -> PathType:
"""
This function builds a singularity image from a dockerhub image
using singularity pull. The resulting .sif is stored in <singularity_image_folder> and
the filepath is returned.
"""
if isinstance(singularity_image_folder, Path):
singularity_image_folder = str(singularity_image_folder)
pull_cmd = [
"singularity",
"pull",
"--name",
f"{singularity_image_folder}/{singularity_image_name}",
"--force",
f"docker://{name_of_docker_image}",
]
print(f"Building {singularity_image_name} - {' '.join(pull_cmd)}")
process = Popen(pull_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stderr)
print(stdout)
raise ValueError(
f"Singularity image {singularity_image_name} can not be built"
)
singularity_image_path = (
f"{singularity_image_folder}/{singularity_image_name}"
)
return singularity_image_path
def test_singularity_image(
singularity_image_folder: PathType, singularity_image_name: str, model: str
) -> None:
"""Tests a singularity image residing in singularity_image_folder
with kipoi test <model> --source=kipoi
Raises:
ValueError: Raise valueerror if the test is not successful"""
print(
f"Testing {model} with {singularity_image_folder}/{singularity_image_name}"
)
if model == "Basenji":
test_cmd = [
"kipoi",
"test",
f"{model}",
"--source=kipoi",
"--batch_size=2",
]
else:
test_cmd = ["kipoi", "test", f"{model}", "--source=kipoi"]
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(singularity_image_name, str):
singularity_image_name = Path(singularity_image_name)
exec_cmd = [
"singularity",
"exec",
f"{singularity_image_folder}/{singularity_image_name}",
]
exec_cmd.extend(test_cmd)
process = Popen(exec_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stdout)
print(stderr)
raise ValueError(
f"Singularity image {singularity_image_name} for {model} did not pass relevant tests"
)
def create_new_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> str:
"""Creates a new version of an existing depsosition on zenodo and returns the
corresponding id"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/newversion"
)
return response["links"]["latest_draft"].split("/")[-1]
def get_deposit(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Returns the response body of a get request for an existing deposition"""
response = zenodo_client.get_content(
f"{ZENODO_DEPOSITION}/{deposition_id}"
)
return response
def upload_file(
zenodo_client: "zenodoclient.Client",
url: str,
singularity_image_folder: PathType,
filename: str,
) -> None:
"""Upload singularity_image_folder/filename to a url"""
path = Path(singularity_image_folder) / Path(filename)
zenodo_client.put_content(url, data=path)
def upload_metadata(
zenodo_client: "zenodoclient.Client",
url: str,
model_group: str = "",
shared_env: str = "",
) -> None:
"""Upload metadata for a model group to a given url"""
if not model_group and not shared_env:
raise ValueError(
"You need to provide atlease a shared env name or a model group name"
)
if model_group:
data = {
"metadata": {
"title": f"{model_group} singularity container",
"upload_type": "physicalobject",
"description": "This is a singularity container for models "
f"under https://kipoi.org/models/{model_group}/",
"creators": [
{"name": "<NAME>", "affiliation": "EMBL"}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env:
if "shared" in shared_env:
data = {
"metadata": {
"title": f"{shared_env} singularity container",
"upload_type": "physicalobject",
"description": "Singularity container with conda environment "
f"https://github.com/kipoi/kipoi-containers/blob/main/envfiles/{shared_env}.yml",
"creators": [
{
"name": "<NAME>",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env == "mmsplice":
data = {
"metadata": {
"title": "MMSplice singularity container except mtsplice",
"upload_type": "physicalobject",
"description": "Singularity container for MMSplice models except mtsplice "
"under http://kipoi.org/models/MMSplice/",
"creators": [
{
"name": "<NAME>",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
else:
raise ValueError(
"Available options are - mmsplice, sharedpy3keras2tf1, sharedpy3keras2tf2, sharedpy3keras1.2"
)
zenodo_client.put_content(url, data=data)
def push_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Pushes a deposition to zenodo. An additional get request is made to the newy pushed
deposition and a response body is returned"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/publish"
)
response = get_deposit(zenodo_client, deposition_id)
return response
def update_existing_singularity_container(
zenodo_client: "zenodoclient.Client",
singularity_dict: Dict,
singularity_image_folder: PathType,
model_group: str,
file_to_upload: str = "",
push: bool = True,
) -> None:
"""This function creates a new draft version of an existing image's zenodo entry with updated
metadata and file after deleting the old file. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains information about the existing image. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
# Create a new version of an existing deposition
deposition_id = singularity_dict["url"].split("/")[4]
new_deposition_id = create_new_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, new_deposition_id)
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
file_id = ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
# Delete existing file from this new version
if file_id:
zenodo_client.delete_content(
f"{ZENODO_DEPOSITION}/{new_deposition_id}/files/{file_id}"
)
# Add a new file to this new version
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{new_deposition_id}"
if (
"shared" in singularity_dict["name"]
or singularity_dict["name"] == "kipoi-docker_mmsplice-slim"
):
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
# publish the newly created revision
if push:
response = push_deposition(zenodo_client, new_deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
file_id, file_name, file_md5 = "", "", ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
file_name = fileobj["filename"].replace(".sif", "")
file_md5 = fileobj["checksum"]
return {
"new_deposition_id": new_deposition_id,
"file_id": file_id,
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": file_name,
"md5": file_md5,
}
else:
return singularity_dict | {
"new_deposition_id": new_deposition_id,
"file_id": "",
}
def push_new_singularity_image(
zenodo_client: "zenodoclient.Client",
singularity_image_folder: PathType,
singularity_dict: Dict,
model_group: str,
file_to_upload: str = "",
path: str = "",
push: bool = True,
) -> None:
"""This function creates a draft version of a new zenodo entry with the
metadata and singularity image. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains empty strings as url and md5. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
status_code, response = zenodo_client.post_content(f"{ZENODO_DEPOSITION}")
deposition_id = response["id"]
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{deposition_id}"
if "shared" in singularity_dict["name"]:
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
if push:
push_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
return {
"new_deposition_id": deposition_id,
"file_id": response["files"][0]["id"],
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": response["files"][0]["filename"].replace(".sif", ""),
"md5": response["files"][0]["checksum"],
}
else:
return singularity_dict | {
"new_deposition_id": deposition_id,
"file_id": "",
}
def get_singularity_image(
singularity_image_folder: PathType,
singularity_image_dict: Dict,
model_or_model_group: str,
) -> PathType:
"""This function downloads the singularity image corresponding to the given model or
model group from zenodo to singularity_image_folder and returns the name of the image"""
if (
model_or_model_group in singularity_image_dict
): # Special case for MMSPlice/mtsplice, APARENT/veff
image_name = (
f"{singularity_image_dict[model_or_model_group]['name']}.sif"
)
image_url = f"{singularity_image_dict[model_or_model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_or_model_group]['md5']}"
else:
model_group = model_or_model_group.split("/")[0]
image_name = f"{singularity_image_dict[model_group]['name']}.sif"
image_url = f"{singularity_image_dict[model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_group]['md5']}"
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(image_name, str):
image_name = Path(image_name)
if not (singularity_image_folder / image_name).exists():
download_url(
url=image_url,
root=singularity_image_folder,
filename=image_name,
md5=image_md5,
)
return image_name
| from collections import Counter
from datetime import datetime
import os
import requests
from subprocess import Popen, PIPE
from pathlib import Path
import json
from typing import Dict, Union, TYPE_CHECKING
from kipoi_utils.external.torchvision.dataset_utils import download_url
if TYPE_CHECKING:
import zenodoclient
ZENODO_BASE = "https://zenodo.org"
ZENODO_DEPOSITION = f"{ZENODO_BASE}/api/deposit/depositions"
PathType = Union[str, Path]
def cleanup(singularity_file_path: PathType) -> None:
"""
Deletes the singularity image that was created by build_singularity_image
"""
if isinstance(singularity_file_path, str):
singularity_file_path = Path(singularity_file_path)
if singularity_file_path.exists():
singularity_file_path.unlink()
def build_singularity_image(
name_of_docker_image: str,
singularity_image_name: str,
singularity_image_folder: PathType,
) -> PathType:
"""
This function builds a singularity image from a dockerhub image
using singularity pull. The resulting .sif is stored in <singularity_image_folder> and
the filepath is returned.
"""
if isinstance(singularity_image_folder, Path):
singularity_image_folder = str(singularity_image_folder)
pull_cmd = [
"singularity",
"pull",
"--name",
f"{singularity_image_folder}/{singularity_image_name}",
"--force",
f"docker://{name_of_docker_image}",
]
print(f"Building {singularity_image_name} - {' '.join(pull_cmd)}")
process = Popen(pull_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stderr)
print(stdout)
raise ValueError(
f"Singularity image {singularity_image_name} can not be built"
)
singularity_image_path = (
f"{singularity_image_folder}/{singularity_image_name}"
)
return singularity_image_path
def test_singularity_image(
singularity_image_folder: PathType, singularity_image_name: str, model: str
) -> None:
"""Tests a singularity image residing in singularity_image_folder
with kipoi test <model> --source=kipoi
Raises:
ValueError: Raise valueerror if the test is not successful"""
print(
f"Testing {model} with {singularity_image_folder}/{singularity_image_name}"
)
if model == "Basenji":
test_cmd = [
"kipoi",
"test",
f"{model}",
"--source=kipoi",
"--batch_size=2",
]
else:
test_cmd = ["kipoi", "test", f"{model}", "--source=kipoi"]
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(singularity_image_name, str):
singularity_image_name = Path(singularity_image_name)
exec_cmd = [
"singularity",
"exec",
f"{singularity_image_folder}/{singularity_image_name}",
]
exec_cmd.extend(test_cmd)
process = Popen(exec_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(stdout)
print(stderr)
raise ValueError(
f"Singularity image {singularity_image_name} for {model} did not pass relevant tests"
)
def create_new_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> str:
"""Creates a new version of an existing depsosition on zenodo and returns the
corresponding id"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/newversion"
)
return response["links"]["latest_draft"].split("/")[-1]
def get_deposit(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Returns the response body of a get request for an existing deposition"""
response = zenodo_client.get_content(
f"{ZENODO_DEPOSITION}/{deposition_id}"
)
return response
def upload_file(
zenodo_client: "zenodoclient.Client",
url: str,
singularity_image_folder: PathType,
filename: str,
) -> None:
"""Upload singularity_image_folder/filename to a url"""
path = Path(singularity_image_folder) / Path(filename)
zenodo_client.put_content(url, data=path)
def upload_metadata(
zenodo_client: "zenodoclient.Client",
url: str,
model_group: str = "",
shared_env: str = "",
) -> None:
"""Upload metadata for a model group to a given url"""
if not model_group and not shared_env:
raise ValueError(
"You need to provide atlease a shared env name or a model group name"
)
if model_group:
data = {
"metadata": {
"title": f"{model_group} singularity container",
"upload_type": "physicalobject",
"description": "This is a singularity container for models "
f"under https://kipoi.org/models/{model_group}/",
"creators": [
{"name": "<NAME>", "affiliation": "EMBL"}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env:
if "shared" in shared_env:
data = {
"metadata": {
"title": f"{shared_env} singularity container",
"upload_type": "physicalobject",
"description": "Singularity container with conda environment "
f"https://github.com/kipoi/kipoi-containers/blob/main/envfiles/{shared_env}.yml",
"creators": [
{
"name": "<NAME>",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
elif shared_env == "mmsplice":
data = {
"metadata": {
"title": "MMSplice singularity container except mtsplice",
"upload_type": "physicalobject",
"description": "Singularity container for MMSplice models except mtsplice "
"under http://kipoi.org/models/MMSplice/",
"creators": [
{
"name": "<NAME>",
"affiliation": "EMBL",
}
],
"publication_date": datetime.today().strftime("%Y-%m-%d"),
"license": "MIT",
}
}
else:
raise ValueError(
"Available options are - mmsplice, sharedpy3keras2tf1, sharedpy3keras2tf2, sharedpy3keras1.2"
)
zenodo_client.put_content(url, data=data)
def push_deposition(
zenodo_client: "zenodoclient.Client", deposition_id: str
) -> Dict:
"""Pushes a deposition to zenodo. An additional get request is made to the newy pushed
deposition and a response body is returned"""
status_code, response = zenodo_client.post_content(
f"{ZENODO_DEPOSITION}/{deposition_id}/actions/publish"
)
response = get_deposit(zenodo_client, deposition_id)
return response
def update_existing_singularity_container(
zenodo_client: "zenodoclient.Client",
singularity_dict: Dict,
singularity_image_folder: PathType,
model_group: str,
file_to_upload: str = "",
push: bool = True,
) -> None:
"""This function creates a new draft version of an existing image's zenodo entry with updated
metadata and file after deleting the old file. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains information about the existing image. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
# Create a new version of an existing deposition
deposition_id = singularity_dict["url"].split("/")[4]
new_deposition_id = create_new_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, new_deposition_id)
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
file_id = ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
# Delete existing file from this new version
if file_id:
zenodo_client.delete_content(
f"{ZENODO_DEPOSITION}/{new_deposition_id}/files/{file_id}"
)
# Add a new file to this new version
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{new_deposition_id}"
if (
"shared" in singularity_dict["name"]
or singularity_dict["name"] == "kipoi-docker_mmsplice-slim"
):
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
# publish the newly created revision
if push:
response = push_deposition(zenodo_client, new_deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
file_id, file_name, file_md5 = "", "", ""
for fileobj in response["files"]:
if fileobj["filename"] == filename:
file_id = fileobj["id"] # Assuming only 1 version is added
file_name = fileobj["filename"].replace(".sif", "")
file_md5 = fileobj["checksum"]
return {
"new_deposition_id": new_deposition_id,
"file_id": file_id,
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": file_name,
"md5": file_md5,
}
else:
return singularity_dict | {
"new_deposition_id": new_deposition_id,
"file_id": "",
}
def push_new_singularity_image(
zenodo_client: "zenodoclient.Client",
singularity_image_folder: PathType,
singularity_dict: Dict,
model_group: str,
file_to_upload: str = "",
path: str = "",
push: bool = True,
) -> None:
"""This function creates a draft version of a new zenodo entry with the
metadata and singularity image. If push is True, the draft version is finalized
and the url, name and md5 fields are updated and the new deposition id and file id is added to
singularity dict which contains empty strings as url and md5. Otherwise, only
the new deposotion id and file id is added to the dictionary. This modified dictionary is
returned"""
status_code, response = zenodo_client.post_content(f"{ZENODO_DEPOSITION}")
deposition_id = response["id"]
bucket_url = response["links"]["bucket"]
filename = (
file_to_upload if file_to_upload else f"{singularity_dict['name']}.sif"
)
upload_file(
zenodo_client,
f"{bucket_url}/{filename}",
singularity_image_folder,
filename,
)
url = f"{ZENODO_DEPOSITION}/{deposition_id}"
if "shared" in singularity_dict["name"]:
shared_env_name = (
singularity_dict["name"]
.replace("kipoi-docker_", "")
.replace("-slim", "")
)
upload_metadata(zenodo_client, url, shared_env=shared_env_name)
else:
upload_metadata(zenodo_client, url, model_group=model_group)
if push:
push_deposition(zenodo_client, deposition_id)
response = get_deposit(zenodo_client, deposition_id)
record_id = response["metadata"]["prereserve_doi"]["recid"]
return {
"new_deposition_id": deposition_id,
"file_id": response["files"][0]["id"],
"url": f"{ZENODO_BASE}/record/{record_id}/files/{filename}?download=1",
"name": response["files"][0]["filename"].replace(".sif", ""),
"md5": response["files"][0]["checksum"],
}
else:
return singularity_dict | {
"new_deposition_id": deposition_id,
"file_id": "",
}
def get_singularity_image(
singularity_image_folder: PathType,
singularity_image_dict: Dict,
model_or_model_group: str,
) -> PathType:
"""This function downloads the singularity image corresponding to the given model or
model group from zenodo to singularity_image_folder and returns the name of the image"""
if (
model_or_model_group in singularity_image_dict
): # Special case for MMSPlice/mtsplice, APARENT/veff
image_name = (
f"{singularity_image_dict[model_or_model_group]['name']}.sif"
)
image_url = f"{singularity_image_dict[model_or_model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_or_model_group]['md5']}"
else:
model_group = model_or_model_group.split("/")[0]
image_name = f"{singularity_image_dict[model_group]['name']}.sif"
image_url = f"{singularity_image_dict[model_group]['url']}"
image_md5 = f"{singularity_image_dict[model_group]['md5']}"
if isinstance(singularity_image_folder, str):
singularity_image_folder = Path(singularity_image_folder)
if isinstance(image_name, str):
image_name = Path(image_name)
if not (singularity_image_folder / image_name).exists():
download_url(
url=image_url,
root=singularity_image_folder,
filename=image_name,
md5=image_md5,
)
return image_name | en | 0.875892 | Deletes the singularity image that was created by build_singularity_image This function builds a singularity image from a dockerhub image using singularity pull. The resulting .sif is stored in <singularity_image_folder> and the filepath is returned. Tests a singularity image residing in singularity_image_folder with kipoi test <model> --source=kipoi Raises: ValueError: Raise valueerror if the test is not successful Creates a new version of an existing depsosition on zenodo and returns the corresponding id Returns the response body of a get request for an existing deposition Upload singularity_image_folder/filename to a url Upload metadata for a model group to a given url Pushes a deposition to zenodo. An additional get request is made to the newy pushed deposition and a response body is returned This function creates a new draft version of an existing image's zenodo entry with updated metadata and file after deleting the old file. If push is True, the draft version is finalized and the url, name and md5 fields are updated and the new deposition id and file id is added to singularity dict which contains information about the existing image. Otherwise, only the new deposotion id and file id is added to the dictionary. This modified dictionary is returned # Create a new version of an existing deposition # Assuming only 1 version is added # Delete existing file from this new version # Add a new file to this new version # publish the newly created revision # Assuming only 1 version is added This function creates a draft version of a new zenodo entry with the metadata and singularity image. If push is True, the draft version is finalized and the url, name and md5 fields are updated and the new deposition id and file id is added to singularity dict which contains empty strings as url and md5. Otherwise, only the new deposotion id and file id is added to the dictionary. This modified dictionary is returned This function downloads the singularity image corresponding to the given model or model group from zenodo to singularity_image_folder and returns the name of the image # Special case for MMSPlice/mtsplice, APARENT/veff | 2.374942 | 2 |
policy/_cache.py | garenchan/policy | 5 | 7555 | # -*- coding: utf-8 -*-
"""
policy._cache
~~~~~~~~~~~~~~~
Cache for policy file.
"""
import os
import logging
LOG = logging.getLogger(__name__)
# Global file cache
CACHE = {}
def read_file(filename: str, force_reload=False):
"""Read a file if it has been modified.
:param filename: File name which want to be read from.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh or not.
"""
if force_reload:
_delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug('Reloading cached file %s', filename)
with open(filename) as fp:
cache_info['data'] = fp.read()
cache_info['mtime'] = mtime
reloaded = True
return reloaded, cache_info['data']
def _delete_cached_file(filename: str):
"""Delete cached file if present.
:param filename: Filename to delete
"""
try:
del CACHE[filename]
except KeyError:
pass
| # -*- coding: utf-8 -*-
"""
policy._cache
~~~~~~~~~~~~~~~
Cache for policy file.
"""
import os
import logging
LOG = logging.getLogger(__name__)
# Global file cache
CACHE = {}
def read_file(filename: str, force_reload=False):
"""Read a file if it has been modified.
:param filename: File name which want to be read from.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh or not.
"""
if force_reload:
_delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug('Reloading cached file %s', filename)
with open(filename) as fp:
cache_info['data'] = fp.read()
cache_info['mtime'] = mtime
reloaded = True
return reloaded, cache_info['data']
def _delete_cached_file(filename: str):
"""Delete cached file if present.
:param filename: Filename to delete
"""
try:
del CACHE[filename]
except KeyError:
pass
| en | 0.841239 | # -*- coding: utf-8 -*- policy._cache ~~~~~~~~~~~~~~~ Cache for policy file. # Global file cache Read a file if it has been modified. :param filename: File name which want to be read from. :param force_reload: Whether to reload the file. :returns: A tuple with a boolean specifying if the data is fresh or not. Delete cached file if present. :param filename: Filename to delete | 2.878641 | 3 |
contrib/opencensus-ext-django/opencensus/ext/django/middleware.py | samn/opencensus-python | 0 | 7556 | <filename>contrib/opencensus-ext-django/opencensus/ext/django/middleware.py
# Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Django middleware helper to capture and trace a request."""
import logging
from opencensus.ext.django.config import (settings, convert_to_import)
from opencensus.trace import attributes_helper
from opencensus.trace import execution_context
from opencensus.trace import span as span_module
from opencensus.trace import tracer as tracer_module
from opencensus.trace import utils
from opencensus.trace.samplers import probability
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError: # pragma: NO COVER
MiddlewareMixin = object
HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES['HTTP_METHOD']
HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL']
HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE']
REQUEST_THREAD_LOCAL_KEY = 'django_request'
SPAN_THREAD_LOCAL_KEY = 'django_span'
BLACKLIST_PATHS = 'BLACKLIST_PATHS'
GCP_EXPORTER_PROJECT = 'GCP_EXPORTER_PROJECT'
SAMPLING_RATE = 'SAMPLING_RATE'
TRANSPORT = 'TRANSPORT'
SERVICE_NAME = 'SERVICE_NAME'
ZIPKIN_EXPORTER_SERVICE_NAME = 'ZIPKIN_EXPORTER_SERVICE_NAME'
ZIPKIN_EXPORTER_HOST_NAME = 'ZIPKIN_EXPORTER_HOST_NAME'
ZIPKIN_EXPORTER_PORT = 'ZIPKIN_EXPORTER_PORT'
ZIPKIN_EXPORTER_PROTOCOL = 'ZIPKIN_EXPORTER_PROTOCOL'
JAEGER_EXPORTER_HOST_NAME = 'JAEGER_EXPORTER_HOST_NAME'
JAEGER_EXPORTER_PORT = 'JAEGER_EXPORTER_PORT'
JAEGER_EXPORTER_AGENT_HOST_NAME = 'JAEGER_EXPORTER_AGENT_HOST_NAME'
JAEGER_EXPORTER_AGENT_PORT = 'JAEGER_EXPORTER_AGENT_PORT'
JAEGER_EXPORTER_SERVICE_NAME = 'JAEGER_EXPORTER_SERVICE_NAME'
OCAGENT_TRACE_EXPORTER_ENDPOINT = 'OCAGENT_TRACE_EXPORTER_ENDPOINT'
BLACKLIST_HOSTNAMES = 'BLACKLIST_HOSTNAMES'
log = logging.getLogger(__name__)
class _DjangoMetaWrapper(object):
"""
Wrapper class which takes HTTP header name and retrieve the value from
Django request.META
"""
def __init__(self, meta=None):
self.meta = meta or _get_django_request().META
def get(self, key):
return self.meta.get('HTTP_' + key.upper().replace('-', '_'))
def _get_django_request():
"""Get Django request from thread local.
:rtype: str
:returns: Django request.
"""
return execution_context.get_opencensus_attr(REQUEST_THREAD_LOCAL_KEY)
def _get_django_span():
"""Get Django span from thread local.
:rtype: str
:returns: Django request.
"""
return execution_context.get_opencensus_attr(SPAN_THREAD_LOCAL_KEY)
def _get_current_tracer():
"""Get the current request tracer."""
return execution_context.get_opencensus_tracer()
def _set_django_attributes(span, request):
"""Set the django related attributes."""
django_user = getattr(request, 'user', None)
if django_user is None:
return
user_id = django_user.pk
try:
user_name = django_user.get_username()
except AttributeError:
# AnonymousUser in some older versions of Django doesn't implement
# get_username
return
# User id is the django autofield for User model as the primary key
if user_id is not None:
span.add_attribute('django.user.id', str(user_id))
if user_name is not None:
span.add_attribute('django.user.name', str(user_name))
class OpencensusMiddleware(MiddlewareMixin):
"""Saves the request in thread local"""
def __init__(self, get_response=None):
# One-time configuration and initialization.
self.get_response = get_response
self._sampler = settings.SAMPLER
self._exporter = settings.EXPORTER
self._propagator = settings.PROPAGATOR
self._blacklist_paths = settings.params.get(BLACKLIST_PATHS)
# Initialize the sampler
if self._sampler.__name__ == 'ProbabilitySampler':
_rate = settings.params.get(
SAMPLING_RATE, probability.DEFAULT_SAMPLING_RATE)
self.sampler = self._sampler(_rate)
else:
self.sampler = self._sampler()
# Initialize the exporter
transport = convert_to_import(settings.params.get(TRANSPORT))
if self._exporter.__name__ == 'GoogleCloudExporter':
_project_id = settings.params.get(GCP_EXPORTER_PROJECT, None)
self.exporter = self._exporter(
project_id=_project_id,
transport=transport)
elif self._exporter.__name__ == 'ZipkinExporter':
_service_name = self._get_service_name(settings.params)
_zipkin_host_name = settings.params.get(
ZIPKIN_EXPORTER_HOST_NAME, 'localhost')
_zipkin_port = settings.params.get(
ZIPKIN_EXPORTER_PORT, 9411)
_zipkin_protocol = settings.params.get(
ZIPKIN_EXPORTER_PROTOCOL, 'http')
self.exporter = self._exporter(
service_name=_service_name,
host_name=_zipkin_host_name,
port=_zipkin_port,
protocol=_zipkin_protocol,
transport=transport)
elif self._exporter.__name__ == 'TraceExporter':
_service_name = self._get_service_name(settings.params)
_endpoint = settings.params.get(
OCAGENT_TRACE_EXPORTER_ENDPOINT, None)
self.exporter = self._exporter(
service_name=_service_name,
endpoint=_endpoint,
transport=transport)
elif self._exporter.__name__ == 'JaegerExporter':
_service_name = settings.params.get(
JAEGER_EXPORTER_SERVICE_NAME,
self._get_service_name(settings.params))
_jaeger_host_name = settings.params.get(
JAEGER_EXPORTER_HOST_NAME, None)
_jaeger_port = settings.params.get(
JAEGER_EXPORTER_PORT, None)
_jaeger_agent_host_name = settings.params.get(
JAEGER_EXPORTER_AGENT_HOST_NAME, 'localhost')
_jaeger_agent_port = settings.params.get(
JAEGER_EXPORTER_AGENT_PORT, 6831)
self.exporter = self._exporter(
service_name=_service_name,
host_name=_jaeger_host_name,
port=_jaeger_port,
agent_host_name=_jaeger_agent_host_name,
agent_port=_jaeger_agent_port,
transport=transport)
else:
self.exporter = self._exporter(transport=transport)
self.blacklist_hostnames = settings.params.get(
BLACKLIST_HOSTNAMES, None)
# Initialize the propagator
self.propagator = self._propagator()
def process_request(self, request):
"""Called on each request, before Django decides which view to execute.
:type request: :class:`~django.http.request.HttpRequest`
:param request: Django http request.
"""
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return
# Add the request to thread local
execution_context.set_opencensus_attr(
REQUEST_THREAD_LOCAL_KEY,
request)
execution_context.set_opencensus_attr(
'blacklist_hostnames',
self.blacklist_hostnames)
try:
# Start tracing this request
span_context = self.propagator.from_headers(
_DjangoMetaWrapper(_get_django_request().META))
# Reload the tracer with the new span context
tracer = tracer_module.Tracer(
span_context=span_context,
sampler=self.sampler,
exporter=self.exporter,
propagator=self.propagator)
# Span name is being set at process_view
span = tracer.start_span()
span.span_kind = span_module.SpanKind.SERVER
tracer.add_attribute_to_current_span(
attribute_key=HTTP_METHOD,
attribute_value=request.method)
tracer.add_attribute_to_current_span(
attribute_key=HTTP_URL,
attribute_value=str(request.path))
# Add the span to thread local
# in some cases (exceptions, timeouts) currentspan in
# response event will be one of a child spans.
# let's keep reference to 'django' span and
# use it in response event
execution_context.set_opencensus_attr(
SPAN_THREAD_LOCAL_KEY,
span)
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
def process_view(self, request, view_func, *args, **kwargs):
"""Process view is executed before the view function, here we get the
function name add set it as the span name.
"""
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return
try:
# Get the current span and set the span name to the current
# function name of the request.
tracer = _get_current_tracer()
span = tracer.current_span()
span.name = utils.get_func_name(view_func)
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
def process_response(self, request, response):
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return response
try:
span = _get_django_span()
span.add_attribute(
attribute_key=HTTP_STATUS_CODE,
attribute_value=str(response.status_code))
_set_django_attributes(span, request)
tracer = _get_current_tracer()
tracer.end_span()
tracer.finish()
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
finally:
return response
def _get_service_name(self, params):
_service_name = params.get(
SERVICE_NAME, None)
if _service_name is None:
_service_name = params.get(
ZIPKIN_EXPORTER_SERVICE_NAME, 'my_service')
return _service_name
| <filename>contrib/opencensus-ext-django/opencensus/ext/django/middleware.py
# Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Django middleware helper to capture and trace a request."""
import logging
from opencensus.ext.django.config import (settings, convert_to_import)
from opencensus.trace import attributes_helper
from opencensus.trace import execution_context
from opencensus.trace import span as span_module
from opencensus.trace import tracer as tracer_module
from opencensus.trace import utils
from opencensus.trace.samplers import probability
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError: # pragma: NO COVER
MiddlewareMixin = object
HTTP_METHOD = attributes_helper.COMMON_ATTRIBUTES['HTTP_METHOD']
HTTP_URL = attributes_helper.COMMON_ATTRIBUTES['HTTP_URL']
HTTP_STATUS_CODE = attributes_helper.COMMON_ATTRIBUTES['HTTP_STATUS_CODE']
REQUEST_THREAD_LOCAL_KEY = 'django_request'
SPAN_THREAD_LOCAL_KEY = 'django_span'
BLACKLIST_PATHS = 'BLACKLIST_PATHS'
GCP_EXPORTER_PROJECT = 'GCP_EXPORTER_PROJECT'
SAMPLING_RATE = 'SAMPLING_RATE'
TRANSPORT = 'TRANSPORT'
SERVICE_NAME = 'SERVICE_NAME'
ZIPKIN_EXPORTER_SERVICE_NAME = 'ZIPKIN_EXPORTER_SERVICE_NAME'
ZIPKIN_EXPORTER_HOST_NAME = 'ZIPKIN_EXPORTER_HOST_NAME'
ZIPKIN_EXPORTER_PORT = 'ZIPKIN_EXPORTER_PORT'
ZIPKIN_EXPORTER_PROTOCOL = 'ZIPKIN_EXPORTER_PROTOCOL'
JAEGER_EXPORTER_HOST_NAME = 'JAEGER_EXPORTER_HOST_NAME'
JAEGER_EXPORTER_PORT = 'JAEGER_EXPORTER_PORT'
JAEGER_EXPORTER_AGENT_HOST_NAME = 'JAEGER_EXPORTER_AGENT_HOST_NAME'
JAEGER_EXPORTER_AGENT_PORT = 'JAEGER_EXPORTER_AGENT_PORT'
JAEGER_EXPORTER_SERVICE_NAME = 'JAEGER_EXPORTER_SERVICE_NAME'
OCAGENT_TRACE_EXPORTER_ENDPOINT = 'OCAGENT_TRACE_EXPORTER_ENDPOINT'
BLACKLIST_HOSTNAMES = 'BLACKLIST_HOSTNAMES'
log = logging.getLogger(__name__)
class _DjangoMetaWrapper(object):
"""
Wrapper class which takes HTTP header name and retrieve the value from
Django request.META
"""
def __init__(self, meta=None):
self.meta = meta or _get_django_request().META
def get(self, key):
return self.meta.get('HTTP_' + key.upper().replace('-', '_'))
def _get_django_request():
"""Get Django request from thread local.
:rtype: str
:returns: Django request.
"""
return execution_context.get_opencensus_attr(REQUEST_THREAD_LOCAL_KEY)
def _get_django_span():
"""Get Django span from thread local.
:rtype: str
:returns: Django request.
"""
return execution_context.get_opencensus_attr(SPAN_THREAD_LOCAL_KEY)
def _get_current_tracer():
"""Get the current request tracer."""
return execution_context.get_opencensus_tracer()
def _set_django_attributes(span, request):
"""Set the django related attributes."""
django_user = getattr(request, 'user', None)
if django_user is None:
return
user_id = django_user.pk
try:
user_name = django_user.get_username()
except AttributeError:
# AnonymousUser in some older versions of Django doesn't implement
# get_username
return
# User id is the django autofield for User model as the primary key
if user_id is not None:
span.add_attribute('django.user.id', str(user_id))
if user_name is not None:
span.add_attribute('django.user.name', str(user_name))
class OpencensusMiddleware(MiddlewareMixin):
"""Saves the request in thread local"""
def __init__(self, get_response=None):
# One-time configuration and initialization.
self.get_response = get_response
self._sampler = settings.SAMPLER
self._exporter = settings.EXPORTER
self._propagator = settings.PROPAGATOR
self._blacklist_paths = settings.params.get(BLACKLIST_PATHS)
# Initialize the sampler
if self._sampler.__name__ == 'ProbabilitySampler':
_rate = settings.params.get(
SAMPLING_RATE, probability.DEFAULT_SAMPLING_RATE)
self.sampler = self._sampler(_rate)
else:
self.sampler = self._sampler()
# Initialize the exporter
transport = convert_to_import(settings.params.get(TRANSPORT))
if self._exporter.__name__ == 'GoogleCloudExporter':
_project_id = settings.params.get(GCP_EXPORTER_PROJECT, None)
self.exporter = self._exporter(
project_id=_project_id,
transport=transport)
elif self._exporter.__name__ == 'ZipkinExporter':
_service_name = self._get_service_name(settings.params)
_zipkin_host_name = settings.params.get(
ZIPKIN_EXPORTER_HOST_NAME, 'localhost')
_zipkin_port = settings.params.get(
ZIPKIN_EXPORTER_PORT, 9411)
_zipkin_protocol = settings.params.get(
ZIPKIN_EXPORTER_PROTOCOL, 'http')
self.exporter = self._exporter(
service_name=_service_name,
host_name=_zipkin_host_name,
port=_zipkin_port,
protocol=_zipkin_protocol,
transport=transport)
elif self._exporter.__name__ == 'TraceExporter':
_service_name = self._get_service_name(settings.params)
_endpoint = settings.params.get(
OCAGENT_TRACE_EXPORTER_ENDPOINT, None)
self.exporter = self._exporter(
service_name=_service_name,
endpoint=_endpoint,
transport=transport)
elif self._exporter.__name__ == 'JaegerExporter':
_service_name = settings.params.get(
JAEGER_EXPORTER_SERVICE_NAME,
self._get_service_name(settings.params))
_jaeger_host_name = settings.params.get(
JAEGER_EXPORTER_HOST_NAME, None)
_jaeger_port = settings.params.get(
JAEGER_EXPORTER_PORT, None)
_jaeger_agent_host_name = settings.params.get(
JAEGER_EXPORTER_AGENT_HOST_NAME, 'localhost')
_jaeger_agent_port = settings.params.get(
JAEGER_EXPORTER_AGENT_PORT, 6831)
self.exporter = self._exporter(
service_name=_service_name,
host_name=_jaeger_host_name,
port=_jaeger_port,
agent_host_name=_jaeger_agent_host_name,
agent_port=_jaeger_agent_port,
transport=transport)
else:
self.exporter = self._exporter(transport=transport)
self.blacklist_hostnames = settings.params.get(
BLACKLIST_HOSTNAMES, None)
# Initialize the propagator
self.propagator = self._propagator()
def process_request(self, request):
"""Called on each request, before Django decides which view to execute.
:type request: :class:`~django.http.request.HttpRequest`
:param request: Django http request.
"""
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return
# Add the request to thread local
execution_context.set_opencensus_attr(
REQUEST_THREAD_LOCAL_KEY,
request)
execution_context.set_opencensus_attr(
'blacklist_hostnames',
self.blacklist_hostnames)
try:
# Start tracing this request
span_context = self.propagator.from_headers(
_DjangoMetaWrapper(_get_django_request().META))
# Reload the tracer with the new span context
tracer = tracer_module.Tracer(
span_context=span_context,
sampler=self.sampler,
exporter=self.exporter,
propagator=self.propagator)
# Span name is being set at process_view
span = tracer.start_span()
span.span_kind = span_module.SpanKind.SERVER
tracer.add_attribute_to_current_span(
attribute_key=HTTP_METHOD,
attribute_value=request.method)
tracer.add_attribute_to_current_span(
attribute_key=HTTP_URL,
attribute_value=str(request.path))
# Add the span to thread local
# in some cases (exceptions, timeouts) currentspan in
# response event will be one of a child spans.
# let's keep reference to 'django' span and
# use it in response event
execution_context.set_opencensus_attr(
SPAN_THREAD_LOCAL_KEY,
span)
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
def process_view(self, request, view_func, *args, **kwargs):
"""Process view is executed before the view function, here we get the
function name add set it as the span name.
"""
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return
try:
# Get the current span and set the span name to the current
# function name of the request.
tracer = _get_current_tracer()
span = tracer.current_span()
span.name = utils.get_func_name(view_func)
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
def process_response(self, request, response):
# Do not trace if the url is blacklisted
if utils.disable_tracing_url(request.path, self._blacklist_paths):
return response
try:
span = _get_django_span()
span.add_attribute(
attribute_key=HTTP_STATUS_CODE,
attribute_value=str(response.status_code))
_set_django_attributes(span, request)
tracer = _get_current_tracer()
tracer.end_span()
tracer.finish()
except Exception: # pragma: NO COVER
log.error('Failed to trace request', exc_info=True)
finally:
return response
def _get_service_name(self, params):
_service_name = params.get(
SERVICE_NAME, None)
if _service_name is None:
_service_name = params.get(
ZIPKIN_EXPORTER_SERVICE_NAME, 'my_service')
return _service_name
| en | 0.818695 | # Copyright 2017, OpenCensus Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Django middleware helper to capture and trace a request. # pragma: NO COVER Wrapper class which takes HTTP header name and retrieve the value from Django request.META Get Django request from thread local. :rtype: str :returns: Django request. Get Django span from thread local. :rtype: str :returns: Django request. Get the current request tracer. Set the django related attributes. # AnonymousUser in some older versions of Django doesn't implement # get_username # User id is the django autofield for User model as the primary key Saves the request in thread local # One-time configuration and initialization. # Initialize the sampler # Initialize the exporter # Initialize the propagator Called on each request, before Django decides which view to execute. :type request: :class:`~django.http.request.HttpRequest` :param request: Django http request. # Do not trace if the url is blacklisted # Add the request to thread local # Start tracing this request # Reload the tracer with the new span context # Span name is being set at process_view # Add the span to thread local # in some cases (exceptions, timeouts) currentspan in # response event will be one of a child spans. # let's keep reference to 'django' span and # use it in response event # pragma: NO COVER Process view is executed before the view function, here we get the function name add set it as the span name. # Do not trace if the url is blacklisted # Get the current span and set the span name to the current # function name of the request. # pragma: NO COVER # Do not trace if the url is blacklisted # pragma: NO COVER | 1.711986 | 2 |
codeblockCar/codingPage/tests.py | ICT2x01-p2-4/ICT2x01-p2-4 | 0 | 7557 | from typing import Reversible
from django.test import TestCase, Client
from challenge.models import Challenge
from codingPage.models import Command, Log
from django.core.exceptions import ValidationError
from django.urls import reverse
class CodingPageTest(TestCase):
def setUp(self) -> None:
self.client = Client(HTTP_USER_AGENT='Mozilla/5.0')
self.challenge = Challenge.objects.create(name='abc', map='0,0,0,0,0,0,0,0,0', size=3, difficulty='Easy')
self.command = Command.objects.create(action='Dodo', code=1)
self.log = Log.objects.create(data='123', challenge = self.challenge)
return super().setUp()
def test_validation(self):
"""Test if validation works for creating new command"""
Command.objects.create(action='asd', code=5)
self.assertRaises(ValidationError)
def test_check_code(self):
"""Test if code checkers dont upload to database if log false is given"""
response = self.client.post(
reverse('ajax_view'),
data = {
'code': '1\n2\n3\n',
'log': False,
'challenge_id': 1
},
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
url = '/codingPage/test_code'
)
self.assertEqual(response, '123')
| from typing import Reversible
from django.test import TestCase, Client
from challenge.models import Challenge
from codingPage.models import Command, Log
from django.core.exceptions import ValidationError
from django.urls import reverse
class CodingPageTest(TestCase):
def setUp(self) -> None:
self.client = Client(HTTP_USER_AGENT='Mozilla/5.0')
self.challenge = Challenge.objects.create(name='abc', map='0,0,0,0,0,0,0,0,0', size=3, difficulty='Easy')
self.command = Command.objects.create(action='Dodo', code=1)
self.log = Log.objects.create(data='123', challenge = self.challenge)
return super().setUp()
def test_validation(self):
"""Test if validation works for creating new command"""
Command.objects.create(action='asd', code=5)
self.assertRaises(ValidationError)
def test_check_code(self):
"""Test if code checkers dont upload to database if log false is given"""
response = self.client.post(
reverse('ajax_view'),
data = {
'code': '1\n2\n3\n',
'log': False,
'challenge_id': 1
},
HTTP_X_REQUESTED_WITH='XMLHttpRequest',
url = '/codingPage/test_code'
)
self.assertEqual(response, '123')
| en | 0.569502 | Test if validation works for creating new command Test if code checkers dont upload to database if log false is given | 2.491186 | 2 |
app/hint/models.py | vigov5/oshougatsu2015 | 0 | 7558 | <reponame>vigov5/oshougatsu2015<gh_stars>0
import os
import datetime
from app import app, db
class Hint(db.Model):
__tablename__ = 'hints'
id = db.Column(db.Integer, primary_key=True)
description = db.Column(db.Text)
is_open = db.Column(db.Boolean)
problem_id = db.Column(db.Integer, db.ForeignKey('problems.id'))
def __repr__(self):
return '<Hint %r>' % (self.description)
def __init__(self, description='', is_open=False, problem=None):
self.description = description
self.is_open = is_open
self.problem = problem | import os
import datetime
from app import app, db
class Hint(db.Model):
__tablename__ = 'hints'
id = db.Column(db.Integer, primary_key=True)
description = db.Column(db.Text)
is_open = db.Column(db.Boolean)
problem_id = db.Column(db.Integer, db.ForeignKey('problems.id'))
def __repr__(self):
return '<Hint %r>' % (self.description)
def __init__(self, description='', is_open=False, problem=None):
self.description = description
self.is_open = is_open
self.problem = problem | none | 1 | 2.600569 | 3 |
|
base/urls.py | almustafa-noureddin/Portfolio-website | 0 | 7559 | from django.urls import path
from . import views
app_name = "base"
urlpatterns = [
path('', views.IndexView.as_view(), name="home"),
path('contact/', views.ContactView.as_view(), name="contact"),] | from django.urls import path
from . import views
app_name = "base"
urlpatterns = [
path('', views.IndexView.as_view(), name="home"),
path('contact/', views.ContactView.as_view(), name="contact"),] | none | 1 | 1.754924 | 2 |
|
app/main/views/templates.py | cds-snc/notification-admin | 16 | 7560 | <filename>app/main/views/templates.py
from datetime import datetime, timedelta
from string import ascii_uppercase
from dateutil.parser import parse
from flask import abort, flash, jsonify, redirect, render_template, request, url_for
from flask_babel import _
from flask_babel import lazy_gettext as _l
from flask_login import current_user
from markupsafe import Markup
from notifications_python_client.errors import HTTPError
from notifications_utils.formatters import nl2br
from notifications_utils.recipients import first_column_headings
from app import (
current_service,
service_api_client,
template_api_prefill_client,
template_folder_api_client,
template_statistics_client,
)
from app.main import main
from app.main.forms import (
CreateTemplateForm,
EmailTemplateForm,
LetterTemplateForm,
LetterTemplatePostageForm,
SearchByNameForm,
SetTemplateSenderForm,
SMSTemplateForm,
TemplateAndFoldersSelectionForm,
TemplateFolderForm,
)
from app.main.views.send import get_example_csv_rows, get_sender_details
from app.models.service import Service
from app.models.template_list import TemplateList, TemplateLists
from app.template_previews import TemplatePreview, get_page_count_for_letter
from app.utils import (
email_or_sms_not_enabled,
get_template,
should_skip_template_page,
user_has_permissions,
user_is_platform_admin,
)
form_objects = {
"email": EmailTemplateForm,
"sms": SMSTemplateForm,
"letter": LetterTemplateForm,
}
def get_email_preview_template(template, template_id, service_id):
email_preview_template = get_template(
template,
current_service,
letter_preview_url=url_for(
".view_letter_template_preview",
service_id=service_id,
template_id=template_id,
filetype="png",
),
show_recipient=True,
page_count=get_page_count_for_letter(template),
)
return email_preview_template
@main.route("/services/<service_id>/templates/<uuid:template_id>")
@user_has_permissions()
def view_template(service_id, template_id):
template = current_service.get_template(template_id)
template_folder = current_service.get_template_folder(template["folder"])
user_has_template_permission = current_user.has_template_folder_permission(template_folder)
if should_skip_template_page(template["template_type"]):
return redirect(url_for(".send_one_off", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template_id, service_id),
template_postage=template["postage"],
user_has_template_permission=user_has_template_permission,
)
@main.route("/services/<service_id>/start-tour/<uuid:template_id>")
@user_has_permissions("view_activity")
def start_tour(service_id, template_id):
template = current_service.get_template(template_id)
if template["template_type"] != "email":
abort(404)
return render_template(
"views/templates/start-tour.html",
template=get_template(
template,
current_service,
show_recipient=True,
),
help="1",
)
@main.route("/services/<service_id>/templates", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>",
methods=["GET", "POST"],
)
@main.route("/services/<service_id>/templates/<template_type>", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/<template_type>/folders/<template_folder_id>",
methods=["GET", "POST"],
)
@user_has_permissions()
def choose_template(service_id, template_type="all", template_folder_id=None):
template_folder = current_service.get_template_folder(template_folder_id)
user_has_template_folder_permission = current_user.has_template_folder_permission(template_folder)
template_list = TemplateList(current_service, template_type, template_folder_id, current_user)
templates_and_folders_form = TemplateAndFoldersSelectionForm(
all_template_folders=current_service.get_user_template_folders(current_user),
template_list=template_list,
template_type=template_type,
allow_adding_letter_template=current_service.has_permission("letter"),
allow_adding_copy_of_template=(current_service.all_templates or len(current_user.service_ids) > 1),
)
option_hints = {template_folder_id: "current folder"}
if request.method == "POST" and templates_and_folders_form.validate_on_submit():
if not current_user.has_permissions("manage_templates"):
abort(403)
try:
return process_folder_management_form(templates_and_folders_form, template_folder_id)
except HTTPError as e:
flash(e.message)
if "templates_and_folders" in templates_and_folders_form.errors:
flash(_("Select at least one template or folder"))
initial_state = request.args.get("initial_state")
if request.method == "GET" and initial_state:
templates_and_folders_form.op = initial_state
sending_view = request.args.get("view") == "sending"
return render_template(
"views/templates/choose.html",
current_template_folder_id=template_folder_id,
template_folder_path=current_service.get_template_folder_path(template_folder_id),
template_list=template_list,
show_search_box=current_service.count_of_templates_and_folders > 7,
show_template_nav=(current_service.has_multiple_template_types and (len(current_service.all_templates) > 2)),
sending_view=sending_view,
template_nav_items=get_template_nav_items(template_folder_id, sending_view),
template_type=template_type,
search_form=SearchByNameForm(),
templates_and_folders_form=templates_and_folders_form,
move_to_children=templates_and_folders_form.move_to.children(),
user_has_template_folder_permission=user_has_template_folder_permission,
option_hints=option_hints,
)
def process_folder_management_form(form, current_folder_id):
current_service.get_template_folder_with_user_permission_or_403(current_folder_id, current_user)
new_folder_id = None
if form.is_add_folder_op:
new_folder_id = template_folder_api_client.create_template_folder(
current_service.id, name=form.get_folder_name(), parent_id=current_folder_id
)
if form.is_move_op:
# if we've just made a folder, we also want to move there
move_to_id = new_folder_id or form.move_to.data
current_service.move_to_folder(ids_to_move=form.templates_and_folders.data, move_to=move_to_id)
return redirect(request.url)
def get_template_nav_label(value):
return {
"all": _l("All"),
"sms": _l("Text message"),
"email": _l("Email"),
"letter": _l("Letter"),
}[value]
def get_template_nav_items(template_folder_id, sending_view):
return [
(
get_template_nav_label(key),
key,
url_for(
".choose_template",
service_id=current_service.id,
template_type=key,
template_folder_id=template_folder_id,
view="sending" if sending_view else None,
),
"",
)
for key in ["all"] + current_service.available_template_types
]
@main.route("/services/<service_id>/templates/<template_id>.<filetype>")
@user_has_permissions()
def view_letter_template_preview(service_id, template_id, filetype):
if filetype not in ("pdf", "png"):
abort(404)
db_template = current_service.get_template(template_id)
return TemplatePreview.from_database_object(db_template, filetype, page=request.args.get("page"))
@main.route("/templates/letter-preview-image/<filename>")
@user_is_platform_admin
def letter_branding_preview_image(filename):
template = {
"subject": "An example letter",
"content": (
"Lorem Ipsum is simply dummy text of the printing and typesetting "
"industry.\n\nLorem Ipsum has been the industry’s standard dummy "
"text ever since the 1500s, when an unknown printer took a galley "
"of type and scrambled it to make a type specimen book.\n\n"
"# History\n\nIt has survived not only\n\n"
"* five centuries\n"
"* but also the leap into electronic typesetting\n\n"
"It was popularised in the 1960s with the release of Letraset "
"sheets containing Lorem Ipsum passages, and more recently with "
"desktop publishing software like Aldus PageMaker including "
"versions of Lorem Ipsum.\n\n"
"The point of using Lorem Ipsum is that it has a more-or-less "
"normal distribution of letters, as opposed to using ‘Content "
"here, content here’, making it look like readable English."
),
}
filename = None if filename == "no-branding" else filename
return TemplatePreview.from_example_template(template, filename)
def _view_template_version(service_id, template_id, version, letters_as_pdf=False):
return dict(
template=get_template(
current_service.get_template(template_id, version=version),
current_service,
letter_preview_url=url_for(
".view_template_version_preview",
service_id=service_id,
template_id=template_id,
version=version,
filetype="png",
)
if not letters_as_pdf
else None,
)
)
@main.route("/services/<service_id>/templates/<template_id>/version/<int:version>")
@user_has_permissions()
def view_template_version(service_id, template_id, version):
return render_template(
"views/templates/template_history.html",
**_view_template_version(service_id=service_id, template_id=template_id, version=version),
)
@main.route("/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>")
@user_has_permissions()
def view_template_version_preview(service_id, template_id, version, filetype):
db_template = current_service.get_template(template_id, version=version)
return TemplatePreview.from_database_object(db_template, filetype)
def _add_template_by_type(template_type, template_folder_id):
if template_type == "copy-existing":
return redirect(
url_for(
".choose_template_to_copy",
service_id=current_service.id,
)
)
if template_type == "letter":
blank_letter = service_api_client.create_service_template(
"New letter template",
"letter",
"Body",
current_service.id,
"Main heading",
"normal",
template_folder_id,
)
return redirect(
url_for(
".view_template",
service_id=current_service.id,
template_id=blank_letter["data"]["id"],
)
)
if email_or_sms_not_enabled(template_type, current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=current_service.id,
notification_type=template_type,
return_to="add_new_template",
template_id="0",
)
)
else:
return redirect(
url_for(
".add_service_template",
service_id=current_service.id,
template_type=template_type,
template_folder_id=template_folder_id,
)
)
@main.route("/services/<service_id>/templates/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/folders/<template_folder_id>/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/<template_type>/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/<template_type>/folders/<template_folder_id>/create", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def create_template(service_id, template_type="all", template_folder_id=None):
form = CreateTemplateForm()
if request.method == "POST" and form.validate_on_submit():
try:
return _add_template_by_type(
form.what_type.data,
template_folder_id,
)
except HTTPError as e:
flash(e.message)
return render_template(
"views/templates/create.html",
service_id=service_id,
template_folder_id=template_folder_id,
template_type=template_type,
form=form,
disabled_options={},
option_hints={},
)
@main.route("/services/<service_id>/templates/copy")
@main.route("/services/<service_id>/templates/all/copy")
@main.route("/services/<service_id>/templates/email/copy")
@main.route("/services/<service_id>/templates/sms/copy")
@main.route("/services/<service_id>/templates/copy/from-folder/<uuid:from_folder>")
@main.route("/services/<service_id>/templates/copy/from-service/<uuid:from_service>")
@main.route("/services/<service_id>/templates/copy/from-service/<uuid:from_service>/from-folder/<uuid:from_folder>")
@main.route("/services/<service_id>/templates/all/folders/<uuid:from_folder>/copy")
@user_has_permissions("manage_templates")
def choose_template_to_copy(
service_id,
from_service=None,
from_folder=None,
):
if from_folder and from_service is None:
from_service = service_id
if from_service:
current_user.belongs_to_service_or_403(from_service)
service = Service(service_api_client.get_service(from_service)["data"])
return render_template(
"views/templates/copy.html",
services_templates_and_folders=TemplateList(service, template_folder_id=from_folder, user=current_user),
template_folder_path=service.get_template_folder_path(from_folder),
from_service=service,
search_form=SearchByNameForm(),
)
else:
return render_template(
"views/templates/copy.html",
services_templates_and_folders=TemplateLists(current_user),
search_form=SearchByNameForm(),
)
@main.route("/services/<service_id>/templates/copy/<uuid:template_id>", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def copy_template(service_id, template_id):
from_service = request.args.get("from_service")
current_user.belongs_to_service_or_403(from_service)
template = service_api_client.get_service_template(from_service, str(template_id))["data"]
template_folder = template_folder_api_client.get_template_folder(from_service, template["folder"])
if not current_user.has_template_folder_permission(template_folder):
abort(403)
if request.method == "POST":
return add_service_template(
service_id,
template["template_type"],
template_folder_id=template_folder.get("id"),
)
template["template_content"] = template["content"]
template["name"] = _get_template_copy_name(template, current_service.all_templates)
form = form_objects[template["template_type"]](**template)
return render_template(
f"views/edit-{template['template_type']}-template.html",
form=form,
template=template,
heading=_l("Copy email template") if template["template_type"] == "email" else _l("Copy text message template"),
service_id=service_id,
services=current_user.service_ids,
)
def _get_template_copy_name(template, existing_templates):
template_names = [existing["name"] for existing in existing_templates]
for index in reversed(range(1, 10)):
if "{} (copy {})".format(template["name"], index) in template_names:
return "{} (copy {})".format(template["name"], index + 1)
if "{} (copy)".format(template["name"]) in template_names:
return "{} (copy 2)".format(template["name"])
return "{} (copy)".format(template["name"])
@main.route("/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>")
@user_has_permissions("manage_templates")
def action_blocked(service_id, notification_type, return_to, template_id):
if notification_type == "sms":
notification_type = "text messages"
elif notification_type == "email":
notification_type = "emails"
return render_template(
"views/templates/action_blocked.html",
service_id=service_id,
notification_type=notification_type,
return_to=return_to,
template_id=template_id,
)
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/manage",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def manage_template_folder(service_id, template_folder_id):
template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user)
form = TemplateFolderForm(
name=template_folder["name"],
users_with_permission=template_folder.get("users_with_permission", None),
all_service_users=[user for user in current_service.active_users if user.id != current_user.id],
)
if form.validate_on_submit():
if current_user.has_permissions("manage_service") and form.users_with_permission.all_service_users:
users_with_permission = form.users_with_permission.data + [current_user.id]
else:
users_with_permission = None
template_folder_api_client.update_template_folder(
current_service.id,
template_folder_id,
name=form.name.data,
users_with_permission=users_with_permission,
)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template_folder_id,
)
)
return render_template(
"views/templates/manage-template-folder.html",
form=form,
template_folder_path=current_service.get_template_folder_path(template_folder_id),
current_service_id=current_service.id,
template_folder_id=template_folder_id,
template_type="all",
)
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/delete",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def delete_template_folder(service_id, template_folder_id):
template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user)
if len(current_service.get_template_folders_and_templates(template_type="all", template_folder_id=template_folder_id)) > 0:
flash(_l("You must empty this folder before you can delete it"), "info")
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_type="all",
template_folder_id=template_folder_id,
)
)
if request.method == "POST":
try:
template_folder_api_client.delete_template_folder(current_service.id, template_folder_id)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template_folder["parent_id"],
)
)
except HTTPError as e:
msg = _l("Folder is not empty")
if e.status_code == 400 and msg in e.message:
flash(_("You must empty this folder before you can delete it"), "info")
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_type="all",
template_folder_id=template_folder_id,
)
)
else:
abort(500, e)
else:
flash(
"{} ‘{}’ {}".format(
_l("Are you sure you want to delete the"),
template_folder["name"],
_l("folder?"),
),
"delete",
)
return manage_template_folder(service_id, template_folder_id)
@main.route("/services/templates/<template_id>/get-data", methods=["POST"])
def get_template_data(template_id):
data = template_api_prefill_client.get_template(template_id)
return jsonify({"result": data})
@main.route("/services/<service_id>/templates/add-<template_type>", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/add-<template_type>",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def add_service_template(service_id, template_type, template_folder_id=None):
if template_type not in ["sms", "email", "letter"]:
abort(404)
if not current_service.has_permission("letter") and template_type == "letter":
abort(403)
form = form_objects[template_type]()
if form.validate_on_submit():
if form.process_type.data != "normal":
abort_403_if_not_admin_user()
try:
new_template = service_api_client.create_service_template(
form.name.data,
template_type,
form.template_content.data,
service_id,
form.subject.data if hasattr(form, "subject") else None,
form.process_type.data,
template_folder_id,
)
except HTTPError as e:
if (
e.status_code == 400
and "content" in e.message
and any(["character count greater than" in x for x in e.message["content"]])
):
form.template_content.errors.extend(e.message["content"])
else:
raise e
else:
flash(_("'{}' template saved").format(form.name.data), "default_with_tick")
return redirect(
url_for(
".view_template",
service_id=service_id,
template_id=new_template["data"]["id"],
)
)
if email_or_sms_not_enabled(template_type, current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=service_id,
notification_type=template_type,
template_folder_id=template_folder_id,
return_to="templates",
template_id="0",
)
)
else:
return render_template(
f"views/edit-{template_type}-template.html",
form=form,
template_type=template_type,
template_folder_id=template_folder_id,
service_id=service_id,
heading=_l("New email template") if template_type == "email" else _l("New text message template"),
)
def abort_403_if_not_admin_user():
if not current_user.platform_admin:
abort(403)
@main.route("/services/<service_id>/templates/<template_id>/edit", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def edit_service_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
template["template_content"] = template["content"]
form = form_objects[template["template_type"]](**template)
if form.validate_on_submit():
if form.process_type.data != template["process_type"]:
abort_403_if_not_admin_user()
subject = form.subject.data if hasattr(form, "subject") else None
new_template_data = {
"name": form.name.data,
"content": form.template_content.data,
"subject": subject,
"template_type": template["template_type"],
"id": template["id"],
"process_type": form.process_type.data,
"reply_to_text": template["reply_to_text"],
}
new_template = get_template(new_template_data, current_service)
template_change = get_template(template, current_service).compare_to(new_template)
if template_change.placeholders_added and not request.form.get("confirm"):
example_column_headings = first_column_headings[new_template.template_type] + list(new_template.placeholders)
return render_template(
"views/templates/breaking-change.html",
template_change=template_change,
new_template=new_template,
column_headings=list(ascii_uppercase[: len(example_column_headings)]),
example_rows=[
example_column_headings,
get_example_csv_rows(new_template),
get_example_csv_rows(new_template),
],
form=form,
)
try:
service_api_client.update_service_template(
template_id,
form.name.data,
template["template_type"],
form.template_content.data,
service_id,
subject,
form.process_type.data,
)
except HTTPError as e:
if e.status_code == 400:
if "content" in e.message and any(["character count greater than" in x for x in e.message["content"]]):
form.template_content.errors.extend(e.message["content"])
else:
raise e
else:
raise e
else:
flash(_("'{}' template saved").format(form.name.data), "default_with_tick")
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
if email_or_sms_not_enabled(template["template_type"], current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=service_id,
notification_type=template["template_type"],
return_to="view_template",
template_id=template_id,
)
)
else:
return render_template(
f"views/edit-{template['template_type']}-template.html",
form=form,
template=template,
heading=_l("Edit email template") if template["template_type"] == "email" else _l("Edit text message template"),
)
@main.route("/services/<service_id>/templates/<template_id>/delete", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def delete_service_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
if request.method == "POST":
service_api_client.delete_service_template(service_id, template_id)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template["folder"],
)
)
try:
last_used_notification = template_statistics_client.get_template_statistics_for_template(service_id, template["id"])
last_used_text = ""
if not last_used_notification:
last_used_text = _l("more than seven days")
else:
last_used_date = parse(last_used_notification["created_at"]).replace(tzinfo=None)
last_used_text = get_human_readable_delta(last_used_date, datetime.utcnow())
message = "{} {} {}".format(_l("This template was last used"), last_used_text, _l("ago."))
except HTTPError as e:
if e.status_code == 404:
message = None
else:
raise e
flash(
[
"{} ‘{}’?".format(_l("Are you sure you want to delete"), template["name"]),
message,
],
"delete",
)
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template["id"], service_id),
user_has_template_permission=True,
)
@main.route("/services/<service_id>/templates/<template_id>/redact", methods=["GET"])
@user_has_permissions("manage_templates")
def confirm_redact_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template["id"], service_id),
user_has_template_permission=True,
show_redaction_message=True,
)
@main.route("/services/<service_id>/templates/<template_id>/redact", methods=["POST"])
@user_has_permissions("manage_templates")
def redact_template(service_id, template_id):
service_api_client.redact_service_template(service_id, template_id)
flash(
_("Personalised content will be hidden for messages sent with this template"),
"default_with_tick",
)
return redirect(
url_for(
".view_template",
service_id=service_id,
template_id=template_id,
)
)
@main.route("/services/<service_id>/templates/<template_id>/versions")
@user_has_permissions("view_activity")
def view_template_versions(service_id, template_id):
return render_template(
"views/templates/choose_history.html",
versions=[
get_template(
template,
current_service,
letter_preview_url=url_for(
".view_template_version_preview",
service_id=service_id,
template_id=template_id,
version=template["version"],
filetype="png",
),
)
for template in service_api_client.get_service_template_versions(service_id, template_id)["data"]
],
)
@main.route(
"/services/<service_id>/templates/<template_id>/set-template-sender",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def set_template_sender(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
sender_details = get_template_sender_form_dict(service_id, template)
no_senders = sender_details.get("no_senders", False)
form = SetTemplateSenderForm(
sender=sender_details["current_choice"],
sender_choices=sender_details["value_and_label"],
)
option_hints = {sender_details["default_sender"]: "(Default)"}
if form.validate_on_submit():
service_api_client.update_service_template_sender(
service_id,
template_id,
form.sender.data if form.sender.data else None,
)
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/set-template-sender.html",
form=form,
template_id=template_id,
no_senders=no_senders,
option_hints=option_hints,
)
@main.route(
"/services/<service_id>/templates/<template_id>/edit-postage",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def edit_template_postage(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
if template["template_type"] != "letter":
abort(404)
form = LetterTemplatePostageForm(**template)
if form.validate_on_submit():
postage = form.postage.data
service_api_client.update_service_template_postage(service_id, template_id, postage)
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/edit-template-postage.html",
form=form,
service_id=service_id,
template_id=template_id,
template_postage=template["postage"],
)
def get_template_sender_form_dict(service_id, template):
context = {
"email": {"field_name": "email_address"},
"letter": {"field_name": "contact_block"},
"sms": {"field_name": "sms_sender"},
}[template["template_type"]]
sender_format = context["field_name"]
service_senders = get_sender_details(service_id, template["template_type"])
context["default_sender"] = next((x["id"] for x in service_senders if x["is_default"]), "Not set")
if not service_senders:
context["no_senders"] = True
context["value_and_label"] = [(sender["id"], Markup(nl2br(sender[sender_format]))) for sender in service_senders]
context["value_and_label"].insert(0, ("", "Blank")) # Add blank option to start of list
context["current_choice"] = template["service_letter_contact"] if template["service_letter_contact"] else ""
return context
def get_human_readable_delta(from_time, until_time):
delta = until_time - from_time
if delta < timedelta(seconds=60):
return "under a minute"
elif delta < timedelta(hours=1):
minutes = int(delta.seconds / 60)
return "{} minute{}".format(minutes, "" if minutes == 1 else "s")
elif delta < timedelta(days=1):
hours = int(delta.seconds / 3600)
return "{} hour{}".format(hours, "" if hours == 1 else "s")
else:
days = delta.days
return "{} day{}".format(days, "" if days == 1 else "s")
| <filename>app/main/views/templates.py
from datetime import datetime, timedelta
from string import ascii_uppercase
from dateutil.parser import parse
from flask import abort, flash, jsonify, redirect, render_template, request, url_for
from flask_babel import _
from flask_babel import lazy_gettext as _l
from flask_login import current_user
from markupsafe import Markup
from notifications_python_client.errors import HTTPError
from notifications_utils.formatters import nl2br
from notifications_utils.recipients import first_column_headings
from app import (
current_service,
service_api_client,
template_api_prefill_client,
template_folder_api_client,
template_statistics_client,
)
from app.main import main
from app.main.forms import (
CreateTemplateForm,
EmailTemplateForm,
LetterTemplateForm,
LetterTemplatePostageForm,
SearchByNameForm,
SetTemplateSenderForm,
SMSTemplateForm,
TemplateAndFoldersSelectionForm,
TemplateFolderForm,
)
from app.main.views.send import get_example_csv_rows, get_sender_details
from app.models.service import Service
from app.models.template_list import TemplateList, TemplateLists
from app.template_previews import TemplatePreview, get_page_count_for_letter
from app.utils import (
email_or_sms_not_enabled,
get_template,
should_skip_template_page,
user_has_permissions,
user_is_platform_admin,
)
form_objects = {
"email": EmailTemplateForm,
"sms": SMSTemplateForm,
"letter": LetterTemplateForm,
}
def get_email_preview_template(template, template_id, service_id):
email_preview_template = get_template(
template,
current_service,
letter_preview_url=url_for(
".view_letter_template_preview",
service_id=service_id,
template_id=template_id,
filetype="png",
),
show_recipient=True,
page_count=get_page_count_for_letter(template),
)
return email_preview_template
@main.route("/services/<service_id>/templates/<uuid:template_id>")
@user_has_permissions()
def view_template(service_id, template_id):
template = current_service.get_template(template_id)
template_folder = current_service.get_template_folder(template["folder"])
user_has_template_permission = current_user.has_template_folder_permission(template_folder)
if should_skip_template_page(template["template_type"]):
return redirect(url_for(".send_one_off", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template_id, service_id),
template_postage=template["postage"],
user_has_template_permission=user_has_template_permission,
)
@main.route("/services/<service_id>/start-tour/<uuid:template_id>")
@user_has_permissions("view_activity")
def start_tour(service_id, template_id):
template = current_service.get_template(template_id)
if template["template_type"] != "email":
abort(404)
return render_template(
"views/templates/start-tour.html",
template=get_template(
template,
current_service,
show_recipient=True,
),
help="1",
)
@main.route("/services/<service_id>/templates", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>",
methods=["GET", "POST"],
)
@main.route("/services/<service_id>/templates/<template_type>", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/<template_type>/folders/<template_folder_id>",
methods=["GET", "POST"],
)
@user_has_permissions()
def choose_template(service_id, template_type="all", template_folder_id=None):
template_folder = current_service.get_template_folder(template_folder_id)
user_has_template_folder_permission = current_user.has_template_folder_permission(template_folder)
template_list = TemplateList(current_service, template_type, template_folder_id, current_user)
templates_and_folders_form = TemplateAndFoldersSelectionForm(
all_template_folders=current_service.get_user_template_folders(current_user),
template_list=template_list,
template_type=template_type,
allow_adding_letter_template=current_service.has_permission("letter"),
allow_adding_copy_of_template=(current_service.all_templates or len(current_user.service_ids) > 1),
)
option_hints = {template_folder_id: "current folder"}
if request.method == "POST" and templates_and_folders_form.validate_on_submit():
if not current_user.has_permissions("manage_templates"):
abort(403)
try:
return process_folder_management_form(templates_and_folders_form, template_folder_id)
except HTTPError as e:
flash(e.message)
if "templates_and_folders" in templates_and_folders_form.errors:
flash(_("Select at least one template or folder"))
initial_state = request.args.get("initial_state")
if request.method == "GET" and initial_state:
templates_and_folders_form.op = initial_state
sending_view = request.args.get("view") == "sending"
return render_template(
"views/templates/choose.html",
current_template_folder_id=template_folder_id,
template_folder_path=current_service.get_template_folder_path(template_folder_id),
template_list=template_list,
show_search_box=current_service.count_of_templates_and_folders > 7,
show_template_nav=(current_service.has_multiple_template_types and (len(current_service.all_templates) > 2)),
sending_view=sending_view,
template_nav_items=get_template_nav_items(template_folder_id, sending_view),
template_type=template_type,
search_form=SearchByNameForm(),
templates_and_folders_form=templates_and_folders_form,
move_to_children=templates_and_folders_form.move_to.children(),
user_has_template_folder_permission=user_has_template_folder_permission,
option_hints=option_hints,
)
def process_folder_management_form(form, current_folder_id):
current_service.get_template_folder_with_user_permission_or_403(current_folder_id, current_user)
new_folder_id = None
if form.is_add_folder_op:
new_folder_id = template_folder_api_client.create_template_folder(
current_service.id, name=form.get_folder_name(), parent_id=current_folder_id
)
if form.is_move_op:
# if we've just made a folder, we also want to move there
move_to_id = new_folder_id or form.move_to.data
current_service.move_to_folder(ids_to_move=form.templates_and_folders.data, move_to=move_to_id)
return redirect(request.url)
def get_template_nav_label(value):
return {
"all": _l("All"),
"sms": _l("Text message"),
"email": _l("Email"),
"letter": _l("Letter"),
}[value]
def get_template_nav_items(template_folder_id, sending_view):
return [
(
get_template_nav_label(key),
key,
url_for(
".choose_template",
service_id=current_service.id,
template_type=key,
template_folder_id=template_folder_id,
view="sending" if sending_view else None,
),
"",
)
for key in ["all"] + current_service.available_template_types
]
@main.route("/services/<service_id>/templates/<template_id>.<filetype>")
@user_has_permissions()
def view_letter_template_preview(service_id, template_id, filetype):
if filetype not in ("pdf", "png"):
abort(404)
db_template = current_service.get_template(template_id)
return TemplatePreview.from_database_object(db_template, filetype, page=request.args.get("page"))
@main.route("/templates/letter-preview-image/<filename>")
@user_is_platform_admin
def letter_branding_preview_image(filename):
template = {
"subject": "An example letter",
"content": (
"Lorem Ipsum is simply dummy text of the printing and typesetting "
"industry.\n\nLorem Ipsum has been the industry’s standard dummy "
"text ever since the 1500s, when an unknown printer took a galley "
"of type and scrambled it to make a type specimen book.\n\n"
"# History\n\nIt has survived not only\n\n"
"* five centuries\n"
"* but also the leap into electronic typesetting\n\n"
"It was popularised in the 1960s with the release of Letraset "
"sheets containing Lorem Ipsum passages, and more recently with "
"desktop publishing software like Aldus PageMaker including "
"versions of Lorem Ipsum.\n\n"
"The point of using Lorem Ipsum is that it has a more-or-less "
"normal distribution of letters, as opposed to using ‘Content "
"here, content here’, making it look like readable English."
),
}
filename = None if filename == "no-branding" else filename
return TemplatePreview.from_example_template(template, filename)
def _view_template_version(service_id, template_id, version, letters_as_pdf=False):
return dict(
template=get_template(
current_service.get_template(template_id, version=version),
current_service,
letter_preview_url=url_for(
".view_template_version_preview",
service_id=service_id,
template_id=template_id,
version=version,
filetype="png",
)
if not letters_as_pdf
else None,
)
)
@main.route("/services/<service_id>/templates/<template_id>/version/<int:version>")
@user_has_permissions()
def view_template_version(service_id, template_id, version):
return render_template(
"views/templates/template_history.html",
**_view_template_version(service_id=service_id, template_id=template_id, version=version),
)
@main.route("/services/<service_id>/templates/<template_id>/version/<int:version>.<filetype>")
@user_has_permissions()
def view_template_version_preview(service_id, template_id, version, filetype):
db_template = current_service.get_template(template_id, version=version)
return TemplatePreview.from_database_object(db_template, filetype)
def _add_template_by_type(template_type, template_folder_id):
if template_type == "copy-existing":
return redirect(
url_for(
".choose_template_to_copy",
service_id=current_service.id,
)
)
if template_type == "letter":
blank_letter = service_api_client.create_service_template(
"New letter template",
"letter",
"Body",
current_service.id,
"Main heading",
"normal",
template_folder_id,
)
return redirect(
url_for(
".view_template",
service_id=current_service.id,
template_id=blank_letter["data"]["id"],
)
)
if email_or_sms_not_enabled(template_type, current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=current_service.id,
notification_type=template_type,
return_to="add_new_template",
template_id="0",
)
)
else:
return redirect(
url_for(
".add_service_template",
service_id=current_service.id,
template_type=template_type,
template_folder_id=template_folder_id,
)
)
@main.route("/services/<service_id>/templates/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/folders/<template_folder_id>/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/<template_type>/create", methods=["GET", "POST"])
@main.route("/services/<service_id>/templates/<template_type>/folders/<template_folder_id>/create", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def create_template(service_id, template_type="all", template_folder_id=None):
form = CreateTemplateForm()
if request.method == "POST" and form.validate_on_submit():
try:
return _add_template_by_type(
form.what_type.data,
template_folder_id,
)
except HTTPError as e:
flash(e.message)
return render_template(
"views/templates/create.html",
service_id=service_id,
template_folder_id=template_folder_id,
template_type=template_type,
form=form,
disabled_options={},
option_hints={},
)
@main.route("/services/<service_id>/templates/copy")
@main.route("/services/<service_id>/templates/all/copy")
@main.route("/services/<service_id>/templates/email/copy")
@main.route("/services/<service_id>/templates/sms/copy")
@main.route("/services/<service_id>/templates/copy/from-folder/<uuid:from_folder>")
@main.route("/services/<service_id>/templates/copy/from-service/<uuid:from_service>")
@main.route("/services/<service_id>/templates/copy/from-service/<uuid:from_service>/from-folder/<uuid:from_folder>")
@main.route("/services/<service_id>/templates/all/folders/<uuid:from_folder>/copy")
@user_has_permissions("manage_templates")
def choose_template_to_copy(
service_id,
from_service=None,
from_folder=None,
):
if from_folder and from_service is None:
from_service = service_id
if from_service:
current_user.belongs_to_service_or_403(from_service)
service = Service(service_api_client.get_service(from_service)["data"])
return render_template(
"views/templates/copy.html",
services_templates_and_folders=TemplateList(service, template_folder_id=from_folder, user=current_user),
template_folder_path=service.get_template_folder_path(from_folder),
from_service=service,
search_form=SearchByNameForm(),
)
else:
return render_template(
"views/templates/copy.html",
services_templates_and_folders=TemplateLists(current_user),
search_form=SearchByNameForm(),
)
@main.route("/services/<service_id>/templates/copy/<uuid:template_id>", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def copy_template(service_id, template_id):
from_service = request.args.get("from_service")
current_user.belongs_to_service_or_403(from_service)
template = service_api_client.get_service_template(from_service, str(template_id))["data"]
template_folder = template_folder_api_client.get_template_folder(from_service, template["folder"])
if not current_user.has_template_folder_permission(template_folder):
abort(403)
if request.method == "POST":
return add_service_template(
service_id,
template["template_type"],
template_folder_id=template_folder.get("id"),
)
template["template_content"] = template["content"]
template["name"] = _get_template_copy_name(template, current_service.all_templates)
form = form_objects[template["template_type"]](**template)
return render_template(
f"views/edit-{template['template_type']}-template.html",
form=form,
template=template,
heading=_l("Copy email template") if template["template_type"] == "email" else _l("Copy text message template"),
service_id=service_id,
services=current_user.service_ids,
)
def _get_template_copy_name(template, existing_templates):
template_names = [existing["name"] for existing in existing_templates]
for index in reversed(range(1, 10)):
if "{} (copy {})".format(template["name"], index) in template_names:
return "{} (copy {})".format(template["name"], index + 1)
if "{} (copy)".format(template["name"]) in template_names:
return "{} (copy 2)".format(template["name"])
return "{} (copy)".format(template["name"])
@main.route("/services/<service_id>/templates/action-blocked/<notification_type>/<return_to>/<template_id>")
@user_has_permissions("manage_templates")
def action_blocked(service_id, notification_type, return_to, template_id):
if notification_type == "sms":
notification_type = "text messages"
elif notification_type == "email":
notification_type = "emails"
return render_template(
"views/templates/action_blocked.html",
service_id=service_id,
notification_type=notification_type,
return_to=return_to,
template_id=template_id,
)
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/manage",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def manage_template_folder(service_id, template_folder_id):
template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user)
form = TemplateFolderForm(
name=template_folder["name"],
users_with_permission=template_folder.get("users_with_permission", None),
all_service_users=[user for user in current_service.active_users if user.id != current_user.id],
)
if form.validate_on_submit():
if current_user.has_permissions("manage_service") and form.users_with_permission.all_service_users:
users_with_permission = form.users_with_permission.data + [current_user.id]
else:
users_with_permission = None
template_folder_api_client.update_template_folder(
current_service.id,
template_folder_id,
name=form.name.data,
users_with_permission=users_with_permission,
)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template_folder_id,
)
)
return render_template(
"views/templates/manage-template-folder.html",
form=form,
template_folder_path=current_service.get_template_folder_path(template_folder_id),
current_service_id=current_service.id,
template_folder_id=template_folder_id,
template_type="all",
)
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/delete",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def delete_template_folder(service_id, template_folder_id):
template_folder = current_service.get_template_folder_with_user_permission_or_403(template_folder_id, current_user)
if len(current_service.get_template_folders_and_templates(template_type="all", template_folder_id=template_folder_id)) > 0:
flash(_l("You must empty this folder before you can delete it"), "info")
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_type="all",
template_folder_id=template_folder_id,
)
)
if request.method == "POST":
try:
template_folder_api_client.delete_template_folder(current_service.id, template_folder_id)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template_folder["parent_id"],
)
)
except HTTPError as e:
msg = _l("Folder is not empty")
if e.status_code == 400 and msg in e.message:
flash(_("You must empty this folder before you can delete it"), "info")
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_type="all",
template_folder_id=template_folder_id,
)
)
else:
abort(500, e)
else:
flash(
"{} ‘{}’ {}".format(
_l("Are you sure you want to delete the"),
template_folder["name"],
_l("folder?"),
),
"delete",
)
return manage_template_folder(service_id, template_folder_id)
@main.route("/services/templates/<template_id>/get-data", methods=["POST"])
def get_template_data(template_id):
data = template_api_prefill_client.get_template(template_id)
return jsonify({"result": data})
@main.route("/services/<service_id>/templates/add-<template_type>", methods=["GET", "POST"])
@main.route(
"/services/<service_id>/templates/folders/<template_folder_id>/add-<template_type>",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def add_service_template(service_id, template_type, template_folder_id=None):
if template_type not in ["sms", "email", "letter"]:
abort(404)
if not current_service.has_permission("letter") and template_type == "letter":
abort(403)
form = form_objects[template_type]()
if form.validate_on_submit():
if form.process_type.data != "normal":
abort_403_if_not_admin_user()
try:
new_template = service_api_client.create_service_template(
form.name.data,
template_type,
form.template_content.data,
service_id,
form.subject.data if hasattr(form, "subject") else None,
form.process_type.data,
template_folder_id,
)
except HTTPError as e:
if (
e.status_code == 400
and "content" in e.message
and any(["character count greater than" in x for x in e.message["content"]])
):
form.template_content.errors.extend(e.message["content"])
else:
raise e
else:
flash(_("'{}' template saved").format(form.name.data), "default_with_tick")
return redirect(
url_for(
".view_template",
service_id=service_id,
template_id=new_template["data"]["id"],
)
)
if email_or_sms_not_enabled(template_type, current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=service_id,
notification_type=template_type,
template_folder_id=template_folder_id,
return_to="templates",
template_id="0",
)
)
else:
return render_template(
f"views/edit-{template_type}-template.html",
form=form,
template_type=template_type,
template_folder_id=template_folder_id,
service_id=service_id,
heading=_l("New email template") if template_type == "email" else _l("New text message template"),
)
def abort_403_if_not_admin_user():
if not current_user.platform_admin:
abort(403)
@main.route("/services/<service_id>/templates/<template_id>/edit", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def edit_service_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
template["template_content"] = template["content"]
form = form_objects[template["template_type"]](**template)
if form.validate_on_submit():
if form.process_type.data != template["process_type"]:
abort_403_if_not_admin_user()
subject = form.subject.data if hasattr(form, "subject") else None
new_template_data = {
"name": form.name.data,
"content": form.template_content.data,
"subject": subject,
"template_type": template["template_type"],
"id": template["id"],
"process_type": form.process_type.data,
"reply_to_text": template["reply_to_text"],
}
new_template = get_template(new_template_data, current_service)
template_change = get_template(template, current_service).compare_to(new_template)
if template_change.placeholders_added and not request.form.get("confirm"):
example_column_headings = first_column_headings[new_template.template_type] + list(new_template.placeholders)
return render_template(
"views/templates/breaking-change.html",
template_change=template_change,
new_template=new_template,
column_headings=list(ascii_uppercase[: len(example_column_headings)]),
example_rows=[
example_column_headings,
get_example_csv_rows(new_template),
get_example_csv_rows(new_template),
],
form=form,
)
try:
service_api_client.update_service_template(
template_id,
form.name.data,
template["template_type"],
form.template_content.data,
service_id,
subject,
form.process_type.data,
)
except HTTPError as e:
if e.status_code == 400:
if "content" in e.message and any(["character count greater than" in x for x in e.message["content"]]):
form.template_content.errors.extend(e.message["content"])
else:
raise e
else:
raise e
else:
flash(_("'{}' template saved").format(form.name.data), "default_with_tick")
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
if email_or_sms_not_enabled(template["template_type"], current_service.permissions):
return redirect(
url_for(
".action_blocked",
service_id=service_id,
notification_type=template["template_type"],
return_to="view_template",
template_id=template_id,
)
)
else:
return render_template(
f"views/edit-{template['template_type']}-template.html",
form=form,
template=template,
heading=_l("Edit email template") if template["template_type"] == "email" else _l("Edit text message template"),
)
@main.route("/services/<service_id>/templates/<template_id>/delete", methods=["GET", "POST"])
@user_has_permissions("manage_templates")
def delete_service_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
if request.method == "POST":
service_api_client.delete_service_template(service_id, template_id)
return redirect(
url_for(
".choose_template",
service_id=service_id,
template_folder_id=template["folder"],
)
)
try:
last_used_notification = template_statistics_client.get_template_statistics_for_template(service_id, template["id"])
last_used_text = ""
if not last_used_notification:
last_used_text = _l("more than seven days")
else:
last_used_date = parse(last_used_notification["created_at"]).replace(tzinfo=None)
last_used_text = get_human_readable_delta(last_used_date, datetime.utcnow())
message = "{} {} {}".format(_l("This template was last used"), last_used_text, _l("ago."))
except HTTPError as e:
if e.status_code == 404:
message = None
else:
raise e
flash(
[
"{} ‘{}’?".format(_l("Are you sure you want to delete"), template["name"]),
message,
],
"delete",
)
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template["id"], service_id),
user_has_template_permission=True,
)
@main.route("/services/<service_id>/templates/<template_id>/redact", methods=["GET"])
@user_has_permissions("manage_templates")
def confirm_redact_template(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
return render_template(
"views/templates/template.html",
template=get_email_preview_template(template, template["id"], service_id),
user_has_template_permission=True,
show_redaction_message=True,
)
@main.route("/services/<service_id>/templates/<template_id>/redact", methods=["POST"])
@user_has_permissions("manage_templates")
def redact_template(service_id, template_id):
service_api_client.redact_service_template(service_id, template_id)
flash(
_("Personalised content will be hidden for messages sent with this template"),
"default_with_tick",
)
return redirect(
url_for(
".view_template",
service_id=service_id,
template_id=template_id,
)
)
@main.route("/services/<service_id>/templates/<template_id>/versions")
@user_has_permissions("view_activity")
def view_template_versions(service_id, template_id):
return render_template(
"views/templates/choose_history.html",
versions=[
get_template(
template,
current_service,
letter_preview_url=url_for(
".view_template_version_preview",
service_id=service_id,
template_id=template_id,
version=template["version"],
filetype="png",
),
)
for template in service_api_client.get_service_template_versions(service_id, template_id)["data"]
],
)
@main.route(
"/services/<service_id>/templates/<template_id>/set-template-sender",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def set_template_sender(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
sender_details = get_template_sender_form_dict(service_id, template)
no_senders = sender_details.get("no_senders", False)
form = SetTemplateSenderForm(
sender=sender_details["current_choice"],
sender_choices=sender_details["value_and_label"],
)
option_hints = {sender_details["default_sender"]: "(Default)"}
if form.validate_on_submit():
service_api_client.update_service_template_sender(
service_id,
template_id,
form.sender.data if form.sender.data else None,
)
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/set-template-sender.html",
form=form,
template_id=template_id,
no_senders=no_senders,
option_hints=option_hints,
)
@main.route(
"/services/<service_id>/templates/<template_id>/edit-postage",
methods=["GET", "POST"],
)
@user_has_permissions("manage_templates")
def edit_template_postage(service_id, template_id):
template = current_service.get_template_with_user_permission_or_403(template_id, current_user)
if template["template_type"] != "letter":
abort(404)
form = LetterTemplatePostageForm(**template)
if form.validate_on_submit():
postage = form.postage.data
service_api_client.update_service_template_postage(service_id, template_id, postage)
return redirect(url_for(".view_template", service_id=service_id, template_id=template_id))
return render_template(
"views/templates/edit-template-postage.html",
form=form,
service_id=service_id,
template_id=template_id,
template_postage=template["postage"],
)
def get_template_sender_form_dict(service_id, template):
context = {
"email": {"field_name": "email_address"},
"letter": {"field_name": "contact_block"},
"sms": {"field_name": "sms_sender"},
}[template["template_type"]]
sender_format = context["field_name"]
service_senders = get_sender_details(service_id, template["template_type"])
context["default_sender"] = next((x["id"] for x in service_senders if x["is_default"]), "Not set")
if not service_senders:
context["no_senders"] = True
context["value_and_label"] = [(sender["id"], Markup(nl2br(sender[sender_format]))) for sender in service_senders]
context["value_and_label"].insert(0, ("", "Blank")) # Add blank option to start of list
context["current_choice"] = template["service_letter_contact"] if template["service_letter_contact"] else ""
return context
def get_human_readable_delta(from_time, until_time):
delta = until_time - from_time
if delta < timedelta(seconds=60):
return "under a minute"
elif delta < timedelta(hours=1):
minutes = int(delta.seconds / 60)
return "{} minute{}".format(minutes, "" if minutes == 1 else "s")
elif delta < timedelta(days=1):
hours = int(delta.seconds / 3600)
return "{} hour{}".format(hours, "" if hours == 1 else "s")
else:
days = delta.days
return "{} day{}".format(days, "" if days == 1 else "s")
| en | 0.954352 | # if we've just made a folder, we also want to move there # Add blank option to start of list | 1.99336 | 2 |
linearRegression_gradientDescent/linearRegression_gradientDescent.py | MarcelloVendruscolo/DeepLearningForImageAnalysis | 0 | 7561 | <filename>linearRegression_gradientDescent/linearRegression_gradientDescent.py
import numpy as np
from load_auto import load_auto
import matplotlib.pyplot as plt
import math
def initialize_parameters(observation_dimension):
# observation_dimension: number of features taken into consideration of the input
# returns weights as a vector and offset as a scalar
weights = np.zeros((observation_dimension, 1))
offset_b = 0
return weights, offset_b
def model_forward(train_dataset, weights, offset_b):
# train_dataset: input data points
# weights and offset_b: model parameters
# returns the output predictions as a vector corresponding to each input data point
number_observations = np.size(train_dataset, axis = 1)
predictions = np.zeros((1, number_observations))
for observation in range(0, number_observations):
with np.errstate(over='raise', invalid='raise'):
try:
predictions[0, observation] = weights.T @ train_dataset[:, observation] + offset_b
except:
predictions[0, observation] = np.inf
return predictions
def compute_cost(predictions, train_labels):
# predictions: computed output values
# train_labels: true output values (ground truth)
# returns the cost function value
number_observations = np.size(predictions, axis = 1)
sum = 0
with np.errstate(over='raise', invalid='raise'):
try:
for observation in range(0, number_observations):
sum += (train_labels[observation, 0] - predictions[0, observation])**2
except:
return np.inf
return sum/number_observations
def model_backward(observation_dimension, train_dataset, predictions, train_labels):
# observation_dimension: number of features taken into consideration of the input
# train_dataset: input data points
# predictions: computed output values
# train_labels: true output values (ground truth)
# returns the gradient of the cost function with respect to all parameters
number_observations = np.size(train_dataset, axis = 1)
sum_weights = np.zeros((observation_dimension, 1))
sum_offset = 0
for observation in range(0, number_observations):
diff = predictions[0, observation] - train_labels[observation, 0]
with np.errstate(over='raise', invalid='raise'):
try:
sum_weights += train_dataset[:, observation].reshape(observation_dimension,-1) * diff
sum_offset += diff
except:
return np.full(sum_weights.shape, np.inf), np.inf
gradient_weights = sum_weights * (2/number_observations)
gradient_offset = sum_offset * (2/number_observations)
return gradient_weights, gradient_offset
def update_parameters(weights, offset_b, gradient_weights, gradient_offset, learning_rate):
# weights and offset_b: parameters computed (or initialised) in this iteration
# gradient_weights and gradient_offset: gradients of the cost function
# learning_rate: step size
# returns the updated parameters for the next iteration
updated_weights = weights - (learning_rate * gradient_weights)
updated_offset = offset_b - (learning_rate * gradient_offset)
return updated_weights, updated_offset
def predict(train_dataset, weights, offset_b):
return model_forward(train_dataset, weights, offset_b)
def train_linear_model(train_dataset, train_labels, number_iterations, learning_rate):
# train_dataset: input data points
# train_labels: true output values (ground truth)
# number_iterations and learning_rate: user-defined hyperparameters
# returns the model parameters and cost function values as a vector
cost = []
observation_dimension = np.size(train_dataset, axis = 0)
weights, offset_b = initialize_parameters(observation_dimension)
while number_iterations > 0:
predictions = predict(train_dataset, weights, offset_b)
cost.append(compute_cost(predictions, train_labels))
gradient_weights, gradient_offset = model_backward(observation_dimension, train_dataset, predictions, train_labels)
weights, offset_b = update_parameters(weights, offset_b, gradient_weights, gradient_offset, learning_rate)
number_iterations -= 1
return weights, offset_b, cost
def plotting_cost_iteration(learning_rates, cost_consolidated):
for counter in range(0, cost_consolidated.shape[0]):
plt.plot(np.arange(start=1, stop = (cost_consolidated.shape[1] + 1), step= 1), cost_consolidated[counter,:], label=r'$\alpha = $' + str(learning_rates[counter]))
plt.xlabel('Iteration')
plt.ylabel('Cost')
plt.title('Cost per Iteration')
plt.ylim(0,720)
plt.legend()
plt.show()
def plotting_horsepower_mpg(train_dataset, train_labels, weights, offset_b):
plt.scatter(train_dataset[0,:], train_labels[:,0], label='Data points')
plt.plot(train_dataset[0,:], np.array(train_dataset[0,:]*weights + offset_b).reshape(train_labels.shape),'r-', label='Linear Regression')
plt.xlabel('(normalised) Horsepower')
plt.ylabel('MPG')
plt.title('MPG vs (normalised) Horsepower')
plt.legend()
plt.show()
PATH_DATASET = '/Users/marcellovendruscolo/Documents/vscode-workspace/DeepLearningForImageAnalysis/linearRegression_gradientDescent/Auto.csv'
train_dataset, train_labels = load_auto(PATH_DATASET)
train_dataset = np.array(train_dataset)
non_normalised_dataset = np.array(np.transpose(train_dataset))
non_normalised_horsepower = non_normalised_dataset[2,:].reshape(1,-1)
train_labels = np.array(train_labels)
mean = np.mean(train_dataset, axis=0)
sd = np.std(train_dataset, axis=0)
for col in range(0, train_dataset.shape[1]):
train_dataset[:,col] = (train_dataset[:,col] - mean[col])/sd[col]
normalised_dataset = np.transpose(train_dataset)
horsepower_dataset = normalised_dataset[2,:].reshape(1,-1)
# Exercise 1.4.1 and Exercise 1.4.2:
# learning_rate = 0.1
# number_iterations = 1000
# print('\nChoice of input dataset: (i) Only horsepower feature.')
# weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate)
# print('Number of iterations: ' +str(number_iterations))
# print('Learning rate: ' +str(learning_rate))
# print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1]))
# print('Weights: ' +str(weights))
# print('Offset: ' +str(offset_b))
# print('\nChoice of input dataset: (ii) All features except name.')
# weights, offset_b, cost_function_value = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rate)
# print('Number of iterations: ' +str(number_iterations))
# print('Learning rate: ' +str(learning_rate))
# print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1]))
# print('Weights: ' +str(weights))
# print('Offset: ' +str(offset_b) + '\n')
# Exercise 1.4.3:
# learning_rates = [1, 1e-1, 1e-2, 1e-3, 1e-4]
# number_iterations = 1000
# cost_consolidated = np.ndarray(shape=(len(learning_rates), number_iterations))
# for counter in range(0, len(learning_rates)):
# weights, offset_b, cost_consolidated[counter,:] = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rates[counter])
# plotting_cost_iteration(learning_rates, cost_consolidated)
# Exercise 1.4.4:
# learning_rate = [1, 1e-1, 1e-2, 1e-3, 1e-4]
# number_iterations = 1000
# cost_consolidated = np.ndarray(shape=(len(learning_rate), number_iterations))
# for counter in range(0, len(learning_rate)):
# weights, offset_b, cost_consolidated[counter,:] = train_linear_model(non_normalised_dataset, train_labels, number_iterations, learning_rate[counter])
# plotting_cost_iteration(learning_rate, cost_consolidated)
# Exercise 1.4.5:
# learning_rate = 0.1
# number_iterations = 1000
# weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate)
# plotting_horsepower_mpg(horsepower_dataset, train_labels, weights, offset_b) | <filename>linearRegression_gradientDescent/linearRegression_gradientDescent.py
import numpy as np
from load_auto import load_auto
import matplotlib.pyplot as plt
import math
def initialize_parameters(observation_dimension):
# observation_dimension: number of features taken into consideration of the input
# returns weights as a vector and offset as a scalar
weights = np.zeros((observation_dimension, 1))
offset_b = 0
return weights, offset_b
def model_forward(train_dataset, weights, offset_b):
# train_dataset: input data points
# weights and offset_b: model parameters
# returns the output predictions as a vector corresponding to each input data point
number_observations = np.size(train_dataset, axis = 1)
predictions = np.zeros((1, number_observations))
for observation in range(0, number_observations):
with np.errstate(over='raise', invalid='raise'):
try:
predictions[0, observation] = weights.T @ train_dataset[:, observation] + offset_b
except:
predictions[0, observation] = np.inf
return predictions
def compute_cost(predictions, train_labels):
# predictions: computed output values
# train_labels: true output values (ground truth)
# returns the cost function value
number_observations = np.size(predictions, axis = 1)
sum = 0
with np.errstate(over='raise', invalid='raise'):
try:
for observation in range(0, number_observations):
sum += (train_labels[observation, 0] - predictions[0, observation])**2
except:
return np.inf
return sum/number_observations
def model_backward(observation_dimension, train_dataset, predictions, train_labels):
# observation_dimension: number of features taken into consideration of the input
# train_dataset: input data points
# predictions: computed output values
# train_labels: true output values (ground truth)
# returns the gradient of the cost function with respect to all parameters
number_observations = np.size(train_dataset, axis = 1)
sum_weights = np.zeros((observation_dimension, 1))
sum_offset = 0
for observation in range(0, number_observations):
diff = predictions[0, observation] - train_labels[observation, 0]
with np.errstate(over='raise', invalid='raise'):
try:
sum_weights += train_dataset[:, observation].reshape(observation_dimension,-1) * diff
sum_offset += diff
except:
return np.full(sum_weights.shape, np.inf), np.inf
gradient_weights = sum_weights * (2/number_observations)
gradient_offset = sum_offset * (2/number_observations)
return gradient_weights, gradient_offset
def update_parameters(weights, offset_b, gradient_weights, gradient_offset, learning_rate):
# weights and offset_b: parameters computed (or initialised) in this iteration
# gradient_weights and gradient_offset: gradients of the cost function
# learning_rate: step size
# returns the updated parameters for the next iteration
updated_weights = weights - (learning_rate * gradient_weights)
updated_offset = offset_b - (learning_rate * gradient_offset)
return updated_weights, updated_offset
def predict(train_dataset, weights, offset_b):
return model_forward(train_dataset, weights, offset_b)
def train_linear_model(train_dataset, train_labels, number_iterations, learning_rate):
# train_dataset: input data points
# train_labels: true output values (ground truth)
# number_iterations and learning_rate: user-defined hyperparameters
# returns the model parameters and cost function values as a vector
cost = []
observation_dimension = np.size(train_dataset, axis = 0)
weights, offset_b = initialize_parameters(observation_dimension)
while number_iterations > 0:
predictions = predict(train_dataset, weights, offset_b)
cost.append(compute_cost(predictions, train_labels))
gradient_weights, gradient_offset = model_backward(observation_dimension, train_dataset, predictions, train_labels)
weights, offset_b = update_parameters(weights, offset_b, gradient_weights, gradient_offset, learning_rate)
number_iterations -= 1
return weights, offset_b, cost
def plotting_cost_iteration(learning_rates, cost_consolidated):
for counter in range(0, cost_consolidated.shape[0]):
plt.plot(np.arange(start=1, stop = (cost_consolidated.shape[1] + 1), step= 1), cost_consolidated[counter,:], label=r'$\alpha = $' + str(learning_rates[counter]))
plt.xlabel('Iteration')
plt.ylabel('Cost')
plt.title('Cost per Iteration')
plt.ylim(0,720)
plt.legend()
plt.show()
def plotting_horsepower_mpg(train_dataset, train_labels, weights, offset_b):
plt.scatter(train_dataset[0,:], train_labels[:,0], label='Data points')
plt.plot(train_dataset[0,:], np.array(train_dataset[0,:]*weights + offset_b).reshape(train_labels.shape),'r-', label='Linear Regression')
plt.xlabel('(normalised) Horsepower')
plt.ylabel('MPG')
plt.title('MPG vs (normalised) Horsepower')
plt.legend()
plt.show()
PATH_DATASET = '/Users/marcellovendruscolo/Documents/vscode-workspace/DeepLearningForImageAnalysis/linearRegression_gradientDescent/Auto.csv'
train_dataset, train_labels = load_auto(PATH_DATASET)
train_dataset = np.array(train_dataset)
non_normalised_dataset = np.array(np.transpose(train_dataset))
non_normalised_horsepower = non_normalised_dataset[2,:].reshape(1,-1)
train_labels = np.array(train_labels)
mean = np.mean(train_dataset, axis=0)
sd = np.std(train_dataset, axis=0)
for col in range(0, train_dataset.shape[1]):
train_dataset[:,col] = (train_dataset[:,col] - mean[col])/sd[col]
normalised_dataset = np.transpose(train_dataset)
horsepower_dataset = normalised_dataset[2,:].reshape(1,-1)
# Exercise 1.4.1 and Exercise 1.4.2:
# learning_rate = 0.1
# number_iterations = 1000
# print('\nChoice of input dataset: (i) Only horsepower feature.')
# weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate)
# print('Number of iterations: ' +str(number_iterations))
# print('Learning rate: ' +str(learning_rate))
# print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1]))
# print('Weights: ' +str(weights))
# print('Offset: ' +str(offset_b))
# print('\nChoice of input dataset: (ii) All features except name.')
# weights, offset_b, cost_function_value = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rate)
# print('Number of iterations: ' +str(number_iterations))
# print('Learning rate: ' +str(learning_rate))
# print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1]))
# print('Weights: ' +str(weights))
# print('Offset: ' +str(offset_b) + '\n')
# Exercise 1.4.3:
# learning_rates = [1, 1e-1, 1e-2, 1e-3, 1e-4]
# number_iterations = 1000
# cost_consolidated = np.ndarray(shape=(len(learning_rates), number_iterations))
# for counter in range(0, len(learning_rates)):
# weights, offset_b, cost_consolidated[counter,:] = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rates[counter])
# plotting_cost_iteration(learning_rates, cost_consolidated)
# Exercise 1.4.4:
# learning_rate = [1, 1e-1, 1e-2, 1e-3, 1e-4]
# number_iterations = 1000
# cost_consolidated = np.ndarray(shape=(len(learning_rate), number_iterations))
# for counter in range(0, len(learning_rate)):
# weights, offset_b, cost_consolidated[counter,:] = train_linear_model(non_normalised_dataset, train_labels, number_iterations, learning_rate[counter])
# plotting_cost_iteration(learning_rate, cost_consolidated)
# Exercise 1.4.5:
# learning_rate = 0.1
# number_iterations = 1000
# weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate)
# plotting_horsepower_mpg(horsepower_dataset, train_labels, weights, offset_b) | en | 0.591342 | # observation_dimension: number of features taken into consideration of the input # returns weights as a vector and offset as a scalar # train_dataset: input data points # weights and offset_b: model parameters # returns the output predictions as a vector corresponding to each input data point # predictions: computed output values # train_labels: true output values (ground truth) # returns the cost function value # observation_dimension: number of features taken into consideration of the input # train_dataset: input data points # predictions: computed output values # train_labels: true output values (ground truth) # returns the gradient of the cost function with respect to all parameters # weights and offset_b: parameters computed (or initialised) in this iteration # gradient_weights and gradient_offset: gradients of the cost function # learning_rate: step size # returns the updated parameters for the next iteration # train_dataset: input data points # train_labels: true output values (ground truth) # number_iterations and learning_rate: user-defined hyperparameters # returns the model parameters and cost function values as a vector # Exercise 1.4.1 and Exercise 1.4.2: # learning_rate = 0.1 # number_iterations = 1000 # print('\nChoice of input dataset: (i) Only horsepower feature.') # weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate) # print('Number of iterations: ' +str(number_iterations)) # print('Learning rate: ' +str(learning_rate)) # print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1])) # print('Weights: ' +str(weights)) # print('Offset: ' +str(offset_b)) # print('\nChoice of input dataset: (ii) All features except name.') # weights, offset_b, cost_function_value = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rate) # print('Number of iterations: ' +str(number_iterations)) # print('Learning rate: ' +str(learning_rate)) # print('Cost function value: ' +str(cost_function_value[len(cost_function_value) - 1])) # print('Weights: ' +str(weights)) # print('Offset: ' +str(offset_b) + '\n') # Exercise 1.4.3: # learning_rates = [1, 1e-1, 1e-2, 1e-3, 1e-4] # number_iterations = 1000 # cost_consolidated = np.ndarray(shape=(len(learning_rates), number_iterations)) # for counter in range(0, len(learning_rates)): # weights, offset_b, cost_consolidated[counter,:] = train_linear_model(normalised_dataset, train_labels, number_iterations, learning_rates[counter]) # plotting_cost_iteration(learning_rates, cost_consolidated) # Exercise 1.4.4: # learning_rate = [1, 1e-1, 1e-2, 1e-3, 1e-4] # number_iterations = 1000 # cost_consolidated = np.ndarray(shape=(len(learning_rate), number_iterations)) # for counter in range(0, len(learning_rate)): # weights, offset_b, cost_consolidated[counter,:] = train_linear_model(non_normalised_dataset, train_labels, number_iterations, learning_rate[counter]) # plotting_cost_iteration(learning_rate, cost_consolidated) # Exercise 1.4.5: # learning_rate = 0.1 # number_iterations = 1000 # weights, offset_b, cost_function_value = train_linear_model(horsepower_dataset, train_labels, number_iterations, learning_rate) # plotting_horsepower_mpg(horsepower_dataset, train_labels, weights, offset_b) | 3.478537 | 3 |
unit_tests/test_hr_calculations.py | mdholbrook/heart_rate_sentinel_server | 0 | 7562 | <filename>unit_tests/test_hr_calculations.py
import pytest
from functions.hr_calculations import *
@pytest.mark.parametrize("candidate, database, expected", [
('jack', [{'patient_id': 'jump'}, {'patient_id': 'jack'}], 1),
('jungle', [{'patient_id': 'jungle'}, {'patient_id': 'jack'}], 0),
('bo', [{'patient_id': 'james'}, {'patient_id': 'boo'},
{'patient_id': 'bo'}], 2)])
def test_find_id_ind(candidate, database, expected):
# Run the test
assert find_id_ind(candidate, database) == expected
@pytest.mark.parametrize("candidate, database, expected", [
('jump', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]},
{'patient_id': 'jack', 'heart_rate': [120, 112, 131]}],
[50, 60, 70]),
('jack', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]},
{'patient_id': 'jack', 'heart_rate': [120, 112, 131]}],
[120, 112, 131])
])
def test_get_heart_rates(candidate, database, expected):
# Run the test
assert get_heart_rates(candidate, database) == expected
@pytest.mark.parametrize("candidate, expected", [
([50, 60, 70], 60),
([50, 55, 56], 53.66),
([0, 50, 100], 50),
])
def test_average_heart_rate(candidate, expected):
# Run the test
assert pytest.approx(average_heart_rate(candidate), 1e-2) == expected
@pytest.mark.parametrize("candidate", [
'2018-03-09 11:00:36.372339',
'2017-10-19 15:11:36.167854',
])
def test_get_date_as_numeric(candidate):
# Run the test
result = pytest.approx(get_date_as_numeric(candidate), rel=1e-6)
# Generate expected result
expression = "%Y-%m-%d %H:%M:%S.%f"
expected = datetime.strptime(candidate, expression).timestamp()
assert result == expected
@pytest.mark.parametrize("candidate, expected", [
('Mark', ['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']),
('Matt', ['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854'])
])
def test_get_times(candidate, expected):
database = [{'patient_id': 'Mark', 'time':
['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']},
{'patient_id': 'Matt', 'time':
['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854']}]
# Run the test
assert get_times(candidate, database) == expected
@pytest.mark.parametrize("ref_time, times, hr, expected", [
('2018-03-09 11:00:36.372339',
['2018-03-09 11:00:34.372339',
'2018-03-09 11:00:35.372339',
'2018-03-09 11:00:36.872339'], [0, 0, 0], [0]),
('2018-03-09 11:00:36.372339',
['2018-03-09 11:00:35.372339',
'2018-03-09 11:00:36.372359',
'2018-03-09 11:00:37.372339'], [0, 0, 0], [0, 0])])
def test_hr_after_time(ref_time, times, hr, expected):
# Run the test
assert hr_after_time(ref_time, times, hr) == expected
@pytest.mark.parametrize("times, ref_time, expected", [
([0, 1, 2, 3, 4], 3, 4),
([0, 1, 2, 3, 4], 2.5, 3),
([0, 1, 2, 3, 4], 1.5, 2)
])
def test_find_index_larger(ref_time, times, expected):
# Run the test
assert find_index_larger(times, ref_time) == expected
@pytest.mark.parametrize("times, ref_time, expected", [
([0, 1, 2, 3, 4], 3, True),
([0, 1, 2, 3, 4], 4, True),
([0, 1, 2, 3, 4], 4.5, False),
([0, 1, 2, 3, 4], 0, True)
])
def test_check_recent_timestamps(ref_time, times, expected):
# Run the test
assert check_recent_timestamps(times, ref_time) == expected
| <filename>unit_tests/test_hr_calculations.py
import pytest
from functions.hr_calculations import *
@pytest.mark.parametrize("candidate, database, expected", [
('jack', [{'patient_id': 'jump'}, {'patient_id': 'jack'}], 1),
('jungle', [{'patient_id': 'jungle'}, {'patient_id': 'jack'}], 0),
('bo', [{'patient_id': 'james'}, {'patient_id': 'boo'},
{'patient_id': 'bo'}], 2)])
def test_find_id_ind(candidate, database, expected):
# Run the test
assert find_id_ind(candidate, database) == expected
@pytest.mark.parametrize("candidate, database, expected", [
('jump', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]},
{'patient_id': 'jack', 'heart_rate': [120, 112, 131]}],
[50, 60, 70]),
('jack', [{'patient_id': 'jump', 'heart_rate': [50, 60, 70]},
{'patient_id': 'jack', 'heart_rate': [120, 112, 131]}],
[120, 112, 131])
])
def test_get_heart_rates(candidate, database, expected):
# Run the test
assert get_heart_rates(candidate, database) == expected
@pytest.mark.parametrize("candidate, expected", [
([50, 60, 70], 60),
([50, 55, 56], 53.66),
([0, 50, 100], 50),
])
def test_average_heart_rate(candidate, expected):
# Run the test
assert pytest.approx(average_heart_rate(candidate), 1e-2) == expected
@pytest.mark.parametrize("candidate", [
'2018-03-09 11:00:36.372339',
'2017-10-19 15:11:36.167854',
])
def test_get_date_as_numeric(candidate):
# Run the test
result = pytest.approx(get_date_as_numeric(candidate), rel=1e-6)
# Generate expected result
expression = "%Y-%m-%d %H:%M:%S.%f"
expected = datetime.strptime(candidate, expression).timestamp()
assert result == expected
@pytest.mark.parametrize("candidate, expected", [
('Mark', ['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']),
('Matt', ['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854'])
])
def test_get_times(candidate, expected):
database = [{'patient_id': 'Mark', 'time':
['2018-03-09 11:00:36.372339', '2017-10-19 15:11:36.167854']},
{'patient_id': 'Matt', 'time':
['2018-03-10 11:00:32.372339', '2017-10-19 35:11:36.167854']}]
# Run the test
assert get_times(candidate, database) == expected
@pytest.mark.parametrize("ref_time, times, hr, expected", [
('2018-03-09 11:00:36.372339',
['2018-03-09 11:00:34.372339',
'2018-03-09 11:00:35.372339',
'2018-03-09 11:00:36.872339'], [0, 0, 0], [0]),
('2018-03-09 11:00:36.372339',
['2018-03-09 11:00:35.372339',
'2018-03-09 11:00:36.372359',
'2018-03-09 11:00:37.372339'], [0, 0, 0], [0, 0])])
def test_hr_after_time(ref_time, times, hr, expected):
# Run the test
assert hr_after_time(ref_time, times, hr) == expected
@pytest.mark.parametrize("times, ref_time, expected", [
([0, 1, 2, 3, 4], 3, 4),
([0, 1, 2, 3, 4], 2.5, 3),
([0, 1, 2, 3, 4], 1.5, 2)
])
def test_find_index_larger(ref_time, times, expected):
# Run the test
assert find_index_larger(times, ref_time) == expected
@pytest.mark.parametrize("times, ref_time, expected", [
([0, 1, 2, 3, 4], 3, True),
([0, 1, 2, 3, 4], 4, True),
([0, 1, 2, 3, 4], 4.5, False),
([0, 1, 2, 3, 4], 0, True)
])
def test_check_recent_timestamps(ref_time, times, expected):
# Run the test
assert check_recent_timestamps(times, ref_time) == expected
| en | 0.577346 | # Run the test # Run the test # Run the test # Run the test # Generate expected result # Run the test # Run the test # Run the test # Run the test | 2.828119 | 3 |
selfdrive/boardd/tests/test_boardd_api.py | 919bot/Tessa | 114 | 7563 | <reponame>919bot/Tessa
import random
import numpy as np
import selfdrive.boardd.tests.boardd_old as boardd_old
import selfdrive.boardd.boardd as boardd
from common.realtime import sec_since_boot
from cereal import log
import unittest
def generate_random_can_data_list():
can_list = []
cnt = random.randint(1, 64)
for j in range(cnt):
can_data = np.random.bytes(random.randint(1, 8))
can_list.append([random.randint(0, 128), random.randint(0, 128), can_data, random.randint(0, 128)])
return can_list, cnt
class TestBoarddApiMethods(unittest.TestCase):
def test_correctness(self):
for i in range(1000):
can_list, _ = generate_random_can_data_list()
# Sendcan
# Old API
m_old = boardd_old.can_list_to_can_capnp(can_list, 'sendcan').to_bytes()
# new API
m = boardd.can_list_to_can_capnp(can_list, 'sendcan')
ev_old = log.Event.from_bytes(m_old)
ev = log.Event.from_bytes(m)
self.assertEqual(ev_old.which(), ev.which())
self.assertEqual(len(ev.sendcan), len(ev_old.sendcan))
for i in range(len(ev.sendcan)):
attrs = ['address', 'busTime', 'dat', 'src']
for attr in attrs:
self.assertEqual(getattr(ev.sendcan[i], attr, 'new'), getattr(ev_old.sendcan[i], attr, 'old'))
# Can
m_old = boardd_old.can_list_to_can_capnp(can_list, 'can').to_bytes()
# new API
m = boardd.can_list_to_can_capnp(can_list, 'can')
ev_old = log.Event.from_bytes(m_old)
ev = log.Event.from_bytes(m)
self.assertEqual(ev_old.which(), ev.which())
self.assertEqual(len(ev.can), len(ev_old.can))
for i in range(len(ev.can)):
attrs = ['address', 'busTime', 'dat', 'src']
for attr in attrs:
self.assertEqual(getattr(ev.can[i], attr, 'new'), getattr(ev_old.can[i], attr, 'old'))
def test_performance(self):
can_list, cnt = generate_random_can_data_list()
recursions = 1000
n1 = sec_since_boot()
for i in range(recursions):
boardd_old.can_list_to_can_capnp(can_list, 'sendcan').to_bytes()
n2 = sec_since_boot()
elapsed_old = n2 - n1
# print('Old API, elapsed time: {} secs'.format(elapsed_old))
n1 = sec_since_boot()
for i in range(recursions):
boardd.can_list_to_can_capnp(can_list)
n2 = sec_since_boot()
elapsed_new = n2 - n1
# print('New API, elapsed time: {} secs'.format(elapsed_new))
self.assertTrue(elapsed_new < elapsed_old / 2)
if __name__ == '__main__':
unittest.main()
| import random
import numpy as np
import selfdrive.boardd.tests.boardd_old as boardd_old
import selfdrive.boardd.boardd as boardd
from common.realtime import sec_since_boot
from cereal import log
import unittest
def generate_random_can_data_list():
can_list = []
cnt = random.randint(1, 64)
for j in range(cnt):
can_data = np.random.bytes(random.randint(1, 8))
can_list.append([random.randint(0, 128), random.randint(0, 128), can_data, random.randint(0, 128)])
return can_list, cnt
class TestBoarddApiMethods(unittest.TestCase):
def test_correctness(self):
for i in range(1000):
can_list, _ = generate_random_can_data_list()
# Sendcan
# Old API
m_old = boardd_old.can_list_to_can_capnp(can_list, 'sendcan').to_bytes()
# new API
m = boardd.can_list_to_can_capnp(can_list, 'sendcan')
ev_old = log.Event.from_bytes(m_old)
ev = log.Event.from_bytes(m)
self.assertEqual(ev_old.which(), ev.which())
self.assertEqual(len(ev.sendcan), len(ev_old.sendcan))
for i in range(len(ev.sendcan)):
attrs = ['address', 'busTime', 'dat', 'src']
for attr in attrs:
self.assertEqual(getattr(ev.sendcan[i], attr, 'new'), getattr(ev_old.sendcan[i], attr, 'old'))
# Can
m_old = boardd_old.can_list_to_can_capnp(can_list, 'can').to_bytes()
# new API
m = boardd.can_list_to_can_capnp(can_list, 'can')
ev_old = log.Event.from_bytes(m_old)
ev = log.Event.from_bytes(m)
self.assertEqual(ev_old.which(), ev.which())
self.assertEqual(len(ev.can), len(ev_old.can))
for i in range(len(ev.can)):
attrs = ['address', 'busTime', 'dat', 'src']
for attr in attrs:
self.assertEqual(getattr(ev.can[i], attr, 'new'), getattr(ev_old.can[i], attr, 'old'))
def test_performance(self):
can_list, cnt = generate_random_can_data_list()
recursions = 1000
n1 = sec_since_boot()
for i in range(recursions):
boardd_old.can_list_to_can_capnp(can_list, 'sendcan').to_bytes()
n2 = sec_since_boot()
elapsed_old = n2 - n1
# print('Old API, elapsed time: {} secs'.format(elapsed_old))
n1 = sec_since_boot()
for i in range(recursions):
boardd.can_list_to_can_capnp(can_list)
n2 = sec_since_boot()
elapsed_new = n2 - n1
# print('New API, elapsed time: {} secs'.format(elapsed_new))
self.assertTrue(elapsed_new < elapsed_old / 2)
if __name__ == '__main__':
unittest.main() | en | 0.440253 | # Sendcan # Old API # new API # Can # new API # print('Old API, elapsed time: {} secs'.format(elapsed_old)) # print('New API, elapsed time: {} secs'.format(elapsed_new)) | 2.35695 | 2 |
py_types/static/parse.py | zekna/py-types | 5 | 7564 | <gh_stars>1-10
import ast
import inspect
import sys
import argparse
from ..runtime.asserts import typecheck
@typecheck
def pretty_print_defs(defs: list) -> None:
for d in defs:
print("Function definition for {}".format(d["name"]))
print("Arguments:")
for arg in d["args"]:
arg_type = "untyped"
if arg[2]:
arg_type = arg[2].id
print("\t{} : type {}".format(arg[1], arg_type))
if len(d["args"]) == 0:
print("\tNo arguments.")
return_type = None
if d["return"]:
return_type = d["return"].id
print("Return type: {}".format(return_type))
print("")
@typecheck
def parse(filename: str) -> list:
"""Parses and does basic analysis of functions declared at the top level of a file."""
with open(filename, "r") as file_to_parse:
a = file_to_parse.read()
file_ast = ast.parse(a)
# initial pass -- get all function definitions, their names, args, and annotations
@typecheck
def get_name_annotations(block) -> dict:
if not isinstance(block, ast.FunctionDef):
return
return_annotation = block.returns
arg_annotations = []
for i, arg in enumerate(block.args.args):
arg_annotations.append((i, arg.arg, arg.annotation))
fn_name = block.name
annotations = {
"name": fn_name,
"return": return_annotation,
"args": arg_annotations
}
return annotations
definitions = [get_name_annotations(block) for block in file_ast.body]
pretty_print_defs(definitions)
# second pass -- find all expressions, double check origins of any arguments passed to any function in definitions
def depth_first_traversal(ast_tree, filter_type, results: list) -> ast.Module:
pass
return definitions
if __name__ == "__main__":
parse("static/example_parse_me.py")
| import ast
import inspect
import sys
import argparse
from ..runtime.asserts import typecheck
@typecheck
def pretty_print_defs(defs: list) -> None:
for d in defs:
print("Function definition for {}".format(d["name"]))
print("Arguments:")
for arg in d["args"]:
arg_type = "untyped"
if arg[2]:
arg_type = arg[2].id
print("\t{} : type {}".format(arg[1], arg_type))
if len(d["args"]) == 0:
print("\tNo arguments.")
return_type = None
if d["return"]:
return_type = d["return"].id
print("Return type: {}".format(return_type))
print("")
@typecheck
def parse(filename: str) -> list:
"""Parses and does basic analysis of functions declared at the top level of a file."""
with open(filename, "r") as file_to_parse:
a = file_to_parse.read()
file_ast = ast.parse(a)
# initial pass -- get all function definitions, their names, args, and annotations
@typecheck
def get_name_annotations(block) -> dict:
if not isinstance(block, ast.FunctionDef):
return
return_annotation = block.returns
arg_annotations = []
for i, arg in enumerate(block.args.args):
arg_annotations.append((i, arg.arg, arg.annotation))
fn_name = block.name
annotations = {
"name": fn_name,
"return": return_annotation,
"args": arg_annotations
}
return annotations
definitions = [get_name_annotations(block) for block in file_ast.body]
pretty_print_defs(definitions)
# second pass -- find all expressions, double check origins of any arguments passed to any function in definitions
def depth_first_traversal(ast_tree, filter_type, results: list) -> ast.Module:
pass
return definitions
if __name__ == "__main__":
parse("static/example_parse_me.py") | en | 0.769892 | Parses and does basic analysis of functions declared at the top level of a file. # initial pass -- get all function definitions, their names, args, and annotations # second pass -- find all expressions, double check origins of any arguments passed to any function in definitions | 3.0585 | 3 |
example/example/urls.py | pmaccamp/django-tastypie-swagger | 2 | 7565 | <filename>example/example/urls.py<gh_stars>1-10
from django.conf.urls import include, url
from django.contrib import admin
from demo.apis import api
urlpatterns = [
url(r'^api/', include(api.urls)),
url(r'^api/doc/', include(('tastypie_swagger.urls', 'tastypie_swagger'),
namespace='demo_api_swagger'),
kwargs={
"tastypie_api_module":"demo.apis.api",
"namespace":"demo_api_swagger",
"version": "0.1"}
),
url(r'^admin/', admin.site.urls),
]
| <filename>example/example/urls.py<gh_stars>1-10
from django.conf.urls import include, url
from django.contrib import admin
from demo.apis import api
urlpatterns = [
url(r'^api/', include(api.urls)),
url(r'^api/doc/', include(('tastypie_swagger.urls', 'tastypie_swagger'),
namespace='demo_api_swagger'),
kwargs={
"tastypie_api_module":"demo.apis.api",
"namespace":"demo_api_swagger",
"version": "0.1"}
),
url(r'^admin/', admin.site.urls),
]
| none | 1 | 1.779361 | 2 |
|
scrapy_framework/midwares/download_midware.py | savor007/scrapy_framework | 0 | 7566 | <gh_stars>0
from scrapy_framework.html.request import Request
from scrapy_framework.html.response import Response
import random
def get_ua():
first_num=random.randint(55,69)
third_num=random.randint(0,3200)
forth_num=random.randint(0, 140)
os_type = [
'(Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(X11; Linux x86_64)',
'(Macintosh; Intel Mac OS X 10_12_6)'
]
chrome_version = 'Chrome/{}.0.{}.{}'.format(first_num, third_num, forth_num)
ua = ' '.join(['Mozilla/5.0', random.choice(os_type), 'AppleWebKit/537.36',
'(KHTML, like Gecko)', chrome_version, 'Safari/537.36']
)
return ua
class DownloadMidware(object):
def process_request(self, request):
if request.headers==None:
request.headers=dict()
request.headers["User-Agent"]=get_ua()
return request
def process_response(self, response):
return response | from scrapy_framework.html.request import Request
from scrapy_framework.html.response import Response
import random
def get_ua():
first_num=random.randint(55,69)
third_num=random.randint(0,3200)
forth_num=random.randint(0, 140)
os_type = [
'(Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(X11; Linux x86_64)',
'(Macintosh; Intel Mac OS X 10_12_6)'
]
chrome_version = 'Chrome/{}.0.{}.{}'.format(first_num, third_num, forth_num)
ua = ' '.join(['Mozilla/5.0', random.choice(os_type), 'AppleWebKit/537.36',
'(KHTML, like Gecko)', chrome_version, 'Safari/537.36']
)
return ua
class DownloadMidware(object):
def process_request(self, request):
if request.headers==None:
request.headers=dict()
request.headers["User-Agent"]=get_ua()
return request
def process_response(self, response):
return response | none | 1 | 2.718433 | 3 |
|
tracardi/process_engine/action/v1/pro/scheduler/plugin.py | bytepl/tracardi | 0 | 7567 | from pydantic import BaseModel
from tracardi.domain.entity import Entity
from tracardi.domain.scheduler_config import SchedulerConfig
from tracardi.domain.resource import ResourceCredentials
from tracardi.service.storage.driver import storage
from tracardi.service.plugin.runner import ActionRunner
from tracardi.service.plugin.domain.register import Plugin, Spec, MetaData, Form, FormGroup, FormField, FormComponent
from tracardi.service.plugin.domain.result import Result
class Configuration(BaseModel):
source: Entity
event_type: str
properties: str = "{}"
postpone: str
def validate(config: dict) -> Configuration:
return Configuration(**config)
class SchedulerPlugin(ActionRunner):
@staticmethod
async def build(**kwargs) -> 'SchedulerPlugin':
config = validate(kwargs)
resource = await storage.driver.resource.load(config.source.id)
plugin = SchedulerPlugin(config, resource.credentials)
return plugin
def __init__(self, config: Configuration, credentials: ResourceCredentials):
self.config = config
self.credentials = credentials.get_credentials(
self,
output=SchedulerConfig) # type: SchedulerConfig
async def run(self, payload):
run_in_background = True
if not run_in_background:
return Result(port="response", value=None)
else:
return Result(port="response", value=None)
def register() -> Plugin:
return Plugin(
start=False,
spec=Spec(
module='tracardi.process_engine.action.v1.pro.scheduler.plugin',
className='SchedulerPlugin',
inputs=["payload"],
outputs=['response', 'error'],
version='0.6.2',
license="MIT",
author="<NAME>",
init= {
"source": {
"id": ""
},
"event_type": "",
"properties": "{}",
"postpone": "+1m"
}
),
metadata=MetaData(
name='Schedule event',
desc='This plugin schedules events',
icon='calendar',
group=["Time"],
tags=["Pro", "Scheduler"],
pro=True,
)
)
| from pydantic import BaseModel
from tracardi.domain.entity import Entity
from tracardi.domain.scheduler_config import SchedulerConfig
from tracardi.domain.resource import ResourceCredentials
from tracardi.service.storage.driver import storage
from tracardi.service.plugin.runner import ActionRunner
from tracardi.service.plugin.domain.register import Plugin, Spec, MetaData, Form, FormGroup, FormField, FormComponent
from tracardi.service.plugin.domain.result import Result
class Configuration(BaseModel):
source: Entity
event_type: str
properties: str = "{}"
postpone: str
def validate(config: dict) -> Configuration:
return Configuration(**config)
class SchedulerPlugin(ActionRunner):
@staticmethod
async def build(**kwargs) -> 'SchedulerPlugin':
config = validate(kwargs)
resource = await storage.driver.resource.load(config.source.id)
plugin = SchedulerPlugin(config, resource.credentials)
return plugin
def __init__(self, config: Configuration, credentials: ResourceCredentials):
self.config = config
self.credentials = credentials.get_credentials(
self,
output=SchedulerConfig) # type: SchedulerConfig
async def run(self, payload):
run_in_background = True
if not run_in_background:
return Result(port="response", value=None)
else:
return Result(port="response", value=None)
def register() -> Plugin:
return Plugin(
start=False,
spec=Spec(
module='tracardi.process_engine.action.v1.pro.scheduler.plugin',
className='SchedulerPlugin',
inputs=["payload"],
outputs=['response', 'error'],
version='0.6.2',
license="MIT",
author="<NAME>",
init= {
"source": {
"id": ""
},
"event_type": "",
"properties": "{}",
"postpone": "+1m"
}
),
metadata=MetaData(
name='Schedule event',
desc='This plugin schedules events',
icon='calendar',
group=["Time"],
tags=["Pro", "Scheduler"],
pro=True,
)
)
| en | 0.20864 | # type: SchedulerConfig | 1.889135 | 2 |
tests/test_covid_daily.py | alvarobartt/covid-daily | 13 | 7568 | <filename>tests/test_covid_daily.py
# Copyright 2020 <NAME>, alvarobartt @ GitHub
# See LICENSE for details.
import pytest
import covid_daily
def test_overview():
params = [
{
'as_json': True
},
{
'as_json': False
}
]
for param in params:
covid_daily.overview(as_json=param['as_json'])
def test_data():
data = covid_daily.data(
country='france',
chart='graph-deaths-daily',
as_json=False
)
print(data.tail())
if __name__ == "__main__":
test_overview()
test_data()
| <filename>tests/test_covid_daily.py
# Copyright 2020 <NAME>, alvarobartt @ GitHub
# See LICENSE for details.
import pytest
import covid_daily
def test_overview():
params = [
{
'as_json': True
},
{
'as_json': False
}
]
for param in params:
covid_daily.overview(as_json=param['as_json'])
def test_data():
data = covid_daily.data(
country='france',
chart='graph-deaths-daily',
as_json=False
)
print(data.tail())
if __name__ == "__main__":
test_overview()
test_data()
| en | 0.617168 | # Copyright 2020 <NAME>, alvarobartt @ GitHub # See LICENSE for details. | 2.337855 | 2 |
2021/HANFS/fence-agents/fence/agents/zvm/fence_zvmip.py | BryanWhitehurst/HPCCEA | 10 | 7569 | <gh_stars>1-10
#!@PYTHON@ -tt
import sys
import atexit
import socket
import struct
import logging
sys.path.append("@FENCEAGENTSLIBDIR@")
from fencing import *
from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT
#BEGIN_VERSION_GENERATION
RELEASE_VERSION=""
REDHAT_COPYRIGHT=""
BUILD_DATE=""
#END_VERSION_GENERATION
INT4 = 4
def open_socket(options):
try:
if "--inet6-only" in options:
protocol = socket.AF_INET6
elif "--inet4-only" in options:
protocol = socket.AF_INET
else:
protocol = 0
(_, _, _, _, addr) = socket.getaddrinfo( \
options["--ip"], options["--ipport"], protocol,
0, socket.IPPROTO_TCP, socket.AI_PASSIVE
)[0]
except socket.gaierror:
fail(EC_LOGIN_DENIED)
conn = socket.socket()
conn.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
conn.settimeout(float(options["--shell-timeout"]))
try:
conn.connect(addr)
except socket.error:
fail(EC_LOGIN_DENIED)
return conn
def smapi_pack_string(string):
return struct.pack("!i%ds" % (len(string)), len(string), string)
def prepare_smapi_command(options, smapi_function, additional_args):
packet_size = 3*INT4 + len(smapi_function) + len(options["--username"]) + len(options["--password"])
for arg in additional_args:
packet_size += INT4 + len(arg)
command = struct.pack("!i", packet_size)
command += smapi_pack_string(smapi_function)
command += smapi_pack_string(options["--username"])
command += smapi_pack_string(options["--password"])
for arg in additional_args:
command += smapi_pack_string(arg)
return command
def get_power_status(conn, options):
del conn
if options.get("--original-action", None) == "monitor":
(return_code, reason_code, images_active) = \
get_list_of_images(options, "Check_Authentication", None)
logging.debug("Check_Authenticate (%d,%d)", return_code, reason_code)
if return_code == 0:
return {}
else:
fail(EC_LOGIN_DENIED)
if options["--action"] == "list":
# '*' = list all active images
options["--plug"] = "*"
(return_code, reason_code, images_active) = \
get_list_of_images(options, "Image_Status_Query", options["--plug"])
logging.debug("Image_Status_Query results are (%d,%d)", return_code, reason_code)
if not options["--action"] == "list":
if (return_code == 0) and (reason_code == 0):
return "on"
elif (return_code == 0) and (reason_code == 12):
# We are running always with --missing-as-off because we can not check if image
# is defined or not (look at rhbz#1188750)
return "off"
else:
return "unknown"
else:
(return_code, reason_code, images_defined) = \
get_list_of_images(options, "Image_Name_Query_DM", options["--username"])
logging.debug("Image_Name_Query_DM results are (%d,%d)", return_code, reason_code)
return dict([(i, ("", "on" if i in images_active else "off")) for i in images_defined])
def set_power_status(conn, options):
conn = open_socket(options)
packet = None
if options["--action"] == "on":
packet = prepare_smapi_command(options, "Image_Activate", [options["--plug"]])
elif options["--action"] == "off":
packet = prepare_smapi_command(options, "Image_Deactivate", [options["--plug"], "IMMED"])
conn.send(packet)
request_id = struct.unpack("!i", conn.recv(INT4))[0]
(output_len, request_id, return_code, reason_code) = struct.unpack("!iiii", conn.recv(INT4 * 4))
logging.debug("Image_(De)Activate results are (%d,%d)", return_code, reason_code)
conn.close()
return
def get_list_of_images(options, command, data_as_plug):
conn = open_socket(options)
if data_as_plug is None:
packet = prepare_smapi_command(options, command, [])
else:
packet = prepare_smapi_command(options, command, [data_as_plug])
conn.send(packet)
request_id = struct.unpack("!i", conn.recv(INT4))[0]
(output_len, request_id, return_code, reason_code) = struct.unpack("!iiii", conn.recv(INT4 * 4))
images = set()
if output_len > 3*INT4:
array_len = struct.unpack("!i", conn.recv(INT4))[0]
data = ""
while True:
read_data = conn.recv(1024, socket.MSG_WAITALL)
data += read_data
if array_len == len(data):
break
elif not read_data:
logging.error("Failed: Not enough data read from socket")
fail(EC_TIMED_OUT)
parsed_len = 0
while parsed_len < array_len:
string_len = struct.unpack("!i", data[parsed_len:parsed_len+INT4])[0]
parsed_len += INT4
image_name = struct.unpack("!%ds" % (string_len), data[parsed_len:parsed_len+string_len])[0]
parsed_len += string_len
images.add(image_name)
conn.close()
return (return_code, reason_code, images)
def main():
device_opt = ["ipaddr", "login", "passwd", "port", "method", "missing_as_off"]
atexit.register(atexit_handler)
all_opt["ipport"]["default"] = "44444"
all_opt["shell_timeout"]["default"] = "5.0"
all_opt["missing_as_off"]["default"] = "1"
options = check_input(device_opt, process_input(device_opt), other_conditions=True)
if len(options.get("--plug", "")) > 8:
fail_usage("Failed: Name of image can not be longer than 8 characters")
if options["--action"] == "validate-all":
sys.exit(0)
docs = {}
docs["shortdesc"] = "Fence agent for use with z/VM Virtual Machines"
docs["longdesc"] = """The fence_zvm agent is intended to be used with with z/VM SMAPI service via TCP/IP
To use this agent the z/VM SMAPI service needs to be configured to allow the virtual machine running this agent to connect to it and issue
the image_recycle operation. This involves updating the VSMWORK1 AUTHLIST VMSYS:VSMWORK1. file. The entry should look something similar to
this:
Column 1 Column 66 Column 131
| | |
V V V
XXXXXXXX ALL IMAGE_OPERATIONS
Where XXXXXXX is the name of the virtual machine used in the authuser field of the request.
"""
docs["vendorurl"] = "http://www.ibm.com"
show_docs(options, docs)
run_delay(options)
result = fence_action(None, options, set_power_status, get_power_status, get_power_status)
sys.exit(result)
if __name__ == "__main__":
main()
| #!@PYTHON@ -tt
import sys
import atexit
import socket
import struct
import logging
sys.path.append("@FENCEAGENTSLIBDIR@")
from fencing import *
from fencing import fail, fail_usage, run_delay, EC_LOGIN_DENIED, EC_TIMED_OUT
#BEGIN_VERSION_GENERATION
RELEASE_VERSION=""
REDHAT_COPYRIGHT=""
BUILD_DATE=""
#END_VERSION_GENERATION
INT4 = 4
def open_socket(options):
try:
if "--inet6-only" in options:
protocol = socket.AF_INET6
elif "--inet4-only" in options:
protocol = socket.AF_INET
else:
protocol = 0
(_, _, _, _, addr) = socket.getaddrinfo( \
options["--ip"], options["--ipport"], protocol,
0, socket.IPPROTO_TCP, socket.AI_PASSIVE
)[0]
except socket.gaierror:
fail(EC_LOGIN_DENIED)
conn = socket.socket()
conn.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
conn.settimeout(float(options["--shell-timeout"]))
try:
conn.connect(addr)
except socket.error:
fail(EC_LOGIN_DENIED)
return conn
def smapi_pack_string(string):
return struct.pack("!i%ds" % (len(string)), len(string), string)
def prepare_smapi_command(options, smapi_function, additional_args):
packet_size = 3*INT4 + len(smapi_function) + len(options["--username"]) + len(options["--password"])
for arg in additional_args:
packet_size += INT4 + len(arg)
command = struct.pack("!i", packet_size)
command += smapi_pack_string(smapi_function)
command += smapi_pack_string(options["--username"])
command += smapi_pack_string(options["--password"])
for arg in additional_args:
command += smapi_pack_string(arg)
return command
def get_power_status(conn, options):
del conn
if options.get("--original-action", None) == "monitor":
(return_code, reason_code, images_active) = \
get_list_of_images(options, "Check_Authentication", None)
logging.debug("Check_Authenticate (%d,%d)", return_code, reason_code)
if return_code == 0:
return {}
else:
fail(EC_LOGIN_DENIED)
if options["--action"] == "list":
# '*' = list all active images
options["--plug"] = "*"
(return_code, reason_code, images_active) = \
get_list_of_images(options, "Image_Status_Query", options["--plug"])
logging.debug("Image_Status_Query results are (%d,%d)", return_code, reason_code)
if not options["--action"] == "list":
if (return_code == 0) and (reason_code == 0):
return "on"
elif (return_code == 0) and (reason_code == 12):
# We are running always with --missing-as-off because we can not check if image
# is defined or not (look at rhbz#1188750)
return "off"
else:
return "unknown"
else:
(return_code, reason_code, images_defined) = \
get_list_of_images(options, "Image_Name_Query_DM", options["--username"])
logging.debug("Image_Name_Query_DM results are (%d,%d)", return_code, reason_code)
return dict([(i, ("", "on" if i in images_active else "off")) for i in images_defined])
def set_power_status(conn, options):
conn = open_socket(options)
packet = None
if options["--action"] == "on":
packet = prepare_smapi_command(options, "Image_Activate", [options["--plug"]])
elif options["--action"] == "off":
packet = prepare_smapi_command(options, "Image_Deactivate", [options["--plug"], "IMMED"])
conn.send(packet)
request_id = struct.unpack("!i", conn.recv(INT4))[0]
(output_len, request_id, return_code, reason_code) = struct.unpack("!iiii", conn.recv(INT4 * 4))
logging.debug("Image_(De)Activate results are (%d,%d)", return_code, reason_code)
conn.close()
return
def get_list_of_images(options, command, data_as_plug):
conn = open_socket(options)
if data_as_plug is None:
packet = prepare_smapi_command(options, command, [])
else:
packet = prepare_smapi_command(options, command, [data_as_plug])
conn.send(packet)
request_id = struct.unpack("!i", conn.recv(INT4))[0]
(output_len, request_id, return_code, reason_code) = struct.unpack("!iiii", conn.recv(INT4 * 4))
images = set()
if output_len > 3*INT4:
array_len = struct.unpack("!i", conn.recv(INT4))[0]
data = ""
while True:
read_data = conn.recv(1024, socket.MSG_WAITALL)
data += read_data
if array_len == len(data):
break
elif not read_data:
logging.error("Failed: Not enough data read from socket")
fail(EC_TIMED_OUT)
parsed_len = 0
while parsed_len < array_len:
string_len = struct.unpack("!i", data[parsed_len:parsed_len+INT4])[0]
parsed_len += INT4
image_name = struct.unpack("!%ds" % (string_len), data[parsed_len:parsed_len+string_len])[0]
parsed_len += string_len
images.add(image_name)
conn.close()
return (return_code, reason_code, images)
def main():
device_opt = ["ipaddr", "login", "passwd", "port", "method", "missing_as_off"]
atexit.register(atexit_handler)
all_opt["ipport"]["default"] = "44444"
all_opt["shell_timeout"]["default"] = "5.0"
all_opt["missing_as_off"]["default"] = "1"
options = check_input(device_opt, process_input(device_opt), other_conditions=True)
if len(options.get("--plug", "")) > 8:
fail_usage("Failed: Name of image can not be longer than 8 characters")
if options["--action"] == "validate-all":
sys.exit(0)
docs = {}
docs["shortdesc"] = "Fence agent for use with z/VM Virtual Machines"
docs["longdesc"] = """The fence_zvm agent is intended to be used with with z/VM SMAPI service via TCP/IP
To use this agent the z/VM SMAPI service needs to be configured to allow the virtual machine running this agent to connect to it and issue
the image_recycle operation. This involves updating the VSMWORK1 AUTHLIST VMSYS:VSMWORK1. file. The entry should look something similar to
this:
Column 1 Column 66 Column 131
| | |
V V V
XXXXXXXX ALL IMAGE_OPERATIONS
Where XXXXXXX is the name of the virtual machine used in the authuser field of the request.
"""
docs["vendorurl"] = "http://www.ibm.com"
show_docs(options, docs)
run_delay(options)
result = fence_action(None, options, set_power_status, get_power_status, get_power_status)
sys.exit(result)
if __name__ == "__main__":
main() | en | 0.780131 | #!@PYTHON@ -tt #BEGIN_VERSION_GENERATION #END_VERSION_GENERATION # '*' = list all active images # We are running always with --missing-as-off because we can not check if image # is defined or not (look at rhbz#1188750) The fence_zvm agent is intended to be used with with z/VM SMAPI service via TCP/IP To use this agent the z/VM SMAPI service needs to be configured to allow the virtual machine running this agent to connect to it and issue the image_recycle operation. This involves updating the VSMWORK1 AUTHLIST VMSYS:VSMWORK1. file. The entry should look something similar to this: Column 1 Column 66 Column 131 | | | V V V XXXXXXXX ALL IMAGE_OPERATIONS Where XXXXXXX is the name of the virtual machine used in the authuser field of the request. | 2.137382 | 2 |
2.5.9/test_splash/test_splash/spiders/with_splash.py | feel-easy/myspider | 1 | 7570 | <filename>2.5.9/test_splash/test_splash/spiders/with_splash.py
# -*- coding: utf-8 -*-
import scrapy
from scrapy_splash import SplashRequest # 使用scrapy_splash包提供的request对象
class WithSplashSpider(scrapy.Spider):
name = 'with_splash'
allowed_domains = ['baidu.com']
start_urls = ['https://www.baidu.com/s?wd=13161933309']
def start_requests(self):
yield SplashRequest(self.start_urls[0],
callback=self.parse_splash,
args={'wait': 10}, # 最大超时时间,单位:秒
endpoint='render.html') # 使用splash服务的固定参数
def parse_splash(self, response):
with open('with_splash.html', 'w') as f:
f.write(response.body.decode())
| <filename>2.5.9/test_splash/test_splash/spiders/with_splash.py
# -*- coding: utf-8 -*-
import scrapy
from scrapy_splash import SplashRequest # 使用scrapy_splash包提供的request对象
class WithSplashSpider(scrapy.Spider):
name = 'with_splash'
allowed_domains = ['baidu.com']
start_urls = ['https://www.baidu.com/s?wd=13161933309']
def start_requests(self):
yield SplashRequest(self.start_urls[0],
callback=self.parse_splash,
args={'wait': 10}, # 最大超时时间,单位:秒
endpoint='render.html') # 使用splash服务的固定参数
def parse_splash(self, response):
with open('with_splash.html', 'w') as f:
f.write(response.body.decode())
| zh | 0.852609 | # -*- coding: utf-8 -*- # 使用scrapy_splash包提供的request对象 # 最大超时时间,单位:秒 # 使用splash服务的固定参数 | 2.710969 | 3 |
run.py | iudaichi/iu_linebot | 0 | 7571 | from main import app
import os
import uvicorn
if __name__ == '__main__':
port = int(os.getenv("PORT"))
uvicorn.run(app, host="0.0.0.0", port=port, workers=1, reload=True)
| from main import app
import os
import uvicorn
if __name__ == '__main__':
port = int(os.getenv("PORT"))
uvicorn.run(app, host="0.0.0.0", port=port, workers=1, reload=True)
| none | 1 | 1.792976 | 2 |
|
fastestimator/architecture/pytorch/unet.py | DwijayDS/fastestimator | 57 | 7572 | <reponame>DwijayDS/fastestimator
# Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from typing import Tuple
import torch
import torch.nn as nn
from torch.nn import functional as F
from torch.nn.init import kaiming_normal_ as he_normal
class UNetEncoderBlock(nn.Module):
"""A UNet encoder block.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
in_channels: How many channels enter the encoder.
out_channels: How many channels leave the encoder.
"""
def __init__(self, in_channels: int, out_channels: int) -> None:
super().__init__()
self.layers = nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
for layer in self.layers:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
out = self.layers(x)
return out, F.max_pool2d(out, 2)
class UNetDecoderBlock(nn.Module):
"""A UNet decoder block.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
in_channels: How many channels enter the decoder.
mid_channels: How many channels are used for the decoder's intermediate layer.
out_channels: How many channels leave the decoder.
"""
def __init__(self, in_channels: int, mid_channels: int, out_channels: int) -> None:
super().__init__()
self.layers = nn.Sequential(nn.Conv2d(in_channels, mid_channels, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(mid_channels, mid_channels, 3, padding=1),
nn.ReLU(inplace=True),
nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True),
nn.Conv2d(mid_channels, out_channels, 3, padding=1),
nn.ReLU(inplace=True))
for layer in self.layers:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.layers(x)
class UNet(nn.Module):
"""A standard UNet implementation in PyTorch.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
input_size: The size of the input tensor (channels, height, width).
Raises:
ValueError: Length of `input_size` is not 3.
ValueError: `input_size`[1] or `input_size`[2] is not a multiple of 16.
"""
def __init__(self, input_size: Tuple[int, int, int] = (1, 128, 128)) -> None:
UNet._check_input_size(input_size)
super().__init__()
self.input_size = input_size
self.enc1 = UNetEncoderBlock(in_channels=input_size[0], out_channels=64)
self.enc2 = UNetEncoderBlock(in_channels=64, out_channels=128)
self.enc3 = UNetEncoderBlock(in_channels=128, out_channels=256)
self.enc4 = UNetEncoderBlock(in_channels=256, out_channels=512)
self.bottle_neck = UNetDecoderBlock(in_channels=512, mid_channels=1024, out_channels=512)
self.dec4 = UNetDecoderBlock(in_channels=1024, mid_channels=512, out_channels=256)
self.dec3 = UNetDecoderBlock(in_channels=512, mid_channels=256, out_channels=128)
self.dec2 = UNetDecoderBlock(in_channels=256, mid_channels=128, out_channels=64)
self.dec1 = nn.Sequential(nn.Conv2d(128, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 1, 1),
nn.Sigmoid())
for layer in self.dec1:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> torch.Tensor:
x1, x_e1 = self.enc1(x)
x2, x_e2 = self.enc2(x_e1)
x3, x_e3 = self.enc3(x_e2)
x4, x_e4 = self.enc4(x_e3)
x_bottle_neck = self.bottle_neck(x_e4)
x_d4 = self.dec4(torch.cat((x_bottle_neck, x4), 1))
x_d3 = self.dec3(torch.cat((x_d4, x3), 1))
x_d2 = self.dec2(torch.cat((x_d3, x2), 1))
x_out = self.dec1(torch.cat((x_d2, x1), 1))
return x_out
@staticmethod
def _check_input_size(input_size):
if len(input_size) != 3:
raise ValueError("Length of `input_size` is not 3 (channel, height, width)")
_, height, width = input_size
if height < 16 or not (height / 16.0).is_integer() or width < 16 or not (width / 16.0).is_integer():
raise ValueError("Both height and width of input_size need to be multiples of 16 (16, 32, 48...)")
| # Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from typing import Tuple
import torch
import torch.nn as nn
from torch.nn import functional as F
from torch.nn.init import kaiming_normal_ as he_normal
class UNetEncoderBlock(nn.Module):
"""A UNet encoder block.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
in_channels: How many channels enter the encoder.
out_channels: How many channels leave the encoder.
"""
def __init__(self, in_channels: int, out_channels: int) -> None:
super().__init__()
self.layers = nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.ReLU(inplace=True))
for layer in self.layers:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
out = self.layers(x)
return out, F.max_pool2d(out, 2)
class UNetDecoderBlock(nn.Module):
"""A UNet decoder block.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
in_channels: How many channels enter the decoder.
mid_channels: How many channels are used for the decoder's intermediate layer.
out_channels: How many channels leave the decoder.
"""
def __init__(self, in_channels: int, mid_channels: int, out_channels: int) -> None:
super().__init__()
self.layers = nn.Sequential(nn.Conv2d(in_channels, mid_channels, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(mid_channels, mid_channels, 3, padding=1),
nn.ReLU(inplace=True),
nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True),
nn.Conv2d(mid_channels, out_channels, 3, padding=1),
nn.ReLU(inplace=True))
for layer in self.layers:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.layers(x)
class UNet(nn.Module):
"""A standard UNet implementation in PyTorch.
This class is intentionally not @traceable (models and layers are handled by a different process).
Args:
input_size: The size of the input tensor (channels, height, width).
Raises:
ValueError: Length of `input_size` is not 3.
ValueError: `input_size`[1] or `input_size`[2] is not a multiple of 16.
"""
def __init__(self, input_size: Tuple[int, int, int] = (1, 128, 128)) -> None:
UNet._check_input_size(input_size)
super().__init__()
self.input_size = input_size
self.enc1 = UNetEncoderBlock(in_channels=input_size[0], out_channels=64)
self.enc2 = UNetEncoderBlock(in_channels=64, out_channels=128)
self.enc3 = UNetEncoderBlock(in_channels=128, out_channels=256)
self.enc4 = UNetEncoderBlock(in_channels=256, out_channels=512)
self.bottle_neck = UNetDecoderBlock(in_channels=512, mid_channels=1024, out_channels=512)
self.dec4 = UNetDecoderBlock(in_channels=1024, mid_channels=512, out_channels=256)
self.dec3 = UNetDecoderBlock(in_channels=512, mid_channels=256, out_channels=128)
self.dec2 = UNetDecoderBlock(in_channels=256, mid_channels=128, out_channels=64)
self.dec1 = nn.Sequential(nn.Conv2d(128, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 1, 1),
nn.Sigmoid())
for layer in self.dec1:
if isinstance(layer, nn.Conv2d):
he_normal(layer.weight.data)
layer.bias.data.zero_()
def forward(self, x: torch.Tensor) -> torch.Tensor:
x1, x_e1 = self.enc1(x)
x2, x_e2 = self.enc2(x_e1)
x3, x_e3 = self.enc3(x_e2)
x4, x_e4 = self.enc4(x_e3)
x_bottle_neck = self.bottle_neck(x_e4)
x_d4 = self.dec4(torch.cat((x_bottle_neck, x4), 1))
x_d3 = self.dec3(torch.cat((x_d4, x3), 1))
x_d2 = self.dec2(torch.cat((x_d3, x2), 1))
x_out = self.dec1(torch.cat((x_d2, x1), 1))
return x_out
@staticmethod
def _check_input_size(input_size):
if len(input_size) != 3:
raise ValueError("Length of `input_size` is not 3 (channel, height, width)")
_, height, width = input_size
if height < 16 or not (height / 16.0).is_integer() or width < 16 or not (width / 16.0).is_integer():
raise ValueError("Both height and width of input_size need to be multiples of 16 (16, 32, 48...)") | en | 0.845853 | # Copyright 2019 The FastEstimator Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== A UNet encoder block. This class is intentionally not @traceable (models and layers are handled by a different process). Args: in_channels: How many channels enter the encoder. out_channels: How many channels leave the encoder. A UNet decoder block. This class is intentionally not @traceable (models and layers are handled by a different process). Args: in_channels: How many channels enter the decoder. mid_channels: How many channels are used for the decoder's intermediate layer. out_channels: How many channels leave the decoder. A standard UNet implementation in PyTorch. This class is intentionally not @traceable (models and layers are handled by a different process). Args: input_size: The size of the input tensor (channels, height, width). Raises: ValueError: Length of `input_size` is not 3. ValueError: `input_size`[1] or `input_size`[2] is not a multiple of 16. | 2.377219 | 2 |
generalfile/path.py | Mandera/generalfile | 0 | 7573 | <reponame>Mandera/generalfile
import pathlib
import os
from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache
from generalfile.errors import InvalidCharacterError
from generalfile.path_lock import Path_ContextManager
from generalfile.path_operations import Path_Operations
from generalfile.path_strings import Path_Strings
from generalfile.optional_dependencies.path_spreadsheet import Path_Spreadsheet
from generalfile.optional_dependencies.path_text import Path_Text
from generalfile.optional_dependencies.path_cfg import Path_Cfg
from generalfile.optional_dependencies.path_pickle import Path_Pickle
class Path(TreeDiagram, Recycle, Path_ContextManager, Path_Operations, Path_Strings, Path_Spreadsheet, Path_Text, Path_Cfg, Path_Pickle):
""" Immutable cross-platform Path.
Built on pathlib and TreeDiagram.
Implements rules to ensure cross-platform compatability.
Adds useful methods.
Todo: Binary extension. """
verInfo = VerInfo()
_path_delimiter = verInfo.pathDelimiter
Path = ...
_recycle_keys = {"path": lambda path: Path.scrub("" if path is None else str(path))}
_alternative_chars = {_path_delimiter: "/", ":": ":", ".": "."}
def __init__(self, path=None): # Don't have parent here because of Recycle
self.path = self.scrub(str_path="" if path is None else str(path))
self._path = pathlib.Path(self.path)
self._latest_listdir = set()
copy_node = NotImplemented # Maybe something like this to disable certain methods
@classproperty
def path_delimiter(cls):
return cls._path_delimiter
def spawn_parents(self):
if not self.get_parent(spawn=False) and self.path and not self.is_root():
try:
index = self.path.rindex(self.path_delimiter) + 1
except ValueError:
index = 0
self.set_parent(Path(path=self.path[:index]))
def spawn_children(self):
if self.is_folder():
old_children = {path.name() for path in self.get_children(spawn=False)}
try:
new_children = set(os.listdir(self.path if self.path else "."))
except PermissionError:
new_children = set()
for name in old_children.symmetric_difference(new_children):
path = Path(path=self / name)
path.set_parent(self if name in new_children else None)
def __str__(self):
return getattr(self, "path", "<Path not loaded yet>")
# return self.path
def __repr__(self):
return self.name()
def __fspath__(self):
return self.path
def __format__(self, format_spec):
return self.path.__format__(format_spec)
def __truediv__(self, other):
""" :rtype: generalfile.Path """
# print("here", self._recycle_instances)
return self.Path(self._path / str(other))
def __eq__(self, other):
if isinstance(other, Path):
other = other.path
else:
other = self._scrub("" if other is None else str(other))
return self.path == other
def __hash__(self):
return hash(self.path)
def __contains__(self, item):
return self.path.__contains__(item)
@classmethod
def _scrub(cls, str_path):
str_path = cls._replace_delimiters(str_path=str_path)
str_path = cls._invalid_characters(str_path=str_path)
str_path = cls._trim(str_path=str_path)
str_path = cls._delimiter_suffix_if_root(str_path=str_path)
return str_path
@classmethod
@deco_cache()
def scrub(cls, str_path):
return cls._scrub(str_path=str_path)
@classmethod
@deco_cache()
def _replace_delimiters(cls, str_path):
str_path = str_path.replace("/", cls.path_delimiter)
str_path = str_path.replace("\\", cls.path_delimiter)
return str_path
@classmethod
@deco_cache()
def _invalid_characters(cls, str_path):
# Simple invalid characters testing from Windows
for character in '<>"|?*':
if character in str_path:
raise InvalidCharacterError(f"Invalid character '{character}' in '{str_path}'")
if ":" in str_path:
if not cls.verInfo.pathRootHasColon:
raise InvalidCharacterError(f"Path has a colon but '{cls.verInfo.os}' doesn't use colon for path root: '{str_path}'")
if str_path[1] != ":":
raise InvalidCharacterError(f"Path has a colon but there's no colon at index 1: '{str_path}'")
if len(str_path) >= 3 and str_path[2] != cls.path_delimiter:
raise InvalidCharacterError(f"Path has a colon but index 2 is not a delimiter: '{str_path}'")
if ":" in str_path[2:]:
raise InvalidCharacterError(f"Path has a colon that's not at index 1: '{str_path}'")
if str_path.endswith("."):
raise InvalidCharacterError(f"Path cannot end with a dot ('.').")
return str_path
@classmethod
@deco_cache()
def _trim(cls, str_path):
if not cls.verInfo.pathRootIsDelimiter and str_path.startswith(cls.path_delimiter):
str_path = str_path[1:]
if str_path.endswith(cls.path_delimiter) and len(str_path) > 1:
str_path = str_path[0:-1]
return str_path
@classmethod
@deco_cache()
def _delimiter_suffix_if_root(cls, str_path):
if len(str_path) == 2 and str_path[1] == ":":
return f"{str_path}{cls.path_delimiter}"
return str_path
setattr(Path, "Path", Path)
| import pathlib
import os
from generallibrary import VerInfo, TreeDiagram, Recycle, classproperty, deco_cache
from generalfile.errors import InvalidCharacterError
from generalfile.path_lock import Path_ContextManager
from generalfile.path_operations import Path_Operations
from generalfile.path_strings import Path_Strings
from generalfile.optional_dependencies.path_spreadsheet import Path_Spreadsheet
from generalfile.optional_dependencies.path_text import Path_Text
from generalfile.optional_dependencies.path_cfg import Path_Cfg
from generalfile.optional_dependencies.path_pickle import Path_Pickle
class Path(TreeDiagram, Recycle, Path_ContextManager, Path_Operations, Path_Strings, Path_Spreadsheet, Path_Text, Path_Cfg, Path_Pickle):
""" Immutable cross-platform Path.
Built on pathlib and TreeDiagram.
Implements rules to ensure cross-platform compatability.
Adds useful methods.
Todo: Binary extension. """
verInfo = VerInfo()
_path_delimiter = verInfo.pathDelimiter
Path = ...
_recycle_keys = {"path": lambda path: Path.scrub("" if path is None else str(path))}
_alternative_chars = {_path_delimiter: "/", ":": ":", ".": "."}
def __init__(self, path=None): # Don't have parent here because of Recycle
self.path = self.scrub(str_path="" if path is None else str(path))
self._path = pathlib.Path(self.path)
self._latest_listdir = set()
copy_node = NotImplemented # Maybe something like this to disable certain methods
@classproperty
def path_delimiter(cls):
return cls._path_delimiter
def spawn_parents(self):
if not self.get_parent(spawn=False) and self.path and not self.is_root():
try:
index = self.path.rindex(self.path_delimiter) + 1
except ValueError:
index = 0
self.set_parent(Path(path=self.path[:index]))
def spawn_children(self):
if self.is_folder():
old_children = {path.name() for path in self.get_children(spawn=False)}
try:
new_children = set(os.listdir(self.path if self.path else "."))
except PermissionError:
new_children = set()
for name in old_children.symmetric_difference(new_children):
path = Path(path=self / name)
path.set_parent(self if name in new_children else None)
def __str__(self):
return getattr(self, "path", "<Path not loaded yet>")
# return self.path
def __repr__(self):
return self.name()
def __fspath__(self):
return self.path
def __format__(self, format_spec):
return self.path.__format__(format_spec)
def __truediv__(self, other):
""" :rtype: generalfile.Path """
# print("here", self._recycle_instances)
return self.Path(self._path / str(other))
def __eq__(self, other):
if isinstance(other, Path):
other = other.path
else:
other = self._scrub("" if other is None else str(other))
return self.path == other
def __hash__(self):
return hash(self.path)
def __contains__(self, item):
return self.path.__contains__(item)
@classmethod
def _scrub(cls, str_path):
str_path = cls._replace_delimiters(str_path=str_path)
str_path = cls._invalid_characters(str_path=str_path)
str_path = cls._trim(str_path=str_path)
str_path = cls._delimiter_suffix_if_root(str_path=str_path)
return str_path
@classmethod
@deco_cache()
def scrub(cls, str_path):
return cls._scrub(str_path=str_path)
@classmethod
@deco_cache()
def _replace_delimiters(cls, str_path):
str_path = str_path.replace("/", cls.path_delimiter)
str_path = str_path.replace("\\", cls.path_delimiter)
return str_path
@classmethod
@deco_cache()
def _invalid_characters(cls, str_path):
# Simple invalid characters testing from Windows
for character in '<>"|?*':
if character in str_path:
raise InvalidCharacterError(f"Invalid character '{character}' in '{str_path}'")
if ":" in str_path:
if not cls.verInfo.pathRootHasColon:
raise InvalidCharacterError(f"Path has a colon but '{cls.verInfo.os}' doesn't use colon for path root: '{str_path}'")
if str_path[1] != ":":
raise InvalidCharacterError(f"Path has a colon but there's no colon at index 1: '{str_path}'")
if len(str_path) >= 3 and str_path[2] != cls.path_delimiter:
raise InvalidCharacterError(f"Path has a colon but index 2 is not a delimiter: '{str_path}'")
if ":" in str_path[2:]:
raise InvalidCharacterError(f"Path has a colon that's not at index 1: '{str_path}'")
if str_path.endswith("."):
raise InvalidCharacterError(f"Path cannot end with a dot ('.').")
return str_path
@classmethod
@deco_cache()
def _trim(cls, str_path):
if not cls.verInfo.pathRootIsDelimiter and str_path.startswith(cls.path_delimiter):
str_path = str_path[1:]
if str_path.endswith(cls.path_delimiter) and len(str_path) > 1:
str_path = str_path[0:-1]
return str_path
@classmethod
@deco_cache()
def _delimiter_suffix_if_root(cls, str_path):
if len(str_path) == 2 and str_path[1] == ":":
return f"{str_path}{cls.path_delimiter}"
return str_path
setattr(Path, "Path", Path) | en | 0.715967 | Immutable cross-platform Path. Built on pathlib and TreeDiagram. Implements rules to ensure cross-platform compatability. Adds useful methods. Todo: Binary extension. #47;", ":": ":", ".": "."} # Don't have parent here because of Recycle # Maybe something like this to disable certain methods # return self.path :rtype: generalfile.Path # print("here", self._recycle_instances) # Simple invalid characters testing from Windows | 2.112951 | 2 |
src/genui/models/models.py | Tontolda/genui | 15 | 7574 | <gh_stars>10-100
import os
from django.db import models
import uuid
# Create your models here.
from djcelery_model.models import TaskMixin
from polymorphic.models import PolymorphicModel
from genui.utils.models import NON_POLYMORPHIC_CASCADE, OverwriteStorage
from genui.utils.extensions.tasks.models import TaskShortcutsMixIn, PolymorphicTaskManager
from genui.projects.models import DataSet
class AlgorithmMode(models.Model):
name = models.CharField(unique=True, blank=False, max_length=32)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelFileFormat(models.Model):
fileExtension = models.CharField(max_length=32, blank=False, unique=True)
description = models.TextField(max_length=10000, blank=True)
class ImportableModelComponent(models.Model):
corePackage = models.CharField(blank=False, null=False, default='genui.models.genuimodels', max_length=1024)
class Meta:
abstract = True
class Algorithm(ImportableModelComponent):
name = models.CharField(blank=False, max_length=128, unique=True)
fileFormats = models.ManyToManyField(ModelFileFormat)
validModes = models.ManyToManyField(AlgorithmMode)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelParameter(models.Model):
STRING = 'string'
BOOL = 'bool'
INTEGER = 'integer'
FLOAT = 'float'
CONTENT_TYPES = [
(STRING, 'String'),
(BOOL, 'Logical'),
(INTEGER, 'Integer'),
(FLOAT, 'Float'),
]
name = models.CharField(max_length=128, blank=False)
algorithm = models.ForeignKey(Algorithm, on_delete=models.CASCADE, null=False, related_name='parameters')
contentType = models.CharField(max_length=32, choices=CONTENT_TYPES, default=STRING)
defaultValue = models.ForeignKey("ModelParameterValue", on_delete=models.SET_NULL, null=True)
class Meta:
unique_together = ('name', 'algorithm')
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelBuilder(ImportableModelComponent):
name = models.CharField(max_length=128, blank=False, unique=True)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelFile(models.Model):
MAIN = "main"
AUXILIARY = "aux"
KINDS = [
(MAIN, 'Main'),
(AUXILIARY, 'Auxiliary'),
]
class Rejected(Exception):
def __init__(self, msg):
super().__init__(msg)
class InvalidFileFormatError(Exception):
def __init__(self, msg):
super().__init__(msg)
modelInstance = models.ForeignKey("Model", null=False, related_name="files", on_delete=models.CASCADE)
kind = models.CharField(max_length=32, choices=KINDS, null=False, default=AUXILIARY)
note = models.CharField(max_length=128, blank=True)
format = models.ForeignKey(ModelFileFormat, null=True, on_delete=models.CASCADE)
file = models.FileField(null=True, upload_to='models/', storage=OverwriteStorage()) # TODO: add custom logic to save in a directory specific to the project where the model is
@property
def path(self):
return self.file.path
@staticmethod
def generateMainFileName(model, fileFormat):
return f"{model.trainingStrategy.algorithm.name}{model.id}_project{model.project.id}_{uuid.uuid4().hex}_main{fileFormat.fileExtension}"
@staticmethod
def generateAuxFileName(model, fileFormat):
return f"{model.trainingStrategy.algorithm.name}{model.id}_project{model.project.id}_{uuid.uuid4().hex}_aux{fileFormat.fileExtension}"
@staticmethod
def create(model, name, file_, kind=AUXILIARY, note=None):
if not note:
note = ''
algorithm = model.trainingStrategy.algorithm
if kind == ModelFile.MAIN and model.modelFile:
file_format = None
for format_ in algorithm.fileFormats.all():
if name.endswith(format_.fileExtension):
file_format = format_
break
if not file_format:
raise ModelFile.InvalidFileFormatError(f"The extension for file '{name}' of the submitted file did not match any of the known formats for algorithm: ({algorithm.name}).")
if model.modelFile.format.fileExtension == file_format.fileExtension:
model.modelFile.file.save(os.path.basename(model.modelFile.path), file_)
else:
model.modelFile.delete()
ModelFile.objects.create(
model=model,
kind=ModelFile.MAIN,
format=file_format,
note=note,
file=file_
)
return model.modelFile
else:
file_format = None
for format_ in ModelFileFormat.objects.all():
if name.endswith(format_.fileExtension):
file_format = format_
break
if kind == ModelFile.MAIN:
if not file_format:
raise ModelFile.InvalidFileFormatError(f"The extension for file '{name}' of the submitted file did not match any of the known formats for algorithm: ({algorithm.name}).")
ret = ModelFile.objects.create(
modelInstance=model,
kind=ModelFile.MAIN,
format=file_format,
note=note
)
ret.file.save(ret.generateMainFileName(model, file_format), file_)
else:
ret = ModelFile.objects.create(
modelInstance=model,
kind=kind,
format=file_format if file_format else ModelFileFormat.objects.get_or_create(
fileExtension='.' + name.split('.')[-1]
)[0],
note=note
)
ret.file.save(ret.generateAuxFileName(model, ret.format), file_)
return ret
class Model(TaskShortcutsMixIn, TaskMixin, DataSet):
objects = PolymorphicTaskManager()
builder = models.ForeignKey(ModelBuilder, on_delete=models.CASCADE, null=False)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
@property
def modelFile(self):
# TODO: exception when more than one main file found
main = self.files.filter(kind=ModelFile.MAIN)
if main:
return main.get()
else:
return None
def onFileSave(self, saved : ModelFile):
"""
This will be called when a file is being
saved to this model instance. You can throw
the ModelFile.Rejected exception if the file
is invalid.
:param saved:
:return:
"""
pass
# @modelFile.setter
# def modelFile(self, val):
# main = self.files.filter(kind=ModelFile.MAIN)
# if main:
# main.delete()
# val.kind = ModelFile.MAIN
# val.save()
# self.files.add(val)
# self.save()
@property
def trainingStrategy(self):
count = self.trainingStrategies.count()
if count == 1:
return self.trainingStrategies.get()
elif count == 0:
return None
else:
raise Exception("Training strategy returned more than one value. This indicates an integrity error in the database!")
@property
def validationStrategy(self):
count = self.validationStrategies.count()
if count == 1:
return self.validationStrategies.get()
elif count == 0:
return None
else:
raise Exception("Validation strategy returned more than one value. This indicates an integrity error in the database!")
class TrainingStrategy(PolymorphicModel):
algorithm = models.ForeignKey(Algorithm, on_delete=models.CASCADE, null=False)
mode = models.ForeignKey(AlgorithmMode, on_delete=models.CASCADE, null=False)
modelInstance = models.ForeignKey(Model, null=False, on_delete=models.CASCADE, related_name="trainingStrategies")
class ModelParameterValue(PolymorphicModel):
parameter = models.ForeignKey(ModelParameter, on_delete=models.CASCADE, null=False)
strategy = models.ForeignKey(TrainingStrategy, on_delete=NON_POLYMORPHIC_CASCADE, null=True, related_name='parameters')
@staticmethod
def parseValue(val):
return str(val)
class ModelParameterStr(ModelParameterValue):
value = models.CharField(max_length=1024)
class ModelParameterBool(ModelParameterValue):
value = models.BooleanField(null=False)
@staticmethod
def parseValue(val):
return bool(val)
class ModelParameterInt(ModelParameterValue):
value = models.IntegerField(null=False)
@staticmethod
def parseValue(val):
return int(val)
class ModelParameterFloat(ModelParameterValue):
value = models.FloatField(null=False)
@staticmethod
def parseValue(val):
return float(val)
PARAM_VALUE_CTYPE_TO_MODEL_MAP = {
ModelParameter.STRING : ModelParameterStr,
ModelParameter.INTEGER : ModelParameterInt,
ModelParameter.FLOAT : ModelParameterFloat,
ModelParameter.BOOL : ModelParameterBool
}
class ModelPerformanceMetric(ImportableModelComponent):
name = models.CharField(unique=True, blank=False, max_length=128)
validModes = models.ManyToManyField(AlgorithmMode, related_name='metrics')
validAlgorithms = models.ManyToManyField(Algorithm, related_name='metrics')
description = models.TextField(max_length=10000, blank=True)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ValidationStrategy(PolymorphicModel):
metrics = models.ManyToManyField(ModelPerformanceMetric)
modelInstance = models.ForeignKey(Model, null=False, on_delete=models.CASCADE, related_name='validationStrategies')
class CV(ValidationStrategy):
cvFolds = models.IntegerField(blank=False)
class Meta:
abstract = True
class ValidationSet(ValidationStrategy):
validSetSize = models.FloatField(blank=False)
class Meta:
abstract = True
class BasicValidationStrategy(ValidationSet, CV):
pass
class ModelPerformance(PolymorphicModel):
metric = models.ForeignKey(ModelPerformanceMetric, null=False, on_delete=models.CASCADE)
value = models.FloatField(blank=False)
model = models.ForeignKey(Model, null=False, on_delete=NON_POLYMORPHIC_CASCADE, related_name="performance")
class ModelPerformanceCV(ModelPerformance):
fold = models.IntegerField(blank=False)
class ModelPerfomanceNN(ModelPerformance):
epoch = models.IntegerField(null=False, blank=False)
step = models.IntegerField(null=False, blank=False)
class ROCCurvePoint(ModelPerformance):
fpr = models.FloatField(blank=False)
auc = models.ForeignKey(ModelPerformance, null=False, on_delete=NON_POLYMORPHIC_CASCADE, related_name="points")
@property
def tpr(self):
return self.value | import os
from django.db import models
import uuid
# Create your models here.
from djcelery_model.models import TaskMixin
from polymorphic.models import PolymorphicModel
from genui.utils.models import NON_POLYMORPHIC_CASCADE, OverwriteStorage
from genui.utils.extensions.tasks.models import TaskShortcutsMixIn, PolymorphicTaskManager
from genui.projects.models import DataSet
class AlgorithmMode(models.Model):
name = models.CharField(unique=True, blank=False, max_length=32)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelFileFormat(models.Model):
fileExtension = models.CharField(max_length=32, blank=False, unique=True)
description = models.TextField(max_length=10000, blank=True)
class ImportableModelComponent(models.Model):
corePackage = models.CharField(blank=False, null=False, default='genui.models.genuimodels', max_length=1024)
class Meta:
abstract = True
class Algorithm(ImportableModelComponent):
name = models.CharField(blank=False, max_length=128, unique=True)
fileFormats = models.ManyToManyField(ModelFileFormat)
validModes = models.ManyToManyField(AlgorithmMode)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelParameter(models.Model):
STRING = 'string'
BOOL = 'bool'
INTEGER = 'integer'
FLOAT = 'float'
CONTENT_TYPES = [
(STRING, 'String'),
(BOOL, 'Logical'),
(INTEGER, 'Integer'),
(FLOAT, 'Float'),
]
name = models.CharField(max_length=128, blank=False)
algorithm = models.ForeignKey(Algorithm, on_delete=models.CASCADE, null=False, related_name='parameters')
contentType = models.CharField(max_length=32, choices=CONTENT_TYPES, default=STRING)
defaultValue = models.ForeignKey("ModelParameterValue", on_delete=models.SET_NULL, null=True)
class Meta:
unique_together = ('name', 'algorithm')
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelBuilder(ImportableModelComponent):
name = models.CharField(max_length=128, blank=False, unique=True)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ModelFile(models.Model):
MAIN = "main"
AUXILIARY = "aux"
KINDS = [
(MAIN, 'Main'),
(AUXILIARY, 'Auxiliary'),
]
class Rejected(Exception):
def __init__(self, msg):
super().__init__(msg)
class InvalidFileFormatError(Exception):
def __init__(self, msg):
super().__init__(msg)
modelInstance = models.ForeignKey("Model", null=False, related_name="files", on_delete=models.CASCADE)
kind = models.CharField(max_length=32, choices=KINDS, null=False, default=AUXILIARY)
note = models.CharField(max_length=128, blank=True)
format = models.ForeignKey(ModelFileFormat, null=True, on_delete=models.CASCADE)
file = models.FileField(null=True, upload_to='models/', storage=OverwriteStorage()) # TODO: add custom logic to save in a directory specific to the project where the model is
@property
def path(self):
return self.file.path
@staticmethod
def generateMainFileName(model, fileFormat):
return f"{model.trainingStrategy.algorithm.name}{model.id}_project{model.project.id}_{uuid.uuid4().hex}_main{fileFormat.fileExtension}"
@staticmethod
def generateAuxFileName(model, fileFormat):
return f"{model.trainingStrategy.algorithm.name}{model.id}_project{model.project.id}_{uuid.uuid4().hex}_aux{fileFormat.fileExtension}"
@staticmethod
def create(model, name, file_, kind=AUXILIARY, note=None):
if not note:
note = ''
algorithm = model.trainingStrategy.algorithm
if kind == ModelFile.MAIN and model.modelFile:
file_format = None
for format_ in algorithm.fileFormats.all():
if name.endswith(format_.fileExtension):
file_format = format_
break
if not file_format:
raise ModelFile.InvalidFileFormatError(f"The extension for file '{name}' of the submitted file did not match any of the known formats for algorithm: ({algorithm.name}).")
if model.modelFile.format.fileExtension == file_format.fileExtension:
model.modelFile.file.save(os.path.basename(model.modelFile.path), file_)
else:
model.modelFile.delete()
ModelFile.objects.create(
model=model,
kind=ModelFile.MAIN,
format=file_format,
note=note,
file=file_
)
return model.modelFile
else:
file_format = None
for format_ in ModelFileFormat.objects.all():
if name.endswith(format_.fileExtension):
file_format = format_
break
if kind == ModelFile.MAIN:
if not file_format:
raise ModelFile.InvalidFileFormatError(f"The extension for file '{name}' of the submitted file did not match any of the known formats for algorithm: ({algorithm.name}).")
ret = ModelFile.objects.create(
modelInstance=model,
kind=ModelFile.MAIN,
format=file_format,
note=note
)
ret.file.save(ret.generateMainFileName(model, file_format), file_)
else:
ret = ModelFile.objects.create(
modelInstance=model,
kind=kind,
format=file_format if file_format else ModelFileFormat.objects.get_or_create(
fileExtension='.' + name.split('.')[-1]
)[0],
note=note
)
ret.file.save(ret.generateAuxFileName(model, ret.format), file_)
return ret
class Model(TaskShortcutsMixIn, TaskMixin, DataSet):
objects = PolymorphicTaskManager()
builder = models.ForeignKey(ModelBuilder, on_delete=models.CASCADE, null=False)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
@property
def modelFile(self):
# TODO: exception when more than one main file found
main = self.files.filter(kind=ModelFile.MAIN)
if main:
return main.get()
else:
return None
def onFileSave(self, saved : ModelFile):
"""
This will be called when a file is being
saved to this model instance. You can throw
the ModelFile.Rejected exception if the file
is invalid.
:param saved:
:return:
"""
pass
# @modelFile.setter
# def modelFile(self, val):
# main = self.files.filter(kind=ModelFile.MAIN)
# if main:
# main.delete()
# val.kind = ModelFile.MAIN
# val.save()
# self.files.add(val)
# self.save()
@property
def trainingStrategy(self):
count = self.trainingStrategies.count()
if count == 1:
return self.trainingStrategies.get()
elif count == 0:
return None
else:
raise Exception("Training strategy returned more than one value. This indicates an integrity error in the database!")
@property
def validationStrategy(self):
count = self.validationStrategies.count()
if count == 1:
return self.validationStrategies.get()
elif count == 0:
return None
else:
raise Exception("Validation strategy returned more than one value. This indicates an integrity error in the database!")
class TrainingStrategy(PolymorphicModel):
algorithm = models.ForeignKey(Algorithm, on_delete=models.CASCADE, null=False)
mode = models.ForeignKey(AlgorithmMode, on_delete=models.CASCADE, null=False)
modelInstance = models.ForeignKey(Model, null=False, on_delete=models.CASCADE, related_name="trainingStrategies")
class ModelParameterValue(PolymorphicModel):
parameter = models.ForeignKey(ModelParameter, on_delete=models.CASCADE, null=False)
strategy = models.ForeignKey(TrainingStrategy, on_delete=NON_POLYMORPHIC_CASCADE, null=True, related_name='parameters')
@staticmethod
def parseValue(val):
return str(val)
class ModelParameterStr(ModelParameterValue):
value = models.CharField(max_length=1024)
class ModelParameterBool(ModelParameterValue):
value = models.BooleanField(null=False)
@staticmethod
def parseValue(val):
return bool(val)
class ModelParameterInt(ModelParameterValue):
value = models.IntegerField(null=False)
@staticmethod
def parseValue(val):
return int(val)
class ModelParameterFloat(ModelParameterValue):
value = models.FloatField(null=False)
@staticmethod
def parseValue(val):
return float(val)
PARAM_VALUE_CTYPE_TO_MODEL_MAP = {
ModelParameter.STRING : ModelParameterStr,
ModelParameter.INTEGER : ModelParameterInt,
ModelParameter.FLOAT : ModelParameterFloat,
ModelParameter.BOOL : ModelParameterBool
}
class ModelPerformanceMetric(ImportableModelComponent):
name = models.CharField(unique=True, blank=False, max_length=128)
validModes = models.ManyToManyField(AlgorithmMode, related_name='metrics')
validAlgorithms = models.ManyToManyField(Algorithm, related_name='metrics')
description = models.TextField(max_length=10000, blank=True)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.name)
class ValidationStrategy(PolymorphicModel):
metrics = models.ManyToManyField(ModelPerformanceMetric)
modelInstance = models.ForeignKey(Model, null=False, on_delete=models.CASCADE, related_name='validationStrategies')
class CV(ValidationStrategy):
cvFolds = models.IntegerField(blank=False)
class Meta:
abstract = True
class ValidationSet(ValidationStrategy):
validSetSize = models.FloatField(blank=False)
class Meta:
abstract = True
class BasicValidationStrategy(ValidationSet, CV):
pass
class ModelPerformance(PolymorphicModel):
metric = models.ForeignKey(ModelPerformanceMetric, null=False, on_delete=models.CASCADE)
value = models.FloatField(blank=False)
model = models.ForeignKey(Model, null=False, on_delete=NON_POLYMORPHIC_CASCADE, related_name="performance")
class ModelPerformanceCV(ModelPerformance):
fold = models.IntegerField(blank=False)
class ModelPerfomanceNN(ModelPerformance):
epoch = models.IntegerField(null=False, blank=False)
step = models.IntegerField(null=False, blank=False)
class ROCCurvePoint(ModelPerformance):
fpr = models.FloatField(blank=False)
auc = models.ForeignKey(ModelPerformance, null=False, on_delete=NON_POLYMORPHIC_CASCADE, related_name="points")
@property
def tpr(self):
return self.value | en | 0.726423 | # Create your models here. # TODO: add custom logic to save in a directory specific to the project where the model is # TODO: exception when more than one main file found This will be called when a file is being saved to this model instance. You can throw the ModelFile.Rejected exception if the file is invalid. :param saved: :return: # @modelFile.setter # def modelFile(self, val): # main = self.files.filter(kind=ModelFile.MAIN) # if main: # main.delete() # val.kind = ModelFile.MAIN # val.save() # self.files.add(val) # self.save() | 2.082443 | 2 |
projectroles/tests/test_views_api.py | bihealth/sodar_core | 11 | 7575 | <filename>projectroles/tests/test_views_api.py
"""REST API view tests for the projectroles app"""
import base64
import json
import pytz
from django.conf import settings
from django.core import mail
from django.forms.models import model_to_dict
from django.test import override_settings
from django.urls import reverse
from django.utils import timezone
from knox.models import AuthToken
from test_plus.test import APITestCase
from projectroles import views_api
from projectroles.models import (
Project,
Role,
RoleAssignment,
ProjectInvite,
SODAR_CONSTANTS,
)
from projectroles.plugins import change_plugin_status, get_backend_api
from projectroles.remote_projects import RemoteProjectAPI
from projectroles.tests.test_models import (
ProjectMixin,
RoleAssignmentMixin,
ProjectInviteMixin,
RemoteSiteMixin,
RemoteProjectMixin,
AppSettingMixin,
)
from projectroles.tests.test_views import (
TestViewsBase,
PROJECT_TYPE_CATEGORY,
PROJECT_TYPE_PROJECT,
PROJECT_ROLE_OWNER,
PROJECT_ROLE_DELEGATE,
PROJECT_ROLE_CONTRIBUTOR,
PROJECT_ROLE_GUEST,
REMOTE_SITE_NAME,
REMOTE_SITE_URL,
SITE_MODE_SOURCE,
SITE_MODE_TARGET,
REMOTE_SITE_DESC,
REMOTE_SITE_SECRET,
)
from projectroles.utils import build_secret
CORE_API_MEDIA_TYPE_INVALID = 'application/vnd.bihealth.invalid'
CORE_API_VERSION_INVALID = '9.9.9'
INVALID_UUID = '11111111-1111-1111-1111-111111111111'
NEW_CATEGORY_TITLE = 'New Category'
NEW_PROJECT_TITLE = 'New Project'
UPDATED_TITLE = 'Updated Title'
UPDATED_DESC = 'Updated description'
UPDATED_README = 'Updated readme'
INVITE_USER_EMAIL = '<EMAIL>'
INVITE_USER2_EMAIL = '<EMAIL>'
INVITE_MESSAGE = 'Message'
# Base Classes -----------------------------------------------------------------
class SODARAPIViewTestMixin:
"""
Mixin for SODAR and SODAR Core API views with accept headers, knox token
authorization and general helper methods.
"""
# Default API header parameters are for external SODAR site APIs
# Override these for testing SODAR Core API views
media_type = settings.SODAR_API_MEDIA_TYPE
api_version = settings.SODAR_API_DEFAULT_VERSION
# Copied from Knox tests
@classmethod
def _get_basic_auth_header(cls, username, password):
return (
'Basic %s'
% base64.b64encode(
('%s:%s' % (username, password)).encode('ascii')
).decode()
)
@classmethod
def get_token(cls, user, full_result=False):
"""
Get or create a knox token for a user.
:param user: User object
:param full_result: Return full result of AuthToken creation if True
:return: Token string or AuthToken creation tuple
"""
result = AuthToken.objects.create(user=user)
return result if full_result else result[1]
@classmethod
def get_serialized_user(cls, user):
"""
Return serialization for a user.
:param user: User object
:return: Dict
"""
return {
'email': user.email,
'name': user.name,
'sodar_uuid': str(user.sodar_uuid),
'username': user.username,
}
@classmethod
def get_drf_datetime(cls, obj_dt):
"""
Return datetime in DRF compatible format.
:param obj_dt: Object DateTime field
:return: String
"""
return timezone.localtime(
obj_dt, pytz.timezone(settings.TIME_ZONE)
).isoformat()
@classmethod
def get_accept_header(
cls,
media_type=None,
version=None,
):
"""
Return version accept header based on the media type and version string.
:param media_type: String (default = cls.media_type)
:param version: String (default = cls.api_version)
:return: Dict
"""
if not media_type:
media_type = cls.media_type
if not version:
version = cls.api_version
return {'HTTP_ACCEPT': '{}; version={}'.format(media_type, version)}
@classmethod
def get_token_header(cls, token):
"""
Return auth header based on token.
:param token: Token string
:return: Dict
"""
return {'HTTP_AUTHORIZATION': 'token {}'.format(token)}
def request_knox(
self,
url,
method='GET',
format='json',
data=None,
token=None,
media_type=None,
version=None,
header=None,
):
"""
Perform a HTTP request with Knox token auth.
:param url: URL for the request
:param method: Request method (string, default="GET")
:param format: Request format (string, default="json")
:param data: Optional data for request (dict)
:param token: Knox token string (if None, use self.knox_token)
:param media_type: String (default = cls.media_type)
:param version: String (default = cls.api_version)
:return: Response object
"""
if not token:
token = self.knox_token
req_kwargs = {
'format': format,
**self.get_accept_header(media_type, version),
**self.get_token_header(token),
}
if data:
req_kwargs['data'] = data
if header:
req_kwargs.update(header)
req_method = getattr(self.client, method.lower(), None)
if not req_method:
raise ValueError('Unsupported method "{}"'.format(method))
return req_method(url, **req_kwargs)
class TestAPIViewsBase(
ProjectMixin, RoleAssignmentMixin, SODARAPIViewTestMixin, APITestCase
):
"""Base API test view with knox authentication"""
def setUp(self):
# Show complete diff in case of failure
self.maxDiff = None
# Force disabling of taskflow plugin if it's available
if get_backend_api('taskflow'):
change_plugin_status(
name='taskflow', status=1, plugin_type='backend' # 0 = Disabled
)
# Init roles
self.role_owner = Role.objects.get_or_create(name=PROJECT_ROLE_OWNER)[0]
self.role_delegate = Role.objects.get_or_create(
name=PROJECT_ROLE_DELEGATE
)[0]
self.role_contributor = Role.objects.get_or_create(
name=PROJECT_ROLE_CONTRIBUTOR
)[0]
self.role_guest = Role.objects.get_or_create(name=PROJECT_ROLE_GUEST)[0]
# Init superuser
self.user = self.make_user('superuser')
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
# Set up category and project with owner role assignments
self.category = self._make_project(
'TestCategory', PROJECT_TYPE_CATEGORY, None
)
self.cat_owner_as = self._make_assignment(
self.category, self.user, self.role_owner
)
self.project = self._make_project(
'TestProject', PROJECT_TYPE_PROJECT, self.category
)
self.owner_as = self._make_assignment(
self.project, self.user, self.role_owner
)
# Get knox token for self.user
self.knox_token = self.get_token(self.user)
class TestCoreAPIViewsBase(TestAPIViewsBase):
"""Override of TestAPIViewsBase to be used with SODAR Core API views"""
media_type = views_api.CORE_API_MEDIA_TYPE
api_version = views_api.CORE_API_DEFAULT_VERSION
# Tests ------------------------------------------------------------------------
class TestProjectListAPIView(TestCoreAPIViewsBase):
"""Tests for ProjectListAPIView"""
def test_get(self):
"""Test ProjectListAPIView get() as project owner"""
url = reverse('projectroles:api_project_list')
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'title': self.category.title,
'type': self.category.type,
'parent': None,
'description': self.category.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.category.submit_status,
'roles': {
str(self.cat_owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.cat_owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
},
{
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(self.owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
def test_get_no_roles(self):
"""Test ProjectListAPIView get() without roles"""
user_no_roles = self.make_user('user_no_roles')
url = reverse('projectroles:api_project_list')
response = self.request_knox(url, token=self.get_token(user_no_roles))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 0)
def test_get_limited_roles(self):
"""Test ProjectListAPIView get() with only one role"""
user_no_roles = self.make_user('user_no_roles')
self._make_assignment(
self.project, user_no_roles, self.role_contributor
)
url = reverse('projectroles:api_project_list')
response = self.request_knox(url, token=self.get_token(user_no_roles))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1)
class TestProjectRetrieveAPIView(AppSettingMixin, TestCoreAPIViewsBase):
"""Tests for ProjectRetrieveAPIView"""
def test_get_category(self):
"""Test ProjectRetrieveAPIView get() with a category"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.category.sodar_uuid},
)
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'title': self.category.title,
'type': self.category.type,
'parent': None,
'description': self.category.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.category.submit_status,
'roles': {
str(self.cat_owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.cat_owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_project(self):
"""Test ProjectRetrieveAPIView get() with a project"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(self.owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_not_found(self):
"""Test ProjectRetrieveAPIView get() with an invalid UUID"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': INVALID_UUID},
)
response = self.request_knox(url)
self.assertEqual(response.status_code, 404)
class TestIPAllowing(AppSettingMixin, TestCoreAPIViewsBase):
"""Tests for IP allowing settings using ProjectRetrieveAPIView"""
def _setup_ip_allowing(self, ip_list, role_suffix):
"""Setup users and roles for IP allowing test"""
# Create new user
user = self.make_user(role_suffix)
# Remove previously assigned owner role for superuser.
superuser_as_owner_role = RoleAssignment.objects.get(
project=self.project, user=self.user, role=self.role_owner
)
superuser_as_owner_role.delete()
# Assign requested role to user
user_as = self._make_assignment(
self.project, user, getattr(self, 'role_' + role_suffix)
)
user_cat_as = RoleAssignment.objects.get(
project=self.category, user=self.user, role=self.role_owner
)
if role_suffix == 'owner':
user_cat_as.delete()
user_cat_as = self._make_assignment(
self.category, user, getattr(self, 'role_' + role_suffix)
)
# Init IP restrict setting
self._make_setting(
app_name='projectroles',
name='ip_restrict',
setting_type='BOOLEAN',
value=True,
project=self.project,
)
# Init IP allowlist setting
self._make_setting(
app_name='projectroles',
name='ip_allowlist',
setting_type='JSON',
value=None,
value_json=ip_list,
project=self.project,
)
return user, user_as, user_cat_as
def _get_project_ip_allowing(
self, username, http_attribute, ip_list, blocked=None
):
"""Helper for IP allowing tests"""
if blocked is None:
raise Exception('Please set "blocked" argument (True/False)')
user, user_as, user_cat_as = self._setup_ip_allowing(ip_list, username)
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
header = {http_attribute: '192.168.1.1'}
response = self.request_knox(
url, token=self.get_token(user), header=header
)
if username == 'owner':
role = PROJECT_ROLE_OWNER
elif username == 'delegate':
role = PROJECT_ROLE_DELEGATE
elif username == 'contributor':
role = PROJECT_ROLE_CONTRIBUTOR
else:
role = PROJECT_ROLE_GUEST
if blocked:
self.assertEqual(response.status_code, 403)
else:
expected = {
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(user_as.sodar_uuid): {
'user': {
'username': user.username,
'name': user.name,
'email': user.email,
'sodar_uuid': str(user.sodar_uuid),
},
'role': role,
'sodar_uuid': str(user_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected)
def test_http_x_forwarded_for_block_all_owner(self):
self._get_project_ip_allowing(
'owner', 'HTTP_X_FORWARDED_FOR', [], blocked=False
)
def test_http_x_forwarded_for_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_http_x_forwarded_for_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'HTTP_X_FORWARDED_FOR', [], blocked=False
)
def test_http_x_forwarded_for_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_http_x_forwarded_for_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'HTTP_X_FORWARDED_FOR', [], blocked=True
)
def test_http_x_forwarded_for_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor',
'HTTP_X_FORWARDED_FOR',
['192.168.1.1'],
blocked=False,
)
def test_http_x_forwarded_for_block_all_guest(self):
self._get_project_ip_allowing(
'guest', 'HTTP_X_FORWARDED_FOR', [], blocked=True
)
def test_http_x_forwarded_for_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_owner(self):
self._get_project_ip_allowing(
'owner', 'X_FORWARDED_FOR', [], blocked=False
)
def test_x_forwarded_for_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'X_FORWARDED_FOR', [], blocked=False
)
def test_x_forwarded_for_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'X_FORWARDED_FOR', [], blocked=True
)
def test_forwarded_for_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_forwarded_for_block_all_guest(self):
self._get_project_ip_allowing(
'guest', 'X_FORWARDED_FOR', [], blocked=True
)
def test_forwarded_for_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_owner(self):
self._get_project_ip_allowing('owner', 'FORWARDED', [], blocked=False)
def test_forwarded_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'FORWARDED', [], blocked=False
)
def test_forwarded_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'FORWARDED', [], blocked=True
)
def test_forwarded_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_guest(self):
self._get_project_ip_allowing('guest', 'FORWARDED', [], blocked=True)
def test_forwarded_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_owner(self):
self._get_project_ip_allowing('owner', 'REMOTE_ADDR', [], blocked=False)
def test_remote_addr_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'REMOTE_ADDR', [], blocked=False
)
def test_remote_addr_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'REMOTE_ADDR', [], blocked=True
)
def test_remote_addr_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_guest(self):
self._get_project_ip_allowing('guest', 'REMOTE_ADDR', [], blocked=True)
def test_remote_addr_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_allow_network_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.0/24'], blocked=False
)
def test_remote_addr_block_not_in_allowlist_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.2'], blocked=True
)
def test_remote_addr_block_not_in_allowlist_network_guest(
self,
):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.2.0/24'], blocked=True
)
class TestProjectCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectCreateAPIView"""
def test_create_category(self):
"""Test creating a root category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': '',
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_category = Project.objects.get(title=NEW_CATEGORY_TITLE)
model_dict = model_to_dict(new_category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_category.pk,
'title': new_category.title,
'type': new_category.type,
'parent': None,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': new_category.title,
'sodar_uuid': new_category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_category, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_category_nested(self):
"""Test creating a category under an existing category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_category = Project.objects.get(title=NEW_CATEGORY_TITLE)
model_dict = model_to_dict(new_category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_category.pk,
'title': new_category.title,
'type': new_category.type,
'parent': self.category.pk,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + new_category.title,
'sodar_uuid': new_category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_category, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': str(self.category.sodar_uuid),
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_project(self):
"""Test creating a project under an existing category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_project = Project.objects.get(title=NEW_PROJECT_TITLE)
model_dict = model_to_dict(new_project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_project.pk,
'title': new_project.title,
'type': new_project.type,
'parent': self.category.pk,
'description': new_project.description,
'readme': new_project.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + new_project.title,
'sodar_uuid': new_project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_project, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': new_project.description,
'readme': new_project.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_project_root(self):
"""Test creating a project in root (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': None,
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
@override_settings(PROJECTROLES_DISABLE_CATEGORIES=True)
def test_create_project_disable_categories(self):
"""Test creating a project in root with disabled categories"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': '',
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
def test_create_project_duplicate_title(self):
"""Test creating a project with a title already in category (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': self.project.title,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_unknown_user(self):
"""Test creating a project with a non-existent user (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': INVALID_UUID,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_unknown_parent(self):
"""Test creating a project with a non-existent parent category (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': INVALID_UUID,
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_invalid_parent(self):
"""Test creating a project with a project as parent (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.project.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_project_target_enabled(self):
"""Test creating a project as TARGET with target creation allowed"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_project_target_remote(self):
"""Test creating a project as TARGET under a remote category (should fail)"""
# Create source site
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
# Make category remote
self._make_remote_project(
project_uuid=self.category.sodar_uuid,
project=self.category,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
@override_settings(
PROJECTROLES_SITE_MODE=SITE_MODE_TARGET,
PROJECTROLES_TARGET_CREATE=False,
)
def test_create_project_target_disabled(self):
"""Test creating a project as TARGET with target creation disallowed (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
class TestProjectUpdateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectUpdateAPIView"""
def test_put_category(self):
"""Test put() for category updating"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
put_data = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': '',
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.category.refresh_from_db()
model_dict = model_to_dict(self.category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.category.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': UPDATED_TITLE,
'sodar_uuid': self.category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.category.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.category.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_project(self):
"""Test put() for project updating"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
put_data = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.project.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': self.category.pk,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + UPDATED_TITLE,
'sodar_uuid': self.project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.project.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.project.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_category(self):
"""Test patch() for updating category metadata"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.category.refresh_from_db()
model_dict = model_to_dict(self.category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.category.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': UPDATED_TITLE,
'sodar_uuid': self.category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(self.category.get_owner().user, self.user)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': False,
'roles': {
str(self.category.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.category.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_project(self):
"""Test patch() for updating project metadata"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.project.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': self.category.pk,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + UPDATED_TITLE,
'sodar_uuid': self.project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(self.project.get_owner().user, self.user)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.project.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.project.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_project_owner(self):
"""Test patch() for updating project owner (should fail)"""
new_owner = self.make_user('new_owner')
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'owner': str(new_owner.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_project_move(self):
"""Test patch() for moving project under a different category"""
# Assert preconditions
self.assertEqual(
self.project.full_title,
self.category.title + ' / ' + self.project.title,
)
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
self._make_assignment(new_category, self.user, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
self.assertEqual(model_dict['parent'], new_category.pk)
# Assert role assignment
self.assertEqual(self.project.get_owner().user, self.user)
# Assert child project full title update
self.assertEqual(
self.project.full_title,
new_category.title + ' / ' + self.project.title,
)
# Assert API response
self.assertEqual(
json.loads(response.content)['parent'], str(new_category.sodar_uuid)
)
def test_patch_project_move_unallowed(self):
"""Test patch() for moving project without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
# Disable superuser status from self.user and perform request
self.user.is_superuser = False
self.user.save()
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_patch_project_move_root(self):
"""Test patch() for moving project without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': ''}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
def test_patch_project_move_root_unallowed(self):
"""Test patch() for moving project to root without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': ''}
# Disable superuser status from self.user and perform request
self.user.is_superuser = False
self.user.save()
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_patch_project_move_child(self):
"""Test patch() for moving a category inside its child (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, self.category
)
self._make_assignment(new_category, self.user, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_project_type_change(self):
"""Test patch() with a changed project type (should fail)"""
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'type': PROJECT_TYPE_CATEGORY}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_patch_project_remote(self):
"""Test patch() for updating remote project metadata (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
class TestRoleAssignmentCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentCreateAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
def test_create_contributor(self):
"""Test creating a contributor role for user"""
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project,
role=self.role_contributor,
user=self.assign_user,
).first()
self.assertIsNotNone(role_as)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(role_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_owner(self):
"""Test creating an owner role (should fail)"""
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_OWNER,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_create_delegate(self):
"""Test creating a delegate role for user as owner"""
# Disable superuser status from self.user
self.user.is_superuser = False
self.user.save()
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project, role=self.role_delegate, user=self.assign_user
).first()
self.assertIsNotNone(role_as)
def test_create_delegate_unauthorized(self):
"""Test creating a delegate role without authorization (should fail)"""
# Create new user and grant delegate role
new_user = self.make_user('new_user')
self._make_assignment(self.project, new_user, self.role_contributor)
new_user_token = self.get_token(new_user)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(
url, method='POST', data=post_data, token=new_user_token
)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_create_delegate_limit(self):
"""Test creating a delegate role with limit reached (should fail)"""
# Create new user and grant delegate role
new_user = self.make_user('new_user')
self._make_assignment(self.project, new_user, self.role_delegate)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
# NOTE: Post as owner
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_create_delegate_limit_inherit(self):
"""Test creating a delegate role existing role for inherited owner"""
# Set up category owner
new_user = self.make_user('new_user')
self.cat_owner_as.user = new_user
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
# NOTE: Post as owner
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project, role=self.role_delegate, user=self.assign_user
).first()
self.assertIsNotNone(role_as)
def test_create_delegate_category(self):
"""Test creating a non-owner role for category"""
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.category.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 201, msg=response.content)
def test_create_role_existing(self):
"""Test creating a role for user already in the project"""
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Post again
post_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_remote(self):
"""Test creating a role for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
class TestRoleAssignmentUpdateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentUpdateAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
self.update_as = self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
def test_put_role(self):
"""Test put() for role assignment updating"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and role status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_guest.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_delegate(self):
"""Test put() for delegate role assignment"""
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_delegate.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_owner(self):
"""Test put() for owner role assignment (should fail)"""
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_OWNER,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_put_change_user(self):
"""Test put() with a different user (should fail)"""
new_user = self.make_user('new_user')
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(new_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_role(self):
"""Test patch() for role assignment updating"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'role': PROJECT_ROLE_GUEST}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and role status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_guest.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_change_user(self):
"""Test patch() with a different user (should fail)"""
new_user = self.make_user('new_user')
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'user': str(new_user.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_patch_role_remote(self):
"""Test patch() for updating a role in a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'role': PROJECT_ROLE_GUEST}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
class TestRoleAssignmentDestroyAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentDestroyAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
self.update_as = self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
def test_delete_role(self):
"""Test delete for role assignment deletion"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and role status
self.assertEqual(response.status_code, 204, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 2)
self.assertEqual(
RoleAssignment.objects.filter(
project=self.project, user=self.assign_user
).count(),
0,
)
def test_delete_delegate_unauthorized(self):
"""Test delete for delegate deletion without perms (should fail)"""
new_user = self.make_user('new_user')
delegate_as = self._make_assignment(
self.project, new_user, self.role_delegate
)
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 4)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': delegate_as.sodar_uuid},
)
# NOTE: Perform record as contributor user
token = self.get_token(self.assign_user)
response = self.request_knox(url, method='DELETE', token=token)
# Assert response and role status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 4)
def test_delete_owner(self):
"""Test delete for owner deletion (should fail)"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.owner_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_delete_remote(self):
"""Test delete for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
class TestRoleAssignmentOwnerTransferAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentOwnerTransferAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
def test_transfer_owner(self):
"""Test transferring ownership for a project"""
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.project.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.assign_user)
def test_transfer_owner_category(self):
"""Test transferring ownership for a category"""
# Assign role to new user
self._make_assignment(
self.category, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.category.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.category.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.category.get_owner().user, self.assign_user)
def test_transfer_owner_inherit(self):
"""Test transferring ownership to an inherited owner"""
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Set alt owner to current project, make self.user inherited owner
alt_owner = self.make_user('alt_owner')
self.owner_as.user = alt_owner
self.owner_as.save()
# Assert preconditions
self.assertEqual(self.project.get_owner().user, alt_owner)
self.assertEqual(
self.project.get_owners(inherited_only=True)[0].user, self.user
)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.user)
old_owner_as = RoleAssignment.objects.get(
project=self.project, user=alt_owner
)
self.assertEqual(old_owner_as.role, self.role_contributor)
def test_transfer_owner_no_roles(self):
"""Test transferring ownership to user with no existing roles (should fail)"""
# NOTE: No role given to user
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_transfer_remote(self):
"""Test transferring ownership for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.project.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.user)
class TestProjectInviteListAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteListAPIView"""
def setUp(self):
super().setUp()
# Create invites
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_guest,
issuer=self.user,
message='',
secret=build_secret(),
)
self.invite2 = self._make_invite(
email=INVITE_USER2_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
message=INVITE_MESSAGE,
secret=build_secret(),
)
def test_get(self):
"""Test ProjectInviteListAPIView get()"""
url = reverse(
'projectroles:api_invite_list',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url, token=self.get_token(self.user))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'email': INVITE_USER_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_guest.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(self.invite.date_created),
'date_expire': self.get_drf_datetime(self.invite.date_expire),
'message': '',
'sodar_uuid': str(self.invite.sodar_uuid),
},
{
'email': INVITE_USER2_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_contributor.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(
self.invite2.date_created
),
'date_expire': self.get_drf_datetime(self.invite2.date_expire),
'message': INVITE_MESSAGE,
'sodar_uuid': str(self.invite2.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
def test_get_inactive(self):
"""Test get() with an inactive invite"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_list',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url, token=self.get_token(self.user))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1)
expected = [
{
'email': INVITE_USER2_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_contributor.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(
self.invite2.date_created
),
'date_expire': self.get_drf_datetime(self.invite2.date_expire),
'message': INVITE_MESSAGE,
'sodar_uuid': str(self.invite2.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
class TestProjectInviteCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectInviteCreateAPIView"""
def test_create(self):
"""Test creating a contributor invite for user"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 1
)
# Assert data
invite = ProjectInvite.objects.first()
self.assertEqual(invite.email, INVITE_USER_EMAIL)
self.assertEqual(invite.role, self.role_contributor)
self.assertEqual(invite.issuer, self.user)
self.assertEqual(invite.message, INVITE_MESSAGE)
# Assert response
expected = {
'email': INVITE_USER_EMAIL,
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_CONTRIBUTOR,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(invite.date_created),
'date_expire': self.get_drf_datetime(invite.date_expire),
'message': invite.message,
'sodar_uuid': str(invite.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
self.assertEqual(len(mail.outbox), 1)
def test_create_owner(self):
"""Test creating an invite for an owner role (should fail)"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_OWNER,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_delegate(self):
"""Test creating an invite for an delegate role"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 1
)
invite = ProjectInvite.objects.first()
self.assertEqual(invite.role, self.role_delegate)
self.assertEqual(len(mail.outbox), 1)
@override_settings(PROJECTROLES_DELEGATE_LIMIT=2)
def test_create_delegate_no_perms(self):
"""Test creating an delegate invite without perms (should fail)"""
del_user = self.make_user('delegate')
self._make_assignment(self.project, del_user, self.role_delegate)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(
url, method='POST', data=post_data, token=self.get_token(del_user)
)
# Assert response and data
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_delegate_limit(self):
"""Test creating an delegate invite with exceeded limit (should fail)"""
del_user = self.make_user('delegate')
self._make_assignment(self.project, del_user, self.role_delegate)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_invalid_email(self):
"""Test creating an invite with invalid email (should fail)"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': 'NOT_AN_EMAIL!',
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_existing_user(self):
"""Test creating an invite for an existing user (should fail)"""
user = self.make_user('new_user')
user.email = INVITE_USER_EMAIL
user.save()
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_remote(self):
"""Test creating an invite for a remote project (should fail)"""
# Set up remote site and project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
class TestProjectInviteRevokeAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteRevokeAPIView"""
def setUp(self):
super().setUp()
# Create invite
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
)
def test_revoke(self):
"""Test revoking an invite"""
# Assert preconditions
self.assertEqual(self.invite.active, True)
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 200, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, False)
def test_revoke_inactive(self):
"""Test revoking an already inactive invite (should fail)"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 400, msg=response.content)
def test_revoke_delegate(self):
"""Test revoking a delegate invite with sufficient perms"""
self.invite.role = self.role_delegate
self.invite.save()
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 200, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, False)
def test_revoke_delegate_no_perms(self):
"""Test revoking a delegate invite without perms (should fail)"""
self.invite.role = self.role_delegate
self.invite.save()
delegate = self.make_user('delegate')
self._make_assignment(self.project, delegate, self.role_delegate)
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(
url, method='POST', token=self.get_token(delegate)
)
# Assert response and invite status
self.assertEqual(response.status_code, 403, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, True)
def test_revoke_not_found(self):
"""Test revoking invite with invalid UUID"""
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': INVALID_UUID},
)
response = self.request_knox(url, method='POST')
self.assertEqual(response.status_code, 404)
class TestProjectInviteResendAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteResendAPIView"""
def setUp(self):
super().setUp()
# Create invite
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
)
def test_resend(self):
"""Test resending an invite"""
# Assert preconditions
self.assertEqual(len(mail.outbox), 0)
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(len(mail.outbox), 1)
def test_resend_inactive(self):
"""Test resending an inactive invite (should fail)"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(len(mail.outbox), 0)
def test_resend_delegate(self):
"""Test resending a delegate invite with sufficient perms"""
self.invite.role = self.role_delegate
self.invite.save()
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(len(mail.outbox), 1)
def test_resend_delegate_no_perms(self):
"""Test resending a delegate invite without perms (should fail)"""
self.invite.role = self.role_delegate
self.invite.save()
delegate = self.make_user('delegate')
self._make_assignment(self.project, delegate, self.role_delegate)
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(
url, method='POST', token=self.get_token(delegate)
)
# Assert response and mail status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(len(mail.outbox), 0)
def test_resend_not_found(self):
"""Test resending invite with invalid UUID"""
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': INVALID_UUID},
)
response = self.request_knox(url, method='POST')
self.assertEqual(response.status_code, 404)
class TestUserListAPIView(TestCoreAPIViewsBase):
"""Tests for UserListAPIView"""
def setUp(self):
super().setUp()
# Create additional users
self.domain_user = self.make_user('domain_user@domain')
def test_get(self):
"""Test UserListAPIView get() as a regular user"""
url = reverse('projectroles:api_user_list')
response = self.request_knox(
url, token=self.get_token(self.domain_user)
)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1) # System users not returned
expected = [
{
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
}
]
self.assertEqual(response_data, expected)
def test_get_superuser(self):
"""Test UserListAPIView get() as a superuser"""
url = reverse('projectroles:api_user_list')
response = self.request_knox(url) # Default token is for superuser
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
{
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
class TestCurrentUserRetrieveAPIView(TestCoreAPIViewsBase):
"""Tests for CurrentUserRetrieveAPIView"""
def setUp(self):
super().setUp()
# Create additional users
self.domain_user = self.make_user('domain_user@domain')
def test_get(self):
"""Test CurrentUserRetrieveAPIView get() as a regular user"""
url = reverse('projectroles:api_user_current')
response = self.request_knox(
url, token=self.get_token(self.domain_user)
)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_superuser(self):
"""Test CurrentUserRetrieveAPIView get() as superuser"""
url = reverse('projectroles:api_user_current')
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
}
self.assertEqual(response_data, expected)
class TestAPIVersioning(TestCoreAPIViewsBase):
"""Tests for REST API view versioning using ProjectRetrieveAPIView"""
def setUp(self):
super().setUp()
self.url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
def test_api_versioning(self):
"""Test SODAR API Access with correct version headers"""
response = self.request_knox(
self.url,
media_type=views_api.CORE_API_MEDIA_TYPE,
version=views_api.CORE_API_DEFAULT_VERSION,
)
self.assertEqual(response.status_code, 200)
def test_api_versioning_invalid_version(self):
"""Test SODAR API Access with unsupported version (should fail)"""
response = self.request_knox(
self.url,
media_type=views_api.CORE_API_MEDIA_TYPE,
version=CORE_API_VERSION_INVALID,
)
self.assertEqual(response.status_code, 406)
def test_api_versioning_invalid_media_type(self):
"""Test SODAR API Access with unsupported media type (should fail)"""
response = self.request_knox(
self.url,
media_type=CORE_API_MEDIA_TYPE_INVALID,
version=views_api.CORE_API_MEDIA_TYPE,
)
self.assertEqual(response.status_code, 406)
# TODO: To be updated once the legacy API view is redone for SODAR Core v1.0
class TestRemoteProjectGetAPIView(
ProjectMixin,
RoleAssignmentMixin,
RemoteSiteMixin,
RemoteProjectMixin,
SODARAPIViewTestMixin,
TestViewsBase,
):
"""Tests for remote project getting API view"""
media_type = views_api.CORE_API_MEDIA_TYPE
api_version = views_api.CORE_API_DEFAULT_VERSION
def setUp(self):
super().setUp()
# Set up projects
self.category = self._make_project(
'TestCategory', PROJECT_TYPE_CATEGORY, None
)
self.cat_owner_as = self._make_assignment(
self.category, self.user, self.role_owner
)
self.project = self._make_project(
'TestProject', PROJECT_TYPE_PROJECT, self.category
)
self.project_owner_as = self._make_assignment(
self.project, self.user, self.role_owner
)
# Create target site
self.target_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_TARGET,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
# Create remote project
self.remote_project = self._make_remote_project(
site=self.target_site,
project_uuid=self.project.sodar_uuid,
project=self.project,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_INFO'],
)
self.remote_api = RemoteProjectAPI()
def test_get(self):
"""Test retrieving project data to the target site"""
response = self.client.get(
reverse(
'projectroles:api_remote_get',
kwargs={'secret': REMOTE_SITE_SECRET},
)
)
self.assertEqual(response.status_code, 200)
expected = self.remote_api.get_source_data(self.target_site)
response_dict = json.loads(response.content.decode('utf-8'))
self.assertEqual(response_dict, expected)
def test_get_invalid_secret(self):
"""Test retrieving project data with an invalid secret (should fail)"""
response = self.client.get(
reverse(
'projectroles:api_remote_get', kwargs={'secret': build_secret()}
)
)
self.assertEqual(response.status_code, 401)
| <filename>projectroles/tests/test_views_api.py
"""REST API view tests for the projectroles app"""
import base64
import json
import pytz
from django.conf import settings
from django.core import mail
from django.forms.models import model_to_dict
from django.test import override_settings
from django.urls import reverse
from django.utils import timezone
from knox.models import AuthToken
from test_plus.test import APITestCase
from projectroles import views_api
from projectroles.models import (
Project,
Role,
RoleAssignment,
ProjectInvite,
SODAR_CONSTANTS,
)
from projectroles.plugins import change_plugin_status, get_backend_api
from projectroles.remote_projects import RemoteProjectAPI
from projectroles.tests.test_models import (
ProjectMixin,
RoleAssignmentMixin,
ProjectInviteMixin,
RemoteSiteMixin,
RemoteProjectMixin,
AppSettingMixin,
)
from projectroles.tests.test_views import (
TestViewsBase,
PROJECT_TYPE_CATEGORY,
PROJECT_TYPE_PROJECT,
PROJECT_ROLE_OWNER,
PROJECT_ROLE_DELEGATE,
PROJECT_ROLE_CONTRIBUTOR,
PROJECT_ROLE_GUEST,
REMOTE_SITE_NAME,
REMOTE_SITE_URL,
SITE_MODE_SOURCE,
SITE_MODE_TARGET,
REMOTE_SITE_DESC,
REMOTE_SITE_SECRET,
)
from projectroles.utils import build_secret
CORE_API_MEDIA_TYPE_INVALID = 'application/vnd.bihealth.invalid'
CORE_API_VERSION_INVALID = '9.9.9'
INVALID_UUID = '11111111-1111-1111-1111-111111111111'
NEW_CATEGORY_TITLE = 'New Category'
NEW_PROJECT_TITLE = 'New Project'
UPDATED_TITLE = 'Updated Title'
UPDATED_DESC = 'Updated description'
UPDATED_README = 'Updated readme'
INVITE_USER_EMAIL = '<EMAIL>'
INVITE_USER2_EMAIL = '<EMAIL>'
INVITE_MESSAGE = 'Message'
# Base Classes -----------------------------------------------------------------
class SODARAPIViewTestMixin:
"""
Mixin for SODAR and SODAR Core API views with accept headers, knox token
authorization and general helper methods.
"""
# Default API header parameters are for external SODAR site APIs
# Override these for testing SODAR Core API views
media_type = settings.SODAR_API_MEDIA_TYPE
api_version = settings.SODAR_API_DEFAULT_VERSION
# Copied from Knox tests
@classmethod
def _get_basic_auth_header(cls, username, password):
return (
'Basic %s'
% base64.b64encode(
('%s:%s' % (username, password)).encode('ascii')
).decode()
)
@classmethod
def get_token(cls, user, full_result=False):
"""
Get or create a knox token for a user.
:param user: User object
:param full_result: Return full result of AuthToken creation if True
:return: Token string or AuthToken creation tuple
"""
result = AuthToken.objects.create(user=user)
return result if full_result else result[1]
@classmethod
def get_serialized_user(cls, user):
"""
Return serialization for a user.
:param user: User object
:return: Dict
"""
return {
'email': user.email,
'name': user.name,
'sodar_uuid': str(user.sodar_uuid),
'username': user.username,
}
@classmethod
def get_drf_datetime(cls, obj_dt):
"""
Return datetime in DRF compatible format.
:param obj_dt: Object DateTime field
:return: String
"""
return timezone.localtime(
obj_dt, pytz.timezone(settings.TIME_ZONE)
).isoformat()
@classmethod
def get_accept_header(
cls,
media_type=None,
version=None,
):
"""
Return version accept header based on the media type and version string.
:param media_type: String (default = cls.media_type)
:param version: String (default = cls.api_version)
:return: Dict
"""
if not media_type:
media_type = cls.media_type
if not version:
version = cls.api_version
return {'HTTP_ACCEPT': '{}; version={}'.format(media_type, version)}
@classmethod
def get_token_header(cls, token):
"""
Return auth header based on token.
:param token: Token string
:return: Dict
"""
return {'HTTP_AUTHORIZATION': 'token {}'.format(token)}
def request_knox(
self,
url,
method='GET',
format='json',
data=None,
token=None,
media_type=None,
version=None,
header=None,
):
"""
Perform a HTTP request with Knox token auth.
:param url: URL for the request
:param method: Request method (string, default="GET")
:param format: Request format (string, default="json")
:param data: Optional data for request (dict)
:param token: Knox token string (if None, use self.knox_token)
:param media_type: String (default = cls.media_type)
:param version: String (default = cls.api_version)
:return: Response object
"""
if not token:
token = self.knox_token
req_kwargs = {
'format': format,
**self.get_accept_header(media_type, version),
**self.get_token_header(token),
}
if data:
req_kwargs['data'] = data
if header:
req_kwargs.update(header)
req_method = getattr(self.client, method.lower(), None)
if not req_method:
raise ValueError('Unsupported method "{}"'.format(method))
return req_method(url, **req_kwargs)
class TestAPIViewsBase(
ProjectMixin, RoleAssignmentMixin, SODARAPIViewTestMixin, APITestCase
):
"""Base API test view with knox authentication"""
def setUp(self):
# Show complete diff in case of failure
self.maxDiff = None
# Force disabling of taskflow plugin if it's available
if get_backend_api('taskflow'):
change_plugin_status(
name='taskflow', status=1, plugin_type='backend' # 0 = Disabled
)
# Init roles
self.role_owner = Role.objects.get_or_create(name=PROJECT_ROLE_OWNER)[0]
self.role_delegate = Role.objects.get_or_create(
name=PROJECT_ROLE_DELEGATE
)[0]
self.role_contributor = Role.objects.get_or_create(
name=PROJECT_ROLE_CONTRIBUTOR
)[0]
self.role_guest = Role.objects.get_or_create(name=PROJECT_ROLE_GUEST)[0]
# Init superuser
self.user = self.make_user('superuser')
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
# Set up category and project with owner role assignments
self.category = self._make_project(
'TestCategory', PROJECT_TYPE_CATEGORY, None
)
self.cat_owner_as = self._make_assignment(
self.category, self.user, self.role_owner
)
self.project = self._make_project(
'TestProject', PROJECT_TYPE_PROJECT, self.category
)
self.owner_as = self._make_assignment(
self.project, self.user, self.role_owner
)
# Get knox token for self.user
self.knox_token = self.get_token(self.user)
class TestCoreAPIViewsBase(TestAPIViewsBase):
"""Override of TestAPIViewsBase to be used with SODAR Core API views"""
media_type = views_api.CORE_API_MEDIA_TYPE
api_version = views_api.CORE_API_DEFAULT_VERSION
# Tests ------------------------------------------------------------------------
class TestProjectListAPIView(TestCoreAPIViewsBase):
"""Tests for ProjectListAPIView"""
def test_get(self):
"""Test ProjectListAPIView get() as project owner"""
url = reverse('projectroles:api_project_list')
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'title': self.category.title,
'type': self.category.type,
'parent': None,
'description': self.category.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.category.submit_status,
'roles': {
str(self.cat_owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.cat_owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
},
{
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(self.owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
def test_get_no_roles(self):
"""Test ProjectListAPIView get() without roles"""
user_no_roles = self.make_user('user_no_roles')
url = reverse('projectroles:api_project_list')
response = self.request_knox(url, token=self.get_token(user_no_roles))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 0)
def test_get_limited_roles(self):
"""Test ProjectListAPIView get() with only one role"""
user_no_roles = self.make_user('user_no_roles')
self._make_assignment(
self.project, user_no_roles, self.role_contributor
)
url = reverse('projectroles:api_project_list')
response = self.request_knox(url, token=self.get_token(user_no_roles))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1)
class TestProjectRetrieveAPIView(AppSettingMixin, TestCoreAPIViewsBase):
"""Tests for ProjectRetrieveAPIView"""
def test_get_category(self):
"""Test ProjectRetrieveAPIView get() with a category"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.category.sodar_uuid},
)
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'title': self.category.title,
'type': self.category.type,
'parent': None,
'description': self.category.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.category.submit_status,
'roles': {
str(self.cat_owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.cat_owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_project(self):
"""Test ProjectRetrieveAPIView get() with a project"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(self.owner_as.sodar_uuid): {
'user': {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
'role': PROJECT_ROLE_OWNER,
'sodar_uuid': str(self.owner_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_not_found(self):
"""Test ProjectRetrieveAPIView get() with an invalid UUID"""
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': INVALID_UUID},
)
response = self.request_knox(url)
self.assertEqual(response.status_code, 404)
class TestIPAllowing(AppSettingMixin, TestCoreAPIViewsBase):
"""Tests for IP allowing settings using ProjectRetrieveAPIView"""
def _setup_ip_allowing(self, ip_list, role_suffix):
"""Setup users and roles for IP allowing test"""
# Create new user
user = self.make_user(role_suffix)
# Remove previously assigned owner role for superuser.
superuser_as_owner_role = RoleAssignment.objects.get(
project=self.project, user=self.user, role=self.role_owner
)
superuser_as_owner_role.delete()
# Assign requested role to user
user_as = self._make_assignment(
self.project, user, getattr(self, 'role_' + role_suffix)
)
user_cat_as = RoleAssignment.objects.get(
project=self.category, user=self.user, role=self.role_owner
)
if role_suffix == 'owner':
user_cat_as.delete()
user_cat_as = self._make_assignment(
self.category, user, getattr(self, 'role_' + role_suffix)
)
# Init IP restrict setting
self._make_setting(
app_name='projectroles',
name='ip_restrict',
setting_type='BOOLEAN',
value=True,
project=self.project,
)
# Init IP allowlist setting
self._make_setting(
app_name='projectroles',
name='ip_allowlist',
setting_type='JSON',
value=None,
value_json=ip_list,
project=self.project,
)
return user, user_as, user_cat_as
def _get_project_ip_allowing(
self, username, http_attribute, ip_list, blocked=None
):
"""Helper for IP allowing tests"""
if blocked is None:
raise Exception('Please set "blocked" argument (True/False)')
user, user_as, user_cat_as = self._setup_ip_allowing(ip_list, username)
url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
header = {http_attribute: '192.168.1.1'}
response = self.request_knox(
url, token=self.get_token(user), header=header
)
if username == 'owner':
role = PROJECT_ROLE_OWNER
elif username == 'delegate':
role = PROJECT_ROLE_DELEGATE
elif username == 'contributor':
role = PROJECT_ROLE_CONTRIBUTOR
else:
role = PROJECT_ROLE_GUEST
if blocked:
self.assertEqual(response.status_code, 403)
else:
expected = {
'title': self.project.title,
'type': self.project.type,
'parent': str(self.category.sodar_uuid),
'description': self.project.description,
'readme': '',
'public_guest_access': False,
'submit_status': self.project.submit_status,
'roles': {
str(user_as.sodar_uuid): {
'user': {
'username': user.username,
'name': user.name,
'email': user.email,
'sodar_uuid': str(user.sodar_uuid),
},
'role': role,
'sodar_uuid': str(user_as.sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(response_data, expected)
def test_http_x_forwarded_for_block_all_owner(self):
self._get_project_ip_allowing(
'owner', 'HTTP_X_FORWARDED_FOR', [], blocked=False
)
def test_http_x_forwarded_for_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_http_x_forwarded_for_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'HTTP_X_FORWARDED_FOR', [], blocked=False
)
def test_http_x_forwarded_for_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_http_x_forwarded_for_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'HTTP_X_FORWARDED_FOR', [], blocked=True
)
def test_http_x_forwarded_for_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor',
'HTTP_X_FORWARDED_FOR',
['192.168.1.1'],
blocked=False,
)
def test_http_x_forwarded_for_block_all_guest(self):
self._get_project_ip_allowing(
'guest', 'HTTP_X_FORWARDED_FOR', [], blocked=True
)
def test_http_x_forwarded_for_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'HTTP_X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_owner(self):
self._get_project_ip_allowing(
'owner', 'X_FORWARDED_FOR', [], blocked=False
)
def test_x_forwarded_for_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'X_FORWARDED_FOR', [], blocked=False
)
def test_x_forwarded_for_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_x_forwarded_for_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'X_FORWARDED_FOR', [], blocked=True
)
def test_forwarded_for_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_forwarded_for_block_all_guest(self):
self._get_project_ip_allowing(
'guest', 'X_FORWARDED_FOR', [], blocked=True
)
def test_forwarded_for_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'X_FORWARDED_FOR', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_owner(self):
self._get_project_ip_allowing('owner', 'FORWARDED', [], blocked=False)
def test_forwarded_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'FORWARDED', [], blocked=False
)
def test_forwarded_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'FORWARDED', [], blocked=True
)
def test_forwarded_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_forwarded_block_all_guest(self):
self._get_project_ip_allowing('guest', 'FORWARDED', [], blocked=True)
def test_forwarded_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'FORWARDED', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_owner(self):
self._get_project_ip_allowing('owner', 'REMOTE_ADDR', [], blocked=False)
def test_remote_addr_allow_ip_owner(self):
self._get_project_ip_allowing(
'owner', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_delegate(self):
self._get_project_ip_allowing(
'delegate', 'REMOTE_ADDR', [], blocked=False
)
def test_remote_addr_allow_ip_delegate(self):
self._get_project_ip_allowing(
'delegate', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_contributor(self):
self._get_project_ip_allowing(
'contributor', 'REMOTE_ADDR', [], blocked=True
)
def test_remote_addr_allow_ip_contributor(self):
self._get_project_ip_allowing(
'contributor', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_block_all_guest(self):
self._get_project_ip_allowing('guest', 'REMOTE_ADDR', [], blocked=True)
def test_remote_addr_allow_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.1'], blocked=False
)
def test_remote_addr_allow_network_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.0/24'], blocked=False
)
def test_remote_addr_block_not_in_allowlist_ip_guest(self):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.1.2'], blocked=True
)
def test_remote_addr_block_not_in_allowlist_network_guest(
self,
):
self._get_project_ip_allowing(
'guest', 'REMOTE_ADDR', ['192.168.2.0/24'], blocked=True
)
class TestProjectCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectCreateAPIView"""
def test_create_category(self):
"""Test creating a root category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': '',
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_category = Project.objects.get(title=NEW_CATEGORY_TITLE)
model_dict = model_to_dict(new_category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_category.pk,
'title': new_category.title,
'type': new_category.type,
'parent': None,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': new_category.title,
'sodar_uuid': new_category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_category, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_category_nested(self):
"""Test creating a category under an existing category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_category = Project.objects.get(title=NEW_CATEGORY_TITLE)
model_dict = model_to_dict(new_category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_category.pk,
'title': new_category.title,
'type': new_category.type,
'parent': self.category.pk,
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + new_category.title,
'sodar_uuid': new_category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_category, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_CATEGORY_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': str(self.category.sodar_uuid),
'description': new_category.description,
'readme': new_category.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_project(self):
"""Test creating a project under an existing category"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
# Assert object content
new_project = Project.objects.get(title=NEW_PROJECT_TITLE)
model_dict = model_to_dict(new_project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': new_project.pk,
'title': new_project.title,
'type': new_project.type,
'parent': self.category.pk,
'description': new_project.description,
'readme': new_project.readme.raw,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + new_project.title,
'sodar_uuid': new_project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(
RoleAssignment.objects.filter(
project=new_project, user=self.user, role=self.role_owner
).count(),
1,
)
# Assert API response
expected = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': new_project.description,
'readme': new_project.readme.raw,
'public_guest_access': False,
'sodar_uuid': str(new_project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_project_root(self):
"""Test creating a project in root (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': None,
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
@override_settings(PROJECTROLES_DISABLE_CATEGORIES=True)
def test_create_project_disable_categories(self):
"""Test creating a project in root with disabled categories"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': '',
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
def test_create_project_duplicate_title(self):
"""Test creating a project with a title already in category (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': self.project.title,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_unknown_user(self):
"""Test creating a project with a non-existent user (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': INVALID_UUID,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_unknown_parent(self):
"""Test creating a project with a non-existent parent category (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': INVALID_UUID,
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
def test_create_project_invalid_parent(self):
"""Test creating a project with a project as parent (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.project.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400)
self.assertEqual(Project.objects.count(), 2)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_project_target_enabled(self):
"""Test creating a project as TARGET with target creation allowed"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(Project.objects.count(), 3)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_project_target_remote(self):
"""Test creating a project as TARGET under a remote category (should fail)"""
# Create source site
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
# Make category remote
self._make_remote_project(
project_uuid=self.category.sodar_uuid,
project=self.category,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
@override_settings(
PROJECTROLES_SITE_MODE=SITE_MODE_TARGET,
PROJECTROLES_TARGET_CREATE=False,
)
def test_create_project_target_disabled(self):
"""Test creating a project as TARGET with target creation disallowed (should fail)"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse('projectroles:api_project_create')
post_data = {
'title': NEW_PROJECT_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': 'description',
'readme': 'readme',
'public_guest_access': False,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
class TestProjectUpdateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectUpdateAPIView"""
def test_put_category(self):
"""Test put() for category updating"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
put_data = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': '',
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.category.refresh_from_db()
model_dict = model_to_dict(self.category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.category.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': UPDATED_TITLE,
'sodar_uuid': self.category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.category.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.category.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_project(self):
"""Test put() for project updating"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
put_data = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'owner': str(self.user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.project.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': self.category.pk,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + UPDATED_TITLE,
'sodar_uuid': self.project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.project.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.project.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_category(self):
"""Test patch() for updating category metadata"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.category.refresh_from_db()
model_dict = model_to_dict(self.category)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.category.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': False,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': UPDATED_TITLE,
'sodar_uuid': self.category.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(self.category.get_owner().user, self.user)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_CATEGORY,
'parent': None,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': False,
'roles': {
str(self.category.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.category.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.category.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_project(self):
"""Test patch() for updating project metadata"""
# Assert preconditions
self.assertEqual(Project.objects.count(), 2)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(Project.objects.count(), 2)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
model_dict['readme'] = model_dict['readme'].raw
expected = {
'id': self.project.pk,
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': self.category.pk,
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'full_title': self.category.title + ' / ' + UPDATED_TITLE,
'sodar_uuid': self.project.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert role assignment
self.assertEqual(self.project.get_owner().user, self.user)
# Assert API response
expected = {
'title': UPDATED_TITLE,
'type': PROJECT_TYPE_PROJECT,
'parent': str(self.category.sodar_uuid),
'submit_status': SODAR_CONSTANTS['SUBMIT_STATUS_OK'],
'description': UPDATED_DESC,
'readme': UPDATED_README,
'public_guest_access': True,
'roles': {
str(self.project.get_owner().sodar_uuid): {
'role': PROJECT_ROLE_OWNER,
'user': self.get_serialized_user(self.user),
'sodar_uuid': str(self.project.get_owner().sodar_uuid),
}
},
'sodar_uuid': str(self.project.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_project_owner(self):
"""Test patch() for updating project owner (should fail)"""
new_owner = self.make_user('new_owner')
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'owner': str(new_owner.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_project_move(self):
"""Test patch() for moving project under a different category"""
# Assert preconditions
self.assertEqual(
self.project.full_title,
self.category.title + ' / ' + self.project.title,
)
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
self._make_assignment(new_category, self.user, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
# Assert object content
self.project.refresh_from_db()
model_dict = model_to_dict(self.project)
self.assertEqual(model_dict['parent'], new_category.pk)
# Assert role assignment
self.assertEqual(self.project.get_owner().user, self.user)
# Assert child project full title update
self.assertEqual(
self.project.full_title,
new_category.title + ' / ' + self.project.title,
)
# Assert API response
self.assertEqual(
json.loads(response.content)['parent'], str(new_category.sodar_uuid)
)
def test_patch_project_move_unallowed(self):
"""Test patch() for moving project without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
# Disable superuser status from self.user and perform request
self.user.is_superuser = False
self.user.save()
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_patch_project_move_root(self):
"""Test patch() for moving project without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': ''}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
def test_patch_project_move_root_unallowed(self):
"""Test patch() for moving project to root without permissions (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, None
)
new_owner = self.make_user('new_owner')
self._make_assignment(new_category, new_owner, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'parent': ''}
# Disable superuser status from self.user and perform request
self.user.is_superuser = False
self.user.save()
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_patch_project_move_child(self):
"""Test patch() for moving a category inside its child (should fail)"""
new_category = self._make_project(
'NewCategory', PROJECT_TYPE_CATEGORY, self.category
)
self._make_assignment(new_category, self.user, self.role_owner)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.category.sodar_uuid},
)
patch_data = {'parent': str(new_category.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_project_type_change(self):
"""Test patch() with a changed project type (should fail)"""
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {'type': PROJECT_TYPE_CATEGORY}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_patch_project_remote(self):
"""Test patch() for updating remote project metadata (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
url = reverse(
'projectroles:api_project_update',
kwargs={'project': self.project.sodar_uuid},
)
patch_data = {
'title': UPDATED_TITLE,
'description': UPDATED_DESC,
'readme': UPDATED_README,
}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
class TestRoleAssignmentCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentCreateAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
def test_create_contributor(self):
"""Test creating a contributor role for user"""
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project,
role=self.role_contributor,
user=self.assign_user,
).first()
self.assertIsNotNone(role_as)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(role_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_create_owner(self):
"""Test creating an owner role (should fail)"""
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_OWNER,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_create_delegate(self):
"""Test creating a delegate role for user as owner"""
# Disable superuser status from self.user
self.user.is_superuser = False
self.user.save()
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project, role=self.role_delegate, user=self.assign_user
).first()
self.assertIsNotNone(role_as)
def test_create_delegate_unauthorized(self):
"""Test creating a delegate role without authorization (should fail)"""
# Create new user and grant delegate role
new_user = self.make_user('new_user')
self._make_assignment(self.project, new_user, self.role_contributor)
new_user_token = self.get_token(new_user)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(
url, method='POST', data=post_data, token=new_user_token
)
# Assert response
self.assertEqual(response.status_code, 403, msg=response.content)
def test_create_delegate_limit(self):
"""Test creating a delegate role with limit reached (should fail)"""
# Create new user and grant delegate role
new_user = self.make_user('new_user')
self._make_assignment(self.project, new_user, self.role_delegate)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
# NOTE: Post as owner
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_create_delegate_limit_inherit(self):
"""Test creating a delegate role existing role for inherited owner"""
# Set up category owner
new_user = self.make_user('new_user')
self.cat_owner_as.user = new_user
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
# NOTE: Post as owner
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Assert object
role_as = RoleAssignment.objects.filter(
project=self.project, role=self.role_delegate, user=self.assign_user
).first()
self.assertIsNotNone(role_as)
def test_create_delegate_category(self):
"""Test creating a non-owner role for category"""
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.category.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response
self.assertEqual(response.status_code, 201, msg=response.content)
def test_create_role_existing(self):
"""Test creating a role for user already in the project"""
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
# Post again
post_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 2
)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_remote(self):
"""Test creating a role for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
url = reverse(
'projectroles:api_role_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'role': PROJECT_ROLE_CONTRIBUTOR,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
RoleAssignment.objects.filter(project=self.project).count(), 1
)
class TestRoleAssignmentUpdateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentUpdateAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
self.update_as = self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
def test_put_role(self):
"""Test put() for role assignment updating"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response and role status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_guest.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_delegate(self):
"""Test put() for delegate role assignment"""
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 200, msg=response.content)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_delegate.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_DELEGATE,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_put_owner(self):
"""Test put() for owner role assignment (should fail)"""
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_OWNER,
'user': str(self.assign_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_put_change_user(self):
"""Test put() with a different user (should fail)"""
new_user = self.make_user('new_user')
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
put_data = {
'role': PROJECT_ROLE_GUEST,
'user': str(new_user.sodar_uuid),
}
response = self.request_knox(url, method='PUT', data=put_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
def test_patch_role(self):
"""Test patch() for role assignment updating"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'role': PROJECT_ROLE_GUEST}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and role status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
# Assert object content
self.update_as.refresh_from_db()
model_dict = model_to_dict(self.update_as)
expected = {
'id': self.update_as.pk,
'project': self.project.pk,
'role': self.role_guest.pk,
'user': self.assign_user.pk,
'sodar_uuid': self.update_as.sodar_uuid,
}
self.assertEqual(model_dict, expected)
# Assert API response
expected = {
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_GUEST,
'user': str(self.assign_user.sodar_uuid),
'sodar_uuid': str(self.update_as.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
def test_patch_change_user(self):
"""Test patch() with a different user (should fail)"""
new_user = self.make_user('new_user')
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'user': str(new_user.sodar_uuid)}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_patch_role_remote(self):
"""Test patch() for updating a role in a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
url = reverse(
'projectroles:api_role_update',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
patch_data = {'role': PROJECT_ROLE_GUEST}
response = self.request_knox(url, method='PATCH', data=patch_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
class TestRoleAssignmentDestroyAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentDestroyAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
self.update_as = self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
def test_delete_role(self):
"""Test delete for role assignment deletion"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and role status
self.assertEqual(response.status_code, 204, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 2)
self.assertEqual(
RoleAssignment.objects.filter(
project=self.project, user=self.assign_user
).count(),
0,
)
def test_delete_delegate_unauthorized(self):
"""Test delete for delegate deletion without perms (should fail)"""
new_user = self.make_user('new_user')
delegate_as = self._make_assignment(
self.project, new_user, self.role_delegate
)
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 4)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': delegate_as.sodar_uuid},
)
# NOTE: Perform record as contributor user
token = self.get_token(self.assign_user)
response = self.request_knox(url, method='DELETE', token=token)
# Assert response and role status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 4)
def test_delete_owner(self):
"""Test delete for owner deletion (should fail)"""
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.owner_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_delete_remote(self):
"""Test delete for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(RoleAssignment.objects.count(), 3)
url = reverse(
'projectroles:api_role_destroy',
kwargs={'roleassignment': self.update_as.sodar_uuid},
)
response = self.request_knox(url, method='DELETE')
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(RoleAssignment.objects.count(), 3)
class TestRoleAssignmentOwnerTransferAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for RoleAssignmentOwnerTransferAPIView"""
def setUp(self):
super().setUp()
self.assign_user = self.make_user('assign_user')
def test_transfer_owner(self):
"""Test transferring ownership for a project"""
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.project.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.assign_user)
def test_transfer_owner_category(self):
"""Test transferring ownership for a category"""
# Assign role to new user
self._make_assignment(
self.category, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.category.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.category.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.category.get_owner().user, self.assign_user)
def test_transfer_owner_inherit(self):
"""Test transferring ownership to an inherited owner"""
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Set alt owner to current project, make self.user inherited owner
alt_owner = self.make_user('alt_owner')
self.owner_as.user = alt_owner
self.owner_as.save()
# Assert preconditions
self.assertEqual(self.project.get_owner().user, alt_owner)
self.assertEqual(
self.project.get_owners(inherited_only=True)[0].user, self.user
)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.user)
old_owner_as = RoleAssignment.objects.get(
project=self.project, user=alt_owner
)
self.assertEqual(old_owner_as.role, self.role_contributor)
def test_transfer_owner_no_roles(self):
"""Test transferring ownership to user with no existing roles (should fail)"""
# NOTE: No role given to user
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_transfer_remote(self):
"""Test transferring ownership for a remote project (should fail)"""
# Create source site and remote project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assign role to new user
self._make_assignment(
self.project, self.assign_user, self.role_contributor
)
# Assert preconditions
self.assertEqual(self.project.get_owner().user, self.user)
url = reverse(
'projectroles:api_role_owner_transfer',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'new_owner': self.assign_user.username,
'old_owner_role': self.role_contributor.name,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and project status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(self.project.get_owner().user, self.user)
class TestProjectInviteListAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteListAPIView"""
def setUp(self):
super().setUp()
# Create invites
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_guest,
issuer=self.user,
message='',
secret=build_secret(),
)
self.invite2 = self._make_invite(
email=INVITE_USER2_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
message=INVITE_MESSAGE,
secret=build_secret(),
)
def test_get(self):
"""Test ProjectInviteListAPIView get()"""
url = reverse(
'projectroles:api_invite_list',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url, token=self.get_token(self.user))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'email': INVITE_USER_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_guest.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(self.invite.date_created),
'date_expire': self.get_drf_datetime(self.invite.date_expire),
'message': '',
'sodar_uuid': str(self.invite.sodar_uuid),
},
{
'email': INVITE_USER2_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_contributor.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(
self.invite2.date_created
),
'date_expire': self.get_drf_datetime(self.invite2.date_expire),
'message': INVITE_MESSAGE,
'sodar_uuid': str(self.invite2.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
def test_get_inactive(self):
"""Test get() with an inactive invite"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_list',
kwargs={'project': self.project.sodar_uuid},
)
response = self.request_knox(url, token=self.get_token(self.user))
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1)
expected = [
{
'email': INVITE_USER2_EMAIL,
'project': str(self.project.sodar_uuid),
'role': self.role_contributor.name,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(
self.invite2.date_created
),
'date_expire': self.get_drf_datetime(self.invite2.date_expire),
'message': INVITE_MESSAGE,
'sodar_uuid': str(self.invite2.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
class TestProjectInviteCreateAPIView(
RemoteSiteMixin, RemoteProjectMixin, TestCoreAPIViewsBase
):
"""Tests for ProjectInviteCreateAPIView"""
def test_create(self):
"""Test creating a contributor invite for user"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 1
)
# Assert data
invite = ProjectInvite.objects.first()
self.assertEqual(invite.email, INVITE_USER_EMAIL)
self.assertEqual(invite.role, self.role_contributor)
self.assertEqual(invite.issuer, self.user)
self.assertEqual(invite.message, INVITE_MESSAGE)
# Assert response
expected = {
'email': INVITE_USER_EMAIL,
'project': str(self.project.sodar_uuid),
'role': PROJECT_ROLE_CONTRIBUTOR,
'issuer': self.get_serialized_user(self.user),
'date_created': self.get_drf_datetime(invite.date_created),
'date_expire': self.get_drf_datetime(invite.date_expire),
'message': invite.message,
'sodar_uuid': str(invite.sodar_uuid),
}
self.assertEqual(json.loads(response.content), expected)
self.assertEqual(len(mail.outbox), 1)
def test_create_owner(self):
"""Test creating an invite for an owner role (should fail)"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_OWNER,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_delegate(self):
"""Test creating an invite for an delegate role"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 201, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 1
)
invite = ProjectInvite.objects.first()
self.assertEqual(invite.role, self.role_delegate)
self.assertEqual(len(mail.outbox), 1)
@override_settings(PROJECTROLES_DELEGATE_LIMIT=2)
def test_create_delegate_no_perms(self):
"""Test creating an delegate invite without perms (should fail)"""
del_user = self.make_user('delegate')
self._make_assignment(self.project, del_user, self.role_delegate)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(
url, method='POST', data=post_data, token=self.get_token(del_user)
)
# Assert response and data
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_delegate_limit(self):
"""Test creating an delegate invite with exceeded limit (should fail)"""
del_user = self.make_user('delegate')
self._make_assignment(self.project, del_user, self.role_delegate)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_DELEGATE,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_invalid_email(self):
"""Test creating an invite with invalid email (should fail)"""
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': 'NOT_AN_EMAIL!',
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and data
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
def test_create_existing_user(self):
"""Test creating an invite for an existing user (should fail)"""
user = self.make_user('new_user')
user.email = INVITE_USER_EMAIL
user.save()
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
@override_settings(PROJECTROLES_SITE_MODE=SITE_MODE_TARGET)
def test_create_remote(self):
"""Test creating an invite for a remote project (should fail)"""
# Set up remote site and project
source_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_SOURCE,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
self._make_remote_project(
project_uuid=self.project.sodar_uuid,
project=self.project,
site=source_site,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_ROLES'],
)
# Assert preconditions
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
url = reverse(
'projectroles:api_invite_create',
kwargs={'project': self.project.sodar_uuid},
)
post_data = {
'email': INVITE_USER_EMAIL,
'role': PROJECT_ROLE_CONTRIBUTOR,
'message': INVITE_MESSAGE,
}
response = self.request_knox(url, method='POST', data=post_data)
# Assert response and role status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(
ProjectInvite.objects.filter(project=self.project).count(), 0
)
self.assertEqual(len(mail.outbox), 0)
class TestProjectInviteRevokeAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteRevokeAPIView"""
def setUp(self):
super().setUp()
# Create invite
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
)
def test_revoke(self):
"""Test revoking an invite"""
# Assert preconditions
self.assertEqual(self.invite.active, True)
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 200, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, False)
def test_revoke_inactive(self):
"""Test revoking an already inactive invite (should fail)"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 400, msg=response.content)
def test_revoke_delegate(self):
"""Test revoking a delegate invite with sufficient perms"""
self.invite.role = self.role_delegate
self.invite.save()
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and invite status
self.assertEqual(response.status_code, 200, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, False)
def test_revoke_delegate_no_perms(self):
"""Test revoking a delegate invite without perms (should fail)"""
self.invite.role = self.role_delegate
self.invite.save()
delegate = self.make_user('delegate')
self._make_assignment(self.project, delegate, self.role_delegate)
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(
url, method='POST', token=self.get_token(delegate)
)
# Assert response and invite status
self.assertEqual(response.status_code, 403, msg=response.content)
self.invite.refresh_from_db()
self.assertEqual(self.invite.active, True)
def test_revoke_not_found(self):
"""Test revoking invite with invalid UUID"""
url = reverse(
'projectroles:api_invite_revoke',
kwargs={'projectinvite': INVALID_UUID},
)
response = self.request_knox(url, method='POST')
self.assertEqual(response.status_code, 404)
class TestProjectInviteResendAPIView(ProjectInviteMixin, TestCoreAPIViewsBase):
"""Tests for ProjectInviteResendAPIView"""
def setUp(self):
super().setUp()
# Create invite
self.invite = self._make_invite(
email=INVITE_USER_EMAIL,
project=self.project,
role=self.role_contributor,
issuer=self.user,
)
def test_resend(self):
"""Test resending an invite"""
# Assert preconditions
self.assertEqual(len(mail.outbox), 0)
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(len(mail.outbox), 1)
def test_resend_inactive(self):
"""Test resending an inactive invite (should fail)"""
self.invite.active = False
self.invite.save()
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 400, msg=response.content)
self.assertEqual(len(mail.outbox), 0)
def test_resend_delegate(self):
"""Test resending a delegate invite with sufficient perms"""
self.invite.role = self.role_delegate
self.invite.save()
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(url, method='POST')
# Assert response and mail status
self.assertEqual(response.status_code, 200, msg=response.content)
self.assertEqual(len(mail.outbox), 1)
def test_resend_delegate_no_perms(self):
"""Test resending a delegate invite without perms (should fail)"""
self.invite.role = self.role_delegate
self.invite.save()
delegate = self.make_user('delegate')
self._make_assignment(self.project, delegate, self.role_delegate)
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': self.invite.sodar_uuid},
)
response = self.request_knox(
url, method='POST', token=self.get_token(delegate)
)
# Assert response and mail status
self.assertEqual(response.status_code, 403, msg=response.content)
self.assertEqual(len(mail.outbox), 0)
def test_resend_not_found(self):
"""Test resending invite with invalid UUID"""
url = reverse(
'projectroles:api_invite_resend',
kwargs={'projectinvite': INVALID_UUID},
)
response = self.request_knox(url, method='POST')
self.assertEqual(response.status_code, 404)
class TestUserListAPIView(TestCoreAPIViewsBase):
"""Tests for UserListAPIView"""
def setUp(self):
super().setUp()
# Create additional users
self.domain_user = self.make_user('domain_user@domain')
def test_get(self):
"""Test UserListAPIView get() as a regular user"""
url = reverse('projectroles:api_user_list')
response = self.request_knox(
url, token=self.get_token(self.domain_user)
)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 1) # System users not returned
expected = [
{
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
}
]
self.assertEqual(response_data, expected)
def test_get_superuser(self):
"""Test UserListAPIView get() as a superuser"""
url = reverse('projectroles:api_user_list')
response = self.request_knox(url) # Default token is for superuser
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
self.assertEqual(len(response_data), 2)
expected = [
{
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
},
{
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
},
]
self.assertEqual(response_data, expected)
class TestCurrentUserRetrieveAPIView(TestCoreAPIViewsBase):
"""Tests for CurrentUserRetrieveAPIView"""
def setUp(self):
super().setUp()
# Create additional users
self.domain_user = self.make_user('domain_user@domain')
def test_get(self):
"""Test CurrentUserRetrieveAPIView get() as a regular user"""
url = reverse('projectroles:api_user_current')
response = self.request_knox(
url, token=self.get_token(self.domain_user)
)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'username': self.domain_user.username,
'name': self.domain_user.name,
'email': self.domain_user.email,
'sodar_uuid': str(self.domain_user.sodar_uuid),
}
self.assertEqual(response_data, expected)
def test_get_superuser(self):
"""Test CurrentUserRetrieveAPIView get() as superuser"""
url = reverse('projectroles:api_user_current')
response = self.request_knox(url)
# Assert response
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.content)
expected = {
'username': self.user.username,
'name': self.user.name,
'email': self.user.email,
'sodar_uuid': str(self.user.sodar_uuid),
}
self.assertEqual(response_data, expected)
class TestAPIVersioning(TestCoreAPIViewsBase):
"""Tests for REST API view versioning using ProjectRetrieveAPIView"""
def setUp(self):
super().setUp()
self.url = reverse(
'projectroles:api_project_retrieve',
kwargs={'project': self.project.sodar_uuid},
)
def test_api_versioning(self):
"""Test SODAR API Access with correct version headers"""
response = self.request_knox(
self.url,
media_type=views_api.CORE_API_MEDIA_TYPE,
version=views_api.CORE_API_DEFAULT_VERSION,
)
self.assertEqual(response.status_code, 200)
def test_api_versioning_invalid_version(self):
"""Test SODAR API Access with unsupported version (should fail)"""
response = self.request_knox(
self.url,
media_type=views_api.CORE_API_MEDIA_TYPE,
version=CORE_API_VERSION_INVALID,
)
self.assertEqual(response.status_code, 406)
def test_api_versioning_invalid_media_type(self):
"""Test SODAR API Access with unsupported media type (should fail)"""
response = self.request_knox(
self.url,
media_type=CORE_API_MEDIA_TYPE_INVALID,
version=views_api.CORE_API_MEDIA_TYPE,
)
self.assertEqual(response.status_code, 406)
# TODO: To be updated once the legacy API view is redone for SODAR Core v1.0
class TestRemoteProjectGetAPIView(
ProjectMixin,
RoleAssignmentMixin,
RemoteSiteMixin,
RemoteProjectMixin,
SODARAPIViewTestMixin,
TestViewsBase,
):
"""Tests for remote project getting API view"""
media_type = views_api.CORE_API_MEDIA_TYPE
api_version = views_api.CORE_API_DEFAULT_VERSION
def setUp(self):
super().setUp()
# Set up projects
self.category = self._make_project(
'TestCategory', PROJECT_TYPE_CATEGORY, None
)
self.cat_owner_as = self._make_assignment(
self.category, self.user, self.role_owner
)
self.project = self._make_project(
'TestProject', PROJECT_TYPE_PROJECT, self.category
)
self.project_owner_as = self._make_assignment(
self.project, self.user, self.role_owner
)
# Create target site
self.target_site = self._make_site(
name=REMOTE_SITE_NAME,
url=REMOTE_SITE_URL,
mode=SITE_MODE_TARGET,
description=REMOTE_SITE_DESC,
secret=REMOTE_SITE_SECRET,
)
# Create remote project
self.remote_project = self._make_remote_project(
site=self.target_site,
project_uuid=self.project.sodar_uuid,
project=self.project,
level=SODAR_CONSTANTS['REMOTE_LEVEL_READ_INFO'],
)
self.remote_api = RemoteProjectAPI()
def test_get(self):
"""Test retrieving project data to the target site"""
response = self.client.get(
reverse(
'projectroles:api_remote_get',
kwargs={'secret': REMOTE_SITE_SECRET},
)
)
self.assertEqual(response.status_code, 200)
expected = self.remote_api.get_source_data(self.target_site)
response_dict = json.loads(response.content.decode('utf-8'))
self.assertEqual(response_dict, expected)
def test_get_invalid_secret(self):
"""Test retrieving project data with an invalid secret (should fail)"""
response = self.client.get(
reverse(
'projectroles:api_remote_get', kwargs={'secret': build_secret()}
)
)
self.assertEqual(response.status_code, 401)
| en | 0.806676 | REST API view tests for the projectroles app # Base Classes ----------------------------------------------------------------- Mixin for SODAR and SODAR Core API views with accept headers, knox token authorization and general helper methods. # Default API header parameters are for external SODAR site APIs # Override these for testing SODAR Core API views # Copied from Knox tests Get or create a knox token for a user. :param user: User object :param full_result: Return full result of AuthToken creation if True :return: Token string or AuthToken creation tuple Return serialization for a user. :param user: User object :return: Dict Return datetime in DRF compatible format. :param obj_dt: Object DateTime field :return: String Return version accept header based on the media type and version string. :param media_type: String (default = cls.media_type) :param version: String (default = cls.api_version) :return: Dict Return auth header based on token. :param token: Token string :return: Dict Perform a HTTP request with Knox token auth. :param url: URL for the request :param method: Request method (string, default="GET") :param format: Request format (string, default="json") :param data: Optional data for request (dict) :param token: Knox token string (if None, use self.knox_token) :param media_type: String (default = cls.media_type) :param version: String (default = cls.api_version) :return: Response object Base API test view with knox authentication # Show complete diff in case of failure # Force disabling of taskflow plugin if it's available # 0 = Disabled # Init roles # Init superuser # Set up category and project with owner role assignments # Get knox token for self.user Override of TestAPIViewsBase to be used with SODAR Core API views # Tests ------------------------------------------------------------------------ Tests for ProjectListAPIView Test ProjectListAPIView get() as project owner # Assert response Test ProjectListAPIView get() without roles # Assert response Test ProjectListAPIView get() with only one role # Assert response Tests for ProjectRetrieveAPIView Test ProjectRetrieveAPIView get() with a category # Assert response Test ProjectRetrieveAPIView get() with a project # Assert response Test ProjectRetrieveAPIView get() with an invalid UUID Tests for IP allowing settings using ProjectRetrieveAPIView Setup users and roles for IP allowing test # Create new user # Remove previously assigned owner role for superuser. # Assign requested role to user # Init IP restrict setting # Init IP allowlist setting Helper for IP allowing tests # Assert response Tests for ProjectCreateAPIView Test creating a root category # Assert preconditions # Assert response and project status # Assert object content # Assert role assignment # Assert API response Test creating a category under an existing category # Assert preconditions # Assert response and project status # Assert object content # Assert role assignment # Assert API response Test creating a project under an existing category # Assert preconditions # Assert response and project status # Assert object content # Assert role assignment # Assert API response Test creating a project in root (should fail) # Assert preconditions # Assert response and project status Test creating a project in root with disabled categories # Assert preconditions # Assert response and project status Test creating a project with a title already in category (should fail) # Assert preconditions # Assert response and project status Test creating a project with a non-existent user (should fail) # Assert preconditions # Assert response and project status Test creating a project with a non-existent parent category (should fail) # Assert preconditions # Assert response and project status Test creating a project with a project as parent (should fail) # Assert preconditions # Assert response and project status Test creating a project as TARGET with target creation allowed # Assert preconditions # Assert response and project status Test creating a project as TARGET under a remote category (should fail) # Create source site # Make category remote # Assert preconditions # Assert response and project status Test creating a project as TARGET with target creation disallowed (should fail) # Assert preconditions # Assert response and project status Tests for ProjectUpdateAPIView Test put() for category updating # Assert preconditions # Assert response and project status # Assert object content # Assert API response Test put() for project updating # Assert preconditions # Assert response and project status # Assert object content # Assert API response Test patch() for updating category metadata # Assert preconditions # Assert response and project status # Assert object content # Assert role assignment # Assert API response Test patch() for updating project metadata # Assert preconditions # Assert response and project status # Assert object content # Assert role assignment # Assert API response Test patch() for updating project owner (should fail) # Assert response Test patch() for moving project under a different category # Assert preconditions # Assert response # Assert object content # Assert role assignment # Assert child project full title update # Assert API response Test patch() for moving project without permissions (should fail) # Disable superuser status from self.user and perform request # Assert response Test patch() for moving project without permissions (should fail) # Assert response Test patch() for moving project to root without permissions (should fail) # Disable superuser status from self.user and perform request # Assert response Test patch() for moving a category inside its child (should fail) # Assert response Test patch() with a changed project type (should fail) # Assert response Test patch() for updating remote project metadata (should fail) # Create source site and remote project # Assert response and project status Tests for RoleAssignmentCreateAPIView Test creating a contributor role for user # Assert preconditions # Assert response and role status # Assert object # Assert API response Test creating an owner role (should fail) # Assert response Test creating a delegate role for user as owner # Disable superuser status from self.user # Assert preconditions # Assert response and project status # Assert object Test creating a delegate role without authorization (should fail) # Create new user and grant delegate role # Assert response Test creating a delegate role with limit reached (should fail) # Create new user and grant delegate role # NOTE: Post as owner # Assert response Test creating a delegate role existing role for inherited owner # Set up category owner # NOTE: Post as owner # Assert response and project status # Assert object Test creating a non-owner role for category # Assert response Test creating a role for user already in the project # Assert preconditions # Assert response and role status # Post again # Assert response and project status Test creating a role for a remote project (should fail) # Create source site and remote project # Assert preconditions # Assert response and role status Tests for RoleAssignmentUpdateAPIView Test put() for role assignment updating # Assert preconditions # Assert response and role status # Assert object content # Assert API response Test put() for delegate role assignment # Assert response # Assert object content # Assert API response Test put() for owner role assignment (should fail) # Assert response Test put() with a different user (should fail) # Assert response Test patch() for role assignment updating # Assert preconditions # Assert response and role status # Assert object content # Assert API response Test patch() with a different user (should fail) # Assert response Test patch() for updating a role in a remote project (should fail) # Create source site and remote project # Assert response and role status Tests for RoleAssignmentDestroyAPIView Test delete for role assignment deletion # Assert preconditions # Assert response and role status Test delete for delegate deletion without perms (should fail) # Assert preconditions # NOTE: Perform record as contributor user # Assert response and role status Test delete for owner deletion (should fail) # Assert preconditions # Assert response and project status Test delete for a remote project (should fail) # Create source site and remote project # Assert preconditions # Assert response and role status Tests for RoleAssignmentOwnerTransferAPIView Test transferring ownership for a project # Assign role to new user # Assert preconditions # Assert response and project status Test transferring ownership for a category # Assign role to new user # Assert preconditions # Assert response and project status Test transferring ownership to an inherited owner # Assign role to new user # Set alt owner to current project, make self.user inherited owner # Assert preconditions # Assert response and project status Test transferring ownership to user with no existing roles (should fail) # NOTE: No role given to user # Assert response and project status Test transferring ownership for a remote project (should fail) # Create source site and remote project # Assign role to new user # Assert preconditions # Assert response and project status Tests for ProjectInviteListAPIView # Create invites Test ProjectInviteListAPIView get() # Assert response Test get() with an inactive invite # Assert response Tests for ProjectInviteCreateAPIView Test creating a contributor invite for user # Assert preconditions # Assert response and role status # Assert data # Assert response Test creating an invite for an owner role (should fail) # Assert preconditions # Assert response and data Test creating an invite for an delegate role # Assert preconditions # Assert response and data Test creating an delegate invite without perms (should fail) # Assert preconditions # Assert response and data Test creating an delegate invite with exceeded limit (should fail) # Assert preconditions # Assert response and data Test creating an invite with invalid email (should fail) # Assert preconditions # Assert response and data Test creating an invite for an existing user (should fail) # Assert preconditions # Assert response and role status Test creating an invite for a remote project (should fail) # Set up remote site and project # Assert preconditions # Assert response and role status Tests for ProjectInviteRevokeAPIView # Create invite Test revoking an invite # Assert preconditions # Assert response and invite status Test revoking an already inactive invite (should fail) # Assert response and invite status Test revoking a delegate invite with sufficient perms # Assert response and invite status Test revoking a delegate invite without perms (should fail) # Assert response and invite status Test revoking invite with invalid UUID Tests for ProjectInviteResendAPIView # Create invite Test resending an invite # Assert preconditions # Assert response and mail status Test resending an inactive invite (should fail) # Assert response and mail status Test resending a delegate invite with sufficient perms # Assert response and mail status Test resending a delegate invite without perms (should fail) # Assert response and mail status Test resending invite with invalid UUID Tests for UserListAPIView # Create additional users Test UserListAPIView get() as a regular user # Assert response # System users not returned Test UserListAPIView get() as a superuser # Default token is for superuser # Assert response Tests for CurrentUserRetrieveAPIView # Create additional users Test CurrentUserRetrieveAPIView get() as a regular user # Assert response Test CurrentUserRetrieveAPIView get() as superuser # Assert response Tests for REST API view versioning using ProjectRetrieveAPIView Test SODAR API Access with correct version headers Test SODAR API Access with unsupported version (should fail) Test SODAR API Access with unsupported media type (should fail) # TODO: To be updated once the legacy API view is redone for SODAR Core v1.0 Tests for remote project getting API view # Set up projects # Create target site # Create remote project Test retrieving project data to the target site Test retrieving project data with an invalid secret (should fail) | 1.927401 | 2 |
src/model/model.py | kwasnydam/animal_disambiguation | 0 | 7576 | <filename>src/model/model.py
"""Contains the classification model I am going to use in my problem and some utility functions.
Functions
build_mmdisambiguator - build the core application object with the collaborators info
Classes
MMDisambiguator - core class of the application
"""
import pickle
import os
import numpy as np
from sklearn.linear_model import LogisticRegression
import sklearn.metrics as metrics
from src.data import dataset
DEFAULT_CLASSIFIER_SETTINGS = {
'solver': 'liblinear',
'class_weight': 'balanced',
'C': 1.
}
up = os.path.dirname
DEFAULT_ROOT_DIRECTORY = up(up(up(__file__))) # Get directory two levels above
DEFAULT_MODEL_DIRECTORY = os.path.join(DEFAULT_ROOT_DIRECTORY, 'models')
def try_opening_file_pickle(path):
try:
with open(path, 'rb') as f:
file_content = pickle.load(f)
except FileNotFoundError as e:
print('FileNotFound exception occured when trying to open: {}. Disambiguator build failed.'.format(
path
))
raise e
except Exception as e:
print('Exception occured when trying to open {}: {}'.format(path, e))
raise e
return file_content
def build_mmdisambiguator(data_model_params, data_model_path, classificator_parameters, classificator_path=None):
"""Given collaborator parameters and /or load paths, build the MMDisambiguator"""
if classificator_path is None:
data_model = dataset.TextLabelsVectorizer(data_model_params)
data_model_saved = try_opening_file_pickle(data_model_path)
data_model.deserialize(data_model_saved)
classificator = LogisticRegression(**classificator_parameters)
disambiguator = MMDisambiguator(data_model, classificator)
else:
disambiguator_pieces = try_opening_file_pickle(classificator_path)
data_model = dataset.TextLabelsVectorizer(data_model_params)
data_model.deserialize(disambiguator_pieces['data_model'])
classificator = disambiguator_pieces['classificator']
disambiguator = MMDisambiguator(data_model, classificator)
return disambiguator
class NotTrainedException(Exception):
pass
class MMDisambiguator:
"""The class representing the core logic of the disambiguation app.
It uses data_model for feature and text manipulation and Logistic Regression for performing prediction
With 'source' flag user controls if the training/prediction is preformed from precomputed numercial features
or text. If it is done from text, the input is put through feature_extraction first.
Methods:
train - fit the classifier or both data model and classifier from training data
predict - get prediction on data. the data can be single or multiple samples
transform_labels - get numerical representation of labels
performance_report - generate summary of performance
serialize - get representation for saving
"""
def __init__(self, data_model:dataset.TextLabelsVectorizer, classificator: LogisticRegression):
self.data_model = data_model
self.classificator = classificator
def is_trained(self):
"""Returns True if the underlying classification model is trained"""
return hasattr(self.classificator, "coef_")
def train(self, data, classes, report=False, source='features'):
"""Train the model with training data DATA and training labels CLASSES
Args:
data - training data (text or features)
classes- training classes (text or numerical)
report - flag, if True generate training report
source - 'features': numerical, train directly. 'text': train vectorizer, transfrom, then train classifier
"""
if source == 'text':
features, classes = self.data_model.fit_transform(data, classes)
else:
features = data
self.classificator.fit(features, classes)
if report:
return self.performance_report(self._classify(self.classificator.predict_proba(features)), classes)
else:
return None
def transform_labels(self, labels):
"""Returns numerical encoding of text labels"""
return self.data_model.transform_labels(labels)
def predict(self, unseen_features, mode='classification', threshold=0.5, format='text', source='features'):
"""Predict classes on unseen data.
Args:
unseen_features - 'string' or list/pandas Series of 'string' if source = 'text'.
numpy array if source = 'features'
mode -
'classification' - predict probabilities and then make classifcation decision based on 'threshold
'predicition' - return predicted probabilities
threshold - if mode = 'classification', threshold for the decision
source - 'text' if sentences, 'features' if input already transformed
"""
if not self.is_trained():
raise NotTrainedException('Attempted to perform prediction on a model that has not been trained')
if source == 'text':
unseen_features = self.data_model.transform(unseen_features)
predicted_probability = self.classificator.predict_proba(unseen_features)
if mode == 'classification':
classification_binary = self._classify(predicted_probability, threshold).astype(np.int)
classification = classification_binary
if format == 'text':
classification = self.data_model.get_classes_name(classification_binary)
result = []
for idx in range(classification.shape[0]):
result.append([classification[idx], predicted_probability[idx,classification_binary[idx]]])
result = np.asarray(result)
elif mode == 'prediction':
result = predicted_probability
return result
def _classify(self, predicted_probabilities, threshold=0.5):
"""Decision: class based on predicted probability and threshold"""
classes = predicted_probabilities.copy()[:,1]
classes[classes >= threshold] = 1
classes[classes < threshold] = 0
return classes
def performance_report(self, predicted_classes, real_classes):
"""Generates performance of the given classifier given predicted and real classes
Args:
predicted_classes - iterable containing the prediciton results, len(num_of_samples)
real_classes - iterable containing ground truth classes, len(num_of_samples)
Output:
report - dictionary containing the following fields:
'accuracy',
'precision',
'recall',
'f1_score',
'confussion_matrix'
"""
report = {
'accuracy': metrics.accuracy_score(real_classes, predicted_classes),
'precision': metrics.precision_score(real_classes, predicted_classes),
'recall': metrics.recall_score(real_classes, predicted_classes),
'f1': metrics.f1_score(real_classes, predicted_classes),
'confussion_matrix': metrics.confusion_matrix(real_classes, predicted_classes, labels = [1, 0]).tolist()
}
return report
def serialize(self):
"""Returns objects and parameters necessary to perform prediciton"""
to_serialize = {
'data_model': self.data_model.serialize(),
'classificator': self.classificator
}
return to_serialize
| <filename>src/model/model.py
"""Contains the classification model I am going to use in my problem and some utility functions.
Functions
build_mmdisambiguator - build the core application object with the collaborators info
Classes
MMDisambiguator - core class of the application
"""
import pickle
import os
import numpy as np
from sklearn.linear_model import LogisticRegression
import sklearn.metrics as metrics
from src.data import dataset
DEFAULT_CLASSIFIER_SETTINGS = {
'solver': 'liblinear',
'class_weight': 'balanced',
'C': 1.
}
up = os.path.dirname
DEFAULT_ROOT_DIRECTORY = up(up(up(__file__))) # Get directory two levels above
DEFAULT_MODEL_DIRECTORY = os.path.join(DEFAULT_ROOT_DIRECTORY, 'models')
def try_opening_file_pickle(path):
try:
with open(path, 'rb') as f:
file_content = pickle.load(f)
except FileNotFoundError as e:
print('FileNotFound exception occured when trying to open: {}. Disambiguator build failed.'.format(
path
))
raise e
except Exception as e:
print('Exception occured when trying to open {}: {}'.format(path, e))
raise e
return file_content
def build_mmdisambiguator(data_model_params, data_model_path, classificator_parameters, classificator_path=None):
"""Given collaborator parameters and /or load paths, build the MMDisambiguator"""
if classificator_path is None:
data_model = dataset.TextLabelsVectorizer(data_model_params)
data_model_saved = try_opening_file_pickle(data_model_path)
data_model.deserialize(data_model_saved)
classificator = LogisticRegression(**classificator_parameters)
disambiguator = MMDisambiguator(data_model, classificator)
else:
disambiguator_pieces = try_opening_file_pickle(classificator_path)
data_model = dataset.TextLabelsVectorizer(data_model_params)
data_model.deserialize(disambiguator_pieces['data_model'])
classificator = disambiguator_pieces['classificator']
disambiguator = MMDisambiguator(data_model, classificator)
return disambiguator
class NotTrainedException(Exception):
pass
class MMDisambiguator:
"""The class representing the core logic of the disambiguation app.
It uses data_model for feature and text manipulation and Logistic Regression for performing prediction
With 'source' flag user controls if the training/prediction is preformed from precomputed numercial features
or text. If it is done from text, the input is put through feature_extraction first.
Methods:
train - fit the classifier or both data model and classifier from training data
predict - get prediction on data. the data can be single or multiple samples
transform_labels - get numerical representation of labels
performance_report - generate summary of performance
serialize - get representation for saving
"""
def __init__(self, data_model:dataset.TextLabelsVectorizer, classificator: LogisticRegression):
self.data_model = data_model
self.classificator = classificator
def is_trained(self):
"""Returns True if the underlying classification model is trained"""
return hasattr(self.classificator, "coef_")
def train(self, data, classes, report=False, source='features'):
"""Train the model with training data DATA and training labels CLASSES
Args:
data - training data (text or features)
classes- training classes (text or numerical)
report - flag, if True generate training report
source - 'features': numerical, train directly. 'text': train vectorizer, transfrom, then train classifier
"""
if source == 'text':
features, classes = self.data_model.fit_transform(data, classes)
else:
features = data
self.classificator.fit(features, classes)
if report:
return self.performance_report(self._classify(self.classificator.predict_proba(features)), classes)
else:
return None
def transform_labels(self, labels):
"""Returns numerical encoding of text labels"""
return self.data_model.transform_labels(labels)
def predict(self, unseen_features, mode='classification', threshold=0.5, format='text', source='features'):
"""Predict classes on unseen data.
Args:
unseen_features - 'string' or list/pandas Series of 'string' if source = 'text'.
numpy array if source = 'features'
mode -
'classification' - predict probabilities and then make classifcation decision based on 'threshold
'predicition' - return predicted probabilities
threshold - if mode = 'classification', threshold for the decision
source - 'text' if sentences, 'features' if input already transformed
"""
if not self.is_trained():
raise NotTrainedException('Attempted to perform prediction on a model that has not been trained')
if source == 'text':
unseen_features = self.data_model.transform(unseen_features)
predicted_probability = self.classificator.predict_proba(unseen_features)
if mode == 'classification':
classification_binary = self._classify(predicted_probability, threshold).astype(np.int)
classification = classification_binary
if format == 'text':
classification = self.data_model.get_classes_name(classification_binary)
result = []
for idx in range(classification.shape[0]):
result.append([classification[idx], predicted_probability[idx,classification_binary[idx]]])
result = np.asarray(result)
elif mode == 'prediction':
result = predicted_probability
return result
def _classify(self, predicted_probabilities, threshold=0.5):
"""Decision: class based on predicted probability and threshold"""
classes = predicted_probabilities.copy()[:,1]
classes[classes >= threshold] = 1
classes[classes < threshold] = 0
return classes
def performance_report(self, predicted_classes, real_classes):
"""Generates performance of the given classifier given predicted and real classes
Args:
predicted_classes - iterable containing the prediciton results, len(num_of_samples)
real_classes - iterable containing ground truth classes, len(num_of_samples)
Output:
report - dictionary containing the following fields:
'accuracy',
'precision',
'recall',
'f1_score',
'confussion_matrix'
"""
report = {
'accuracy': metrics.accuracy_score(real_classes, predicted_classes),
'precision': metrics.precision_score(real_classes, predicted_classes),
'recall': metrics.recall_score(real_classes, predicted_classes),
'f1': metrics.f1_score(real_classes, predicted_classes),
'confussion_matrix': metrics.confusion_matrix(real_classes, predicted_classes, labels = [1, 0]).tolist()
}
return report
def serialize(self):
"""Returns objects and parameters necessary to perform prediciton"""
to_serialize = {
'data_model': self.data_model.serialize(),
'classificator': self.classificator
}
return to_serialize
| en | 0.701554 | Contains the classification model I am going to use in my problem and some utility functions. Functions build_mmdisambiguator - build the core application object with the collaborators info Classes MMDisambiguator - core class of the application # Get directory two levels above Given collaborator parameters and /or load paths, build the MMDisambiguator The class representing the core logic of the disambiguation app. It uses data_model for feature and text manipulation and Logistic Regression for performing prediction With 'source' flag user controls if the training/prediction is preformed from precomputed numercial features or text. If it is done from text, the input is put through feature_extraction first. Methods: train - fit the classifier or both data model and classifier from training data predict - get prediction on data. the data can be single or multiple samples transform_labels - get numerical representation of labels performance_report - generate summary of performance serialize - get representation for saving Returns True if the underlying classification model is trained Train the model with training data DATA and training labels CLASSES Args: data - training data (text or features) classes- training classes (text or numerical) report - flag, if True generate training report source - 'features': numerical, train directly. 'text': train vectorizer, transfrom, then train classifier Returns numerical encoding of text labels Predict classes on unseen data. Args: unseen_features - 'string' or list/pandas Series of 'string' if source = 'text'. numpy array if source = 'features' mode - 'classification' - predict probabilities and then make classifcation decision based on 'threshold 'predicition' - return predicted probabilities threshold - if mode = 'classification', threshold for the decision source - 'text' if sentences, 'features' if input already transformed Decision: class based on predicted probability and threshold Generates performance of the given classifier given predicted and real classes Args: predicted_classes - iterable containing the prediciton results, len(num_of_samples) real_classes - iterable containing ground truth classes, len(num_of_samples) Output: report - dictionary containing the following fields: 'accuracy', 'precision', 'recall', 'f1_score', 'confussion_matrix' Returns objects and parameters necessary to perform prediciton | 2.684115 | 3 |
v0.5.0/nvidia/submission/code/recommendation/pytorch/load.py | myelintek/results | 44 | 7577 | # Copyright (c) 2018, deepakn94. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
import pandas as pd
RatingData = namedtuple('RatingData',
['items', 'users', 'ratings', 'min_date', 'max_date'])
def describe_ratings(ratings):
info = RatingData(items=len(ratings['item_id'].unique()),
users=len(ratings['user_id'].unique()),
ratings=len(ratings),
min_date=ratings['timestamp'].min(),
max_date=ratings['timestamp'].max())
print("{ratings} ratings on {items} items from {users} users"
" from {min_date} to {max_date}"
.format(**(info._asdict())))
return info
def process_movielens(ratings, sort=True):
ratings['timestamp'] = pd.to_datetime(ratings['timestamp'], unit='s')
if sort:
ratings.sort_values(by='timestamp', inplace=True)
describe_ratings(ratings)
return ratings
def load_ml_100k(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='\t', names=names)
return process_movielens(ratings, sort=sort)
def load_ml_1m(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='::', names=names, engine='python')
return process_movielens(ratings, sort=sort)
def load_ml_10m(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='::', names=names, engine='python')
return process_movielens(ratings, sort=sort)
def load_ml_20m(filename, sort=True):
ratings = pd.read_csv(filename)
ratings['timestamp'] = pd.to_datetime(ratings['timestamp'], unit='s')
names = {'userId': 'user_id', 'movieId': 'item_id'}
ratings.rename(columns=names, inplace=True)
return process_movielens(ratings, sort=sort)
DATASETS = [k.replace('load_', '') for k in locals().keys() if "load_" in k]
def get_dataset_name(filename):
for dataset in DATASETS:
if dataset in filename.replace('-', '_').lower():
return dataset
raise NotImplementedError
def implicit_load(filename, sort=True):
func = globals()["load_" + get_dataset_name(filename)]
return func(filename, sort=sort)
| # Copyright (c) 2018, deepakn94. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
import pandas as pd
RatingData = namedtuple('RatingData',
['items', 'users', 'ratings', 'min_date', 'max_date'])
def describe_ratings(ratings):
info = RatingData(items=len(ratings['item_id'].unique()),
users=len(ratings['user_id'].unique()),
ratings=len(ratings),
min_date=ratings['timestamp'].min(),
max_date=ratings['timestamp'].max())
print("{ratings} ratings on {items} items from {users} users"
" from {min_date} to {max_date}"
.format(**(info._asdict())))
return info
def process_movielens(ratings, sort=True):
ratings['timestamp'] = pd.to_datetime(ratings['timestamp'], unit='s')
if sort:
ratings.sort_values(by='timestamp', inplace=True)
describe_ratings(ratings)
return ratings
def load_ml_100k(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='\t', names=names)
return process_movielens(ratings, sort=sort)
def load_ml_1m(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='::', names=names, engine='python')
return process_movielens(ratings, sort=sort)
def load_ml_10m(filename, sort=True):
names = ['user_id', 'item_id', 'rating', 'timestamp']
ratings = pd.read_csv(filename, sep='::', names=names, engine='python')
return process_movielens(ratings, sort=sort)
def load_ml_20m(filename, sort=True):
ratings = pd.read_csv(filename)
ratings['timestamp'] = pd.to_datetime(ratings['timestamp'], unit='s')
names = {'userId': 'user_id', 'movieId': 'item_id'}
ratings.rename(columns=names, inplace=True)
return process_movielens(ratings, sort=sort)
DATASETS = [k.replace('load_', '') for k in locals().keys() if "load_" in k]
def get_dataset_name(filename):
for dataset in DATASETS:
if dataset in filename.replace('-', '_').lower():
return dataset
raise NotImplementedError
def implicit_load(filename, sort=True):
func = globals()["load_" + get_dataset_name(filename)]
return func(filename, sort=sort)
| en | 0.862954 | # Copyright (c) 2018, deepakn94. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. | 2.892082 | 3 |
rpython/jit/backend/llsupport/test/test_rewrite.py | jptomo/pypy-lang-scheme | 1 | 7578 | from rpython.jit.backend.llsupport.descr import get_size_descr,\
get_field_descr, get_array_descr, ArrayDescr, FieldDescr,\
SizeDescr, get_interiorfield_descr
from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm,\
GcLLDescr_framework
from rpython.jit.backend.llsupport import jitframe
from rpython.jit.metainterp.gc import get_description
from rpython.jit.tool.oparser import parse
from rpython.jit.metainterp.optimizeopt.util import equaloplists
from rpython.jit.metainterp.history import JitCellToken, FLOAT
from rpython.jit.metainterp.history import AbstractFailDescr
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper import rclass
from rpython.jit.backend.x86.arch import WORD
class Evaluator(object):
def __init__(self, scope):
self.scope = scope
def __getitem__(self, key):
return eval(key, self.scope)
class FakeLoopToken(object):
pass
o_vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True)
class RewriteTests(object):
def check_rewrite(self, frm_operations, to_operations, **namespace):
S = lltype.GcStruct('S', ('x', lltype.Signed),
('y', lltype.Signed))
sdescr = get_size_descr(self.gc_ll_descr, S)
sdescr.tid = 1234
#
T = lltype.GcStruct('T', ('y', lltype.Signed),
('z', lltype.Ptr(S)),
('t', lltype.Signed))
tdescr = get_size_descr(self.gc_ll_descr, T)
tdescr.tid = 5678
tzdescr = get_field_descr(self.gc_ll_descr, T, 'z')
#
A = lltype.GcArray(lltype.Signed)
adescr = get_array_descr(self.gc_ll_descr, A)
adescr.tid = 4321
alendescr = adescr.lendescr
#
B = lltype.GcArray(lltype.Char)
bdescr = get_array_descr(self.gc_ll_descr, B)
bdescr.tid = 8765
blendescr = bdescr.lendescr
#
C = lltype.GcArray(lltype.Ptr(S))
cdescr = get_array_descr(self.gc_ll_descr, C)
cdescr.tid = 8111
clendescr = cdescr.lendescr
#
E = lltype.GcStruct('Empty')
edescr = get_size_descr(self.gc_ll_descr, E)
edescr.tid = 9000
#
vtable_descr = self.gc_ll_descr.fielddescr_vtable
O = lltype.GcStruct('O', ('parent', rclass.OBJECT),
('x', lltype.Signed))
o_descr = self.cpu.sizeof(O, True)
o_vtable = globals()['o_vtable']
#
tiddescr = self.gc_ll_descr.fielddescr_tid
wbdescr = self.gc_ll_descr.write_barrier_descr
WORD = globals()['WORD']
#
strdescr = self.gc_ll_descr.str_descr
unicodedescr = self.gc_ll_descr.unicode_descr
strlendescr = strdescr.lendescr
unicodelendescr = unicodedescr.lendescr
strhashdescr = self.gc_ll_descr.str_hash_descr
unicodehashdescr = self.gc_ll_descr.unicode_hash_descr
casmdescr = JitCellToken()
clt = FakeLoopToken()
clt._ll_initial_locs = [0, 8]
frame_info = lltype.malloc(jitframe.JITFRAMEINFO, flavor='raw')
clt.frame_info = frame_info
frame_info.jfi_frame_depth = 13
frame_info.jfi_frame_size = 255
framedescrs = self.gc_ll_descr.getframedescrs(self.cpu)
framelendescr = framedescrs.arraydescr.lendescr
jfi_frame_depth = framedescrs.jfi_frame_depth
jfi_frame_size = framedescrs.jfi_frame_size
jf_frame_info = framedescrs.jf_frame_info
jf_savedata = framedescrs.jf_savedata
jf_force_descr = framedescrs.jf_force_descr
jf_descr = framedescrs.jf_descr
jf_guard_exc = framedescrs.jf_guard_exc
jf_forward = framedescrs.jf_forward
jf_extra_stack_depth = framedescrs.jf_extra_stack_depth
signedframedescr = self.cpu.signedframedescr
floatframedescr = self.cpu.floatframedescr
casmdescr.compiled_loop_token = clt
#
guarddescr = AbstractFailDescr()
#
namespace.update(locals())
#
for funcname in self.gc_ll_descr._generated_functions:
namespace[funcname] = self.gc_ll_descr.get_malloc_fn(funcname)
namespace[funcname + '_descr'] = getattr(self.gc_ll_descr,
'%s_descr' % funcname)
#
ops = parse(frm_operations, namespace=namespace)
expected = parse(to_operations % Evaluator(namespace),
namespace=namespace)
operations = self.gc_ll_descr.rewrite_assembler(self.cpu,
ops.operations,
[])
remap = {}
for a, b in zip(ops.inputargs, expected.inputargs):
remap[b] = a
equaloplists(operations, expected.operations, remap=remap)
lltype.free(frame_info, flavor='raw')
class FakeTracker(object):
pass
class BaseFakeCPU(object):
JITFRAME_FIXED_SIZE = 0
def __init__(self):
self.tracker = FakeTracker()
self._cache = {}
self.signedframedescr = ArrayDescr(3, 8, FieldDescr('len', 0, 0, 0), 0)
self.floatframedescr = ArrayDescr(5, 8, FieldDescr('len', 0, 0, 0), 0)
def getarraydescr_for_frame(self, tp):
if tp == FLOAT:
return self.floatframedescr
return self.signedframedescr
def unpack_arraydescr_size(self, d):
return 0, d.itemsize, 0
def unpack_fielddescr(self, d):
return d.offset
def arraydescrof(self, ARRAY):
try:
return self._cache[ARRAY]
except KeyError:
r = ArrayDescr(1, 2, FieldDescr('len', 0, 0, 0), 0)
self._cache[ARRAY] = r
return r
def fielddescrof(self, STRUCT, fname):
key = (STRUCT, fname)
try:
return self._cache[key]
except KeyError:
r = FieldDescr(fname, 1, 1, 1)
self._cache[key] = r
return r
class TestBoehm(RewriteTests):
def setup_method(self, meth):
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
assert is_object
return SizeDescr(102, gc_fielddescrs=[],
vtable=o_vtable)
self.cpu = FakeCPU()
self.gc_ll_descr = GcLLDescr_boehm(None, None, None)
def test_new(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_no_collapsing(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
p1 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_new_array_fixed(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
10, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(adescr.basesize + 10 * adescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=alendescr)
def test_new_array_variable(self):
self.check_rewrite("""
[i1]
p0 = new_array(i1, descr=adescr)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
i1, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), 102, \
descr=malloc_fixedsize_descr)
setfield_gc(p0, ConstClass(o_vtable), descr=vtable_descr)
jump()
""")
def test_newstr(self):
self.check_rewrite("""
[i1]
p0 = newstr(i1)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(strdescr.basesize)d, \
i1, \
%(strdescr.itemsize)d, \
%(strlendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_newunicode(self):
self.check_rewrite("""
[i1]
p0 = newunicode(10)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(unicodedescr.basesize)d, \
10, \
%(unicodedescr.itemsize)d, \
%(unicodelendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(unicodedescr.basesize + \
## 10 * unicodedescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=unicodelendescr)
class TestFramework(RewriteTests):
def setup_method(self, meth):
class config_(object):
class translation(object):
gc = 'minimark'
gcrootfinder = 'asmgcc'
gctransformer = 'framework'
gcremovetypeptr = False
gcdescr = get_description(config_)
self.gc_ll_descr = GcLLDescr_framework(gcdescr, None, None, None,
really_not_translated=True)
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: True)
self.gc_ll_descr.malloc_zero_filled = False
#
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
descr = SizeDescr(104, gc_fielddescrs=[])
descr.tid = 9315
return descr
self.cpu = FakeCPU()
def test_rewrite_assembler_new_to_malloc(self):
self.check_rewrite("""
[p1]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_new3_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=tdescr)
p2 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + tdescr.size + sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 5678, descr=tiddescr)
p2 = nursery_ptr_increment(p1, %(tdescr.size)d)
setfield_gc(p2, 1234, descr=tiddescr)
zero_ptr_field(p1, %(tdescr.gc_fielddescrs[0].offset)s)
jump()
""")
def test_rewrite_assembler_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 4321, descr=tiddescr)
setfield_gc(p0, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_new_and_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + \
adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 4321, descr=tiddescr)
setfield_gc(p1, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_round_up(self):
self.check_rewrite("""
[]
p0 = new_array(6, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(bdescr.basesize + 8)d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 6, descr=blendescr)
jump()
""")
def test_rewrite_assembler_round_up_always(self):
self.check_rewrite("""
[]
p0 = new_array(5, descr=bdescr)
p1 = new_array(5, descr=bdescr)
p2 = new_array(5, descr=bdescr)
p3 = new_array(5, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4 * (bdescr.basesize + 8))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 5, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 8)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 5, descr=blendescr)
p2 = nursery_ptr_increment(p1, %(bdescr.basesize + 8)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 5, descr=blendescr)
p3 = nursery_ptr_increment(p2, %(bdescr.basesize + 8)d)
setfield_gc(p3, 8765, descr=tiddescr)
setfield_gc(p3, 5, descr=blendescr)
jump()
""")
def test_rewrite_assembler_minimal_size(self):
self.check_rewrite("""
[]
p0 = new(descr=edescr)
p1 = new(descr=edescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4*WORD)d)
setfield_gc(p0, 9000, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(2*WORD)d)
setfield_gc(p1, 9000, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_variable_size(self):
self.check_rewrite("""
[i0]
p0 = new_array(i0, descr=bdescr)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
jump(i0)
""")
def test_rewrite_new_string(self):
self.check_rewrite("""
[i0]
p0 = newstr(i0)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(1, 1, i0, descr=strdescr)
setfield_gc(p0, i0, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
jump(i0)
""")
def test_rewrite_assembler_nonstandard_array(self):
# a non-standard array is a bit hard to get; e.g. GcArray(Float)
# is like that on Win32, but not on Linux. Build one manually...
NONSTD = lltype.GcArray(lltype.Float)
nonstd_descr = get_array_descr(self.gc_ll_descr, NONSTD)
nonstd_descr.tid = 6464
nonstd_descr.basesize = 64 # <= hacked
nonstd_descr.itemsize = 8
nonstd_descr_gcref = 123
self.check_rewrite("""
[i0, p1]
p0 = new_array(i0, descr=nonstd_descr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", """
[i0, p1]
p0 = call_malloc_gc(ConstClass(malloc_array_nonstandard), \
64, 8, \
%(nonstd_descr.lendescr.offset)d, \
6464, i0, \
descr=malloc_array_nonstandard_descr)
cond_call_gc_wb_array(p0, i0, descr=wbdescr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", nonstd_descr=nonstd_descr)
def test_rewrite_assembler_maximal_size_1(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_array(103, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 103, \
descr=malloc_array_descr)
jump()
""")
def test_rewrite_assembler_maximal_size_2(self):
self.gc_ll_descr.max_size_of_young_obj = 300
self.check_rewrite("""
[]
p0 = new_array(101, descr=bdescr)
p1 = new_array(102, descr=bdescr) # two new_arrays can be combined
p2 = new_array(103, descr=bdescr) # but not all three
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(2 * (bdescr.basesize + 104))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 101, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 104)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 102, descr=blendescr)
p2 = call_malloc_nursery( \
%(bdescr.basesize + 104)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 103, descr=blendescr)
jump()
""")
def test_rewrite_assembler_huge_size(self):
# "huge" is defined as "larger than 0xffffff bytes, or 16MB"
self.check_rewrite("""
[]
p0 = new_array(20000000, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 20000000, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(104) # rounded up
setfield_gc(p0, 9315, descr=tiddescr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_new_with_vtable_too_big(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_big_fixedsize), 104, 9315, \
descr=malloc_big_fixedsize_descr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_rewrite_assembler_newstr_newunicode(self):
self.check_rewrite("""
[i2]
p0 = newstr(14)
p1 = newunicode(10)
p2 = newunicode(i2)
p3 = newstr(i2)
jump()
""", """
[i2]
p0 = call_malloc_nursery( \
%(strdescr.basesize + 16 * strdescr.itemsize + \
unicodedescr.basesize + 10 * unicodedescr.itemsize)d)
setfield_gc(p0, %(strdescr.tid)d, descr=tiddescr)
setfield_gc(p0, 14, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
p1 = nursery_ptr_increment(p0, %(strdescr.basesize + 16 * strdescr.itemsize)d)
setfield_gc(p1, %(unicodedescr.tid)d, descr=tiddescr)
setfield_gc(p1, 10, descr=unicodelendescr)
setfield_gc(p1, 0, descr=unicodehashdescr)
p2 = call_malloc_nursery_varsize(2, %(unicodedescr.itemsize)d, i2,\
descr=unicodedescr)
setfield_gc(p2, i2, descr=unicodelendescr)
setfield_gc(p2, 0, descr=unicodehashdescr)
p3 = call_malloc_nursery_varsize(1, 1, i2, \
descr=strdescr)
setfield_gc(p3, i2, descr=strlendescr)
setfield_gc(p3, 0, descr=strhashdescr)
jump()
""")
def test_write_barrier_before_setfield_gc(self):
self.check_rewrite("""
[p1, p2]
setfield_gc(p1, p2, descr=tzdescr)
jump()
""", """
[p1, p2]
cond_call_gc_wb(p1, descr=wbdescr)
setfield_gc(p1, p2, descr=tzdescr)
jump()
""")
def test_write_barrier_before_array_without_from_array(self):
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: False)
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_short_array(self):
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(129, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 129 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 129, descr=clendescr)
zero_array(p1, 0, 129, descr=cdescr)
call_n(123456)
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_long_array(self):
# the limit of "being too long" is fixed, arbitrarily, at 130
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(130, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 130 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 130, descr=clendescr)
zero_array(p1, 0, 130, descr=cdescr)
call_n(123456)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_unknown_array(self):
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_label_makes_size_unknown(self):
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(5, descr=cdescr)
label(p1, i2, p3)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 5, descr=clendescr)
zero_array(p1, 0, 5, descr=cdescr)
label(p1, i2, p3)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_setinteriorfield_gc(self):
S1 = lltype.GcStruct('S1')
INTERIOR = lltype.GcArray(('z', lltype.Ptr(S1)))
interiordescr = get_array_descr(self.gc_ll_descr, INTERIOR)
interiordescr.tid = 1291
interiorlendescr = interiordescr.lendescr
interiorzdescr = get_interiorfield_descr(self.gc_ll_descr,
INTERIOR, 'z')
self.check_rewrite("""
[p1, p2]
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", """
[p1, p2]
cond_call_gc_wb_array(p1, 0, descr=wbdescr)
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", interiorzdescr=interiorzdescr)
def test_initialization_store(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_2(self):
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = new(descr=sdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size + sdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(tdescr.size)d)
setfield_gc(p1, 1234, descr=tiddescr)
# <<<no cond_call_gc_wb here>>>
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_array(self):
self.check_rewrite("""
[p1, i2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""", """
[p1, i2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 2, 3, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_right(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 3, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""")
def test_zero_array_not_reduced_at_all(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_completely(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 5, 0, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_call(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
cond_call_gc_wb(p0, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_label(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
cond_call_gc_wb_array(p0, 1, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_varsize(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
jump()
""")
def test_zero_array_varsize_cannot_reduce(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
cond_call_gc_wb_array(p0, 0, descr=wbdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""")
def test_initialization_store_potentially_large_array(self):
# the write barrier cannot be omitted, because we might get
# an array with cards and the GC assumes that the write
# barrier is always called, even on young (but large) arrays
self.check_rewrite("""
[i0, p1, i2]
p0 = new_array(i0, descr=bdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""", """
[i0, p1, i2]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
cond_call_gc_wb_array(p0, i2, descr=wbdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""")
def test_non_initialization_store(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
p1 = newstr(i0)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = call_malloc_nursery_varsize(1, 1, i0, \
descr=strdescr)
setfield_gc(p1, i0, descr=strlendescr)
setfield_gc(p1, 0, descr=strhashdescr)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_non_initialization_store_label(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
label(p0, p1)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
label(p0, p1)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_multiple_writes(self):
self.check_rewrite("""
[p0, p1, p2]
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""", """
[p0, p1, p2]
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""")
def test_rewrite_call_assembler(self):
self.check_rewrite("""
[i0, f0]
i2 = call_assembler_i(i0, f0, descr=casmdescr)
""", """
[i0, f0]
i1 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_size)
p1 = call_malloc_nursery_varsize_frame(i1)
setfield_gc(p1, 0, descr=tiddescr)
i2 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_depth)
setfield_gc(p1, 0, descr=jf_extra_stack_depth)
setfield_gc(p1, NULL, descr=jf_savedata)
setfield_gc(p1, NULL, descr=jf_force_descr)
setfield_gc(p1, NULL, descr=jf_descr)
setfield_gc(p1, NULL, descr=jf_guard_exc)
setfield_gc(p1, NULL, descr=jf_forward)
setfield_gc(p1, i2, descr=framelendescr)
setfield_gc(p1, ConstClass(frame_info), descr=jf_frame_info)
setarrayitem_gc(p1, 0, i0, descr=signedframedescr)
setarrayitem_gc(p1, 1, f0, descr=floatframedescr)
i3 = call_assembler_i(p1, descr=casmdescr)
""")
def test_int_add_ovf(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""")
def test_int_gt(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""")
def test_zero_ptr_field_before_getfield(self):
# This case may need to be fixed in the metainterp/optimizeopt
# already so that it no longer occurs for rewrite.py. But anyway
# it's a good idea to make sure rewrite.py is correct on its own.
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""")
| from rpython.jit.backend.llsupport.descr import get_size_descr,\
get_field_descr, get_array_descr, ArrayDescr, FieldDescr,\
SizeDescr, get_interiorfield_descr
from rpython.jit.backend.llsupport.gc import GcLLDescr_boehm,\
GcLLDescr_framework
from rpython.jit.backend.llsupport import jitframe
from rpython.jit.metainterp.gc import get_description
from rpython.jit.tool.oparser import parse
from rpython.jit.metainterp.optimizeopt.util import equaloplists
from rpython.jit.metainterp.history import JitCellToken, FLOAT
from rpython.jit.metainterp.history import AbstractFailDescr
from rpython.rtyper.lltypesystem import lltype, rffi
from rpython.rtyper import rclass
from rpython.jit.backend.x86.arch import WORD
class Evaluator(object):
def __init__(self, scope):
self.scope = scope
def __getitem__(self, key):
return eval(key, self.scope)
class FakeLoopToken(object):
pass
o_vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True)
class RewriteTests(object):
def check_rewrite(self, frm_operations, to_operations, **namespace):
S = lltype.GcStruct('S', ('x', lltype.Signed),
('y', lltype.Signed))
sdescr = get_size_descr(self.gc_ll_descr, S)
sdescr.tid = 1234
#
T = lltype.GcStruct('T', ('y', lltype.Signed),
('z', lltype.Ptr(S)),
('t', lltype.Signed))
tdescr = get_size_descr(self.gc_ll_descr, T)
tdescr.tid = 5678
tzdescr = get_field_descr(self.gc_ll_descr, T, 'z')
#
A = lltype.GcArray(lltype.Signed)
adescr = get_array_descr(self.gc_ll_descr, A)
adescr.tid = 4321
alendescr = adescr.lendescr
#
B = lltype.GcArray(lltype.Char)
bdescr = get_array_descr(self.gc_ll_descr, B)
bdescr.tid = 8765
blendescr = bdescr.lendescr
#
C = lltype.GcArray(lltype.Ptr(S))
cdescr = get_array_descr(self.gc_ll_descr, C)
cdescr.tid = 8111
clendescr = cdescr.lendescr
#
E = lltype.GcStruct('Empty')
edescr = get_size_descr(self.gc_ll_descr, E)
edescr.tid = 9000
#
vtable_descr = self.gc_ll_descr.fielddescr_vtable
O = lltype.GcStruct('O', ('parent', rclass.OBJECT),
('x', lltype.Signed))
o_descr = self.cpu.sizeof(O, True)
o_vtable = globals()['o_vtable']
#
tiddescr = self.gc_ll_descr.fielddescr_tid
wbdescr = self.gc_ll_descr.write_barrier_descr
WORD = globals()['WORD']
#
strdescr = self.gc_ll_descr.str_descr
unicodedescr = self.gc_ll_descr.unicode_descr
strlendescr = strdescr.lendescr
unicodelendescr = unicodedescr.lendescr
strhashdescr = self.gc_ll_descr.str_hash_descr
unicodehashdescr = self.gc_ll_descr.unicode_hash_descr
casmdescr = JitCellToken()
clt = FakeLoopToken()
clt._ll_initial_locs = [0, 8]
frame_info = lltype.malloc(jitframe.JITFRAMEINFO, flavor='raw')
clt.frame_info = frame_info
frame_info.jfi_frame_depth = 13
frame_info.jfi_frame_size = 255
framedescrs = self.gc_ll_descr.getframedescrs(self.cpu)
framelendescr = framedescrs.arraydescr.lendescr
jfi_frame_depth = framedescrs.jfi_frame_depth
jfi_frame_size = framedescrs.jfi_frame_size
jf_frame_info = framedescrs.jf_frame_info
jf_savedata = framedescrs.jf_savedata
jf_force_descr = framedescrs.jf_force_descr
jf_descr = framedescrs.jf_descr
jf_guard_exc = framedescrs.jf_guard_exc
jf_forward = framedescrs.jf_forward
jf_extra_stack_depth = framedescrs.jf_extra_stack_depth
signedframedescr = self.cpu.signedframedescr
floatframedescr = self.cpu.floatframedescr
casmdescr.compiled_loop_token = clt
#
guarddescr = AbstractFailDescr()
#
namespace.update(locals())
#
for funcname in self.gc_ll_descr._generated_functions:
namespace[funcname] = self.gc_ll_descr.get_malloc_fn(funcname)
namespace[funcname + '_descr'] = getattr(self.gc_ll_descr,
'%s_descr' % funcname)
#
ops = parse(frm_operations, namespace=namespace)
expected = parse(to_operations % Evaluator(namespace),
namespace=namespace)
operations = self.gc_ll_descr.rewrite_assembler(self.cpu,
ops.operations,
[])
remap = {}
for a, b in zip(ops.inputargs, expected.inputargs):
remap[b] = a
equaloplists(operations, expected.operations, remap=remap)
lltype.free(frame_info, flavor='raw')
class FakeTracker(object):
pass
class BaseFakeCPU(object):
JITFRAME_FIXED_SIZE = 0
def __init__(self):
self.tracker = FakeTracker()
self._cache = {}
self.signedframedescr = ArrayDescr(3, 8, FieldDescr('len', 0, 0, 0), 0)
self.floatframedescr = ArrayDescr(5, 8, FieldDescr('len', 0, 0, 0), 0)
def getarraydescr_for_frame(self, tp):
if tp == FLOAT:
return self.floatframedescr
return self.signedframedescr
def unpack_arraydescr_size(self, d):
return 0, d.itemsize, 0
def unpack_fielddescr(self, d):
return d.offset
def arraydescrof(self, ARRAY):
try:
return self._cache[ARRAY]
except KeyError:
r = ArrayDescr(1, 2, FieldDescr('len', 0, 0, 0), 0)
self._cache[ARRAY] = r
return r
def fielddescrof(self, STRUCT, fname):
key = (STRUCT, fname)
try:
return self._cache[key]
except KeyError:
r = FieldDescr(fname, 1, 1, 1)
self._cache[key] = r
return r
class TestBoehm(RewriteTests):
def setup_method(self, meth):
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
assert is_object
return SizeDescr(102, gc_fielddescrs=[],
vtable=o_vtable)
self.cpu = FakeCPU()
self.gc_ll_descr = GcLLDescr_boehm(None, None, None)
def test_new(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_no_collapsing(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
p1 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\
descr=malloc_fixedsize_descr)
jump()
""")
def test_new_array_fixed(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
10, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(adescr.basesize + 10 * adescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=alendescr)
def test_new_array_variable(self):
self.check_rewrite("""
[i1]
p0 = new_array(i1, descr=adescr)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(adescr.basesize)d, \
i1, \
%(adescr.itemsize)d, \
%(adescr.lendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_fixedsize), 102, \
descr=malloc_fixedsize_descr)
setfield_gc(p0, ConstClass(o_vtable), descr=vtable_descr)
jump()
""")
def test_newstr(self):
self.check_rewrite("""
[i1]
p0 = newstr(i1)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(strdescr.basesize)d, \
i1, \
%(strdescr.itemsize)d, \
%(strlendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
def test_newunicode(self):
self.check_rewrite("""
[i1]
p0 = newunicode(10)
jump()
""", """
[i1]
p0 = call_malloc_gc(ConstClass(malloc_array), \
%(unicodedescr.basesize)d, \
10, \
%(unicodedescr.itemsize)d, \
%(unicodelendescr.offset)d, \
descr=malloc_array_descr)
jump()
""")
## should ideally be:
## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \
## %(unicodedescr.basesize + \
## 10 * unicodedescr.itemsize)d, \
## descr=malloc_fixedsize_descr)
## setfield_gc(p0, 10, descr=unicodelendescr)
class TestFramework(RewriteTests):
def setup_method(self, meth):
class config_(object):
class translation(object):
gc = 'minimark'
gcrootfinder = 'asmgcc'
gctransformer = 'framework'
gcremovetypeptr = False
gcdescr = get_description(config_)
self.gc_ll_descr = GcLLDescr_framework(gcdescr, None, None, None,
really_not_translated=True)
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: True)
self.gc_ll_descr.malloc_zero_filled = False
#
class FakeCPU(BaseFakeCPU):
def sizeof(self, STRUCT, is_object):
descr = SizeDescr(104, gc_fielddescrs=[])
descr.tid = 9315
return descr
self.cpu = FakeCPU()
def test_rewrite_assembler_new_to_malloc(self):
self.check_rewrite("""
[p1]
p0 = new(descr=sdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_new3_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new(descr=tdescr)
p2 = new(descr=sdescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + tdescr.size + sdescr.size)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 5678, descr=tiddescr)
p2 = nursery_ptr_increment(p1, %(tdescr.size)d)
setfield_gc(p2, 1234, descr=tiddescr)
zero_ptr_field(p1, %(tdescr.gc_fielddescrs[0].offset)s)
jump()
""")
def test_rewrite_assembler_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 4321, descr=tiddescr)
setfield_gc(p0, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_new_and_new_array_fixed_to_malloc(self):
self.check_rewrite("""
[]
p0 = new(descr=sdescr)
p1 = new_array(10, descr=adescr)
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(sdescr.size + \
adescr.basesize + 10 * adescr.itemsize)d)
setfield_gc(p0, 1234, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(sdescr.size)d)
setfield_gc(p1, 4321, descr=tiddescr)
setfield_gc(p1, 10, descr=alendescr)
jump()
""")
def test_rewrite_assembler_round_up(self):
self.check_rewrite("""
[]
p0 = new_array(6, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(bdescr.basesize + 8)d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 6, descr=blendescr)
jump()
""")
def test_rewrite_assembler_round_up_always(self):
self.check_rewrite("""
[]
p0 = new_array(5, descr=bdescr)
p1 = new_array(5, descr=bdescr)
p2 = new_array(5, descr=bdescr)
p3 = new_array(5, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4 * (bdescr.basesize + 8))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 5, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 8)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 5, descr=blendescr)
p2 = nursery_ptr_increment(p1, %(bdescr.basesize + 8)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 5, descr=blendescr)
p3 = nursery_ptr_increment(p2, %(bdescr.basesize + 8)d)
setfield_gc(p3, 8765, descr=tiddescr)
setfield_gc(p3, 5, descr=blendescr)
jump()
""")
def test_rewrite_assembler_minimal_size(self):
self.check_rewrite("""
[]
p0 = new(descr=edescr)
p1 = new(descr=edescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(4*WORD)d)
setfield_gc(p0, 9000, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(2*WORD)d)
setfield_gc(p1, 9000, descr=tiddescr)
jump()
""")
def test_rewrite_assembler_variable_size(self):
self.check_rewrite("""
[i0]
p0 = new_array(i0, descr=bdescr)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
jump(i0)
""")
def test_rewrite_new_string(self):
self.check_rewrite("""
[i0]
p0 = newstr(i0)
jump(i0)
""", """
[i0]
p0 = call_malloc_nursery_varsize(1, 1, i0, descr=strdescr)
setfield_gc(p0, i0, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
jump(i0)
""")
def test_rewrite_assembler_nonstandard_array(self):
# a non-standard array is a bit hard to get; e.g. GcArray(Float)
# is like that on Win32, but not on Linux. Build one manually...
NONSTD = lltype.GcArray(lltype.Float)
nonstd_descr = get_array_descr(self.gc_ll_descr, NONSTD)
nonstd_descr.tid = 6464
nonstd_descr.basesize = 64 # <= hacked
nonstd_descr.itemsize = 8
nonstd_descr_gcref = 123
self.check_rewrite("""
[i0, p1]
p0 = new_array(i0, descr=nonstd_descr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", """
[i0, p1]
p0 = call_malloc_gc(ConstClass(malloc_array_nonstandard), \
64, 8, \
%(nonstd_descr.lendescr.offset)d, \
6464, i0, \
descr=malloc_array_nonstandard_descr)
cond_call_gc_wb_array(p0, i0, descr=wbdescr)
setarrayitem_gc(p0, i0, p1)
jump(i0)
""", nonstd_descr=nonstd_descr)
def test_rewrite_assembler_maximal_size_1(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_array(103, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 103, \
descr=malloc_array_descr)
jump()
""")
def test_rewrite_assembler_maximal_size_2(self):
self.gc_ll_descr.max_size_of_young_obj = 300
self.check_rewrite("""
[]
p0 = new_array(101, descr=bdescr)
p1 = new_array(102, descr=bdescr) # two new_arrays can be combined
p2 = new_array(103, descr=bdescr) # but not all three
jump()
""", """
[]
p0 = call_malloc_nursery( \
%(2 * (bdescr.basesize + 104))d)
setfield_gc(p0, 8765, descr=tiddescr)
setfield_gc(p0, 101, descr=blendescr)
p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 104)d)
setfield_gc(p1, 8765, descr=tiddescr)
setfield_gc(p1, 102, descr=blendescr)
p2 = call_malloc_nursery( \
%(bdescr.basesize + 104)d)
setfield_gc(p2, 8765, descr=tiddescr)
setfield_gc(p2, 103, descr=blendescr)
jump()
""")
def test_rewrite_assembler_huge_size(self):
# "huge" is defined as "larger than 0xffffff bytes, or 16MB"
self.check_rewrite("""
[]
p0 = new_array(20000000, descr=bdescr)
jump()
""", """
[]
p0 = call_malloc_gc(ConstClass(malloc_array), 1, \
%(bdescr.tid)d, 20000000, \
descr=malloc_array_descr)
jump()
""")
def test_new_with_vtable(self):
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(104) # rounded up
setfield_gc(p0, 9315, descr=tiddescr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_new_with_vtable_too_big(self):
self.gc_ll_descr.max_size_of_young_obj = 100
self.check_rewrite("""
[]
p0 = new_with_vtable(descr=o_descr)
jump()
""", """
[p1]
p0 = call_malloc_gc(ConstClass(malloc_big_fixedsize), 104, 9315, \
descr=malloc_big_fixedsize_descr)
setfield_gc(p0, 0, descr=vtable_descr)
jump()
""")
def test_rewrite_assembler_newstr_newunicode(self):
self.check_rewrite("""
[i2]
p0 = newstr(14)
p1 = newunicode(10)
p2 = newunicode(i2)
p3 = newstr(i2)
jump()
""", """
[i2]
p0 = call_malloc_nursery( \
%(strdescr.basesize + 16 * strdescr.itemsize + \
unicodedescr.basesize + 10 * unicodedescr.itemsize)d)
setfield_gc(p0, %(strdescr.tid)d, descr=tiddescr)
setfield_gc(p0, 14, descr=strlendescr)
setfield_gc(p0, 0, descr=strhashdescr)
p1 = nursery_ptr_increment(p0, %(strdescr.basesize + 16 * strdescr.itemsize)d)
setfield_gc(p1, %(unicodedescr.tid)d, descr=tiddescr)
setfield_gc(p1, 10, descr=unicodelendescr)
setfield_gc(p1, 0, descr=unicodehashdescr)
p2 = call_malloc_nursery_varsize(2, %(unicodedescr.itemsize)d, i2,\
descr=unicodedescr)
setfield_gc(p2, i2, descr=unicodelendescr)
setfield_gc(p2, 0, descr=unicodehashdescr)
p3 = call_malloc_nursery_varsize(1, 1, i2, \
descr=strdescr)
setfield_gc(p3, i2, descr=strlendescr)
setfield_gc(p3, 0, descr=strhashdescr)
jump()
""")
def test_write_barrier_before_setfield_gc(self):
self.check_rewrite("""
[p1, p2]
setfield_gc(p1, p2, descr=tzdescr)
jump()
""", """
[p1, p2]
cond_call_gc_wb(p1, descr=wbdescr)
setfield_gc(p1, p2, descr=tzdescr)
jump()
""")
def test_write_barrier_before_array_without_from_array(self):
self.gc_ll_descr.write_barrier_descr.has_write_barrier_from_array = (
lambda cpu: False)
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_short_array(self):
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(129, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 129 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 129, descr=clendescr)
zero_array(p1, 0, 129, descr=cdescr)
call_n(123456)
cond_call_gc_wb(p1, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_long_array(self):
# the limit of "being too long" is fixed, arbitrarily, at 130
self.gc_ll_descr.max_size_of_young_obj = 2000
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(130, descr=cdescr)
call_n(123456)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 130 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 130, descr=clendescr)
zero_array(p1, 0, 130, descr=cdescr)
call_n(123456)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_unknown_array(self):
self.check_rewrite("""
[p1, i2, p3]
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[p1, i2, p3]
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_label_makes_size_unknown(self):
self.check_rewrite("""
[i2, p3]
p1 = new_array_clear(5, descr=cdescr)
label(p1, i2, p3)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""", """
[i2, p3]
p1 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p1, 8111, descr=tiddescr)
setfield_gc(p1, 5, descr=clendescr)
zero_array(p1, 0, 5, descr=cdescr)
label(p1, i2, p3)
cond_call_gc_wb_array(p1, i2, descr=wbdescr)
setarrayitem_gc(p1, i2, p3, descr=cdescr)
jump()
""")
def test_write_barrier_before_setinteriorfield_gc(self):
S1 = lltype.GcStruct('S1')
INTERIOR = lltype.GcArray(('z', lltype.Ptr(S1)))
interiordescr = get_array_descr(self.gc_ll_descr, INTERIOR)
interiordescr.tid = 1291
interiorlendescr = interiordescr.lendescr
interiorzdescr = get_interiorfield_descr(self.gc_ll_descr,
INTERIOR, 'z')
self.check_rewrite("""
[p1, p2]
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", """
[p1, p2]
cond_call_gc_wb_array(p1, 0, descr=wbdescr)
setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr)
jump(p1, p2)
""", interiorzdescr=interiorzdescr)
def test_initialization_store(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_2(self):
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = new(descr=sdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size + sdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
p1 = nursery_ptr_increment(p0, %(tdescr.size)d)
setfield_gc(p1, 1234, descr=tiddescr)
# <<<no cond_call_gc_wb here>>>
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_initialization_store_array(self):
self.check_rewrite("""
[p1, i2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""", """
[p1, i2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, i2, p1, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 2, 3, descr=cdescr)
setarrayitem_gc(p0, 1, p1, descr=cdescr)
setarrayitem_gc(p0, 0, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_right(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 3, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
jump()
""")
def test_zero_array_not_reduced_at_all(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 0, 5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_completely(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 5, 0, descr=cdescr)
setarrayitem_gc(p0, 3, p1, descr=cdescr)
setarrayitem_gc(p0, 4, p2, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
setarrayitem_gc(p0, 2, p2, descr=cdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_call(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
call_n(321321)
cond_call_gc_wb(p0, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_reduced_left_with_label(self):
self.check_rewrite("""
[p1, p2]
p0 = new_array_clear(5, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""", """
[p1, p2]
p0 = call_malloc_nursery( \
%(cdescr.basesize + 5 * cdescr.itemsize)d)
setfield_gc(p0, 8111, descr=tiddescr)
setfield_gc(p0, 5, descr=clendescr)
zero_array(p0, 1, 4, descr=cdescr)
setarrayitem_gc(p0, 0, p1, descr=cdescr)
label(p0, p2)
cond_call_gc_wb_array(p0, 1, descr=wbdescr)
setarrayitem_gc(p0, 1, p2, descr=cdescr)
jump()
""")
def test_zero_array_varsize(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
jump()
""")
def test_zero_array_varsize_cannot_reduce(self):
self.check_rewrite("""
[p1, p2, i3]
p0 = new_array_clear(i3, descr=bdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""", """
[p1, p2, i3]
p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr)
setfield_gc(p0, i3, descr=blendescr)
zero_array(p0, 0, i3, descr=bdescr)
cond_call_gc_wb_array(p0, 0, descr=wbdescr)
setarrayitem_gc(p0, 0, p1, descr=bdescr)
jump()
""")
def test_initialization_store_potentially_large_array(self):
# the write barrier cannot be omitted, because we might get
# an array with cards and the GC assumes that the write
# barrier is always called, even on young (but large) arrays
self.check_rewrite("""
[i0, p1, i2]
p0 = new_array(i0, descr=bdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""", """
[i0, p1, i2]
p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr)
setfield_gc(p0, i0, descr=blendescr)
cond_call_gc_wb_array(p0, i2, descr=wbdescr)
setarrayitem_gc(p0, i2, p1, descr=bdescr)
jump()
""")
def test_non_initialization_store(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
p1 = newstr(i0)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = call_malloc_nursery_varsize(1, 1, i0, \
descr=strdescr)
setfield_gc(p1, i0, descr=strlendescr)
setfield_gc(p1, 0, descr=strhashdescr)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_non_initialization_store_label(self):
self.check_rewrite("""
[p1]
p0 = new(descr=tdescr)
label(p0, p1)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""", """
[p1]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
label(p0, p1)
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
jump()
""")
def test_multiple_writes(self):
self.check_rewrite("""
[p0, p1, p2]
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""", """
[p0, p1, p2]
cond_call_gc_wb(p0, descr=wbdescr)
setfield_gc(p0, p1, descr=tzdescr)
setfield_gc(p0, p2, descr=tzdescr)
jump(p1, p2, p0)
""")
def test_rewrite_call_assembler(self):
self.check_rewrite("""
[i0, f0]
i2 = call_assembler_i(i0, f0, descr=casmdescr)
""", """
[i0, f0]
i1 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_size)
p1 = call_malloc_nursery_varsize_frame(i1)
setfield_gc(p1, 0, descr=tiddescr)
i2 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_depth)
setfield_gc(p1, 0, descr=jf_extra_stack_depth)
setfield_gc(p1, NULL, descr=jf_savedata)
setfield_gc(p1, NULL, descr=jf_force_descr)
setfield_gc(p1, NULL, descr=jf_descr)
setfield_gc(p1, NULL, descr=jf_guard_exc)
setfield_gc(p1, NULL, descr=jf_forward)
setfield_gc(p1, i2, descr=framelendescr)
setfield_gc(p1, ConstClass(frame_info), descr=jf_frame_info)
setarrayitem_gc(p1, 0, i0, descr=signedframedescr)
setarrayitem_gc(p1, 1, f0, descr=floatframedescr)
i3 = call_assembler_i(p1, descr=casmdescr)
""")
def test_int_add_ovf(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_add_ovf(i0, 123)
guard_overflow(descr=guarddescr) []
jump()
""")
def test_int_gt(self):
self.check_rewrite("""
[i0]
p0 = new(descr=tdescr)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""", """
[i0]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
i1 = int_gt(i0, 123)
guard_false(i1, descr=guarddescr) []
jump()
""")
def test_zero_ptr_field_before_getfield(self):
# This case may need to be fixed in the metainterp/optimizeopt
# already so that it no longer occurs for rewrite.py. But anyway
# it's a good idea to make sure rewrite.py is correct on its own.
self.check_rewrite("""
[]
p0 = new(descr=tdescr)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""", """
[]
p0 = call_malloc_nursery(%(tdescr.size)d)
setfield_gc(p0, 5678, descr=tiddescr)
zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s)
p1 = getfield_gc_r(p0, descr=tdescr)
jump(p1)
""")
| pt | 0.203914 | # # # # # # # # # # # # [] p0 = new(descr=sdescr) jump() [p1] p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\ descr=malloc_fixedsize_descr) jump() [] p0 = new(descr=sdescr) p1 = new(descr=sdescr) jump() [] p0 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\ descr=malloc_fixedsize_descr) p1 = call_malloc_gc(ConstClass(malloc_fixedsize), %(sdescr.size)d,\ descr=malloc_fixedsize_descr) jump() [] p0 = new_array(10, descr=adescr) jump() [] p0 = call_malloc_gc(ConstClass(malloc_array), \ %(adescr.basesize)d, \ 10, \ %(adescr.itemsize)d, \ %(adescr.lendescr.offset)d, \ descr=malloc_array_descr) jump() ## should ideally be: ## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \ ## %(adescr.basesize + 10 * adescr.itemsize)d, \ ## descr=malloc_fixedsize_descr) ## setfield_gc(p0, 10, descr=alendescr) [i1] p0 = new_array(i1, descr=adescr) jump() [i1] p0 = call_malloc_gc(ConstClass(malloc_array), \ %(adescr.basesize)d, \ i1, \ %(adescr.itemsize)d, \ %(adescr.lendescr.offset)d, \ descr=malloc_array_descr) jump() [] p0 = new_with_vtable(descr=o_descr) jump() [p1] p0 = call_malloc_gc(ConstClass(malloc_fixedsize), 102, \ descr=malloc_fixedsize_descr) setfield_gc(p0, ConstClass(o_vtable), descr=vtable_descr) jump() [i1] p0 = newstr(i1) jump() [i1] p0 = call_malloc_gc(ConstClass(malloc_array), \ %(strdescr.basesize)d, \ i1, \ %(strdescr.itemsize)d, \ %(strlendescr.offset)d, \ descr=malloc_array_descr) jump() [i1] p0 = newunicode(10) jump() [i1] p0 = call_malloc_gc(ConstClass(malloc_array), \ %(unicodedescr.basesize)d, \ 10, \ %(unicodedescr.itemsize)d, \ %(unicodelendescr.offset)d, \ descr=malloc_array_descr) jump() ## should ideally be: ## p0 = call_malloc_gc(ConstClass(malloc_fixedsize), \ ## %(unicodedescr.basesize + \ ## 10 * unicodedescr.itemsize)d, \ ## descr=malloc_fixedsize_descr) ## setfield_gc(p0, 10, descr=unicodelendescr) # [p1] p0 = new(descr=sdescr) jump() [p1] p0 = call_malloc_nursery(%(sdescr.size)d) setfield_gc(p0, 1234, descr=tiddescr) jump() [] p0 = new(descr=sdescr) p1 = new(descr=tdescr) p2 = new(descr=sdescr) jump() [] p0 = call_malloc_nursery( \ %(sdescr.size + tdescr.size + sdescr.size)d) setfield_gc(p0, 1234, descr=tiddescr) p1 = nursery_ptr_increment(p0, %(sdescr.size)d) setfield_gc(p1, 5678, descr=tiddescr) p2 = nursery_ptr_increment(p1, %(tdescr.size)d) setfield_gc(p2, 1234, descr=tiddescr) zero_ptr_field(p1, %(tdescr.gc_fielddescrs[0].offset)s) jump() [] p0 = new_array(10, descr=adescr) jump() [] p0 = call_malloc_nursery( \ %(adescr.basesize + 10 * adescr.itemsize)d) setfield_gc(p0, 4321, descr=tiddescr) setfield_gc(p0, 10, descr=alendescr) jump() [] p0 = new(descr=sdescr) p1 = new_array(10, descr=adescr) jump() [] p0 = call_malloc_nursery( \ %(sdescr.size + \ adescr.basesize + 10 * adescr.itemsize)d) setfield_gc(p0, 1234, descr=tiddescr) p1 = nursery_ptr_increment(p0, %(sdescr.size)d) setfield_gc(p1, 4321, descr=tiddescr) setfield_gc(p1, 10, descr=alendescr) jump() [] p0 = new_array(6, descr=bdescr) jump() [] p0 = call_malloc_nursery(%(bdescr.basesize + 8)d) setfield_gc(p0, 8765, descr=tiddescr) setfield_gc(p0, 6, descr=blendescr) jump() [] p0 = new_array(5, descr=bdescr) p1 = new_array(5, descr=bdescr) p2 = new_array(5, descr=bdescr) p3 = new_array(5, descr=bdescr) jump() [] p0 = call_malloc_nursery(%(4 * (bdescr.basesize + 8))d) setfield_gc(p0, 8765, descr=tiddescr) setfield_gc(p0, 5, descr=blendescr) p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 8)d) setfield_gc(p1, 8765, descr=tiddescr) setfield_gc(p1, 5, descr=blendescr) p2 = nursery_ptr_increment(p1, %(bdescr.basesize + 8)d) setfield_gc(p2, 8765, descr=tiddescr) setfield_gc(p2, 5, descr=blendescr) p3 = nursery_ptr_increment(p2, %(bdescr.basesize + 8)d) setfield_gc(p3, 8765, descr=tiddescr) setfield_gc(p3, 5, descr=blendescr) jump() [] p0 = new(descr=edescr) p1 = new(descr=edescr) jump() [] p0 = call_malloc_nursery(%(4*WORD)d) setfield_gc(p0, 9000, descr=tiddescr) p1 = nursery_ptr_increment(p0, %(2*WORD)d) setfield_gc(p1, 9000, descr=tiddescr) jump() [i0] p0 = new_array(i0, descr=bdescr) jump(i0) [i0] p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr) setfield_gc(p0, i0, descr=blendescr) jump(i0) [i0] p0 = newstr(i0) jump(i0) [i0] p0 = call_malloc_nursery_varsize(1, 1, i0, descr=strdescr) setfield_gc(p0, i0, descr=strlendescr) setfield_gc(p0, 0, descr=strhashdescr) jump(i0) # a non-standard array is a bit hard to get; e.g. GcArray(Float) # is like that on Win32, but not on Linux. Build one manually... # <= hacked [i0, p1] p0 = new_array(i0, descr=nonstd_descr) setarrayitem_gc(p0, i0, p1) jump(i0) [i0, p1] p0 = call_malloc_gc(ConstClass(malloc_array_nonstandard), \ 64, 8, \ %(nonstd_descr.lendescr.offset)d, \ 6464, i0, \ descr=malloc_array_nonstandard_descr) cond_call_gc_wb_array(p0, i0, descr=wbdescr) setarrayitem_gc(p0, i0, p1) jump(i0) [] p0 = new_array(103, descr=bdescr) jump() [] p0 = call_malloc_gc(ConstClass(malloc_array), 1, \ %(bdescr.tid)d, 103, \ descr=malloc_array_descr) jump() [] p0 = new_array(101, descr=bdescr) p1 = new_array(102, descr=bdescr) # two new_arrays can be combined p2 = new_array(103, descr=bdescr) # but not all three jump() [] p0 = call_malloc_nursery( \ %(2 * (bdescr.basesize + 104))d) setfield_gc(p0, 8765, descr=tiddescr) setfield_gc(p0, 101, descr=blendescr) p1 = nursery_ptr_increment(p0, %(bdescr.basesize + 104)d) setfield_gc(p1, 8765, descr=tiddescr) setfield_gc(p1, 102, descr=blendescr) p2 = call_malloc_nursery( \ %(bdescr.basesize + 104)d) setfield_gc(p2, 8765, descr=tiddescr) setfield_gc(p2, 103, descr=blendescr) jump() # "huge" is defined as "larger than 0xffffff bytes, or 16MB" [] p0 = new_array(20000000, descr=bdescr) jump() [] p0 = call_malloc_gc(ConstClass(malloc_array), 1, \ %(bdescr.tid)d, 20000000, \ descr=malloc_array_descr) jump() [] p0 = new_with_vtable(descr=o_descr) jump() [p1] p0 = call_malloc_nursery(104) # rounded up setfield_gc(p0, 9315, descr=tiddescr) setfield_gc(p0, 0, descr=vtable_descr) jump() [] p0 = new_with_vtable(descr=o_descr) jump() [p1] p0 = call_malloc_gc(ConstClass(malloc_big_fixedsize), 104, 9315, \ descr=malloc_big_fixedsize_descr) setfield_gc(p0, 0, descr=vtable_descr) jump() [i2] p0 = newstr(14) p1 = newunicode(10) p2 = newunicode(i2) p3 = newstr(i2) jump() [i2] p0 = call_malloc_nursery( \ %(strdescr.basesize + 16 * strdescr.itemsize + \ unicodedescr.basesize + 10 * unicodedescr.itemsize)d) setfield_gc(p0, %(strdescr.tid)d, descr=tiddescr) setfield_gc(p0, 14, descr=strlendescr) setfield_gc(p0, 0, descr=strhashdescr) p1 = nursery_ptr_increment(p0, %(strdescr.basesize + 16 * strdescr.itemsize)d) setfield_gc(p1, %(unicodedescr.tid)d, descr=tiddescr) setfield_gc(p1, 10, descr=unicodelendescr) setfield_gc(p1, 0, descr=unicodehashdescr) p2 = call_malloc_nursery_varsize(2, %(unicodedescr.itemsize)d, i2,\ descr=unicodedescr) setfield_gc(p2, i2, descr=unicodelendescr) setfield_gc(p2, 0, descr=unicodehashdescr) p3 = call_malloc_nursery_varsize(1, 1, i2, \ descr=strdescr) setfield_gc(p3, i2, descr=strlendescr) setfield_gc(p3, 0, descr=strhashdescr) jump() [p1, p2] setfield_gc(p1, p2, descr=tzdescr) jump() [p1, p2] cond_call_gc_wb(p1, descr=wbdescr) setfield_gc(p1, p2, descr=tzdescr) jump() [p1, i2, p3] setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [p1, i2, p3] cond_call_gc_wb(p1, descr=wbdescr) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [i2, p3] p1 = new_array_clear(129, descr=cdescr) call_n(123456) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [i2, p3] p1 = call_malloc_nursery( \ %(cdescr.basesize + 129 * cdescr.itemsize)d) setfield_gc(p1, 8111, descr=tiddescr) setfield_gc(p1, 129, descr=clendescr) zero_array(p1, 0, 129, descr=cdescr) call_n(123456) cond_call_gc_wb(p1, descr=wbdescr) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() # the limit of "being too long" is fixed, arbitrarily, at 130 [i2, p3] p1 = new_array_clear(130, descr=cdescr) call_n(123456) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [i2, p3] p1 = call_malloc_nursery( \ %(cdescr.basesize + 130 * cdescr.itemsize)d) setfield_gc(p1, 8111, descr=tiddescr) setfield_gc(p1, 130, descr=clendescr) zero_array(p1, 0, 130, descr=cdescr) call_n(123456) cond_call_gc_wb_array(p1, i2, descr=wbdescr) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [p1, i2, p3] setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [p1, i2, p3] cond_call_gc_wb_array(p1, i2, descr=wbdescr) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [i2, p3] p1 = new_array_clear(5, descr=cdescr) label(p1, i2, p3) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [i2, p3] p1 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p1, 8111, descr=tiddescr) setfield_gc(p1, 5, descr=clendescr) zero_array(p1, 0, 5, descr=cdescr) label(p1, i2, p3) cond_call_gc_wb_array(p1, i2, descr=wbdescr) setarrayitem_gc(p1, i2, p3, descr=cdescr) jump() [p1, p2] setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr) jump(p1, p2) [p1, p2] cond_call_gc_wb_array(p1, 0, descr=wbdescr) setinteriorfield_gc(p1, 0, p2, descr=interiorzdescr) jump(p1, p2) [p1] p0 = new(descr=tdescr) setfield_gc(p0, p1, descr=tzdescr) jump() [p1] p0 = call_malloc_nursery(%(tdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) setfield_gc(p0, p1, descr=tzdescr) jump() [] p0 = new(descr=tdescr) p1 = new(descr=sdescr) setfield_gc(p0, p1, descr=tzdescr) jump() [] p0 = call_malloc_nursery(%(tdescr.size + sdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) p1 = nursery_ptr_increment(p0, %(tdescr.size)d) setfield_gc(p1, 1234, descr=tiddescr) # <<<no cond_call_gc_wb here>>> setfield_gc(p0, p1, descr=tzdescr) jump() [p1, i2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, i2, p1, descr=cdescr) jump() [p1, i2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 0, 5, descr=cdescr) setarrayitem_gc(p0, i2, p1, descr=cdescr) jump() [p1, p2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, 1, p1, descr=cdescr) setarrayitem_gc(p0, 0, p2, descr=cdescr) jump() [p1, p2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 2, 3, descr=cdescr) setarrayitem_gc(p0, 1, p1, descr=cdescr) setarrayitem_gc(p0, 0, p2, descr=cdescr) jump() [p1, p2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, 3, p1, descr=cdescr) setarrayitem_gc(p0, 4, p2, descr=cdescr) jump() [p1, p2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 0, 3, descr=cdescr) setarrayitem_gc(p0, 3, p1, descr=cdescr) setarrayitem_gc(p0, 4, p2, descr=cdescr) jump() [p1, p2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, 3, p1, descr=cdescr) setarrayitem_gc(p0, 2, p2, descr=cdescr) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 0, 5, descr=cdescr) setarrayitem_gc(p0, 3, p1, descr=cdescr) setarrayitem_gc(p0, 2, p2, descr=cdescr) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, 3, p1, descr=cdescr) setarrayitem_gc(p0, 4, p2, descr=cdescr) setarrayitem_gc(p0, 0, p1, descr=cdescr) setarrayitem_gc(p0, 2, p2, descr=cdescr) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 5, 0, descr=cdescr) setarrayitem_gc(p0, 3, p1, descr=cdescr) setarrayitem_gc(p0, 4, p2, descr=cdescr) setarrayitem_gc(p0, 0, p1, descr=cdescr) setarrayitem_gc(p0, 2, p2, descr=cdescr) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, 0, p1, descr=cdescr) call_n(321321) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 1, 4, descr=cdescr) setarrayitem_gc(p0, 0, p1, descr=cdescr) call_n(321321) cond_call_gc_wb(p0, descr=wbdescr) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = new_array_clear(5, descr=cdescr) setarrayitem_gc(p0, 0, p1, descr=cdescr) label(p0, p2) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2] p0 = call_malloc_nursery( \ %(cdescr.basesize + 5 * cdescr.itemsize)d) setfield_gc(p0, 8111, descr=tiddescr) setfield_gc(p0, 5, descr=clendescr) zero_array(p0, 1, 4, descr=cdescr) setarrayitem_gc(p0, 0, p1, descr=cdescr) label(p0, p2) cond_call_gc_wb_array(p0, 1, descr=wbdescr) setarrayitem_gc(p0, 1, p2, descr=cdescr) jump() [p1, p2, i3] p0 = new_array_clear(i3, descr=bdescr) jump() [p1, p2, i3] p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr) setfield_gc(p0, i3, descr=blendescr) zero_array(p0, 0, i3, descr=bdescr) jump() [p1, p2, i3] p0 = new_array_clear(i3, descr=bdescr) setarrayitem_gc(p0, 0, p1, descr=bdescr) jump() [p1, p2, i3] p0 = call_malloc_nursery_varsize(0, 1, i3, descr=bdescr) setfield_gc(p0, i3, descr=blendescr) zero_array(p0, 0, i3, descr=bdescr) cond_call_gc_wb_array(p0, 0, descr=wbdescr) setarrayitem_gc(p0, 0, p1, descr=bdescr) jump() # the write barrier cannot be omitted, because we might get # an array with cards and the GC assumes that the write # barrier is always called, even on young (but large) arrays [i0, p1, i2] p0 = new_array(i0, descr=bdescr) setarrayitem_gc(p0, i2, p1, descr=bdescr) jump() [i0, p1, i2] p0 = call_malloc_nursery_varsize(0, 1, i0, descr=bdescr) setfield_gc(p0, i0, descr=blendescr) cond_call_gc_wb_array(p0, i2, descr=wbdescr) setarrayitem_gc(p0, i2, p1, descr=bdescr) jump() [i0] p0 = new(descr=tdescr) p1 = newstr(i0) setfield_gc(p0, p1, descr=tzdescr) jump() [i0] p0 = call_malloc_nursery(%(tdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s) p1 = call_malloc_nursery_varsize(1, 1, i0, \ descr=strdescr) setfield_gc(p1, i0, descr=strlendescr) setfield_gc(p1, 0, descr=strhashdescr) cond_call_gc_wb(p0, descr=wbdescr) setfield_gc(p0, p1, descr=tzdescr) jump() [p1] p0 = new(descr=tdescr) label(p0, p1) setfield_gc(p0, p1, descr=tzdescr) jump() [p1] p0 = call_malloc_nursery(%(tdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s) label(p0, p1) cond_call_gc_wb(p0, descr=wbdescr) setfield_gc(p0, p1, descr=tzdescr) jump() [p0, p1, p2] setfield_gc(p0, p1, descr=tzdescr) setfield_gc(p0, p2, descr=tzdescr) jump(p1, p2, p0) [p0, p1, p2] cond_call_gc_wb(p0, descr=wbdescr) setfield_gc(p0, p1, descr=tzdescr) setfield_gc(p0, p2, descr=tzdescr) jump(p1, p2, p0) [i0, f0] i2 = call_assembler_i(i0, f0, descr=casmdescr) [i0, f0] i1 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_size) p1 = call_malloc_nursery_varsize_frame(i1) setfield_gc(p1, 0, descr=tiddescr) i2 = getfield_raw_i(ConstClass(frame_info), descr=jfi_frame_depth) setfield_gc(p1, 0, descr=jf_extra_stack_depth) setfield_gc(p1, NULL, descr=jf_savedata) setfield_gc(p1, NULL, descr=jf_force_descr) setfield_gc(p1, NULL, descr=jf_descr) setfield_gc(p1, NULL, descr=jf_guard_exc) setfield_gc(p1, NULL, descr=jf_forward) setfield_gc(p1, i2, descr=framelendescr) setfield_gc(p1, ConstClass(frame_info), descr=jf_frame_info) setarrayitem_gc(p1, 0, i0, descr=signedframedescr) setarrayitem_gc(p1, 1, f0, descr=floatframedescr) i3 = call_assembler_i(p1, descr=casmdescr) [i0] p0 = new(descr=tdescr) i1 = int_add_ovf(i0, 123) guard_overflow(descr=guarddescr) [] jump() [i0] p0 = call_malloc_nursery(%(tdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s) i1 = int_add_ovf(i0, 123) guard_overflow(descr=guarddescr) [] jump() [i0] p0 = new(descr=tdescr) i1 = int_gt(i0, 123) guard_false(i1, descr=guarddescr) [] jump() [i0] p0 = call_malloc_nursery(%(tdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s) i1 = int_gt(i0, 123) guard_false(i1, descr=guarddescr) [] jump() # This case may need to be fixed in the metainterp/optimizeopt # already so that it no longer occurs for rewrite.py. But anyway # it's a good idea to make sure rewrite.py is correct on its own. [] p0 = new(descr=tdescr) p1 = getfield_gc_r(p0, descr=tdescr) jump(p1) [] p0 = call_malloc_nursery(%(tdescr.size)d) setfield_gc(p0, 5678, descr=tiddescr) zero_ptr_field(p0, %(tdescr.gc_fielddescrs[0].offset)s) p1 = getfield_gc_r(p0, descr=tdescr) jump(p1) | 1.873418 | 2 |
hummingbot/client/command/history_command.py | sanchaymittal/hummingbot | 0 | 7579 | <filename>hummingbot/client/command/history_command.py
from decimal import Decimal
import pandas as pd
from typing import (
Any,
Dict,
Set,
Tuple,
TYPE_CHECKING)
from hummingbot.client.performance_analysis import PerformanceAnalysis
from hummingbot.core.utils.exchange_rate_conversion import ExchangeRateConversion
from hummingbot.market.market_base import MarketBase
from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple
ERC = ExchangeRateConversion.get_instance()
s_float_0 = float(0)
if TYPE_CHECKING:
from hummingbot.client.hummingbot_application import HummingbotApplication
class HistoryCommand:
def history(self, # type: HummingbotApplication
):
if not all(market.ready for market in self.markets.values()):
self._notify(" History stats are not available before Markets are ready.")
return
self.list_trades()
self.trade_performance_report()
def balance_snapshot(self, # type: HummingbotApplication
) -> Dict[str, Dict[str, float]]:
snapshot: Dict[str, Any] = {}
for market_name in self.markets:
balance_dict = self.markets[market_name].get_all_balances()
balance_dict = {k.upper(): v for k, v in balance_dict.items()}
for asset in self.assets:
asset = asset.upper()
if asset not in snapshot:
snapshot[asset] = {}
if asset in balance_dict:
snapshot[asset][market_name] = balance_dict[asset]
else:
snapshot[asset][market_name] = 0.0
return snapshot
def balance_comparison_data_frame(self, # type: HummingbotApplication
market_trading_pair_stats: Dict[MarketTradingPairTuple, any],
) -> pd.DataFrame:
if len(self.starting_balances) == 0:
self._notify(" Balance snapshots are not available before bot starts")
return
rows = []
for market_trading_pair_tuple in self.market_trading_pair_tuples:
market: MarketBase = market_trading_pair_tuple.market
for asset in set(a.upper() for a in self.assets):
asset_delta: Dict[str, float] = market_trading_pair_stats[market_trading_pair_tuple]["asset"].get(
asset, {"delta": s_float_0})
starting_balance = self.starting_balances.get(asset).get(market.name)
current_balance = self.balance_snapshot().get(asset).get(market.name)
rows.append([market.display_name,
asset,
float(starting_balance),
float(current_balance),
float(current_balance - starting_balance),
float(asset_delta["delta"]),
ERC.adjust_token_rate(asset, Decimal(1))])
df = pd.DataFrame(rows, index=None, columns=["Market", "Asset", "Starting", "Current", "Net_Delta",
"Trade_Delta", "Conversion_Rate"])
return df
def get_performance_analysis_with_updated_balance(self, # type: HummingbotApplication
) -> PerformanceAnalysis:
performance_analysis = PerformanceAnalysis()
dedup_set: Set[Tuple[str, str, bool]] = set()
for market_trading_pair_tuple in self.market_trading_pair_tuples:
for is_base in [True, False]:
for is_starting in [True, False]:
market_name = market_trading_pair_tuple.market.name
asset_name = market_trading_pair_tuple.base_asset if is_base else market_trading_pair_tuple.quote_asset
asset_name = asset_name.upper()
if len(self.assets) == 0 or len(self.markets) == 0:
# Prevent KeyError '***SYMBOL***'
amount = self.starting_balances[asset_name][market_name]
else:
amount = self.starting_balances[asset_name][market_name] if is_starting \
else self.balance_snapshot()[asset_name][market_name]
amount = float(amount)
# Adding this check to prevent assets in the same market to be added multiple times
if (market_name, asset_name, is_starting) not in dedup_set:
dedup_set.add((market_name, asset_name, is_starting))
performance_analysis.add_balances(asset_name, amount, is_base, is_starting)
return performance_analysis
def get_market_mid_price(self, # type: HummingbotApplication
) -> float:
# Compute the current exchange rate. We use the first market_symbol_pair because
# if the trading pairs are different, such as WETH-DAI and ETH-USD, the currency
# pairs above will contain the information in terms of the first trading pair.
market_pair_info = self.market_trading_pair_tuples[0]
market = market_pair_info.market
buy_price = market.get_price(market_pair_info.trading_pair, True)
sell_price = market.get_price(market_pair_info.trading_pair, False)
price = float((buy_price + sell_price) / 2)
return price
def analyze_performance(self, # type: HummingbotApplication
):
""" Calculate bot profitability and print to output pane """
if len(self.starting_balances) == 0:
self._notify(" Performance analysis is not available before bot starts")
return
performance_analysis: PerformanceAnalysis = self.get_performance_analysis_with_updated_balance()
price: float = self.get_market_mid_price()
starting_token, starting_amount = performance_analysis.compute_starting(price)
current_token, current_amount = performance_analysis.compute_current(price)
delta_token, delta_amount = performance_analysis.compute_delta(price)
return_performance = performance_analysis.compute_return(price)
starting_amount = round(starting_amount, 3)
current_amount = round(current_amount, 3)
delta_amount = round(delta_amount, 3)
return_performance = round(return_performance, 3)
print_performance = "\n"
print_performance += " Performance:\n"
print_performance += " - Starting Inventory Value: " + str(starting_amount) + " " + starting_token + "\n"
print_performance += " - Current Inventory Value: " + str(current_amount) + " " + current_token + "\n"
print_performance += " - Delta: " + str(delta_amount) + " " + delta_token + "\n"
print_performance += " - Return: " + str(return_performance) + "%"
self._notify(print_performance)
def calculate_profitability(self) -> float:
""" Determine the profitability of the trading bot. """
performance_analysis: PerformanceAnalysis = self.get_performance_analysis_with_updated_balance()
price: float = self.get_market_mid_price()
return_performance = performance_analysis.compute_return(price)
return return_performance
def trade_performance_report(self, # type: HummingbotApplication
) -> pd.DataFrame:
if len(self.market_trading_pair_tuples) == 0:
self._notify(" Performance analysis is not available before bot starts")
return
try:
current_strategy_name: str = self.markets_recorder.strategy_name
analysis_start_time: int = self.init_time
primary_quote_asset: str = self.market_trading_pair_tuples[0].quote_asset.upper()
performance_analysis: PerformanceAnalysis = PerformanceAnalysis()
trade_performance_stats, market_trading_pair_stats = performance_analysis.calculate_trade_performance(
analysis_start_time,
current_strategy_name,
self.market_trading_pair_tuples
)
trade_performance_status_line = []
market_df_data: Set[Tuple[str, str, float, float, str, str]] = set()
market_df_columns = ["Market", "Trading_Pair", "Start_Price", "End_Price",
"Total_Value_Delta", "Profit"]
for market_trading_pair_tuple, trading_pair_stats in market_trading_pair_stats.items():
market_df_data.add((
market_trading_pair_tuple.market.display_name,
market_trading_pair_tuple.trading_pair.upper(),
float(trading_pair_stats["starting_quote_rate"]),
float(trading_pair_stats["end_quote_rate"]),
f"{trading_pair_stats['trading_pair_delta']:.8f} {primary_quote_asset}",
f"{trading_pair_stats['trading_pair_delta_percentage']:.3f} %"
))
inventory_df: pd.DataFrame = self.balance_comparison_data_frame(market_trading_pair_stats)
market_df: pd.DataFrame = pd.DataFrame(data=list(market_df_data), columns=market_df_columns)
portfolio_delta: Decimal = trade_performance_stats["portfolio_delta"]
portfolio_delta_percentage: Decimal = trade_performance_stats["portfolio_delta_percentage"]
trade_performance_status_line.extend(["", " Inventory:"] +
[" " + line for line in inventory_df.to_string().split("\n")])
trade_performance_status_line.extend(["", " Market Trading Pair Performance:"] +
[" " + line for line in market_df.to_string().split("\n")])
trade_performance_status_line.extend(
["", " Portfolio Performance:"] +
[f" Quote Value Delta: {portfolio_delta:.7g} {primary_quote_asset}"] +
[f" Delta Percentage: {portfolio_delta_percentage:.3f} %"])
self._notify("\n".join(trade_performance_status_line))
except Exception:
self.logger().error("Unexpected error running performance analysis.", exc_info=True)
self._notify("Error running performance analysis")
| <filename>hummingbot/client/command/history_command.py
from decimal import Decimal
import pandas as pd
from typing import (
Any,
Dict,
Set,
Tuple,
TYPE_CHECKING)
from hummingbot.client.performance_analysis import PerformanceAnalysis
from hummingbot.core.utils.exchange_rate_conversion import ExchangeRateConversion
from hummingbot.market.market_base import MarketBase
from hummingbot.strategy.market_trading_pair_tuple import MarketTradingPairTuple
ERC = ExchangeRateConversion.get_instance()
s_float_0 = float(0)
if TYPE_CHECKING:
from hummingbot.client.hummingbot_application import HummingbotApplication
class HistoryCommand:
def history(self, # type: HummingbotApplication
):
if not all(market.ready for market in self.markets.values()):
self._notify(" History stats are not available before Markets are ready.")
return
self.list_trades()
self.trade_performance_report()
def balance_snapshot(self, # type: HummingbotApplication
) -> Dict[str, Dict[str, float]]:
snapshot: Dict[str, Any] = {}
for market_name in self.markets:
balance_dict = self.markets[market_name].get_all_balances()
balance_dict = {k.upper(): v for k, v in balance_dict.items()}
for asset in self.assets:
asset = asset.upper()
if asset not in snapshot:
snapshot[asset] = {}
if asset in balance_dict:
snapshot[asset][market_name] = balance_dict[asset]
else:
snapshot[asset][market_name] = 0.0
return snapshot
def balance_comparison_data_frame(self, # type: HummingbotApplication
market_trading_pair_stats: Dict[MarketTradingPairTuple, any],
) -> pd.DataFrame:
if len(self.starting_balances) == 0:
self._notify(" Balance snapshots are not available before bot starts")
return
rows = []
for market_trading_pair_tuple in self.market_trading_pair_tuples:
market: MarketBase = market_trading_pair_tuple.market
for asset in set(a.upper() for a in self.assets):
asset_delta: Dict[str, float] = market_trading_pair_stats[market_trading_pair_tuple]["asset"].get(
asset, {"delta": s_float_0})
starting_balance = self.starting_balances.get(asset).get(market.name)
current_balance = self.balance_snapshot().get(asset).get(market.name)
rows.append([market.display_name,
asset,
float(starting_balance),
float(current_balance),
float(current_balance - starting_balance),
float(asset_delta["delta"]),
ERC.adjust_token_rate(asset, Decimal(1))])
df = pd.DataFrame(rows, index=None, columns=["Market", "Asset", "Starting", "Current", "Net_Delta",
"Trade_Delta", "Conversion_Rate"])
return df
def get_performance_analysis_with_updated_balance(self, # type: HummingbotApplication
) -> PerformanceAnalysis:
performance_analysis = PerformanceAnalysis()
dedup_set: Set[Tuple[str, str, bool]] = set()
for market_trading_pair_tuple in self.market_trading_pair_tuples:
for is_base in [True, False]:
for is_starting in [True, False]:
market_name = market_trading_pair_tuple.market.name
asset_name = market_trading_pair_tuple.base_asset if is_base else market_trading_pair_tuple.quote_asset
asset_name = asset_name.upper()
if len(self.assets) == 0 or len(self.markets) == 0:
# Prevent KeyError '***SYMBOL***'
amount = self.starting_balances[asset_name][market_name]
else:
amount = self.starting_balances[asset_name][market_name] if is_starting \
else self.balance_snapshot()[asset_name][market_name]
amount = float(amount)
# Adding this check to prevent assets in the same market to be added multiple times
if (market_name, asset_name, is_starting) not in dedup_set:
dedup_set.add((market_name, asset_name, is_starting))
performance_analysis.add_balances(asset_name, amount, is_base, is_starting)
return performance_analysis
def get_market_mid_price(self, # type: HummingbotApplication
) -> float:
# Compute the current exchange rate. We use the first market_symbol_pair because
# if the trading pairs are different, such as WETH-DAI and ETH-USD, the currency
# pairs above will contain the information in terms of the first trading pair.
market_pair_info = self.market_trading_pair_tuples[0]
market = market_pair_info.market
buy_price = market.get_price(market_pair_info.trading_pair, True)
sell_price = market.get_price(market_pair_info.trading_pair, False)
price = float((buy_price + sell_price) / 2)
return price
def analyze_performance(self, # type: HummingbotApplication
):
""" Calculate bot profitability and print to output pane """
if len(self.starting_balances) == 0:
self._notify(" Performance analysis is not available before bot starts")
return
performance_analysis: PerformanceAnalysis = self.get_performance_analysis_with_updated_balance()
price: float = self.get_market_mid_price()
starting_token, starting_amount = performance_analysis.compute_starting(price)
current_token, current_amount = performance_analysis.compute_current(price)
delta_token, delta_amount = performance_analysis.compute_delta(price)
return_performance = performance_analysis.compute_return(price)
starting_amount = round(starting_amount, 3)
current_amount = round(current_amount, 3)
delta_amount = round(delta_amount, 3)
return_performance = round(return_performance, 3)
print_performance = "\n"
print_performance += " Performance:\n"
print_performance += " - Starting Inventory Value: " + str(starting_amount) + " " + starting_token + "\n"
print_performance += " - Current Inventory Value: " + str(current_amount) + " " + current_token + "\n"
print_performance += " - Delta: " + str(delta_amount) + " " + delta_token + "\n"
print_performance += " - Return: " + str(return_performance) + "%"
self._notify(print_performance)
def calculate_profitability(self) -> float:
""" Determine the profitability of the trading bot. """
performance_analysis: PerformanceAnalysis = self.get_performance_analysis_with_updated_balance()
price: float = self.get_market_mid_price()
return_performance = performance_analysis.compute_return(price)
return return_performance
def trade_performance_report(self, # type: HummingbotApplication
) -> pd.DataFrame:
if len(self.market_trading_pair_tuples) == 0:
self._notify(" Performance analysis is not available before bot starts")
return
try:
current_strategy_name: str = self.markets_recorder.strategy_name
analysis_start_time: int = self.init_time
primary_quote_asset: str = self.market_trading_pair_tuples[0].quote_asset.upper()
performance_analysis: PerformanceAnalysis = PerformanceAnalysis()
trade_performance_stats, market_trading_pair_stats = performance_analysis.calculate_trade_performance(
analysis_start_time,
current_strategy_name,
self.market_trading_pair_tuples
)
trade_performance_status_line = []
market_df_data: Set[Tuple[str, str, float, float, str, str]] = set()
market_df_columns = ["Market", "Trading_Pair", "Start_Price", "End_Price",
"Total_Value_Delta", "Profit"]
for market_trading_pair_tuple, trading_pair_stats in market_trading_pair_stats.items():
market_df_data.add((
market_trading_pair_tuple.market.display_name,
market_trading_pair_tuple.trading_pair.upper(),
float(trading_pair_stats["starting_quote_rate"]),
float(trading_pair_stats["end_quote_rate"]),
f"{trading_pair_stats['trading_pair_delta']:.8f} {primary_quote_asset}",
f"{trading_pair_stats['trading_pair_delta_percentage']:.3f} %"
))
inventory_df: pd.DataFrame = self.balance_comparison_data_frame(market_trading_pair_stats)
market_df: pd.DataFrame = pd.DataFrame(data=list(market_df_data), columns=market_df_columns)
portfolio_delta: Decimal = trade_performance_stats["portfolio_delta"]
portfolio_delta_percentage: Decimal = trade_performance_stats["portfolio_delta_percentage"]
trade_performance_status_line.extend(["", " Inventory:"] +
[" " + line for line in inventory_df.to_string().split("\n")])
trade_performance_status_line.extend(["", " Market Trading Pair Performance:"] +
[" " + line for line in market_df.to_string().split("\n")])
trade_performance_status_line.extend(
["", " Portfolio Performance:"] +
[f" Quote Value Delta: {portfolio_delta:.7g} {primary_quote_asset}"] +
[f" Delta Percentage: {portfolio_delta_percentage:.3f} %"])
self._notify("\n".join(trade_performance_status_line))
except Exception:
self.logger().error("Unexpected error running performance analysis.", exc_info=True)
self._notify("Error running performance analysis")
| en | 0.652395 | # type: HummingbotApplication # type: HummingbotApplication # type: HummingbotApplication # type: HummingbotApplication # Prevent KeyError '***SYMBOL***' # Adding this check to prevent assets in the same market to be added multiple times # type: HummingbotApplication # Compute the current exchange rate. We use the first market_symbol_pair because # if the trading pairs are different, such as WETH-DAI and ETH-USD, the currency # pairs above will contain the information in terms of the first trading pair. # type: HummingbotApplication Calculate bot profitability and print to output pane Determine the profitability of the trading bot. # type: HummingbotApplication | 2.515782 | 3 |
scripts/bin2asm.py | sami2316/asm2vec-pytorch | 0 | 7580 | <reponame>sami2316/asm2vec-pytorch
import re
import os
import click
import r2pipe
import hashlib
from pathlib import Path
import _pickle as cPickle
def sha3(data):
return hashlib.sha3_256(data.encode()).hexdigest()
def validEXE(filename):
magics = [bytes.fromhex('7f454c46')]
with open(filename, 'rb') as f:
header = f.read(4)
return header in magics
def normalize(opcode):
opcode = opcode.replace(' - ', ' + ')
opcode = re.sub(r'0x[0-9a-f]+', 'CONST', opcode)
opcode = re.sub(r'\*[0-9]', '*CONST', opcode)
opcode = re.sub(r' [0-9]', ' CONST', opcode)
return opcode
def fn2asm(pdf, minlen):
# check
if pdf is None:
return
if len(pdf['ops']) < minlen:
return
if 'invalid' in [op['type'] for op in pdf['ops']]:
return
ops = pdf['ops']
# set label
labels, scope = {}, [op['offset'] for op in ops]
assert(None not in scope)
for i, op in enumerate(ops):
if op.get('jump') in scope:
labels.setdefault(op.get('jump'), i)
# dump output
output = ''
for op in ops:
# add label
if labels.get(op.get('offset')) is not None:
output += f'LABEL{labels[op["offset"]]}:\n'
# add instruction
if labels.get(op.get('jump')) is not None:
output += f' {op["type"]} LABEL{labels[op["jump"]]}\n'
else:
output += f' {normalize(op["opcode"])}\n'
return output
def bin2asm(filename, opath, minlen):
#
# Create directory where results will be written to.
#
results_dir = os.path.join(opath, os.path.basename(filename))
if not os.access(results_dir, os.F_OK):
os.makedirs(results_dir)
r = r2pipe.open(str(filename))
r.cmd('aaaa')
count = 0
fp = open("%s/fv.pcl" % (results_dir), 'wb')
for fn in r.cmdj('aflj'):
r.cmd(f's {fn["offset"]}')
asm = fn2asm(r.cmdj('pdfj'), minlen)
if asm:
fv = [
fn["name"],
asm
]
cPickle.dump(fv, fp)
count += 1
fp.close()
print(f'[+] {filename}')
return count
@click.command()
@click.option('-i', '--input', 'ipath', help='input directory / file', required=True)
@click.option('-o', '--output', 'opath', default='asm', help='output directory')
@click.option('-l', '--len', 'minlen', default=1, help='ignore assembly code with instructions amount smaller than minlen')
def cli(ipath, opath, minlen):
'''
Extract assembly functions from binary executable
'''
ipath = Path(ipath)
opath = Path(opath)
# create output directory
if not os.path.exists(opath):
os.mkdir(opath)
fcount, bcount = 0, 0
# directory
if os.path.isdir(ipath):
for f in os.listdir(ipath):
if not os.path.islink(ipath / f) and not os.path.isdir(ipath / f):
fcount += bin2asm(ipath / f, opath, minlen)
bcount += 1
# file
elif os.path.exists(ipath):
fcount += bin2asm(ipath, opath, minlen)
bcount += 1
else:
print(f'[Error] No such file or directory: {ipath}')
print(f'[+] Total scan binary: {bcount} => Total generated assembly functions: {fcount}')
if __name__ == '__main__':
cli()
| import re
import os
import click
import r2pipe
import hashlib
from pathlib import Path
import _pickle as cPickle
def sha3(data):
return hashlib.sha3_256(data.encode()).hexdigest()
def validEXE(filename):
magics = [bytes.fromhex('7f454c46')]
with open(filename, 'rb') as f:
header = f.read(4)
return header in magics
def normalize(opcode):
opcode = opcode.replace(' - ', ' + ')
opcode = re.sub(r'0x[0-9a-f]+', 'CONST', opcode)
opcode = re.sub(r'\*[0-9]', '*CONST', opcode)
opcode = re.sub(r' [0-9]', ' CONST', opcode)
return opcode
def fn2asm(pdf, minlen):
# check
if pdf is None:
return
if len(pdf['ops']) < minlen:
return
if 'invalid' in [op['type'] for op in pdf['ops']]:
return
ops = pdf['ops']
# set label
labels, scope = {}, [op['offset'] for op in ops]
assert(None not in scope)
for i, op in enumerate(ops):
if op.get('jump') in scope:
labels.setdefault(op.get('jump'), i)
# dump output
output = ''
for op in ops:
# add label
if labels.get(op.get('offset')) is not None:
output += f'LABEL{labels[op["offset"]]}:\n'
# add instruction
if labels.get(op.get('jump')) is not None:
output += f' {op["type"]} LABEL{labels[op["jump"]]}\n'
else:
output += f' {normalize(op["opcode"])}\n'
return output
def bin2asm(filename, opath, minlen):
#
# Create directory where results will be written to.
#
results_dir = os.path.join(opath, os.path.basename(filename))
if not os.access(results_dir, os.F_OK):
os.makedirs(results_dir)
r = r2pipe.open(str(filename))
r.cmd('aaaa')
count = 0
fp = open("%s/fv.pcl" % (results_dir), 'wb')
for fn in r.cmdj('aflj'):
r.cmd(f's {fn["offset"]}')
asm = fn2asm(r.cmdj('pdfj'), minlen)
if asm:
fv = [
fn["name"],
asm
]
cPickle.dump(fv, fp)
count += 1
fp.close()
print(f'[+] {filename}')
return count
@click.command()
@click.option('-i', '--input', 'ipath', help='input directory / file', required=True)
@click.option('-o', '--output', 'opath', default='asm', help='output directory')
@click.option('-l', '--len', 'minlen', default=1, help='ignore assembly code with instructions amount smaller than minlen')
def cli(ipath, opath, minlen):
'''
Extract assembly functions from binary executable
'''
ipath = Path(ipath)
opath = Path(opath)
# create output directory
if not os.path.exists(opath):
os.mkdir(opath)
fcount, bcount = 0, 0
# directory
if os.path.isdir(ipath):
for f in os.listdir(ipath):
if not os.path.islink(ipath / f) and not os.path.isdir(ipath / f):
fcount += bin2asm(ipath / f, opath, minlen)
bcount += 1
# file
elif os.path.exists(ipath):
fcount += bin2asm(ipath, opath, minlen)
bcount += 1
else:
print(f'[Error] No such file or directory: {ipath}')
print(f'[+] Total scan binary: {bcount} => Total generated assembly functions: {fcount}')
if __name__ == '__main__':
cli() | en | 0.620151 | # check # set label # dump output # add label # add instruction # # Create directory where results will be written to. # Extract assembly functions from binary executable # create output directory # directory # file | 2.516226 | 3 |
6/4.py | Chyroc/homework | 0 | 7581 | import re
def remove_not_alpha_num(string):
return re.sub('[^0-9a-zA-Z]+', '', string)
if __name__ == '__main__':
print(remove_not_alpha_num('a000 aa-b') == 'a000aab')
| import re
def remove_not_alpha_num(string):
return re.sub('[^0-9a-zA-Z]+', '', string)
if __name__ == '__main__':
print(remove_not_alpha_num('a000 aa-b') == 'a000aab')
| none | 1 | 3.509137 | 4 |
|
LazyAngus/Assets/Extensions/IOSDeploy/Scripts/Editor/post_process.py | DougLazyAngus/lazyAngus | 0 | 7582 | <reponame>DougLazyAngus/lazyAngus<gh_stars>0
import os
from sys import argv
from mod_pbxproj import XcodeProject
#import appcontroller
path = argv[1]
frameworks = argv[2].split(' ')
libraries = argv[3].split(' ')
cflags = argv[4].split(' ')
ldflags = argv[5].split(' ')
folders = argv[6].split(' ')
print('Step 1: add system frameworks ')
#if framework is optional, add `weak=True`
project = XcodeProject.Load(path +'/Unity-iPhone.xcodeproj/project.pbxproj')
for frwrk in frameworks:
files = project.get_files_by_name(frwrk)
for f in files:
project.remove_file(f)
if len(frwrk) > 0:
fo = frwrk.split('|')
if int(fo[1]):
project.add_file('System/Library/Frameworks/' + fo[0], tree='SDKROOT', weak=True)
else:
project.add_file('System/Library/Frameworks/' + fo[0], tree='SDKROOT')
print('Step 2: add system libraries ')
for lib in libraries:
files = project.get_files_by_name(lib)
for f in files:
project.remove_file(f)
if len(lib) > 0:
lo = lib.split('|')
if int(lo[1]):
project.add_file('usr/lib/' + lo[0], tree='SDKROOT', weak=True)
else:
project.add_file('usr/lib/' + lo[0], tree='SDKROOT')
print('Step 3: add CFLAGS ')
for cf in cflags:
if len(cf) > 0:
project.add_other_cflags(cf)
print('Step 4: add LDFLAGS ')
for ldf in ldflags:
if len(ldf) > 0:
project.add_other_ldflags(ldf)
print('Step 5: add language folders')
for langFolder in folders:
if len(langFolder) > 0:
project.add_folder(path + '/' + langFolder + '.lproj')
print('Step 6: save our change to xcode project file')
if project.modified:
project.backup()
project.saveFormat3_2()
| import os
from sys import argv
from mod_pbxproj import XcodeProject
#import appcontroller
path = argv[1]
frameworks = argv[2].split(' ')
libraries = argv[3].split(' ')
cflags = argv[4].split(' ')
ldflags = argv[5].split(' ')
folders = argv[6].split(' ')
print('Step 1: add system frameworks ')
#if framework is optional, add `weak=True`
project = XcodeProject.Load(path +'/Unity-iPhone.xcodeproj/project.pbxproj')
for frwrk in frameworks:
files = project.get_files_by_name(frwrk)
for f in files:
project.remove_file(f)
if len(frwrk) > 0:
fo = frwrk.split('|')
if int(fo[1]):
project.add_file('System/Library/Frameworks/' + fo[0], tree='SDKROOT', weak=True)
else:
project.add_file('System/Library/Frameworks/' + fo[0], tree='SDKROOT')
print('Step 2: add system libraries ')
for lib in libraries:
files = project.get_files_by_name(lib)
for f in files:
project.remove_file(f)
if len(lib) > 0:
lo = lib.split('|')
if int(lo[1]):
project.add_file('usr/lib/' + lo[0], tree='SDKROOT', weak=True)
else:
project.add_file('usr/lib/' + lo[0], tree='SDKROOT')
print('Step 3: add CFLAGS ')
for cf in cflags:
if len(cf) > 0:
project.add_other_cflags(cf)
print('Step 4: add LDFLAGS ')
for ldf in ldflags:
if len(ldf) > 0:
project.add_other_ldflags(ldf)
print('Step 5: add language folders')
for langFolder in folders:
if len(langFolder) > 0:
project.add_folder(path + '/' + langFolder + '.lproj')
print('Step 6: save our change to xcode project file')
if project.modified:
project.backup()
project.saveFormat3_2() | en | 0.351301 | #import appcontroller #if framework is optional, add `weak=True` | 2.122999 | 2 |
judge/migrations/0024_auto_20200705_0246.py | TheAvidDev/pnoj-site | 2 | 7583 | <gh_stars>1-10
# Generated by Django 3.0.8 on 2020-07-05 02:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('judge', '0023_auto_20200704_2318'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='language',
field=models.CharField(choices=[('python3', 'Python 3'), ('java8', 'Java 8'), ('cpp17', 'C++17'), ('haskell', 'Haskell'), ('brainfuck', 'Brainfuck'), ('c18', 'C18'), ('java11', 'Java 11'), ('scratch', 'Scratch'), ('text', 'Text')], max_length=10, null=True),
),
migrations.AlterField(
model_name='user',
name='main_language',
field=models.CharField(choices=[('python3', 'Python 3'), ('java8', 'Java 8'), ('cpp17', 'C++17'), ('haskell', 'Haskell'), ('brainfuck', 'Brainfuck'), ('c18', 'C18'), ('java11', 'Java 11'), ('scratch', 'Scratch'), ('text', 'Text')], default='python3', max_length=10),
),
]
| # Generated by Django 3.0.8 on 2020-07-05 02:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('judge', '0023_auto_20200704_2318'),
]
operations = [
migrations.AlterField(
model_name='submission',
name='language',
field=models.CharField(choices=[('python3', 'Python 3'), ('java8', 'Java 8'), ('cpp17', 'C++17'), ('haskell', 'Haskell'), ('brainfuck', 'Brainfuck'), ('c18', 'C18'), ('java11', 'Java 11'), ('scratch', 'Scratch'), ('text', 'Text')], max_length=10, null=True),
),
migrations.AlterField(
model_name='user',
name='main_language',
field=models.CharField(choices=[('python3', 'Python 3'), ('java8', 'Java 8'), ('cpp17', 'C++17'), ('haskell', 'Haskell'), ('brainfuck', 'Brainfuck'), ('c18', 'C18'), ('java11', 'Java 11'), ('scratch', 'Scratch'), ('text', 'Text')], default='python3', max_length=10),
),
] | en | 0.802684 | # Generated by Django 3.0.8 on 2020-07-05 02:46 | 1.784503 | 2 |
src/badge_hub.py | stottlerhenke-seattle/openbadge-hub-py | 0 | 7584 | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import os
import re
import shlex
import subprocess
import signal
import csv
import logging
import json
import time
from datetime import datetime as dt
from requests.exceptions import RequestException
import glob
import traceback
import random
from badge import *
from badge_discoverer import BadgeDiscoverer, BeaconDiscoverer
from badge_manager_server import BadgeManagerServer
from beacon_manager_server import BeaconManagerServer
from badge_manager_standalone import BadgeManagerStandalone
from beacon_manager_standalone import BeaconManagerStandalone
import hub_manager
from settings import DATA_DIR, LOG_DIR
log_file_name = LOG_DIR + 'hub.log'
scans_file_name = DATA_DIR + 'scan.txt'
pending_file_prefix = DATA_DIR + 'pending_'
audio_archive_file_name = DATA_DIR + 'audio_archive.txt'
proximity_archive_file_name = DATA_DIR + 'proximity_archive.txt'
standalone_audio_file = DATA_DIR + 'audio_data.txt'
standalone_proximity_file = DATA_DIR + 'proximity_data.txt'
AUDIO = "audio"
PROXIMITY = "proximity"
SCAN_DURATION = 3 # seconds
#NOTE try to keep under 100MB or so due to memory constraints
MAX_PENDING_FILE_SIZE = 15000000 # in bytes, so 15MB
# create logger with 'badge_server'
logger = logging.getLogger('badge_server')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler(log_file_name)
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - [%(mac)s] %(message)s')
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
def round_float_for_log(x):
return float("{0:.3f}".format(x))
def has_chunks(filename):
"""
Returns true if there is data in the file, and false otherwise
"""
return os.path.exists(filename) and os.path.getsize(filename) > 0
def offload_data():
"""
Send pending files to server and move pending to archive
Return True on success, False on failure
"""
#TODO test with standalone
#NOTE not currently doing anything with the True/False
# return values, might decide to do something later
pending_files = sorted(glob.glob(pending_file_prefix + "*"))
for pending_file_name in pending_files:
logger.debug("Sending {} to server".format(pending_file_name))
if not has_chunks(pending_file_name):
continue
chunks = []
with open(pending_file_name, "r") as pending_file:
for line in pending_file:
chunks.append(json.loads(line))
# real quick grab the data type from the first data entry
data_type = "audio" if "audio" in chunks[0]["type"] else "proximity"
# fire away!
try:
chunks_written = hub_manager.send_data_to_server(logger, data_type, chunks)
if chunks_written == len(chunks):
logger.debug("Successfully wrote {} data entries to server"
.format(len(chunks)))
else:
# this seems unlikely to happen but is good to keep track of i guess
logger.error("Data mismatch: {} data entries were not written to server"
.format(len(chunks) - chunks_written))
logger.error("Error sending data from file {} to server!"
.format(pending_file_name))
return False
# write to archive and erase pending file
with open(get_archive_name(data_type), "a") as archive_file:
for chunk in chunks:
archive_file.write(json.dumps(chunk) + "\n")
os.remove(pending_file_name)
except RequestException as e:
s = traceback.format_exc()
logger.error("Error sending data from file {} to server!"
.format(pending_file_name))
logger.error("{},{}".format(e,s))
return False
return True
def get_archive_name(data_type):
"""
Return the name of the archive file for the passed data type
"""
if data_type == AUDIO:
return audio_archive_file_name
else:
return proximity_archive_file_name
def get_proximity_name(mode="server"):
"""
return the name of the existing pending proximity file,
or a new one if either one doesn't exist or if
the existing file is > MAX_PENDING_FILE_SIZE
"""
if mode == "server":
return _get_pending_file_name(PROXIMITY)
else:
return standalone_proximity_file
def get_audio_name(mode="server"):
if mode == "server":
return _get_pending_file_name(AUDIO)
else:
return standalone_audio_file
def _get_pending_file_name(data_type):
"""
If there are no current pending files < MAX_PENDING_FILE_SIZE in size,
return a new pending filename
Else, return an existing one.
"""
filenames = filter(
lambda x: os.path.getsize(x) < MAX_PENDING_FILE_SIZE,
glob.glob("{}*{}*".format(pending_file_prefix, data_type)))
if len(filenames) == 0:
return _create_pending_file_name(data_type)
else:
return filenames[0]
def _create_pending_file_name(data_type):
"""
Create a pending file name for the given data_type
Uses the current date/time to create a unique filename
"""
now = dt.now().strftime("%Y%m%d%H%M%S")
filename = "{}{}_{}.txt".format(pending_file_prefix, now, data_type)
if os.path.exists(filename):
# this seems unlikely to happen, but just in case :)
# get the number of pending files that match this time and add one
files = glob.glob("{}{}*{}*".format(pending_file_prefix, now, data_type))
now = '_'.join((now, str(len(files) + 1)))
filename = "{}{}_{}.txt".format(pending_file_prefix, now, data_type)
return filename
def dialogue(bdg, activate_audio, activate_proximity, mode="server"):
"""
Attempts to read data from the device specified by the address. Reading is handled by gatttool.
:param bdg:
:return:
"""
ret = bdg.pull_data(activate_audio, activate_proximity)
addr = bdg.addr
if ret == 0:
logger.info("Successfully pulled data")
# if we were able to pull data, we saw the badge again
bdg.last_seen_ts = time.time()
else:
logger.info("Errors pulling data.")
if bdg.dlg.chunks:
logger.info("Chunks received: {}".format(len(bdg.dlg.chunks)))
logger.info("saving chunks to file")
# store in JSON file
with open(get_audio_name(mode), "a") as fout:
for chunk in bdg.dlg.chunks:
ts_with_ms = round_float_for_log(ts_and_fract_to_float(chunk.ts, chunk.fract))
log_line = {
'type': "audio received",
'log_timestamp': round_float_for_log(time.time()),
'log_index': -1, # need to find a good accumulator.
'data': {
'voltage': round_float_for_log(chunk.voltage),
'timestamp': ts_with_ms,
'sample_period': chunk.sampleDelay,
'num_samples': len(chunk.samples),
'samples': chunk.samples,
'badge_address': addr,
'member': bdg.key,
'member_id':bdg.badge_id
}
}
logger.debug("Chunk timestamp: {0:.3f}, Voltage: {1:.3f}, Delay: {2}, Samples in chunk: {3}".format(
ts_with_ms, chunk.voltage, chunk.sampleDelay, len(chunk.samples)))
#logger.debug(json.dumps(log_line))
json.dump(log_line, fout)
fout.write('\n')
logger.info("done writing")
# update badge object to hold latest timestamps
last_chunk = bdg.dlg.chunks[-1]
last_chunk_ts_pretty = dt.fromtimestamp(last_chunk.ts).strftime("%Y-%m-%d@%H:%M:%S UTC")
if bdg.is_newer_audio_ts(last_chunk.ts, last_chunk.fract):
logger.debug("Setting last badge audio timestamp to {} {} ({})".format(
last_chunk.ts, last_chunk.fract, last_chunk_ts_pretty))
bdg.set_audio_ts(last_chunk.ts, last_chunk.fract)
else:
logger.debug("Keeping existing timestamp ({}.{}) for {}. Last chunk timestamp was: {}.{} ({})"
.format(bdg.last_audio_ts_int,bdg.last_audio_ts_fract,bdg.addr,
last_chunk.ts, last_chunk.fract, last_chunk_pretty))
else:
logger.info("No mic data ready")
if bdg.dlg.scans:
logger.info("Proximity scans received: {}".format(len(bdg.dlg.scans)))
logger.info("saving proximity scans to file")
with open(get_proximity_name(mode), "a") as fout:
for scan in bdg.dlg.scans:
ts_with_ms = round_float_for_log(scan.ts)
log_line = {
'type': "proximity received",
'log_timestamp': round_float_for_log(time.time()),
'log_index': -1, # need to find a good accumulator.
'data': {
'voltage': round_float_for_log(scan.voltage),
'timestamp': ts_with_ms,
'badge_address': addr,
'rssi_distances':
{
device.ID: {'rssi': device.rssi, 'count': device.count} for device in scan.devices
},
'member': bdg.key,
'member_id': bdg.badge_id
}
}
logger.debug("SCAN: scan timestamp: {0:.3f}, voltage: {1:.3f}, Devices in scan: {2}".format(
ts_with_ms, scan.voltage, scan.numDevices))
#logger.info(json.dumps(log_line))
json.dump(log_line, fout)
fout.write('\n')
# update badge object to hold latest timestamps
last_scan = bdg.dlg.scans[-1]
last_scan_ts_pretty = dt.fromtimestamp(last_scan.ts).strftime("%Y-%m-%d@%H:%M:%S UTC")
logger.debug("Setting last badge proximity timestamp to {} ([])".format(
last_scan.ts, last_scan_ts_pretty))
bdg.last_proximity_ts = last_scan.ts
else:
logger.info("No proximity scans ready")
def scan_for_devices(devices_whitelist, show_all=False):
bd = BadgeDiscoverer(logger)
try:
all_devices = bd.discover(scan_duration=SCAN_DURATION)
except Exception as e: # catch *all* exceptions
logger.error("[Badges] Scan failed,{}".format(e))
all_devices = {}
scanned_devices = []
for addr,device_info in all_devices.iteritems():
if addr in devices_whitelist:
logger.debug("\033[1;7m\033[1;32mFound {}, added. Device info: {}\033[0m".format(addr, device_info))
scanned_devices.append({'mac':addr,'device_info':device_info})
else:
if show_all:
logger.debug("Found {}, but not on whitelist. Device info: {}".format(addr, device_info))
pass
time.sleep(2) # requires sometimes to prevent connection from failing
return scanned_devices
def scan_for_bc_devices(devices_whitelist, show_all=False):
bc = BeaconDiscoverer(logger)
try:
all_bc_devices = bc.discover(scan_duration=SCAN_DURATION)
except Exception as e: # catch *all* exceptions
logger.error("[Beacons] Scan failed,{}".format(e))
all_bc_devices = {}
scanned_bc_devices = []
for addr,device_info in all_bc_devices.iteritems():
if addr in devices_whitelist:
logger.debug("\033[1;7m\033[1;32mFound {}, added. Device info: {}\033[0m".format(addr, device_info))
scanned_bc_devices.append({'mac':addr,'device_info':device_info})
else:
if show_all:
logger.debug("Found {}, but not on whitelist. Device info: {}".format(addr, device_info))
pass
time.sleep(2) # requires sometimes to prevent connection from failing
return scanned_bc_devices
def create_badge_manager_instance(mode,timestamp):
if mode == "server":
mgr = BadgeManagerServer(logger=logger)
else:
mgr = BadgeManagerStandalone(logger=logger,timestamp=timestamp)
return mgr
def create_beacon_manager_instance(mode,timestamp):
if mode == "server":
mgrb = BeaconManagerServer(logger=logger)
else:
mgrb = BeaconManagerStandalone(logger=logger,timestamp=timestamp)
return mgrb
def reset():
'''
Resets and reconfigures Bluetooth parameters. The specific parameters affect connection speed negotiation. It's
not pretty, but safer to change the conn params this way
:return:
'''
# Resets BLE hci
logger.info("Resetting bluetooth")
reset_command = "hciconfig hci0 reset"
args = shlex.split(reset_command)
p = subprocess.Popen(args)
# israspberry pi?
logger.info("Setting bluetooth connection parameters")
if os.uname()[4][:3] == 'arm':
logger.info("Raspberry Pi detected, changing bluetooth connection parameters")
with open("/sys/kernel/debug/bluetooth/hci0/conn_min_interval", "w") as connparam:
connparam.write("16")
with open("/sys/kernel/debug/bluetooth/hci0/conn_max_interval", "w") as connparam:
connparam.write("17")
else:
logger.warn("Not a Raspberry Pi, Bluetooth connection parameters remain untouched (communication may be slower)")
time.sleep(2) # requires sleep after reset
logger.info("Done resetting bluetooth")
def kill_bluepy():
"""
Kill orphaned/leftover/defunct bluepy-helper processes
I'd like to move this to a separate utility file or something when
we refactor
"""
# get all the bluepy-helper processes
CMD="/bin/ps ax | grep bluepy-helper | grep -v grep | awk '{ print $1 }'"
p = subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
pidstr = p.communicate()[0]
pids = pidstr.split("\n")
pids = [int(pid) for pid in pids if pid.isdigit()]
mypid = os.getpid()
# dont wanna kill our process by accident :)
if mypid in pids:
pids.remove(mypid)
for pid in pids:
# KILL KILL KILL
try:
os.kill(int(pid), signal.SIGKILL)
# we waitpid to clean up defunct processes
os.waitpid(int(pid), 0)
logger.info("Process with PID {} killed".format(pid))
except OSError as err:
logger.error("Unable to kill process with pid {}".format(pid))
logger.error(err)
def pull_devices(mgr, mgrb, start_recording):
logger.info('Started pulling')
activate_audio = False
activate_proximity = False
if start_recording is None or start_recording == "both":
activate_audio = True
activate_proximity = True
elif start_recording == "audio":
activate_audio = True
elif start_recording == "proximity":
activate_proximity = True
elif start_recording == "none":
activate_audio = False
activate_proximity = False
logger.info("Start recording: Audio = {}, Proximity = {}".format(activate_audio,activate_proximity))
mode = "server" if isinstance(mgr, BadgeManagerServer) else "standalone"
while True:
mgr.pull_badges_list()
mgrb.pull_beacons_list()
# When we refactor we can change this, but for now:
if mode == "server":
logger.info("Attempting to offload data to server")
offload_data()
logger.info("Scanning for members...")
scanned_devices = scan_for_devices(mgr.badges.keys())
# Randomly shuffle devices
random.shuffle(scanned_devices)
# iterate before the actual data collection loop just to offload
# voltages to the server (and update heartbeat on server)
for device in scanned_devices:
b = mgr.badges.get(device['mac'])
# i don't think adv_payload is ever supposed to be empty,
# but sometimes it is. and when it is, it breaks
if device['device_info']['adv_payload'] is not None:
b.last_voltage = device['device_info']['adv_payload']['voltage']
b.observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if b.observed_id != b.badge_id or b.project_id != observed_project_id:
logger.debug("Warning! Observed IDs do not match server settings. "
"Observed: member_id:{}, project_id:{}. Expected: member_id:{}. project_id: {}"
.format(b.observed_id,observed_project_id,b.badge_id,b.project_id))
b.last_seen_ts = time.time()
mgr.send_badge(device['mac'])
# now the actual data collection
for device in scanned_devices:
# try to update latest badge timestamps from the server
mac = device['mac']
pull_success = mgr.pull_badge(mac)
if not pull_success:
logger.warn("""Problem pulling badge from server\n
Skipping badge with mac {} until next full badge list refresh"""
.format(mac))
continue
b = mgr.badges.get(mac)
# pull data
dialogue(b, activate_audio, activate_proximity, mode)
# update timestamps on server
mgr.send_badge(mac)
time.sleep(2) # requires sleep between devices
logger.info("Scanning for beacons...")
scanned_beacons = scan_for_bc_devices(mgrb.beacons.keys())
# Randomly shuffle devices
random.shuffle(scanned_beacons)
# iterate before the actual data collection loop just to offload
# voltages to the server (and update heartbeat on server)
for device in scanned_beacons:
bcn = mgrb.beacons.get(device['mac'])
if device['device_info']['adv_payload'] is not None:
bcn.last_voltage = device['device_info']['adv_payload']['voltage']
bcn.observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if bcn.observed_id != bcn.badge_id or bcn.project_id != observed_project_id:
logger.debug("Warning! Observed IDs do not match server settings. "
"Observed: beacon_id:{}, project_id:{}. Expected: beacon_id:{}. project_id: {}"
.format(bcn.observed_id,observed_project_id,bcn.badge_id,bcn.project_id))
bcn.last_seen_ts = time.time()
mgrb.send_beacon(device['mac'])
# Update beacons with wrong id or project id
for device in scanned_beacons:
bcn = mgrb.beacons.get(device['mac'])
if device['device_info']['adv_payload'] is not None:
observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if bcn.badge_id != observed_id or bcn.project_id != observed_project_id:
bcn.sync_timestamp()
mgrb.send_beacon(device['mac'])
time.sleep(2)
time.sleep(2) # allow BLE time to disconnect
# clean up any leftover bluepy processes
kill_bluepy()
def sync_all_devices(mgr):
logger.info('Syncing all badges recording.')
mgr.pull_badges_list()
for mac in mgr.badges:
bdg = mgr.badges.get(mac)
bdg.sync_timestamp()
time.sleep(2) # requires sleep between devices
time.sleep(2) # allow BLE time to disconnect
def devices_scanner(mgr, mgrb, show_all=False):
logger.info('Scanning for badges')
mgr.pull_badges_list()
logger.info('Scanning for beacons')
mgrb.pull_beacons_list()
while True:
logger.info("Scanning for devices...")
scanned_devices = scan_for_devices(mgr.badges.keys(), show_all) + scan_for_bc_devices(mgrb.beacons.keys())
with open(scans_file_name, "a") as fout:
for device in scanned_devices:
mac = device['mac']
scan_date = device['device_info']['scan_date']
rssi = device['device_info']['rssi']
if device['device_info']['adv_payload']:
voltage = device['device_info']['adv_payload']['voltage']
observed_id = device['device_info']['adv_payload']['badge_id']
project_id = device['device_info']['adv_payload']['project_id']
else:
voltage = 0.0
observed_id = -1
project_id = -1
logger.debug("{},{},{:.2f},{:.2f},{},{}".
format(scan_date, mac, rssi, voltage, observed_id, project_id))
fout.write("{},{},{:.2f},{:.2f},{},{}\n".
format(scan_date, mac, rssi, voltage, observed_id, project_id))
time.sleep(5) # give time to Ctrl-C
def start_all_devices(mgr):
logger.info('Starting all badges recording.')
while True:
mgr.pull_badges_list()
logger.info("Scanning for devices...")
scanned_devices = scan_for_devices(mgr.badges.keys())
for device in scanned_devices:
dev_info = device['device_info']
if dev_info ['adv_payload']:
sync = dev_info ['adv_payload']['sync_status']
audio = dev_info ['adv_payload']['audio_status']
proximity = dev_info ['adv_payload']['proximity_status']
badge_id = dev_info ['adv_payload']['badge_id']
project_id = dev_info ['adv_payload']['project_id']
if sync == 0 or audio == 0 or proximity == 0:
if(project_id==0):
logger.info("changing project ids {}".format(device['mac']))
logger.info("Starting {}".format(device['mac']))
bdg = mgr.badges.get(device['mac'])
bdg.start_recording()
time.sleep(2) # requires sleep between devices
else:
logger.info("Starting {}".format(device['mac']))
bdg = mgr.badges.get(device['mac'])
bdg.start_recording()
time.sleep(2) # requires sleep between devices
else:
logger.info("No need to start {}".format(device['mac']))
time.sleep(2) # allow BLE time to disconnect
def print_badges(mgr, mgrb):
logger.info("Printing badges:")
mgr.pull_badges_list()
mgrb.pull_beacons_list()
badge_list = mgr.badges
beacon_list = mgrb.beacons
print("Members:")
for key, value in badge_list.iteritems():
print("{},{},{},{}".format(value.key,value.addr,value.badge_id,value.project_id))
print("\nBadges:")
for key, value in beacon_list.iteritems():
print("{},{},{},{}".format(value.key,value.addr,value.badge_id,value.project_id))
def add_pull_command_options(subparsers):
pull_parser = subparsers.add_parser('pull', help='Continuously pull data from badges')
pull_parser.add_argument('-r','--start_recording'
, choices=('audio', 'proximity', 'both','none'), required=False
, default='both'
, dest='start_recording',help='data recording option')
def add_scan_command_options(subparsers):
scan_parser = subparsers.add_parser('scan', help='Continuously scan for badges')
scan_parser.add_argument('-a','--show_all', action='store_true', default=False, help="Show all devices")
def add_sync_all_command_options(subparsers):
sa_parser = subparsers.add_parser('sync_all', help='Send date to all devices in whitelist')
def add_start_all_command_options(subparsers):
st_parser = subparsers.add_parser('start_all', help='Start recording on all devices in whitelist')
def add_print_badges_command_options(subparsers):
lb_parser = subparsers.add_parser('print_badges', help='print badges in a CSV format')
if __name__ == "__main__":
import time
import argparse
parser = argparse.ArgumentParser(description="Run scans, send dates, or continuously pull data")
parser.add_argument('-dr','--disable_reset_ble', action='store_true', default=False, help="Do not reset BLE")
parser.add_argument('-m','--hub_mode', choices=('server', 'standalone')
, default='standalone', dest='hub_mode'
, help="Operation mode - standalone (using a configuration file) or a server")
parser.add_argument('-t', '--timestamp'
, type=int, required=False
, dest='timestamp', help='UTC timestamp to start pulling data from (int)')
subparsers = parser.add_subparsers(help='Program mode (e.g. Scan, send dates, pull, scan etc.)', dest='mode')
add_pull_command_options(subparsers)
add_scan_command_options(subparsers)
add_sync_all_command_options(subparsers)
add_start_all_command_options(subparsers)
add_print_badges_command_options(subparsers)
args = parser.parse_args()
mgr = create_badge_manager_instance(args.hub_mode, args.timestamp)
mgrb = create_beacon_manager_instance(args.hub_mode, args.timestamp)
if not args.disable_reset_ble:
reset()
if args.mode == "sync_all":
sync_all_devices(mgr)
# scan for devices
if args.mode == "scan":
devices_scanner(mgr,mgrb, args.show_all)
# pull data from all devices
if args.mode == "pull":
pull_devices(mgr, mgrb, args.start_recording)
if args.mode == "start_all":
start_all_devices(mgr)
if args.mode == "print_badges":
print_badges(mgr, mgrb)
exit(0)
| #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import os
import re
import shlex
import subprocess
import signal
import csv
import logging
import json
import time
from datetime import datetime as dt
from requests.exceptions import RequestException
import glob
import traceback
import random
from badge import *
from badge_discoverer import BadgeDiscoverer, BeaconDiscoverer
from badge_manager_server import BadgeManagerServer
from beacon_manager_server import BeaconManagerServer
from badge_manager_standalone import BadgeManagerStandalone
from beacon_manager_standalone import BeaconManagerStandalone
import hub_manager
from settings import DATA_DIR, LOG_DIR
log_file_name = LOG_DIR + 'hub.log'
scans_file_name = DATA_DIR + 'scan.txt'
pending_file_prefix = DATA_DIR + 'pending_'
audio_archive_file_name = DATA_DIR + 'audio_archive.txt'
proximity_archive_file_name = DATA_DIR + 'proximity_archive.txt'
standalone_audio_file = DATA_DIR + 'audio_data.txt'
standalone_proximity_file = DATA_DIR + 'proximity_data.txt'
AUDIO = "audio"
PROXIMITY = "proximity"
SCAN_DURATION = 3 # seconds
#NOTE try to keep under 100MB or so due to memory constraints
MAX_PENDING_FILE_SIZE = 15000000 # in bytes, so 15MB
# create logger with 'badge_server'
logger = logging.getLogger('badge_server')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler(log_file_name)
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - [%(mac)s] %(message)s')
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
def round_float_for_log(x):
return float("{0:.3f}".format(x))
def has_chunks(filename):
"""
Returns true if there is data in the file, and false otherwise
"""
return os.path.exists(filename) and os.path.getsize(filename) > 0
def offload_data():
"""
Send pending files to server and move pending to archive
Return True on success, False on failure
"""
#TODO test with standalone
#NOTE not currently doing anything with the True/False
# return values, might decide to do something later
pending_files = sorted(glob.glob(pending_file_prefix + "*"))
for pending_file_name in pending_files:
logger.debug("Sending {} to server".format(pending_file_name))
if not has_chunks(pending_file_name):
continue
chunks = []
with open(pending_file_name, "r") as pending_file:
for line in pending_file:
chunks.append(json.loads(line))
# real quick grab the data type from the first data entry
data_type = "audio" if "audio" in chunks[0]["type"] else "proximity"
# fire away!
try:
chunks_written = hub_manager.send_data_to_server(logger, data_type, chunks)
if chunks_written == len(chunks):
logger.debug("Successfully wrote {} data entries to server"
.format(len(chunks)))
else:
# this seems unlikely to happen but is good to keep track of i guess
logger.error("Data mismatch: {} data entries were not written to server"
.format(len(chunks) - chunks_written))
logger.error("Error sending data from file {} to server!"
.format(pending_file_name))
return False
# write to archive and erase pending file
with open(get_archive_name(data_type), "a") as archive_file:
for chunk in chunks:
archive_file.write(json.dumps(chunk) + "\n")
os.remove(pending_file_name)
except RequestException as e:
s = traceback.format_exc()
logger.error("Error sending data from file {} to server!"
.format(pending_file_name))
logger.error("{},{}".format(e,s))
return False
return True
def get_archive_name(data_type):
"""
Return the name of the archive file for the passed data type
"""
if data_type == AUDIO:
return audio_archive_file_name
else:
return proximity_archive_file_name
def get_proximity_name(mode="server"):
"""
return the name of the existing pending proximity file,
or a new one if either one doesn't exist or if
the existing file is > MAX_PENDING_FILE_SIZE
"""
if mode == "server":
return _get_pending_file_name(PROXIMITY)
else:
return standalone_proximity_file
def get_audio_name(mode="server"):
if mode == "server":
return _get_pending_file_name(AUDIO)
else:
return standalone_audio_file
def _get_pending_file_name(data_type):
"""
If there are no current pending files < MAX_PENDING_FILE_SIZE in size,
return a new pending filename
Else, return an existing one.
"""
filenames = filter(
lambda x: os.path.getsize(x) < MAX_PENDING_FILE_SIZE,
glob.glob("{}*{}*".format(pending_file_prefix, data_type)))
if len(filenames) == 0:
return _create_pending_file_name(data_type)
else:
return filenames[0]
def _create_pending_file_name(data_type):
"""
Create a pending file name for the given data_type
Uses the current date/time to create a unique filename
"""
now = dt.now().strftime("%Y%m%d%H%M%S")
filename = "{}{}_{}.txt".format(pending_file_prefix, now, data_type)
if os.path.exists(filename):
# this seems unlikely to happen, but just in case :)
# get the number of pending files that match this time and add one
files = glob.glob("{}{}*{}*".format(pending_file_prefix, now, data_type))
now = '_'.join((now, str(len(files) + 1)))
filename = "{}{}_{}.txt".format(pending_file_prefix, now, data_type)
return filename
def dialogue(bdg, activate_audio, activate_proximity, mode="server"):
"""
Attempts to read data from the device specified by the address. Reading is handled by gatttool.
:param bdg:
:return:
"""
ret = bdg.pull_data(activate_audio, activate_proximity)
addr = bdg.addr
if ret == 0:
logger.info("Successfully pulled data")
# if we were able to pull data, we saw the badge again
bdg.last_seen_ts = time.time()
else:
logger.info("Errors pulling data.")
if bdg.dlg.chunks:
logger.info("Chunks received: {}".format(len(bdg.dlg.chunks)))
logger.info("saving chunks to file")
# store in JSON file
with open(get_audio_name(mode), "a") as fout:
for chunk in bdg.dlg.chunks:
ts_with_ms = round_float_for_log(ts_and_fract_to_float(chunk.ts, chunk.fract))
log_line = {
'type': "audio received",
'log_timestamp': round_float_for_log(time.time()),
'log_index': -1, # need to find a good accumulator.
'data': {
'voltage': round_float_for_log(chunk.voltage),
'timestamp': ts_with_ms,
'sample_period': chunk.sampleDelay,
'num_samples': len(chunk.samples),
'samples': chunk.samples,
'badge_address': addr,
'member': bdg.key,
'member_id':bdg.badge_id
}
}
logger.debug("Chunk timestamp: {0:.3f}, Voltage: {1:.3f}, Delay: {2}, Samples in chunk: {3}".format(
ts_with_ms, chunk.voltage, chunk.sampleDelay, len(chunk.samples)))
#logger.debug(json.dumps(log_line))
json.dump(log_line, fout)
fout.write('\n')
logger.info("done writing")
# update badge object to hold latest timestamps
last_chunk = bdg.dlg.chunks[-1]
last_chunk_ts_pretty = dt.fromtimestamp(last_chunk.ts).strftime("%Y-%m-%d@%H:%M:%S UTC")
if bdg.is_newer_audio_ts(last_chunk.ts, last_chunk.fract):
logger.debug("Setting last badge audio timestamp to {} {} ({})".format(
last_chunk.ts, last_chunk.fract, last_chunk_ts_pretty))
bdg.set_audio_ts(last_chunk.ts, last_chunk.fract)
else:
logger.debug("Keeping existing timestamp ({}.{}) for {}. Last chunk timestamp was: {}.{} ({})"
.format(bdg.last_audio_ts_int,bdg.last_audio_ts_fract,bdg.addr,
last_chunk.ts, last_chunk.fract, last_chunk_pretty))
else:
logger.info("No mic data ready")
if bdg.dlg.scans:
logger.info("Proximity scans received: {}".format(len(bdg.dlg.scans)))
logger.info("saving proximity scans to file")
with open(get_proximity_name(mode), "a") as fout:
for scan in bdg.dlg.scans:
ts_with_ms = round_float_for_log(scan.ts)
log_line = {
'type': "proximity received",
'log_timestamp': round_float_for_log(time.time()),
'log_index': -1, # need to find a good accumulator.
'data': {
'voltage': round_float_for_log(scan.voltage),
'timestamp': ts_with_ms,
'badge_address': addr,
'rssi_distances':
{
device.ID: {'rssi': device.rssi, 'count': device.count} for device in scan.devices
},
'member': bdg.key,
'member_id': bdg.badge_id
}
}
logger.debug("SCAN: scan timestamp: {0:.3f}, voltage: {1:.3f}, Devices in scan: {2}".format(
ts_with_ms, scan.voltage, scan.numDevices))
#logger.info(json.dumps(log_line))
json.dump(log_line, fout)
fout.write('\n')
# update badge object to hold latest timestamps
last_scan = bdg.dlg.scans[-1]
last_scan_ts_pretty = dt.fromtimestamp(last_scan.ts).strftime("%Y-%m-%d@%H:%M:%S UTC")
logger.debug("Setting last badge proximity timestamp to {} ([])".format(
last_scan.ts, last_scan_ts_pretty))
bdg.last_proximity_ts = last_scan.ts
else:
logger.info("No proximity scans ready")
def scan_for_devices(devices_whitelist, show_all=False):
bd = BadgeDiscoverer(logger)
try:
all_devices = bd.discover(scan_duration=SCAN_DURATION)
except Exception as e: # catch *all* exceptions
logger.error("[Badges] Scan failed,{}".format(e))
all_devices = {}
scanned_devices = []
for addr,device_info in all_devices.iteritems():
if addr in devices_whitelist:
logger.debug("\033[1;7m\033[1;32mFound {}, added. Device info: {}\033[0m".format(addr, device_info))
scanned_devices.append({'mac':addr,'device_info':device_info})
else:
if show_all:
logger.debug("Found {}, but not on whitelist. Device info: {}".format(addr, device_info))
pass
time.sleep(2) # requires sometimes to prevent connection from failing
return scanned_devices
def scan_for_bc_devices(devices_whitelist, show_all=False):
bc = BeaconDiscoverer(logger)
try:
all_bc_devices = bc.discover(scan_duration=SCAN_DURATION)
except Exception as e: # catch *all* exceptions
logger.error("[Beacons] Scan failed,{}".format(e))
all_bc_devices = {}
scanned_bc_devices = []
for addr,device_info in all_bc_devices.iteritems():
if addr in devices_whitelist:
logger.debug("\033[1;7m\033[1;32mFound {}, added. Device info: {}\033[0m".format(addr, device_info))
scanned_bc_devices.append({'mac':addr,'device_info':device_info})
else:
if show_all:
logger.debug("Found {}, but not on whitelist. Device info: {}".format(addr, device_info))
pass
time.sleep(2) # requires sometimes to prevent connection from failing
return scanned_bc_devices
def create_badge_manager_instance(mode,timestamp):
if mode == "server":
mgr = BadgeManagerServer(logger=logger)
else:
mgr = BadgeManagerStandalone(logger=logger,timestamp=timestamp)
return mgr
def create_beacon_manager_instance(mode,timestamp):
if mode == "server":
mgrb = BeaconManagerServer(logger=logger)
else:
mgrb = BeaconManagerStandalone(logger=logger,timestamp=timestamp)
return mgrb
def reset():
'''
Resets and reconfigures Bluetooth parameters. The specific parameters affect connection speed negotiation. It's
not pretty, but safer to change the conn params this way
:return:
'''
# Resets BLE hci
logger.info("Resetting bluetooth")
reset_command = "hciconfig hci0 reset"
args = shlex.split(reset_command)
p = subprocess.Popen(args)
# israspberry pi?
logger.info("Setting bluetooth connection parameters")
if os.uname()[4][:3] == 'arm':
logger.info("Raspberry Pi detected, changing bluetooth connection parameters")
with open("/sys/kernel/debug/bluetooth/hci0/conn_min_interval", "w") as connparam:
connparam.write("16")
with open("/sys/kernel/debug/bluetooth/hci0/conn_max_interval", "w") as connparam:
connparam.write("17")
else:
logger.warn("Not a Raspberry Pi, Bluetooth connection parameters remain untouched (communication may be slower)")
time.sleep(2) # requires sleep after reset
logger.info("Done resetting bluetooth")
def kill_bluepy():
"""
Kill orphaned/leftover/defunct bluepy-helper processes
I'd like to move this to a separate utility file or something when
we refactor
"""
# get all the bluepy-helper processes
CMD="/bin/ps ax | grep bluepy-helper | grep -v grep | awk '{ print $1 }'"
p = subprocess.Popen(CMD, shell=True, stdout=subprocess.PIPE)
pidstr = p.communicate()[0]
pids = pidstr.split("\n")
pids = [int(pid) for pid in pids if pid.isdigit()]
mypid = os.getpid()
# dont wanna kill our process by accident :)
if mypid in pids:
pids.remove(mypid)
for pid in pids:
# KILL KILL KILL
try:
os.kill(int(pid), signal.SIGKILL)
# we waitpid to clean up defunct processes
os.waitpid(int(pid), 0)
logger.info("Process with PID {} killed".format(pid))
except OSError as err:
logger.error("Unable to kill process with pid {}".format(pid))
logger.error(err)
def pull_devices(mgr, mgrb, start_recording):
logger.info('Started pulling')
activate_audio = False
activate_proximity = False
if start_recording is None or start_recording == "both":
activate_audio = True
activate_proximity = True
elif start_recording == "audio":
activate_audio = True
elif start_recording == "proximity":
activate_proximity = True
elif start_recording == "none":
activate_audio = False
activate_proximity = False
logger.info("Start recording: Audio = {}, Proximity = {}".format(activate_audio,activate_proximity))
mode = "server" if isinstance(mgr, BadgeManagerServer) else "standalone"
while True:
mgr.pull_badges_list()
mgrb.pull_beacons_list()
# When we refactor we can change this, but for now:
if mode == "server":
logger.info("Attempting to offload data to server")
offload_data()
logger.info("Scanning for members...")
scanned_devices = scan_for_devices(mgr.badges.keys())
# Randomly shuffle devices
random.shuffle(scanned_devices)
# iterate before the actual data collection loop just to offload
# voltages to the server (and update heartbeat on server)
for device in scanned_devices:
b = mgr.badges.get(device['mac'])
# i don't think adv_payload is ever supposed to be empty,
# but sometimes it is. and when it is, it breaks
if device['device_info']['adv_payload'] is not None:
b.last_voltage = device['device_info']['adv_payload']['voltage']
b.observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if b.observed_id != b.badge_id or b.project_id != observed_project_id:
logger.debug("Warning! Observed IDs do not match server settings. "
"Observed: member_id:{}, project_id:{}. Expected: member_id:{}. project_id: {}"
.format(b.observed_id,observed_project_id,b.badge_id,b.project_id))
b.last_seen_ts = time.time()
mgr.send_badge(device['mac'])
# now the actual data collection
for device in scanned_devices:
# try to update latest badge timestamps from the server
mac = device['mac']
pull_success = mgr.pull_badge(mac)
if not pull_success:
logger.warn("""Problem pulling badge from server\n
Skipping badge with mac {} until next full badge list refresh"""
.format(mac))
continue
b = mgr.badges.get(mac)
# pull data
dialogue(b, activate_audio, activate_proximity, mode)
# update timestamps on server
mgr.send_badge(mac)
time.sleep(2) # requires sleep between devices
logger.info("Scanning for beacons...")
scanned_beacons = scan_for_bc_devices(mgrb.beacons.keys())
# Randomly shuffle devices
random.shuffle(scanned_beacons)
# iterate before the actual data collection loop just to offload
# voltages to the server (and update heartbeat on server)
for device in scanned_beacons:
bcn = mgrb.beacons.get(device['mac'])
if device['device_info']['adv_payload'] is not None:
bcn.last_voltage = device['device_info']['adv_payload']['voltage']
bcn.observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if bcn.observed_id != bcn.badge_id or bcn.project_id != observed_project_id:
logger.debug("Warning! Observed IDs do not match server settings. "
"Observed: beacon_id:{}, project_id:{}. Expected: beacon_id:{}. project_id: {}"
.format(bcn.observed_id,observed_project_id,bcn.badge_id,bcn.project_id))
bcn.last_seen_ts = time.time()
mgrb.send_beacon(device['mac'])
# Update beacons with wrong id or project id
for device in scanned_beacons:
bcn = mgrb.beacons.get(device['mac'])
if device['device_info']['adv_payload'] is not None:
observed_id = device['device_info']['adv_payload']['badge_id']
observed_project_id = device['device_info']['adv_payload']['project_id']
if bcn.badge_id != observed_id or bcn.project_id != observed_project_id:
bcn.sync_timestamp()
mgrb.send_beacon(device['mac'])
time.sleep(2)
time.sleep(2) # allow BLE time to disconnect
# clean up any leftover bluepy processes
kill_bluepy()
def sync_all_devices(mgr):
logger.info('Syncing all badges recording.')
mgr.pull_badges_list()
for mac in mgr.badges:
bdg = mgr.badges.get(mac)
bdg.sync_timestamp()
time.sleep(2) # requires sleep between devices
time.sleep(2) # allow BLE time to disconnect
def devices_scanner(mgr, mgrb, show_all=False):
logger.info('Scanning for badges')
mgr.pull_badges_list()
logger.info('Scanning for beacons')
mgrb.pull_beacons_list()
while True:
logger.info("Scanning for devices...")
scanned_devices = scan_for_devices(mgr.badges.keys(), show_all) + scan_for_bc_devices(mgrb.beacons.keys())
with open(scans_file_name, "a") as fout:
for device in scanned_devices:
mac = device['mac']
scan_date = device['device_info']['scan_date']
rssi = device['device_info']['rssi']
if device['device_info']['adv_payload']:
voltage = device['device_info']['adv_payload']['voltage']
observed_id = device['device_info']['adv_payload']['badge_id']
project_id = device['device_info']['adv_payload']['project_id']
else:
voltage = 0.0
observed_id = -1
project_id = -1
logger.debug("{},{},{:.2f},{:.2f},{},{}".
format(scan_date, mac, rssi, voltage, observed_id, project_id))
fout.write("{},{},{:.2f},{:.2f},{},{}\n".
format(scan_date, mac, rssi, voltage, observed_id, project_id))
time.sleep(5) # give time to Ctrl-C
def start_all_devices(mgr):
logger.info('Starting all badges recording.')
while True:
mgr.pull_badges_list()
logger.info("Scanning for devices...")
scanned_devices = scan_for_devices(mgr.badges.keys())
for device in scanned_devices:
dev_info = device['device_info']
if dev_info ['adv_payload']:
sync = dev_info ['adv_payload']['sync_status']
audio = dev_info ['adv_payload']['audio_status']
proximity = dev_info ['adv_payload']['proximity_status']
badge_id = dev_info ['adv_payload']['badge_id']
project_id = dev_info ['adv_payload']['project_id']
if sync == 0 or audio == 0 or proximity == 0:
if(project_id==0):
logger.info("changing project ids {}".format(device['mac']))
logger.info("Starting {}".format(device['mac']))
bdg = mgr.badges.get(device['mac'])
bdg.start_recording()
time.sleep(2) # requires sleep between devices
else:
logger.info("Starting {}".format(device['mac']))
bdg = mgr.badges.get(device['mac'])
bdg.start_recording()
time.sleep(2) # requires sleep between devices
else:
logger.info("No need to start {}".format(device['mac']))
time.sleep(2) # allow BLE time to disconnect
def print_badges(mgr, mgrb):
logger.info("Printing badges:")
mgr.pull_badges_list()
mgrb.pull_beacons_list()
badge_list = mgr.badges
beacon_list = mgrb.beacons
print("Members:")
for key, value in badge_list.iteritems():
print("{},{},{},{}".format(value.key,value.addr,value.badge_id,value.project_id))
print("\nBadges:")
for key, value in beacon_list.iteritems():
print("{},{},{},{}".format(value.key,value.addr,value.badge_id,value.project_id))
def add_pull_command_options(subparsers):
pull_parser = subparsers.add_parser('pull', help='Continuously pull data from badges')
pull_parser.add_argument('-r','--start_recording'
, choices=('audio', 'proximity', 'both','none'), required=False
, default='both'
, dest='start_recording',help='data recording option')
def add_scan_command_options(subparsers):
scan_parser = subparsers.add_parser('scan', help='Continuously scan for badges')
scan_parser.add_argument('-a','--show_all', action='store_true', default=False, help="Show all devices")
def add_sync_all_command_options(subparsers):
sa_parser = subparsers.add_parser('sync_all', help='Send date to all devices in whitelist')
def add_start_all_command_options(subparsers):
st_parser = subparsers.add_parser('start_all', help='Start recording on all devices in whitelist')
def add_print_badges_command_options(subparsers):
lb_parser = subparsers.add_parser('print_badges', help='print badges in a CSV format')
if __name__ == "__main__":
import time
import argparse
parser = argparse.ArgumentParser(description="Run scans, send dates, or continuously pull data")
parser.add_argument('-dr','--disable_reset_ble', action='store_true', default=False, help="Do not reset BLE")
parser.add_argument('-m','--hub_mode', choices=('server', 'standalone')
, default='standalone', dest='hub_mode'
, help="Operation mode - standalone (using a configuration file) or a server")
parser.add_argument('-t', '--timestamp'
, type=int, required=False
, dest='timestamp', help='UTC timestamp to start pulling data from (int)')
subparsers = parser.add_subparsers(help='Program mode (e.g. Scan, send dates, pull, scan etc.)', dest='mode')
add_pull_command_options(subparsers)
add_scan_command_options(subparsers)
add_sync_all_command_options(subparsers)
add_start_all_command_options(subparsers)
add_print_badges_command_options(subparsers)
args = parser.parse_args()
mgr = create_badge_manager_instance(args.hub_mode, args.timestamp)
mgrb = create_beacon_manager_instance(args.hub_mode, args.timestamp)
if not args.disable_reset_ble:
reset()
if args.mode == "sync_all":
sync_all_devices(mgr)
# scan for devices
if args.mode == "scan":
devices_scanner(mgr,mgrb, args.show_all)
# pull data from all devices
if args.mode == "pull":
pull_devices(mgr, mgrb, args.start_recording)
if args.mode == "start_all":
start_all_devices(mgr)
if args.mode == "print_badges":
print_badges(mgr, mgrb)
exit(0)
| en | 0.824497 | #!/usr/bin/env python # seconds #NOTE try to keep under 100MB or so due to memory constraints # in bytes, so 15MB # create logger with 'badge_server' # create file handler which logs even debug messages # create console handler with a higher log level # create formatter and add it to the handlers # formatter = logging.Formatter('%(asctime)s - %(levelname)s - [%(mac)s] %(message)s') # add the handlers to the logger Returns true if there is data in the file, and false otherwise Send pending files to server and move pending to archive Return True on success, False on failure #TODO test with standalone #NOTE not currently doing anything with the True/False # return values, might decide to do something later # real quick grab the data type from the first data entry # fire away! # this seems unlikely to happen but is good to keep track of i guess # write to archive and erase pending file Return the name of the archive file for the passed data type return the name of the existing pending proximity file, or a new one if either one doesn't exist or if the existing file is > MAX_PENDING_FILE_SIZE If there are no current pending files < MAX_PENDING_FILE_SIZE in size, return a new pending filename Else, return an existing one. Create a pending file name for the given data_type Uses the current date/time to create a unique filename # this seems unlikely to happen, but just in case :) # get the number of pending files that match this time and add one Attempts to read data from the device specified by the address. Reading is handled by gatttool. :param bdg: :return: # if we were able to pull data, we saw the badge again # store in JSON file # need to find a good accumulator. #logger.debug(json.dumps(log_line)) # update badge object to hold latest timestamps # need to find a good accumulator. #logger.info(json.dumps(log_line)) # update badge object to hold latest timestamps # catch *all* exceptions # requires sometimes to prevent connection from failing # catch *all* exceptions # requires sometimes to prevent connection from failing Resets and reconfigures Bluetooth parameters. The specific parameters affect connection speed negotiation. It's not pretty, but safer to change the conn params this way :return: # Resets BLE hci # israspberry pi? # requires sleep after reset Kill orphaned/leftover/defunct bluepy-helper processes I'd like to move this to a separate utility file or something when we refactor # get all the bluepy-helper processes # dont wanna kill our process by accident :) # KILL KILL KILL # we waitpid to clean up defunct processes # When we refactor we can change this, but for now: # Randomly shuffle devices # iterate before the actual data collection loop just to offload # voltages to the server (and update heartbeat on server) # i don't think adv_payload is ever supposed to be empty, # but sometimes it is. and when it is, it breaks # now the actual data collection # try to update latest badge timestamps from the server Problem pulling badge from server\n Skipping badge with mac {} until next full badge list refresh # pull data # update timestamps on server # requires sleep between devices # Randomly shuffle devices # iterate before the actual data collection loop just to offload # voltages to the server (and update heartbeat on server) # Update beacons with wrong id or project id # allow BLE time to disconnect # clean up any leftover bluepy processes # requires sleep between devices # allow BLE time to disconnect # give time to Ctrl-C # requires sleep between devices # requires sleep between devices # allow BLE time to disconnect # scan for devices # pull data from all devices | 1.74356 | 2 |
python/compile.py | liamgam/gdkit | 1 | 7585 | import compileall
compileall.compile_dir(".",force=1) | import compileall
compileall.compile_dir(".",force=1) | none | 1 | 1.174575 | 1 |
|
saleor/product/migrations/0141_update_descritpion_fields.py | fairhopeweb/saleor | 15,337 | 7586 | # Generated by Django 3.1.5 on 2021-02-17 11:04
from django.db import migrations
import saleor.core.db.fields
import saleor.core.utils.editorjs
def update_empty_description_field(apps, schema_editor):
Category = apps.get_model("product", "Category")
CategoryTranslation = apps.get_model("product", "CategoryTranslation")
Collection = apps.get_model("product", "Collection")
CollectionTranslation = apps.get_model("product", "CollectionTranslation")
Product = apps.get_model("product", "Product")
ProductTranslation = apps.get_model("product", "ProductTranslation")
models = [
Category,
CategoryTranslation,
Collection,
CollectionTranslation,
Product,
ProductTranslation,
]
for model in models:
model.objects.filter(description={}).update(description=None)
class Migration(migrations.Migration):
dependencies = [
("product", "0140_auto_20210125_0905"),
]
operations = [
migrations.AlterField(
model_name="category",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="categorytranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="collection",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="collectiontranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="product",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="producttranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.RunPython(
update_empty_description_field,
migrations.RunPython.noop,
),
]
| # Generated by Django 3.1.5 on 2021-02-17 11:04
from django.db import migrations
import saleor.core.db.fields
import saleor.core.utils.editorjs
def update_empty_description_field(apps, schema_editor):
Category = apps.get_model("product", "Category")
CategoryTranslation = apps.get_model("product", "CategoryTranslation")
Collection = apps.get_model("product", "Collection")
CollectionTranslation = apps.get_model("product", "CollectionTranslation")
Product = apps.get_model("product", "Product")
ProductTranslation = apps.get_model("product", "ProductTranslation")
models = [
Category,
CategoryTranslation,
Collection,
CollectionTranslation,
Product,
ProductTranslation,
]
for model in models:
model.objects.filter(description={}).update(description=None)
class Migration(migrations.Migration):
dependencies = [
("product", "0140_auto_20210125_0905"),
]
operations = [
migrations.AlterField(
model_name="category",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="categorytranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="collection",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="collectiontranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="product",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.AlterField(
model_name="producttranslation",
name="description",
field=saleor.core.db.fields.SanitizedJSONField(
blank=True,
null=True,
sanitizer=saleor.core.utils.editorjs.clean_editor_js,
),
),
migrations.RunPython(
update_empty_description_field,
migrations.RunPython.noop,
),
]
| en | 0.813567 | # Generated by Django 3.1.5 on 2021-02-17 11:04 | 1.668498 | 2 |
local_search/sat_isfayer.py | arnaubena97/SatSolver-sat_isfayer | 0 | 7587 | #!/usr/bin/env python3
import sys
import random
def read_file(file_name):
"""File reader and parser the num of variables, num of clauses and put the clauses in a list"""
clauses =[]
with open(file_name) as all_file:
for line in all_file:
if line.startswith('c'): continue #ignore comments
if line.startswith('p'):
num_variables = int(line.split()[2]) # set num_variables
continue
if line.strip() == "": continue
clause = list(map(int, line.split()))
clause.pop()
clauses.append(clause)
return num_variables, clauses
def print_sol(solution):
"""Method to print the solution that satisfies all the clauses """
print("s SATISFIABLE")
print("v %s 0" %" ".join(map(str, solution)))
exit(0)
class walksat_solver():
def __init__(self, clauses, num_variables):
"""Constructor of the solver"""
self.clauses = clauses
self.num_variables = num_variables
self.formula=[]
self.list_positions = self.create_positions()
self.index_clauses_satisfied = []
def randomSolution(self):
"""Create a random solution of cnf formula. Ex: [-1, 2, 3, -4, ...]"""
random_formula = [x if random.random() < 0.5 else -x for x in range(self.num_variables + 1)]
return random_formula[1:]
def create_positions(self):
"""Return a list with the clause index that apear in the clauses.
First position is empty, and the index of list is the variable.
Ex: [ [], [2], [2, 3], ....] """
vars_positions = [[] for _ in range(self.num_variables * 2 + 1)]
for index, clause in enumerate(self.clauses):
for var in clause:
vars_positions[var].append(index)
return vars_positions
def calculate_all_clauses_satisfy(self):
"""Returns a list with the number of variables that
satisfy the clause with the same index.
Method for all clauses.
Ex: [1, 0, 2, 2] in test_0.cnf """
list_variables_satisfies = []
for clause in range(len(self.clauses)):
number_sat = self.clause_satisfy(clause)
list_variables_satisfies.append(number_sat)
return list_variables_satisfies
def clause_satisfy(self, index):
"""Returns an integer, which is the number of
variables in the formula that satisfy the
clause indicated by the index.
Ex: index = 1 --> cluse[1] = [1, -2, 3, ..] """
satisfy = 0
for variable in self.clauses[index]:
if variable in self.formula:
satisfy += 1
return satisfy
def select_all_unsatisfied(self):
"""Returns a list of indexes whose clause
is not satisfied."""
clauses_not_satisfied = []
for index, value in enumerate(self.index_clauses_satisfied):
if value == 0:
clauses_not_satisfied.append(index)
return clauses_not_satisfied
def get_clause_unsatisfied(self, list_all_unsatisfied):
"""Returns a randomly selected unsatisfied clause"""
return self.clauses[random.choice(list_all_unsatisfied)]
def update(self, variable, x):
"""It is responsible for updating the list of
the number of variables that satisfy the clause"""
for index in self.list_positions[x * variable]:
self.index_clauses_satisfied[index] += x
def change_variable(self, clause_to_review):
"""Is responsible for assessing which is
the best variable in the clause to change"""
worst_wrong = sys.maxsize
bests_variables = []
for variable in clause_to_review:
wrong = 0
for index in self.list_positions[-variable]:
if not self.index_clauses_satisfied[index] > 1:
wrong += 1
if wrong <= worst_wrong:
worst_wrong = wrong
bests_variables.append(variable)
return random.choice(bests_variables)
def solve(self, max_tries=50000000, max_flips=3000):
"""Implementation of the solver"""
#for _ in range(max_tries):
while(True):
self.formula = self.randomSolution()
self.index_clauses_satisfied = self.calculate_all_clauses_satisfy()
for _ in range(max_flips):
index_all_unsatisfied = self.select_all_unsatisfied()
if len(index_all_unsatisfied)==0:
print_sol(self.formula)
clause_to_review = self.get_clause_unsatisfied(index_all_unsatisfied)
variable = self.change_variable(clause_to_review)
self.update(variable, 1)
self.update(variable, -1)
self.formula[abs(variable)-1] *= -1
#Main
if __name__ == "__main__":
if len(sys.argv) == 2:
file_name = sys.argv[1]
else:
print("\n Command: python %s <file_name.cnf> \n" %sys.argv[0])
exit(0)
num_variables, clauses = read_file(file_name)
sat = walksat_solver(clauses, num_variables)
sat.solve()
exit(0)
| #!/usr/bin/env python3
import sys
import random
def read_file(file_name):
"""File reader and parser the num of variables, num of clauses and put the clauses in a list"""
clauses =[]
with open(file_name) as all_file:
for line in all_file:
if line.startswith('c'): continue #ignore comments
if line.startswith('p'):
num_variables = int(line.split()[2]) # set num_variables
continue
if line.strip() == "": continue
clause = list(map(int, line.split()))
clause.pop()
clauses.append(clause)
return num_variables, clauses
def print_sol(solution):
"""Method to print the solution that satisfies all the clauses """
print("s SATISFIABLE")
print("v %s 0" %" ".join(map(str, solution)))
exit(0)
class walksat_solver():
def __init__(self, clauses, num_variables):
"""Constructor of the solver"""
self.clauses = clauses
self.num_variables = num_variables
self.formula=[]
self.list_positions = self.create_positions()
self.index_clauses_satisfied = []
def randomSolution(self):
"""Create a random solution of cnf formula. Ex: [-1, 2, 3, -4, ...]"""
random_formula = [x if random.random() < 0.5 else -x for x in range(self.num_variables + 1)]
return random_formula[1:]
def create_positions(self):
"""Return a list with the clause index that apear in the clauses.
First position is empty, and the index of list is the variable.
Ex: [ [], [2], [2, 3], ....] """
vars_positions = [[] for _ in range(self.num_variables * 2 + 1)]
for index, clause in enumerate(self.clauses):
for var in clause:
vars_positions[var].append(index)
return vars_positions
def calculate_all_clauses_satisfy(self):
"""Returns a list with the number of variables that
satisfy the clause with the same index.
Method for all clauses.
Ex: [1, 0, 2, 2] in test_0.cnf """
list_variables_satisfies = []
for clause in range(len(self.clauses)):
number_sat = self.clause_satisfy(clause)
list_variables_satisfies.append(number_sat)
return list_variables_satisfies
def clause_satisfy(self, index):
"""Returns an integer, which is the number of
variables in the formula that satisfy the
clause indicated by the index.
Ex: index = 1 --> cluse[1] = [1, -2, 3, ..] """
satisfy = 0
for variable in self.clauses[index]:
if variable in self.formula:
satisfy += 1
return satisfy
def select_all_unsatisfied(self):
"""Returns a list of indexes whose clause
is not satisfied."""
clauses_not_satisfied = []
for index, value in enumerate(self.index_clauses_satisfied):
if value == 0:
clauses_not_satisfied.append(index)
return clauses_not_satisfied
def get_clause_unsatisfied(self, list_all_unsatisfied):
"""Returns a randomly selected unsatisfied clause"""
return self.clauses[random.choice(list_all_unsatisfied)]
def update(self, variable, x):
"""It is responsible for updating the list of
the number of variables that satisfy the clause"""
for index in self.list_positions[x * variable]:
self.index_clauses_satisfied[index] += x
def change_variable(self, clause_to_review):
"""Is responsible for assessing which is
the best variable in the clause to change"""
worst_wrong = sys.maxsize
bests_variables = []
for variable in clause_to_review:
wrong = 0
for index in self.list_positions[-variable]:
if not self.index_clauses_satisfied[index] > 1:
wrong += 1
if wrong <= worst_wrong:
worst_wrong = wrong
bests_variables.append(variable)
return random.choice(bests_variables)
def solve(self, max_tries=50000000, max_flips=3000):
"""Implementation of the solver"""
#for _ in range(max_tries):
while(True):
self.formula = self.randomSolution()
self.index_clauses_satisfied = self.calculate_all_clauses_satisfy()
for _ in range(max_flips):
index_all_unsatisfied = self.select_all_unsatisfied()
if len(index_all_unsatisfied)==0:
print_sol(self.formula)
clause_to_review = self.get_clause_unsatisfied(index_all_unsatisfied)
variable = self.change_variable(clause_to_review)
self.update(variable, 1)
self.update(variable, -1)
self.formula[abs(variable)-1] *= -1
#Main
if __name__ == "__main__":
if len(sys.argv) == 2:
file_name = sys.argv[1]
else:
print("\n Command: python %s <file_name.cnf> \n" %sys.argv[0])
exit(0)
num_variables, clauses = read_file(file_name)
sat = walksat_solver(clauses, num_variables)
sat.solve()
exit(0)
| en | 0.839743 | #!/usr/bin/env python3 File reader and parser the num of variables, num of clauses and put the clauses in a list #ignore comments # set num_variables Method to print the solution that satisfies all the clauses Constructor of the solver Create a random solution of cnf formula. Ex: [-1, 2, 3, -4, ...] Return a list with the clause index that apear in the clauses. First position is empty, and the index of list is the variable. Ex: [ [], [2], [2, 3], ....] Returns a list with the number of variables that satisfy the clause with the same index. Method for all clauses. Ex: [1, 0, 2, 2] in test_0.cnf Returns an integer, which is the number of variables in the formula that satisfy the clause indicated by the index. Ex: index = 1 --> cluse[1] = [1, -2, 3, ..] Returns a list of indexes whose clause is not satisfied. Returns a randomly selected unsatisfied clause It is responsible for updating the list of the number of variables that satisfy the clause Is responsible for assessing which is the best variable in the clause to change Implementation of the solver #for _ in range(max_tries): #Main | 3.849441 | 4 |
torch/_VF.py | Hacky-DH/pytorch | 60,067 | 7588 | """
This makes the functions in torch._C._VariableFunctions available as
torch._VF.<funcname>
without mypy being able to find them.
A subset of those functions are mapped to ATen functions in
torch/jit/_builtins.py
See https://github.com/pytorch/pytorch/issues/21478 for the reason for
introducing torch._VF
"""
import torch
import sys
import types
class VFModule(types.ModuleType):
vf: types.ModuleType
def __init__(self, name):
super(VFModule, self).__init__(name)
self.vf = torch._C._VariableFunctions
def __getattr__(self, attr):
return getattr(self.vf, attr)
sys.modules[__name__] = VFModule(__name__)
| """
This makes the functions in torch._C._VariableFunctions available as
torch._VF.<funcname>
without mypy being able to find them.
A subset of those functions are mapped to ATen functions in
torch/jit/_builtins.py
See https://github.com/pytorch/pytorch/issues/21478 for the reason for
introducing torch._VF
"""
import torch
import sys
import types
class VFModule(types.ModuleType):
vf: types.ModuleType
def __init__(self, name):
super(VFModule, self).__init__(name)
self.vf = torch._C._VariableFunctions
def __getattr__(self, attr):
return getattr(self.vf, attr)
sys.modules[__name__] = VFModule(__name__)
| en | 0.866004 | This makes the functions in torch._C._VariableFunctions available as torch._VF.<funcname> without mypy being able to find them. A subset of those functions are mapped to ATen functions in torch/jit/_builtins.py See https://github.com/pytorch/pytorch/issues/21478 for the reason for introducing torch._VF | 2.507393 | 3 |
sparse_causal_model_learner_rl/annealer/threshold_projection.py | sergeivolodin/causality-disentanglement-rl | 2 | 7589 | <filename>sparse_causal_model_learner_rl/annealer/threshold_projection.py
import gin
import torch
import logging
from sparse_causal_model_learner_rl.metrics import find_value, find_key
@gin.configurable
def ProjectionThreshold(config, config_object, epoch_info, temp,
adjust_every=100, metric_threshold=0.5, delta=0.5, source_metric_key=None,
min_hyper=0, max_hyper=1000,
gin_variable=None, **kwargs):
try:
metric_val = find_value(epoch_info, source_metric_key)
except AssertionError as e:
return config
good = metric_val < metric_threshold
hyper = gin.query_parameter(gin_variable)
logging.info(f"Projection: metric={metric_val} threshold={metric_threshold} good={good} hyper={hyper}")
if 'last_hyper_adjustment' not in temp:
temp['last_hyper_adjustment'] = 0
i = epoch_info['epochs']
if good:
temp['suggested_hyper'] = hyper - delta
else:
temp['suggested_hyper'] = hyper + delta
if temp['suggested_hyper'] > max_hyper:
temp['suggested_hyper'] = max_hyper
if temp['suggested_hyper'] < min_hyper:
temp['suggested_hyper'] = min_hyper
if 'suggested_hyper' in temp and (i - temp['last_hyper_adjustment'] >= adjust_every):
temp['last_hyper_adjustment'] = i
with gin.unlock_config():
gin.bind_parameter(gin_variable, temp['suggested_hyper'])
del temp['suggested_hyper']
return config
| <filename>sparse_causal_model_learner_rl/annealer/threshold_projection.py
import gin
import torch
import logging
from sparse_causal_model_learner_rl.metrics import find_value, find_key
@gin.configurable
def ProjectionThreshold(config, config_object, epoch_info, temp,
adjust_every=100, metric_threshold=0.5, delta=0.5, source_metric_key=None,
min_hyper=0, max_hyper=1000,
gin_variable=None, **kwargs):
try:
metric_val = find_value(epoch_info, source_metric_key)
except AssertionError as e:
return config
good = metric_val < metric_threshold
hyper = gin.query_parameter(gin_variable)
logging.info(f"Projection: metric={metric_val} threshold={metric_threshold} good={good} hyper={hyper}")
if 'last_hyper_adjustment' not in temp:
temp['last_hyper_adjustment'] = 0
i = epoch_info['epochs']
if good:
temp['suggested_hyper'] = hyper - delta
else:
temp['suggested_hyper'] = hyper + delta
if temp['suggested_hyper'] > max_hyper:
temp['suggested_hyper'] = max_hyper
if temp['suggested_hyper'] < min_hyper:
temp['suggested_hyper'] = min_hyper
if 'suggested_hyper' in temp and (i - temp['last_hyper_adjustment'] >= adjust_every):
temp['last_hyper_adjustment'] = i
with gin.unlock_config():
gin.bind_parameter(gin_variable, temp['suggested_hyper'])
del temp['suggested_hyper']
return config
| none | 1 | 1.980903 | 2 |
|
numpyro/contrib/control_flow/scan.py | ucals/numpyro | 2 | 7590 | # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import OrderedDict
from functools import partial
from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten
import jax.numpy as jnp
from jax.tree_util import register_pytree_node_class
from numpyro import handlers
from numpyro.primitives import _PYRO_STACK, Messenger, apply_stack
from numpyro.util import not_jax_tracer
@register_pytree_node_class
class PytreeTrace:
def __init__(self, trace):
self.trace = trace
def tree_flatten(self):
trace, aux_trace = {}, {}
for name, site in self.trace.items():
if site['type'] in ['sample', 'deterministic']:
trace[name], aux_trace[name] = {}, {'_control_flow_done': True}
for key in site:
if key in ['fn', 'args', 'value', 'intermediates']:
trace[name][key] = site[key]
# scanned sites have stop field because we trace them inside a block handler
elif key != 'stop':
aux_trace[name][key] = site[key]
return (trace,), aux_trace
@classmethod
def tree_unflatten(cls, aux_data, children):
trace, = children
for name, site in trace.items():
site.update(aux_data[name])
return cls(trace)
def _subs_wrapper(subs_map, i, length, site):
value = None
if isinstance(subs_map, dict) and site['name'] in subs_map:
value = subs_map[site['name']]
elif callable(subs_map):
rng_key = site['kwargs'].get('rng_key')
subs_map = handlers.seed(subs_map, rng_seed=rng_key) if rng_key is not None else subs_map
value = subs_map(site)
if value is not None:
value_ndim = jnp.ndim(value)
sample_shape = site['kwargs']['sample_shape']
fn_ndim = len(sample_shape + site['fn'].shape())
if value_ndim == fn_ndim:
# this branch happens when substitute_fn is init_strategy,
# where we apply init_strategy to each element in the scanned series
return value
elif value_ndim == fn_ndim + 1:
# this branch happens when we substitute a series of values
shape = jnp.shape(value)
if shape[0] == length:
return value[i]
elif shape[0] < length:
rng_key = site['kwargs']['rng_key']
assert rng_key is not None
# we use the substituted values if i < shape[0]
# and generate a new sample otherwise
return lax.cond(i < shape[0],
(value, i),
lambda val: val[0][val[1]],
rng_key,
lambda val: site['fn'](rng_key=val, sample_shape=sample_shape))
else:
raise RuntimeError(f"Substituted value for site {site['name']} "
"requires length less than or equal to scan length."
f" Expected length <= {length}, but got {shape[0]}.")
else:
raise RuntimeError(f"Something goes wrong. Expected ndim = {fn_ndim} or {fn_ndim+1},"
f" but got {value_ndim}. This might happen when you use nested scan,"
" which is currently not supported. Please report the issue to us!")
class promote_shapes(Messenger):
# a helper messenger to promote shapes of `fn` and `value`
# + msg: fn.batch_shape = (2, 3), value.shape = (3,) + fn.event_shape
# process_message(msg): promote value so that value.shape = (1, 3) + fn.event_shape
# + msg: fn.batch_shape = (3,), value.shape = (2, 3) + fn.event_shape
# process_message(msg): promote fn so that fn.batch_shape = (1, 3).
def process_message(self, msg):
if msg["type"] == "sample" and msg["value"] is not None:
fn, value = msg["fn"], msg["value"]
value_batch_ndims = jnp.ndim(value) - fn.event_dim
fn_batch_ndim = len(fn.batch_shape)
prepend_shapes = (1,) * abs(fn_batch_ndim - value_batch_ndims)
if fn_batch_ndim > value_batch_ndims:
msg["value"] = jnp.reshape(value, prepend_shapes + jnp.shape(value))
elif fn_batch_ndim < value_batch_ndims:
msg["fn"] = tree_map(lambda x: jnp.reshape(x, prepend_shapes + jnp.shape(x)), fn)
def scan_enum(f, init, xs, length, reverse, rng_key=None, substitute_stack=None):
from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace
# XXX: This implementation only works for history size=1 but can be
# extended to history size > 1 by running `f` `history_size` times
# for initialization. However, `sequential_sum_product` does not
# support history size > 1, so we skip supporting it here.
# Note that `funsor.sum_product.sarkka_bilmes_product` does support history > 1.
if reverse:
x0 = tree_map(lambda x: x[-1], xs)
xs_ = tree_map(lambda x: x[:-1], xs)
else:
x0 = tree_map(lambda x: x[0], xs)
xs_ = tree_map(lambda x: x[1:], xs)
carry_shape_at_t1 = None
def body_fn(wrapped_carry, x, prefix=None):
i, rng_key, carry = wrapped_carry
init = True if (not_jax_tracer(i) and i == 0) else False
rng_key, subkey = random.split(rng_key) if rng_key is not None else (None, None)
seeded_fn = handlers.seed(f, subkey) if subkey is not None else f
for subs_type, subs_map in substitute_stack:
subs_fn = partial(_subs_wrapper, subs_map, i, length)
if subs_type == 'condition':
seeded_fn = handlers.condition(seeded_fn, condition_fn=subs_fn)
elif subs_type == 'substitute':
seeded_fn = handlers.substitute(seeded_fn, substitute_fn=subs_fn)
if init:
with handlers.scope(prefix="_init"):
new_carry, y = seeded_fn(carry, x)
trace = {}
else:
with handlers.block(), packed_trace() as trace, promote_shapes(), enum(), markov():
# Like scan_wrapper, we collect the trace of scan's transition function
# `seeded_fn` here. To put time dimension to the correct position, we need to
# promote shapes to make `fn` and `value`
# at each site have the same batch dims (e.g. if `fn.batch_shape = (2, 3)`,
# and value's batch_shape is (3,), then we promote shape of
# value so that its batch shape is (1, 3)).
new_carry, y = config_enumerate(seeded_fn)(carry, x)
# store shape of new_carry at a global variable
nonlocal carry_shape_at_t1
carry_shape_at_t1 = [jnp.shape(x) for x in tree_flatten(new_carry)[0]]
# make new_carry have the same shape as carry
# FIXME: is this rigorous?
new_carry = tree_multimap(lambda a, b: jnp.reshape(a, jnp.shape(b)),
new_carry, carry)
return (i + jnp.array(1), rng_key, new_carry), (PytreeTrace(trace), y)
with markov():
wrapped_carry = (0, rng_key, init)
wrapped_carry, (_, y0) = body_fn(wrapped_carry, x0)
if length == 1:
ys = tree_map(lambda x: jnp.expand_dims(x, 0), y0)
return wrapped_carry, (PytreeTrace({}), ys)
wrapped_carry, (pytree_trace, ys) = lax.scan(body_fn, wrapped_carry, xs_, length - 1, reverse)
first_var = None
for name, site in pytree_trace.trace.items():
# add `time` dimension, the name will be '_time_{first variable in the trace}'
if first_var is None:
first_var = name
leftmost_dim = min(site['infer']['dim_to_name'])
site['infer']['dim_to_name'][leftmost_dim - 1] = '_time_{}'.format(first_var)
# similar to carry, we need to reshape due to shape alternating in markov
ys = tree_multimap(lambda z0, z: jnp.reshape(z, z.shape[:1] + jnp.shape(z0)), y0, ys)
# we also need to reshape `carry` to match sequential behavior
if length % 2 == 0:
t, rng_key, carry = wrapped_carry
flatten_carry, treedef = tree_flatten(carry)
flatten_carry = [jnp.reshape(x, t1_shape)
for x, t1_shape in zip(flatten_carry, carry_shape_at_t1)]
carry = tree_unflatten(treedef, flatten_carry)
wrapped_carry = (t, rng_key, carry)
return wrapped_carry, (pytree_trace, ys)
def scan_wrapper(f, init, xs, length, reverse, rng_key=None, substitute_stack=[], enum=False):
if length is None:
length = tree_flatten(xs)[0][0].shape[0]
if enum:
return scan_enum(f, init, xs, length, reverse, rng_key, substitute_stack)
def body_fn(wrapped_carry, x):
i, rng_key, carry = wrapped_carry
rng_key, subkey = random.split(rng_key) if rng_key is not None else (None, None)
with handlers.block():
seeded_fn = handlers.seed(f, subkey) if subkey is not None else f
for subs_type, subs_map in substitute_stack:
subs_fn = partial(_subs_wrapper, subs_map, i, length)
if subs_type == 'condition':
seeded_fn = handlers.condition(seeded_fn, condition_fn=subs_fn)
elif subs_type == 'substitute':
seeded_fn = handlers.substitute(seeded_fn, substitute_fn=subs_fn)
with handlers.trace() as trace:
carry, y = seeded_fn(carry, x)
return (i + 1, rng_key, carry), (PytreeTrace(trace), y)
return lax.scan(body_fn, (jnp.array(0), rng_key, init), xs, length=length, reverse=reverse)
def scan(f, init, xs, length=None, reverse=False):
"""
This primitive scans a function over the leading array axes of
`xs` while carrying along state. See :func:`jax.lax.scan` for more
information.
**Usage**:
.. doctest::
>>> import numpy as np
>>> import numpyro
>>> import numpyro.distributions as dist
>>> from numpyro.contrib.control_flow import scan
>>>
>>> def gaussian_hmm(y=None, T=10):
... def transition(x_prev, y_curr):
... x_curr = numpyro.sample('x', dist.Normal(x_prev, 1))
... y_curr = numpyro.sample('y', dist.Normal(x_curr, 1), obs=y_curr)
... return x_curr, (x_curr, y_curr)
...
... x0 = numpyro.sample('x_0', dist.Normal(0, 1))
... _, (x, y) = scan(transition, x0, y, length=T)
... return (x, y)
>>>
>>> # here we do some quick tests
>>> with numpyro.handlers.seed(rng_seed=0):
... x, y = gaussian_hmm(np.arange(10.))
>>> assert x.shape == (10,) and y.shape == (10,)
>>> assert np.all(y == np.arange(10))
>>>
>>> with numpyro.handlers.seed(rng_seed=0): # generative
... x, y = gaussian_hmm()
>>> assert x.shape == (10,) and y.shape == (10,)
.. warning:: This is an experimental utility function that allows users to use
JAX control flow with NumPyro's effect handlers. Currently, `sample` and
`deterministic` sites within the scan body `f` are supported. If you notice
that any effect handlers or distributions are unsupported, please file an issue.
.. note:: It is ambiguous to align `scan` dimension inside a `plate` context.
So the following pattern won't be supported
.. code-block:: python
with numpyro.plate('N', 10):
last, ys = scan(f, init, xs)
All `plate` statements should be put inside `f`. For example, the corresponding
working code is
.. code-block:: python
def g(*args, **kwargs):
with numpyro.plate('N', 10):
return f(*arg, **kwargs)
last, ys = scan(g, init, xs)
.. note:: Nested scan is currently not supported.
.. note:: We can scan over discrete latent variables in `f`. The joint density is
evaluated using parallel-scan (reference [1]) over time dimension, which
reduces parallel complexity to `O(log(length))`.
Currently, only the equivalence to
:class:`~numpyro.contrib.funsor.enum_messenger.markov(history_size=1)`
is supported. A :class:`~numpyro.handlers.trace` of `scan` with discrete latent
variables will contain the following sites:
+ init sites: those sites belong to the first trace of `f`. Each of
them will have name prefixed with `_init/`.
+ scanned sites: those sites collect the values of the remaining scan
loop over `f`. An addition time dimension `_time_foo` will be
added to those sites, where `foo` is the name of the first site
appeared in `f`.
Not all transition functions `f` are supported. All of the restrictions from
Pyro's enumeration tutorial [2] still apply here. In addition, there should
not have any site outside of `scan` depend on the first output of `scan`
(the last carry value).
** References **
1. *Temporal Parallelization of Bayesian Smoothers*,
<NAME>, <NAME>
(https://arxiv.org/abs/1905.13002)
2. *Inference with Discrete Latent Variables*
(http://pyro.ai/examples/enumeration.html#Dependencies-among-plates)
:param callable f: a function to be scanned.
:param init: the initial carrying state
:param xs: the values over which we scan along the leading axis. This can
be any JAX pytree (e.g. list/dict of arrays).
:param length: optional value specifying the length of `xs`
but can be used when `xs` is an empty pytree (e.g. None)
:param bool reverse: optional boolean specifying whether to run the scan iteration
forward (the default) or in reverse
:return: output of scan, quoted from :func:`jax.lax.scan` docs:
"pair of type (c, [b]) where the first element represents the final loop
carry value and the second element represents the stacked outputs of the
second output of f when scanned over the leading axis of the inputs".
"""
# if there are no active Messengers, we just run and return it as expected:
if not _PYRO_STACK:
(length, rng_key, carry), (pytree_trace, ys) = scan_wrapper(
f, init, xs, length=length, reverse=reverse)
else:
# Otherwise, we initialize a message...
initial_msg = {
'type': 'control_flow',
'fn': scan_wrapper,
'args': (f, init, xs, length, reverse),
'kwargs': {'rng_key': None,
'substitute_stack': []},
'value': None,
}
# ...and use apply_stack to send it to the Messengers
msg = apply_stack(initial_msg)
(length, rng_key, carry), (pytree_trace, ys) = msg['value']
if not msg["kwargs"].get("enum", False):
for msg in pytree_trace.trace.values():
apply_stack(msg)
else:
from numpyro.contrib.funsor import to_funsor
from numpyro.contrib.funsor.enum_messenger import LocalNamedMessenger
for msg in pytree_trace.trace.values():
with LocalNamedMessenger():
dim_to_name = msg["infer"].get("dim_to_name")
to_funsor(msg["value"], dim_to_name=OrderedDict([(k, dim_to_name[k]) for k in sorted(dim_to_name)]))
apply_stack(msg)
return carry, ys
| # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import OrderedDict
from functools import partial
from jax import lax, random, tree_flatten, tree_map, tree_multimap, tree_unflatten
import jax.numpy as jnp
from jax.tree_util import register_pytree_node_class
from numpyro import handlers
from numpyro.primitives import _PYRO_STACK, Messenger, apply_stack
from numpyro.util import not_jax_tracer
@register_pytree_node_class
class PytreeTrace:
def __init__(self, trace):
self.trace = trace
def tree_flatten(self):
trace, aux_trace = {}, {}
for name, site in self.trace.items():
if site['type'] in ['sample', 'deterministic']:
trace[name], aux_trace[name] = {}, {'_control_flow_done': True}
for key in site:
if key in ['fn', 'args', 'value', 'intermediates']:
trace[name][key] = site[key]
# scanned sites have stop field because we trace them inside a block handler
elif key != 'stop':
aux_trace[name][key] = site[key]
return (trace,), aux_trace
@classmethod
def tree_unflatten(cls, aux_data, children):
trace, = children
for name, site in trace.items():
site.update(aux_data[name])
return cls(trace)
def _subs_wrapper(subs_map, i, length, site):
value = None
if isinstance(subs_map, dict) and site['name'] in subs_map:
value = subs_map[site['name']]
elif callable(subs_map):
rng_key = site['kwargs'].get('rng_key')
subs_map = handlers.seed(subs_map, rng_seed=rng_key) if rng_key is not None else subs_map
value = subs_map(site)
if value is not None:
value_ndim = jnp.ndim(value)
sample_shape = site['kwargs']['sample_shape']
fn_ndim = len(sample_shape + site['fn'].shape())
if value_ndim == fn_ndim:
# this branch happens when substitute_fn is init_strategy,
# where we apply init_strategy to each element in the scanned series
return value
elif value_ndim == fn_ndim + 1:
# this branch happens when we substitute a series of values
shape = jnp.shape(value)
if shape[0] == length:
return value[i]
elif shape[0] < length:
rng_key = site['kwargs']['rng_key']
assert rng_key is not None
# we use the substituted values if i < shape[0]
# and generate a new sample otherwise
return lax.cond(i < shape[0],
(value, i),
lambda val: val[0][val[1]],
rng_key,
lambda val: site['fn'](rng_key=val, sample_shape=sample_shape))
else:
raise RuntimeError(f"Substituted value for site {site['name']} "
"requires length less than or equal to scan length."
f" Expected length <= {length}, but got {shape[0]}.")
else:
raise RuntimeError(f"Something goes wrong. Expected ndim = {fn_ndim} or {fn_ndim+1},"
f" but got {value_ndim}. This might happen when you use nested scan,"
" which is currently not supported. Please report the issue to us!")
class promote_shapes(Messenger):
# a helper messenger to promote shapes of `fn` and `value`
# + msg: fn.batch_shape = (2, 3), value.shape = (3,) + fn.event_shape
# process_message(msg): promote value so that value.shape = (1, 3) + fn.event_shape
# + msg: fn.batch_shape = (3,), value.shape = (2, 3) + fn.event_shape
# process_message(msg): promote fn so that fn.batch_shape = (1, 3).
def process_message(self, msg):
if msg["type"] == "sample" and msg["value"] is not None:
fn, value = msg["fn"], msg["value"]
value_batch_ndims = jnp.ndim(value) - fn.event_dim
fn_batch_ndim = len(fn.batch_shape)
prepend_shapes = (1,) * abs(fn_batch_ndim - value_batch_ndims)
if fn_batch_ndim > value_batch_ndims:
msg["value"] = jnp.reshape(value, prepend_shapes + jnp.shape(value))
elif fn_batch_ndim < value_batch_ndims:
msg["fn"] = tree_map(lambda x: jnp.reshape(x, prepend_shapes + jnp.shape(x)), fn)
def scan_enum(f, init, xs, length, reverse, rng_key=None, substitute_stack=None):
from numpyro.contrib.funsor import enum, config_enumerate, markov, trace as packed_trace
# XXX: This implementation only works for history size=1 but can be
# extended to history size > 1 by running `f` `history_size` times
# for initialization. However, `sequential_sum_product` does not
# support history size > 1, so we skip supporting it here.
# Note that `funsor.sum_product.sarkka_bilmes_product` does support history > 1.
if reverse:
x0 = tree_map(lambda x: x[-1], xs)
xs_ = tree_map(lambda x: x[:-1], xs)
else:
x0 = tree_map(lambda x: x[0], xs)
xs_ = tree_map(lambda x: x[1:], xs)
carry_shape_at_t1 = None
def body_fn(wrapped_carry, x, prefix=None):
i, rng_key, carry = wrapped_carry
init = True if (not_jax_tracer(i) and i == 0) else False
rng_key, subkey = random.split(rng_key) if rng_key is not None else (None, None)
seeded_fn = handlers.seed(f, subkey) if subkey is not None else f
for subs_type, subs_map in substitute_stack:
subs_fn = partial(_subs_wrapper, subs_map, i, length)
if subs_type == 'condition':
seeded_fn = handlers.condition(seeded_fn, condition_fn=subs_fn)
elif subs_type == 'substitute':
seeded_fn = handlers.substitute(seeded_fn, substitute_fn=subs_fn)
if init:
with handlers.scope(prefix="_init"):
new_carry, y = seeded_fn(carry, x)
trace = {}
else:
with handlers.block(), packed_trace() as trace, promote_shapes(), enum(), markov():
# Like scan_wrapper, we collect the trace of scan's transition function
# `seeded_fn` here. To put time dimension to the correct position, we need to
# promote shapes to make `fn` and `value`
# at each site have the same batch dims (e.g. if `fn.batch_shape = (2, 3)`,
# and value's batch_shape is (3,), then we promote shape of
# value so that its batch shape is (1, 3)).
new_carry, y = config_enumerate(seeded_fn)(carry, x)
# store shape of new_carry at a global variable
nonlocal carry_shape_at_t1
carry_shape_at_t1 = [jnp.shape(x) for x in tree_flatten(new_carry)[0]]
# make new_carry have the same shape as carry
# FIXME: is this rigorous?
new_carry = tree_multimap(lambda a, b: jnp.reshape(a, jnp.shape(b)),
new_carry, carry)
return (i + jnp.array(1), rng_key, new_carry), (PytreeTrace(trace), y)
with markov():
wrapped_carry = (0, rng_key, init)
wrapped_carry, (_, y0) = body_fn(wrapped_carry, x0)
if length == 1:
ys = tree_map(lambda x: jnp.expand_dims(x, 0), y0)
return wrapped_carry, (PytreeTrace({}), ys)
wrapped_carry, (pytree_trace, ys) = lax.scan(body_fn, wrapped_carry, xs_, length - 1, reverse)
first_var = None
for name, site in pytree_trace.trace.items():
# add `time` dimension, the name will be '_time_{first variable in the trace}'
if first_var is None:
first_var = name
leftmost_dim = min(site['infer']['dim_to_name'])
site['infer']['dim_to_name'][leftmost_dim - 1] = '_time_{}'.format(first_var)
# similar to carry, we need to reshape due to shape alternating in markov
ys = tree_multimap(lambda z0, z: jnp.reshape(z, z.shape[:1] + jnp.shape(z0)), y0, ys)
# we also need to reshape `carry` to match sequential behavior
if length % 2 == 0:
t, rng_key, carry = wrapped_carry
flatten_carry, treedef = tree_flatten(carry)
flatten_carry = [jnp.reshape(x, t1_shape)
for x, t1_shape in zip(flatten_carry, carry_shape_at_t1)]
carry = tree_unflatten(treedef, flatten_carry)
wrapped_carry = (t, rng_key, carry)
return wrapped_carry, (pytree_trace, ys)
def scan_wrapper(f, init, xs, length, reverse, rng_key=None, substitute_stack=[], enum=False):
if length is None:
length = tree_flatten(xs)[0][0].shape[0]
if enum:
return scan_enum(f, init, xs, length, reverse, rng_key, substitute_stack)
def body_fn(wrapped_carry, x):
i, rng_key, carry = wrapped_carry
rng_key, subkey = random.split(rng_key) if rng_key is not None else (None, None)
with handlers.block():
seeded_fn = handlers.seed(f, subkey) if subkey is not None else f
for subs_type, subs_map in substitute_stack:
subs_fn = partial(_subs_wrapper, subs_map, i, length)
if subs_type == 'condition':
seeded_fn = handlers.condition(seeded_fn, condition_fn=subs_fn)
elif subs_type == 'substitute':
seeded_fn = handlers.substitute(seeded_fn, substitute_fn=subs_fn)
with handlers.trace() as trace:
carry, y = seeded_fn(carry, x)
return (i + 1, rng_key, carry), (PytreeTrace(trace), y)
return lax.scan(body_fn, (jnp.array(0), rng_key, init), xs, length=length, reverse=reverse)
def scan(f, init, xs, length=None, reverse=False):
"""
This primitive scans a function over the leading array axes of
`xs` while carrying along state. See :func:`jax.lax.scan` for more
information.
**Usage**:
.. doctest::
>>> import numpy as np
>>> import numpyro
>>> import numpyro.distributions as dist
>>> from numpyro.contrib.control_flow import scan
>>>
>>> def gaussian_hmm(y=None, T=10):
... def transition(x_prev, y_curr):
... x_curr = numpyro.sample('x', dist.Normal(x_prev, 1))
... y_curr = numpyro.sample('y', dist.Normal(x_curr, 1), obs=y_curr)
... return x_curr, (x_curr, y_curr)
...
... x0 = numpyro.sample('x_0', dist.Normal(0, 1))
... _, (x, y) = scan(transition, x0, y, length=T)
... return (x, y)
>>>
>>> # here we do some quick tests
>>> with numpyro.handlers.seed(rng_seed=0):
... x, y = gaussian_hmm(np.arange(10.))
>>> assert x.shape == (10,) and y.shape == (10,)
>>> assert np.all(y == np.arange(10))
>>>
>>> with numpyro.handlers.seed(rng_seed=0): # generative
... x, y = gaussian_hmm()
>>> assert x.shape == (10,) and y.shape == (10,)
.. warning:: This is an experimental utility function that allows users to use
JAX control flow with NumPyro's effect handlers. Currently, `sample` and
`deterministic` sites within the scan body `f` are supported. If you notice
that any effect handlers or distributions are unsupported, please file an issue.
.. note:: It is ambiguous to align `scan` dimension inside a `plate` context.
So the following pattern won't be supported
.. code-block:: python
with numpyro.plate('N', 10):
last, ys = scan(f, init, xs)
All `plate` statements should be put inside `f`. For example, the corresponding
working code is
.. code-block:: python
def g(*args, **kwargs):
with numpyro.plate('N', 10):
return f(*arg, **kwargs)
last, ys = scan(g, init, xs)
.. note:: Nested scan is currently not supported.
.. note:: We can scan over discrete latent variables in `f`. The joint density is
evaluated using parallel-scan (reference [1]) over time dimension, which
reduces parallel complexity to `O(log(length))`.
Currently, only the equivalence to
:class:`~numpyro.contrib.funsor.enum_messenger.markov(history_size=1)`
is supported. A :class:`~numpyro.handlers.trace` of `scan` with discrete latent
variables will contain the following sites:
+ init sites: those sites belong to the first trace of `f`. Each of
them will have name prefixed with `_init/`.
+ scanned sites: those sites collect the values of the remaining scan
loop over `f`. An addition time dimension `_time_foo` will be
added to those sites, where `foo` is the name of the first site
appeared in `f`.
Not all transition functions `f` are supported. All of the restrictions from
Pyro's enumeration tutorial [2] still apply here. In addition, there should
not have any site outside of `scan` depend on the first output of `scan`
(the last carry value).
** References **
1. *Temporal Parallelization of Bayesian Smoothers*,
<NAME>, <NAME>
(https://arxiv.org/abs/1905.13002)
2. *Inference with Discrete Latent Variables*
(http://pyro.ai/examples/enumeration.html#Dependencies-among-plates)
:param callable f: a function to be scanned.
:param init: the initial carrying state
:param xs: the values over which we scan along the leading axis. This can
be any JAX pytree (e.g. list/dict of arrays).
:param length: optional value specifying the length of `xs`
but can be used when `xs` is an empty pytree (e.g. None)
:param bool reverse: optional boolean specifying whether to run the scan iteration
forward (the default) or in reverse
:return: output of scan, quoted from :func:`jax.lax.scan` docs:
"pair of type (c, [b]) where the first element represents the final loop
carry value and the second element represents the stacked outputs of the
second output of f when scanned over the leading axis of the inputs".
"""
# if there are no active Messengers, we just run and return it as expected:
if not _PYRO_STACK:
(length, rng_key, carry), (pytree_trace, ys) = scan_wrapper(
f, init, xs, length=length, reverse=reverse)
else:
# Otherwise, we initialize a message...
initial_msg = {
'type': 'control_flow',
'fn': scan_wrapper,
'args': (f, init, xs, length, reverse),
'kwargs': {'rng_key': None,
'substitute_stack': []},
'value': None,
}
# ...and use apply_stack to send it to the Messengers
msg = apply_stack(initial_msg)
(length, rng_key, carry), (pytree_trace, ys) = msg['value']
if not msg["kwargs"].get("enum", False):
for msg in pytree_trace.trace.values():
apply_stack(msg)
else:
from numpyro.contrib.funsor import to_funsor
from numpyro.contrib.funsor.enum_messenger import LocalNamedMessenger
for msg in pytree_trace.trace.values():
with LocalNamedMessenger():
dim_to_name = msg["infer"].get("dim_to_name")
to_funsor(msg["value"], dim_to_name=OrderedDict([(k, dim_to_name[k]) for k in sorted(dim_to_name)]))
apply_stack(msg)
return carry, ys
| en | 0.773069 | # Copyright Contributors to the Pyro project. # SPDX-License-Identifier: Apache-2.0 # scanned sites have stop field because we trace them inside a block handler # this branch happens when substitute_fn is init_strategy, # where we apply init_strategy to each element in the scanned series # this branch happens when we substitute a series of values # we use the substituted values if i < shape[0] # and generate a new sample otherwise # a helper messenger to promote shapes of `fn` and `value` # + msg: fn.batch_shape = (2, 3), value.shape = (3,) + fn.event_shape # process_message(msg): promote value so that value.shape = (1, 3) + fn.event_shape # + msg: fn.batch_shape = (3,), value.shape = (2, 3) + fn.event_shape # process_message(msg): promote fn so that fn.batch_shape = (1, 3). # XXX: This implementation only works for history size=1 but can be # extended to history size > 1 by running `f` `history_size` times # for initialization. However, `sequential_sum_product` does not # support history size > 1, so we skip supporting it here. # Note that `funsor.sum_product.sarkka_bilmes_product` does support history > 1. # Like scan_wrapper, we collect the trace of scan's transition function # `seeded_fn` here. To put time dimension to the correct position, we need to # promote shapes to make `fn` and `value` # at each site have the same batch dims (e.g. if `fn.batch_shape = (2, 3)`, # and value's batch_shape is (3,), then we promote shape of # value so that its batch shape is (1, 3)). # store shape of new_carry at a global variable # make new_carry have the same shape as carry # FIXME: is this rigorous? # add `time` dimension, the name will be '_time_{first variable in the trace}' # similar to carry, we need to reshape due to shape alternating in markov # we also need to reshape `carry` to match sequential behavior This primitive scans a function over the leading array axes of `xs` while carrying along state. See :func:`jax.lax.scan` for more information. **Usage**: .. doctest:: >>> import numpy as np >>> import numpyro >>> import numpyro.distributions as dist >>> from numpyro.contrib.control_flow import scan >>> >>> def gaussian_hmm(y=None, T=10): ... def transition(x_prev, y_curr): ... x_curr = numpyro.sample('x', dist.Normal(x_prev, 1)) ... y_curr = numpyro.sample('y', dist.Normal(x_curr, 1), obs=y_curr) ... return x_curr, (x_curr, y_curr) ... ... x0 = numpyro.sample('x_0', dist.Normal(0, 1)) ... _, (x, y) = scan(transition, x0, y, length=T) ... return (x, y) >>> >>> # here we do some quick tests >>> with numpyro.handlers.seed(rng_seed=0): ... x, y = gaussian_hmm(np.arange(10.)) >>> assert x.shape == (10,) and y.shape == (10,) >>> assert np.all(y == np.arange(10)) >>> >>> with numpyro.handlers.seed(rng_seed=0): # generative ... x, y = gaussian_hmm() >>> assert x.shape == (10,) and y.shape == (10,) .. warning:: This is an experimental utility function that allows users to use JAX control flow with NumPyro's effect handlers. Currently, `sample` and `deterministic` sites within the scan body `f` are supported. If you notice that any effect handlers or distributions are unsupported, please file an issue. .. note:: It is ambiguous to align `scan` dimension inside a `plate` context. So the following pattern won't be supported .. code-block:: python with numpyro.plate('N', 10): last, ys = scan(f, init, xs) All `plate` statements should be put inside `f`. For example, the corresponding working code is .. code-block:: python def g(*args, **kwargs): with numpyro.plate('N', 10): return f(*arg, **kwargs) last, ys = scan(g, init, xs) .. note:: Nested scan is currently not supported. .. note:: We can scan over discrete latent variables in `f`. The joint density is evaluated using parallel-scan (reference [1]) over time dimension, which reduces parallel complexity to `O(log(length))`. Currently, only the equivalence to :class:`~numpyro.contrib.funsor.enum_messenger.markov(history_size=1)` is supported. A :class:`~numpyro.handlers.trace` of `scan` with discrete latent variables will contain the following sites: + init sites: those sites belong to the first trace of `f`. Each of them will have name prefixed with `_init/`. + scanned sites: those sites collect the values of the remaining scan loop over `f`. An addition time dimension `_time_foo` will be added to those sites, where `foo` is the name of the first site appeared in `f`. Not all transition functions `f` are supported. All of the restrictions from Pyro's enumeration tutorial [2] still apply here. In addition, there should not have any site outside of `scan` depend on the first output of `scan` (the last carry value). ** References ** 1. *Temporal Parallelization of Bayesian Smoothers*, <NAME>, <NAME> (https://arxiv.org/abs/1905.13002) 2. *Inference with Discrete Latent Variables* (http://pyro.ai/examples/enumeration.html#Dependencies-among-plates) :param callable f: a function to be scanned. :param init: the initial carrying state :param xs: the values over which we scan along the leading axis. This can be any JAX pytree (e.g. list/dict of arrays). :param length: optional value specifying the length of `xs` but can be used when `xs` is an empty pytree (e.g. None) :param bool reverse: optional boolean specifying whether to run the scan iteration forward (the default) or in reverse :return: output of scan, quoted from :func:`jax.lax.scan` docs: "pair of type (c, [b]) where the first element represents the final loop carry value and the second element represents the stacked outputs of the second output of f when scanned over the leading axis of the inputs". # if there are no active Messengers, we just run and return it as expected: # Otherwise, we initialize a message... # ...and use apply_stack to send it to the Messengers | 1.980766 | 2 |
src/catalog/migrations/0003_remove_productattributevalue_name.py | earth-emoji/dennea | 0 | 7591 | <filename>src/catalog/migrations/0003_remove_productattributevalue_name.py
# Generated by Django 2.2.12 on 2020-06-10 01:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalog', '0002_auto_20200610_0019'),
]
operations = [
migrations.RemoveField(
model_name='productattributevalue',
name='name',
),
]
| <filename>src/catalog/migrations/0003_remove_productattributevalue_name.py
# Generated by Django 2.2.12 on 2020-06-10 01:11
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalog', '0002_auto_20200610_0019'),
]
operations = [
migrations.RemoveField(
model_name='productattributevalue',
name='name',
),
]
| en | 0.737844 | # Generated by Django 2.2.12 on 2020-06-10 01:11 | 1.529524 | 2 |
e2xgrader/preprocessors/overwritecells.py | divindevaiah/e2xgrader | 2 | 7592 | import json
from nbformat.notebooknode import NotebookNode
from nbconvert.exporters.exporter import ResourcesDict
from typing import Tuple
from nbgrader.api import MissingEntry
from nbgrader.preprocessors import OverwriteCells as NbgraderOverwriteCells
from ..utils.extra_cells import is_singlechoice, is_multiplechoice
class OverwriteCells(NbgraderOverwriteCells):
def preprocess_cell(
self, cell: NotebookNode, resources: ResourcesDict, cell_index: int
) -> Tuple[NotebookNode, ResourcesDict]:
if not (is_singlechoice(cell) or is_multiplechoice(cell)):
return super().preprocess_cell(cell, resources, cell_index)
grade_id = cell.metadata.get("nbgrader", {}).get("grade_id", None)
if grade_id is None:
return cell, resources
try:
source_cell = self.gradebook.find_source_cell(
grade_id, self.notebook_id, self.assignment_id
)
except MissingEntry:
self.log.warning(f"Cell {grade_id} does not exist in database")
del cell.metadata.nbgrader["grade_id"]
return cell, resources
cell.metadata.extended_cell.source = json.loads(source_cell.source)
return cell, resources
| import json
from nbformat.notebooknode import NotebookNode
from nbconvert.exporters.exporter import ResourcesDict
from typing import Tuple
from nbgrader.api import MissingEntry
from nbgrader.preprocessors import OverwriteCells as NbgraderOverwriteCells
from ..utils.extra_cells import is_singlechoice, is_multiplechoice
class OverwriteCells(NbgraderOverwriteCells):
def preprocess_cell(
self, cell: NotebookNode, resources: ResourcesDict, cell_index: int
) -> Tuple[NotebookNode, ResourcesDict]:
if not (is_singlechoice(cell) or is_multiplechoice(cell)):
return super().preprocess_cell(cell, resources, cell_index)
grade_id = cell.metadata.get("nbgrader", {}).get("grade_id", None)
if grade_id is None:
return cell, resources
try:
source_cell = self.gradebook.find_source_cell(
grade_id, self.notebook_id, self.assignment_id
)
except MissingEntry:
self.log.warning(f"Cell {grade_id} does not exist in database")
del cell.metadata.nbgrader["grade_id"]
return cell, resources
cell.metadata.extended_cell.source = json.loads(source_cell.source)
return cell, resources
| none | 1 | 2.132648 | 2 |
|
tools/pdf2txt.py | ehtec/pdfminer.six | 0 | 7593 | <reponame>ehtec/pdfminer.six
#!/usr/bin/env python3
"""A command line tool for extracting text and images from PDF and
output it to plain text, html, xml or tags."""
import argparse
import logging
import sys
from typing import Any, Container, Iterable, List, Optional
import pdfminer.high_level
from pdfminer.layout import LAParams
from pdfminer.utils import AnyIO
logging.basicConfig()
OUTPUT_TYPES = ((".htm", "html"),
(".html", "html"),
(".xml", "xml"),
(".tag", "tag"))
def float_or_disabled(x: str) -> Optional[float]:
if x.lower().strip() == "disabled":
return None
try:
return float(x)
except ValueError:
raise argparse.ArgumentTypeError("invalid float value: {}".format(x))
def extract_text(
files: Iterable[str] = [],
outfile: str = '-',
laparams: Optional[LAParams] = None,
output_type: str = 'text',
codec: str = 'utf-8',
strip_control: bool = False,
maxpages: int = 0,
page_numbers: Optional[Container[int]] = None,
password: str = "",
scale: float = 1.0,
rotation: int = 0,
layoutmode: str = 'normal',
output_dir: Optional[str] = None,
debug: bool = False,
disable_caching: bool = False,
**kwargs: Any
) -> AnyIO:
if not files:
raise ValueError("Must provide files to work upon!")
if output_type == "text" and outfile != "-":
for override, alttype in OUTPUT_TYPES:
if outfile.endswith(override):
output_type = alttype
if outfile == "-":
outfp: AnyIO = sys.stdout
if sys.stdout.encoding is not None:
codec = 'utf-8'
else:
outfp = open(outfile, "wb")
for fname in files:
with open(fname, "rb") as fp:
pdfminer.high_level.extract_text_to_fp(fp, **locals())
return outfp
def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=__doc__, add_help=True)
parser.add_argument(
"files", type=str, default=None, nargs="+",
help="One or more paths to PDF files.")
parser.add_argument(
"--version", "-v", action="version",
version="pdfminer.six v{}".format(pdfminer.__version__))
parser.add_argument(
"--debug", "-d", default=False, action="store_true",
help="Use debug logging level.")
parser.add_argument(
"--disable-caching", "-C", default=False, action="store_true",
help="If caching or resources, such as fonts, should be disabled.")
parse_params = parser.add_argument_group(
'Parser', description='Used during PDF parsing')
parse_params.add_argument(
"--page-numbers", type=int, default=None, nargs="+",
help="A space-seperated list of page numbers to parse.")
parse_params.add_argument(
"--pagenos", "-p", type=str,
help="A comma-separated list of page numbers to parse. "
"Included for legacy applications, use --page-numbers "
"for more idiomatic argument entry.")
parse_params.add_argument(
"--maxpages", "-m", type=int, default=0,
help="The maximum number of pages to parse.")
parse_params.add_argument(
"--password", "-P", type=str, default="",
help="The password to use for decrypting PDF file.")
parse_params.add_argument(
"--rotation", "-R", default=0, type=int,
help="The number of degrees to rotate the PDF "
"before other types of processing.")
la_params = LAParams() # will be used for defaults
la_param_group = parser.add_argument_group(
'Layout analysis', description='Used during layout analysis.')
la_param_group.add_argument(
"--no-laparams", "-n", default=False, action="store_true",
help="If layout analysis parameters should be ignored.")
la_param_group.add_argument(
"--detect-vertical", "-V", default=la_params.detect_vertical,
action="store_true",
help="If vertical text should be considered during layout analysis")
la_param_group.add_argument(
"--line-overlap", type=float, default=la_params.line_overlap,
help='If two characters have more overlap than this they '
'are considered to be on the same line. The overlap is specified '
'relative to the minimum height of both characters.')
la_param_group.add_argument(
"--char-margin", "-M", type=float, default=la_params.char_margin,
help="If two characters are closer together than this margin they "
"are considered to be part of the same line. The margin is "
"specified relative to the width of the character.")
la_param_group.add_argument(
"--word-margin", "-W", type=float, default=la_params.word_margin,
help="If two characters on the same line are further apart than this "
"margin then they are considered to be two separate words, and "
"an intermediate space will be added for readability. The margin "
"is specified relative to the width of the character.")
la_param_group.add_argument(
"--line-margin", "-L", type=float, default=la_params.line_margin,
help="If two lines are close together they are considered to "
"be part of the same paragraph. The margin is specified "
"relative to the height of a line.")
la_param_group.add_argument(
"--boxes-flow", "-F", type=float_or_disabled,
default=la_params.boxes_flow,
help="Specifies how much a horizontal and vertical position of a "
"text matters when determining the order of lines. The value "
"should be within the range of -1.0 (only horizontal position "
"matters) to +1.0 (only vertical position matters). You can also "
"pass `disabled` to disable advanced layout analysis, and "
"instead return text based on the position of the bottom left "
"corner of the text box.")
la_param_group.add_argument(
"--all-texts", "-A", default=la_params.all_texts, action="store_true",
help="If layout analysis should be performed on text in figures.")
output_params = parser.add_argument_group(
'Output', description='Used during output generation.')
output_params.add_argument(
"--outfile", "-o", type=str, default="-",
help="Path to file where output is written. "
"Or \"-\" (default) to write to stdout.")
output_params.add_argument(
"--output_type", "-t", type=str, default="text",
help="Type of output to generate {text,html,xml,tag}.")
output_params.add_argument(
"--codec", "-c", type=str, default="utf-8",
help="Text encoding to use in output file.")
output_params.add_argument(
"--output-dir", "-O", default=None,
help="The output directory to put extracted images in. If not given, "
"images are not extracted.")
output_params.add_argument(
"--layoutmode", "-Y", default="normal",
type=str, help="Type of layout to use when generating html "
"{normal,exact,loose}. If normal,each line is"
" positioned separately in the html. If exact"
", each character is positioned separately in"
" the html. If loose, same result as normal "
"but with an additional newline after each "
"text line. Only used when output_type is html.")
output_params.add_argument(
"--scale", "-s", type=float, default=1.0,
help="The amount of zoom to use when generating html file. "
"Only used when output_type is html.")
output_params.add_argument(
"--strip-control", "-S", default=False, action="store_true",
help="Remove control statement from text. "
"Only used when output_type is xml.")
parsed_args = parser.parse_args(args=args)
# Propagate parsed layout parameters to LAParams object
if parsed_args.no_laparams:
parsed_args.laparams = None
else:
parsed_args.laparams = LAParams(
line_overlap=parsed_args.line_overlap,
char_margin=parsed_args.char_margin,
line_margin=parsed_args.line_margin,
word_margin=parsed_args.word_margin,
boxes_flow=parsed_args.boxes_flow,
detect_vertical=parsed_args.detect_vertical,
all_texts=parsed_args.all_texts,
)
if parsed_args.page_numbers:
parsed_args.page_numbers = {x-1 for x in parsed_args.page_numbers}
if parsed_args.pagenos:
parsed_args.page_numbers = {int(x)-1 for x in parsed_args.pagenos.split(",")}
if parsed_args.output_type == "text" and parsed_args.outfile != "-":
for override, alttype in OUTPUT_TYPES:
if parsed_args.outfile.endswith(override):
parsed_args.output_type = alttype
return parsed_args
def main(args: Optional[List[str]] = None) -> int:
parsed_args = parse_args(args)
outfp = extract_text(**vars(parsed_args))
outfp.close()
return 0
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python3
"""A command line tool for extracting text and images from PDF and
output it to plain text, html, xml or tags."""
import argparse
import logging
import sys
from typing import Any, Container, Iterable, List, Optional
import pdfminer.high_level
from pdfminer.layout import LAParams
from pdfminer.utils import AnyIO
logging.basicConfig()
OUTPUT_TYPES = ((".htm", "html"),
(".html", "html"),
(".xml", "xml"),
(".tag", "tag"))
def float_or_disabled(x: str) -> Optional[float]:
if x.lower().strip() == "disabled":
return None
try:
return float(x)
except ValueError:
raise argparse.ArgumentTypeError("invalid float value: {}".format(x))
def extract_text(
files: Iterable[str] = [],
outfile: str = '-',
laparams: Optional[LAParams] = None,
output_type: str = 'text',
codec: str = 'utf-8',
strip_control: bool = False,
maxpages: int = 0,
page_numbers: Optional[Container[int]] = None,
password: str = "",
scale: float = 1.0,
rotation: int = 0,
layoutmode: str = 'normal',
output_dir: Optional[str] = None,
debug: bool = False,
disable_caching: bool = False,
**kwargs: Any
) -> AnyIO:
if not files:
raise ValueError("Must provide files to work upon!")
if output_type == "text" and outfile != "-":
for override, alttype in OUTPUT_TYPES:
if outfile.endswith(override):
output_type = alttype
if outfile == "-":
outfp: AnyIO = sys.stdout
if sys.stdout.encoding is not None:
codec = 'utf-8'
else:
outfp = open(outfile, "wb")
for fname in files:
with open(fname, "rb") as fp:
pdfminer.high_level.extract_text_to_fp(fp, **locals())
return outfp
def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=__doc__, add_help=True)
parser.add_argument(
"files", type=str, default=None, nargs="+",
help="One or more paths to PDF files.")
parser.add_argument(
"--version", "-v", action="version",
version="pdfminer.six v{}".format(pdfminer.__version__))
parser.add_argument(
"--debug", "-d", default=False, action="store_true",
help="Use debug logging level.")
parser.add_argument(
"--disable-caching", "-C", default=False, action="store_true",
help="If caching or resources, such as fonts, should be disabled.")
parse_params = parser.add_argument_group(
'Parser', description='Used during PDF parsing')
parse_params.add_argument(
"--page-numbers", type=int, default=None, nargs="+",
help="A space-seperated list of page numbers to parse.")
parse_params.add_argument(
"--pagenos", "-p", type=str,
help="A comma-separated list of page numbers to parse. "
"Included for legacy applications, use --page-numbers "
"for more idiomatic argument entry.")
parse_params.add_argument(
"--maxpages", "-m", type=int, default=0,
help="The maximum number of pages to parse.")
parse_params.add_argument(
"--password", "-P", type=str, default="",
help="The password to use for decrypting PDF file.")
parse_params.add_argument(
"--rotation", "-R", default=0, type=int,
help="The number of degrees to rotate the PDF "
"before other types of processing.")
la_params = LAParams() # will be used for defaults
la_param_group = parser.add_argument_group(
'Layout analysis', description='Used during layout analysis.')
la_param_group.add_argument(
"--no-laparams", "-n", default=False, action="store_true",
help="If layout analysis parameters should be ignored.")
la_param_group.add_argument(
"--detect-vertical", "-V", default=la_params.detect_vertical,
action="store_true",
help="If vertical text should be considered during layout analysis")
la_param_group.add_argument(
"--line-overlap", type=float, default=la_params.line_overlap,
help='If two characters have more overlap than this they '
'are considered to be on the same line. The overlap is specified '
'relative to the minimum height of both characters.')
la_param_group.add_argument(
"--char-margin", "-M", type=float, default=la_params.char_margin,
help="If two characters are closer together than this margin they "
"are considered to be part of the same line. The margin is "
"specified relative to the width of the character.")
la_param_group.add_argument(
"--word-margin", "-W", type=float, default=la_params.word_margin,
help="If two characters on the same line are further apart than this "
"margin then they are considered to be two separate words, and "
"an intermediate space will be added for readability. The margin "
"is specified relative to the width of the character.")
la_param_group.add_argument(
"--line-margin", "-L", type=float, default=la_params.line_margin,
help="If two lines are close together they are considered to "
"be part of the same paragraph. The margin is specified "
"relative to the height of a line.")
la_param_group.add_argument(
"--boxes-flow", "-F", type=float_or_disabled,
default=la_params.boxes_flow,
help="Specifies how much a horizontal and vertical position of a "
"text matters when determining the order of lines. The value "
"should be within the range of -1.0 (only horizontal position "
"matters) to +1.0 (only vertical position matters). You can also "
"pass `disabled` to disable advanced layout analysis, and "
"instead return text based on the position of the bottom left "
"corner of the text box.")
la_param_group.add_argument(
"--all-texts", "-A", default=la_params.all_texts, action="store_true",
help="If layout analysis should be performed on text in figures.")
output_params = parser.add_argument_group(
'Output', description='Used during output generation.')
output_params.add_argument(
"--outfile", "-o", type=str, default="-",
help="Path to file where output is written. "
"Or \"-\" (default) to write to stdout.")
output_params.add_argument(
"--output_type", "-t", type=str, default="text",
help="Type of output to generate {text,html,xml,tag}.")
output_params.add_argument(
"--codec", "-c", type=str, default="utf-8",
help="Text encoding to use in output file.")
output_params.add_argument(
"--output-dir", "-O", default=None,
help="The output directory to put extracted images in. If not given, "
"images are not extracted.")
output_params.add_argument(
"--layoutmode", "-Y", default="normal",
type=str, help="Type of layout to use when generating html "
"{normal,exact,loose}. If normal,each line is"
" positioned separately in the html. If exact"
", each character is positioned separately in"
" the html. If loose, same result as normal "
"but with an additional newline after each "
"text line. Only used when output_type is html.")
output_params.add_argument(
"--scale", "-s", type=float, default=1.0,
help="The amount of zoom to use when generating html file. "
"Only used when output_type is html.")
output_params.add_argument(
"--strip-control", "-S", default=False, action="store_true",
help="Remove control statement from text. "
"Only used when output_type is xml.")
parsed_args = parser.parse_args(args=args)
# Propagate parsed layout parameters to LAParams object
if parsed_args.no_laparams:
parsed_args.laparams = None
else:
parsed_args.laparams = LAParams(
line_overlap=parsed_args.line_overlap,
char_margin=parsed_args.char_margin,
line_margin=parsed_args.line_margin,
word_margin=parsed_args.word_margin,
boxes_flow=parsed_args.boxes_flow,
detect_vertical=parsed_args.detect_vertical,
all_texts=parsed_args.all_texts,
)
if parsed_args.page_numbers:
parsed_args.page_numbers = {x-1 for x in parsed_args.page_numbers}
if parsed_args.pagenos:
parsed_args.page_numbers = {int(x)-1 for x in parsed_args.pagenos.split(",")}
if parsed_args.output_type == "text" and parsed_args.outfile != "-":
for override, alttype in OUTPUT_TYPES:
if parsed_args.outfile.endswith(override):
parsed_args.output_type = alttype
return parsed_args
def main(args: Optional[List[str]] = None) -> int:
parsed_args = parse_args(args)
outfp = extract_text(**vars(parsed_args))
outfp.close()
return 0
if __name__ == '__main__':
sys.exit(main()) | en | 0.42331 | #!/usr/bin/env python3 A command line tool for extracting text and images from PDF and output it to plain text, html, xml or tags. # will be used for defaults # Propagate parsed layout parameters to LAParams object | 3.088829 | 3 |
nython/nythonize.py | agungnasik57/nython | 53 | 7594 | """Compile Nim libraries as Python Extension Modules.
If you want your namespace to coexist with your pthon code, name this ponim.nim
and then your import will look like `from ponim.nim import adder` and
`from ponim import subtractor`. There must be a way to smooth that out in the
__init__.py file somehow.
Note that the file must be in the included source code dir. Currently it is
easiest to just put this in with your python code.
"""
from os import listdir, mkdir
from os.path import join, expanduser
from setuptools import Extension
from shutil import copyfile, rmtree
from typing import Sequence, Dict, List
import subprocess
import sys
import pathlib
# class NimLib(TypedDict):
# """Wrapper around a lib name and path for nim cdoe"""
# name: str
# path: str
def nythonize(nimbase: str, modules: Sequence[Dict[str, str]]) -> List[Extension]:
"""Compile a Nim library as a Python Extension Module.
`nimbase` is the path to `nimbase.h` on your system, which is needed for
Python to compile gene Nim generated C code.
This builds a set of Extenstions, which are then passed back to setuptools.
"""
extensions = []
# Create a top level working dir
rmtree(join("build", "nim_build"), ignore_errors=True)
pathlib.Path(join("build", "nim_build")).mkdir(parents=True)
for module in modules:
module_dir = join("build", "nim_build", f"{module['name']}_build")
rmtree(module_dir, ignore_errors=True)
mkdir(module_dir)
subprocess.run(
[
"nim",
"compileToC",
"--compileOnly",
"-d:release",
"-d:ssl",
"--app:lib",
"--opt:speed",
"--gc:markAndSweep",
f"--nimcache:{module_dir}",
module["path"],
],
check=True,
stderr=sys.stdout.buffer,
)
copyfile(
nimbase, join(module_dir, "nimbase.h"),
)
sources = []
for c_source_file in listdir(module_dir):
if c_source_file.endswith(".c"):
sources.append(join(module_dir, c_source_file))
extensions.append(
Extension(
name=module["name"],
sources=sources,
extra_compile_args=[
"-flto",
"-ffast-math",
"-march=native",
"-mtune=native",
"-O3",
"-fno-ident",
"-fsingle-precision-constant",
],
extra_link_args=["-s"],
include_dirs=[module_dir],
)
)
return extensions
| """Compile Nim libraries as Python Extension Modules.
If you want your namespace to coexist with your pthon code, name this ponim.nim
and then your import will look like `from ponim.nim import adder` and
`from ponim import subtractor`. There must be a way to smooth that out in the
__init__.py file somehow.
Note that the file must be in the included source code dir. Currently it is
easiest to just put this in with your python code.
"""
from os import listdir, mkdir
from os.path import join, expanduser
from setuptools import Extension
from shutil import copyfile, rmtree
from typing import Sequence, Dict, List
import subprocess
import sys
import pathlib
# class NimLib(TypedDict):
# """Wrapper around a lib name and path for nim cdoe"""
# name: str
# path: str
def nythonize(nimbase: str, modules: Sequence[Dict[str, str]]) -> List[Extension]:
"""Compile a Nim library as a Python Extension Module.
`nimbase` is the path to `nimbase.h` on your system, which is needed for
Python to compile gene Nim generated C code.
This builds a set of Extenstions, which are then passed back to setuptools.
"""
extensions = []
# Create a top level working dir
rmtree(join("build", "nim_build"), ignore_errors=True)
pathlib.Path(join("build", "nim_build")).mkdir(parents=True)
for module in modules:
module_dir = join("build", "nim_build", f"{module['name']}_build")
rmtree(module_dir, ignore_errors=True)
mkdir(module_dir)
subprocess.run(
[
"nim",
"compileToC",
"--compileOnly",
"-d:release",
"-d:ssl",
"--app:lib",
"--opt:speed",
"--gc:markAndSweep",
f"--nimcache:{module_dir}",
module["path"],
],
check=True,
stderr=sys.stdout.buffer,
)
copyfile(
nimbase, join(module_dir, "nimbase.h"),
)
sources = []
for c_source_file in listdir(module_dir):
if c_source_file.endswith(".c"):
sources.append(join(module_dir, c_source_file))
extensions.append(
Extension(
name=module["name"],
sources=sources,
extra_compile_args=[
"-flto",
"-ffast-math",
"-march=native",
"-mtune=native",
"-O3",
"-fno-ident",
"-fsingle-precision-constant",
],
extra_link_args=["-s"],
include_dirs=[module_dir],
)
)
return extensions
| en | 0.842822 | Compile Nim libraries as Python Extension Modules. If you want your namespace to coexist with your pthon code, name this ponim.nim and then your import will look like `from ponim.nim import adder` and `from ponim import subtractor`. There must be a way to smooth that out in the __init__.py file somehow. Note that the file must be in the included source code dir. Currently it is easiest to just put this in with your python code. # class NimLib(TypedDict): # """Wrapper around a lib name and path for nim cdoe""" # name: str # path: str Compile a Nim library as a Python Extension Module. `nimbase` is the path to `nimbase.h` on your system, which is needed for Python to compile gene Nim generated C code. This builds a set of Extenstions, which are then passed back to setuptools. # Create a top level working dir | 2.387884 | 2 |
tests/contrib/test_util.py | lixinso/pyro | 10 | 7595 | <filename>tests/contrib/test_util.py
from collections import OrderedDict
import pytest
import torch
import pyro.distributions as dist
from pyro.contrib.util import (
get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian
)
from tests.common import assert_equal
def test_get_indices_sizes():
sizes = OrderedDict([("a", 2), ("b", 2), ("c", 2)])
assert_equal(get_indices(["b"], sizes=sizes), torch.tensor([2, 3]))
assert_equal(get_indices(["b", "c"], sizes=sizes), torch.tensor([2, 3, 4, 5]))
tensors = OrderedDict([("a", torch.ones(2)), ("b", torch.ones(2)), ("c", torch.ones(2))])
assert_equal(get_indices(["b"], tensors=tensors), torch.tensor([2, 3]))
assert_equal(get_indices(["b", "c"], tensors=tensors), torch.tensor([2, 3, 4, 5]))
def test_tensor_to_dict():
sizes = OrderedDict([("a", 2), ("b", 2), ("c", 2)])
vector = torch.tensor([1., 2, 3, 4, 5, 6])
assert_equal(tensor_to_dict(sizes, vector), {"a": torch.tensor([1., 2.]),
"b": torch.tensor([3., 4.]),
"c": torch.tensor([5., 6.])})
assert_equal(tensor_to_dict(sizes, vector, subset=["b"]),
{"b": torch.tensor([3., 4.])})
@pytest.mark.parametrize("A,b", [
(torch.tensor([[1., 2.], [2., -3.]]), torch.tensor([-1., 2.]))
])
def test_rmv(A, b):
assert_equal(rmv(A, b), A.mv(b), prec=1e-8)
batched_A = lexpand(A, 5, 4)
batched_b = lexpand(b, 5, 4)
expected_Ab = lexpand(A.mv(b), 5, 4)
assert_equal(rmv(batched_A, batched_b), expected_Ab, prec=1e-8)
@pytest.mark.parametrize("a,b", [
(torch.tensor([1., 2.]), torch.tensor([-1., 2.]))
])
def test_rvv(a, b):
assert_equal(rvv(a, b), torch.dot(a, b), prec=1e-8)
batched_a = lexpand(a, 5, 4)
batched_b = lexpand(b, 5, 4)
expected_ab = lexpand(torch.dot(a, b), 5, 4)
assert_equal(rvv(batched_a, batched_b), expected_ab, prec=1e-8)
def test_lexpand():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(lexpand(A), A, prec=1e-8)
assert_equal(lexpand(A, 4), A.expand(4, 2, 2), prec=1e-8)
assert_equal(lexpand(A, 4, 2), A.expand(4, 2, 2, 2), prec=1e-8)
def test_rexpand():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(rexpand(A), A, prec=1e-8)
assert_equal(rexpand(A, 4), A.unsqueeze(-1).expand(2, 2, 4), prec=1e-8)
assert_equal(rexpand(A, 4, 2), A.unsqueeze(-1).unsqueeze(-1).expand(2, 2, 4, 2), prec=1e-8)
def test_rtril():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(rtril(A), torch.tril(A), prec=1e-8)
expanded = lexpand(A, 5, 4)
expected = lexpand(torch.tril(A), 5, 4)
assert_equal(rtril(expanded), expected, prec=1e-8)
def test_rdiag():
v = torch.tensor([1., 2., -1.])
assert_equal(rdiag(v), torch.diag(v), prec=1e-8)
expanded = lexpand(v, 5, 4)
expeceted = lexpand(torch.diag(v), 5, 4)
assert_equal(rdiag(expanded), expeceted, prec=1e-8)
def test_hessian_mvn():
tmp = torch.randn(3, 10)
cov = torch.matmul(tmp, tmp.t())
mvn = dist.MultivariateNormal(cov.new_zeros(3), cov)
x = torch.randn(3, requires_grad=True)
y = mvn.log_prob(x)
assert_equal(hessian(y, x), -mvn.precision_matrix)
def test_hessian_multi_variables():
x = torch.randn(3, requires_grad=True)
z = torch.randn(3, requires_grad=True)
y = (x ** 2 * z + z ** 3).sum()
H = hessian(y, (x, z))
Hxx = (2 * z).diag()
Hxz = (2 * x).diag()
Hzz = (6 * z).diag()
target_H = torch.cat([torch.cat([Hxx, Hxz]), torch.cat([Hxz, Hzz])], dim=1)
assert_equal(H, target_H)
| <filename>tests/contrib/test_util.py
from collections import OrderedDict
import pytest
import torch
import pyro.distributions as dist
from pyro.contrib.util import (
get_indices, tensor_to_dict, rmv, rvv, lexpand, rexpand, rdiag, rtril, hessian
)
from tests.common import assert_equal
def test_get_indices_sizes():
sizes = OrderedDict([("a", 2), ("b", 2), ("c", 2)])
assert_equal(get_indices(["b"], sizes=sizes), torch.tensor([2, 3]))
assert_equal(get_indices(["b", "c"], sizes=sizes), torch.tensor([2, 3, 4, 5]))
tensors = OrderedDict([("a", torch.ones(2)), ("b", torch.ones(2)), ("c", torch.ones(2))])
assert_equal(get_indices(["b"], tensors=tensors), torch.tensor([2, 3]))
assert_equal(get_indices(["b", "c"], tensors=tensors), torch.tensor([2, 3, 4, 5]))
def test_tensor_to_dict():
sizes = OrderedDict([("a", 2), ("b", 2), ("c", 2)])
vector = torch.tensor([1., 2, 3, 4, 5, 6])
assert_equal(tensor_to_dict(sizes, vector), {"a": torch.tensor([1., 2.]),
"b": torch.tensor([3., 4.]),
"c": torch.tensor([5., 6.])})
assert_equal(tensor_to_dict(sizes, vector, subset=["b"]),
{"b": torch.tensor([3., 4.])})
@pytest.mark.parametrize("A,b", [
(torch.tensor([[1., 2.], [2., -3.]]), torch.tensor([-1., 2.]))
])
def test_rmv(A, b):
assert_equal(rmv(A, b), A.mv(b), prec=1e-8)
batched_A = lexpand(A, 5, 4)
batched_b = lexpand(b, 5, 4)
expected_Ab = lexpand(A.mv(b), 5, 4)
assert_equal(rmv(batched_A, batched_b), expected_Ab, prec=1e-8)
@pytest.mark.parametrize("a,b", [
(torch.tensor([1., 2.]), torch.tensor([-1., 2.]))
])
def test_rvv(a, b):
assert_equal(rvv(a, b), torch.dot(a, b), prec=1e-8)
batched_a = lexpand(a, 5, 4)
batched_b = lexpand(b, 5, 4)
expected_ab = lexpand(torch.dot(a, b), 5, 4)
assert_equal(rvv(batched_a, batched_b), expected_ab, prec=1e-8)
def test_lexpand():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(lexpand(A), A, prec=1e-8)
assert_equal(lexpand(A, 4), A.expand(4, 2, 2), prec=1e-8)
assert_equal(lexpand(A, 4, 2), A.expand(4, 2, 2, 2), prec=1e-8)
def test_rexpand():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(rexpand(A), A, prec=1e-8)
assert_equal(rexpand(A, 4), A.unsqueeze(-1).expand(2, 2, 4), prec=1e-8)
assert_equal(rexpand(A, 4, 2), A.unsqueeze(-1).unsqueeze(-1).expand(2, 2, 4, 2), prec=1e-8)
def test_rtril():
A = torch.tensor([[1., 2.], [-2., 0]])
assert_equal(rtril(A), torch.tril(A), prec=1e-8)
expanded = lexpand(A, 5, 4)
expected = lexpand(torch.tril(A), 5, 4)
assert_equal(rtril(expanded), expected, prec=1e-8)
def test_rdiag():
v = torch.tensor([1., 2., -1.])
assert_equal(rdiag(v), torch.diag(v), prec=1e-8)
expanded = lexpand(v, 5, 4)
expeceted = lexpand(torch.diag(v), 5, 4)
assert_equal(rdiag(expanded), expeceted, prec=1e-8)
def test_hessian_mvn():
tmp = torch.randn(3, 10)
cov = torch.matmul(tmp, tmp.t())
mvn = dist.MultivariateNormal(cov.new_zeros(3), cov)
x = torch.randn(3, requires_grad=True)
y = mvn.log_prob(x)
assert_equal(hessian(y, x), -mvn.precision_matrix)
def test_hessian_multi_variables():
x = torch.randn(3, requires_grad=True)
z = torch.randn(3, requires_grad=True)
y = (x ** 2 * z + z ** 3).sum()
H = hessian(y, (x, z))
Hxx = (2 * z).diag()
Hxz = (2 * x).diag()
Hzz = (6 * z).diag()
target_H = torch.cat([torch.cat([Hxx, Hxz]), torch.cat([Hxz, Hzz])], dim=1)
assert_equal(H, target_H)
| none | 1 | 2.106208 | 2 |
|
emodul/apps.py | HarisHijazi/mojarnik-server | 0 | 7596 | from django.apps import AppConfig
class EmodulConfig(AppConfig):
name = 'emodul'
| from django.apps import AppConfig
class EmodulConfig(AppConfig):
name = 'emodul'
| none | 1 | 1.059093 | 1 |
|
Diffnet++/class/DataModule.py | mIXs222/diffnet | 0 | 7597 | from __future__ import division
from collections import defaultdict
import numpy as np
from time import time
import random
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# import tensorflow as tf
class DataModule():
def __init__(self, conf, filename):
self.conf = conf
self.data_dict = {}
self.terminal_flag = 1
self.filename = filename
self.index = 0
####### Initalize Procedures #######
def prepareModelSupplement(self, model):
data_dict = {}
if 'CONSUMED_ITEMS_SPARSE_MATRIX' in model.supply_set:
self.generateConsumedItemsSparseMatrix()
#self.arrangePositiveData()
data_dict['CONSUMED_ITEMS_INDICES_INPUT'] = self.consumed_items_indices_list
data_dict['CONSUMED_ITEMS_VALUES_INPUT'] = self.consumed_items_values_list
data_dict['CONSUMED_ITEMS_VALUES_WEIGHT_AVG_INPUT'] = self.consumed_items_values_weight_avg_list
data_dict['CONSUMED_ITEMS_NUM_INPUT'] = self.consumed_item_num_list
data_dict['CONSUMED_ITEMS_NUM_DICT_INPUT'] = self.user_item_num_dict
data_dict['USER_ITEM_SPARSITY_DICT'] = self.user_item_sparsity_dict
if 'SOCIAL_NEIGHBORS_SPARSE_MATRIX' in model.supply_set:
self.readSocialNeighbors()
self.generateSocialNeighborsSparseMatrix()
data_dict['SOCIAL_NEIGHBORS_INDICES_INPUT'] = self.social_neighbors_indices_list
data_dict['SOCIAL_NEIGHBORS_VALUES_INPUT'] = self.social_neighbors_values_list
data_dict['SOCIAL_NEIGHBORS_VALUES_WEIGHT_AVG_INPUT'] = self.social_neighbors_values_weight_avg_list
data_dict['SOCIAL_NEIGHBORS_NUM_INPUT'] = self.social_neighbor_num_list
data_dict['SOCIAL_NEIGHBORS_NUM_DICT_INPUT'] = self.social_neighbors_num_dict
data_dict['USER_USER_SPARSITY_DICT']= self.user_user_sparsity_dict
if 'ITEM_CUSTOMER_SPARSE_MATRIX' in model.supply_set:
self.generateConsumedItemsSparseMatrixForItemUser()
data_dict['ITEM_CUSTOMER_INDICES_INPUT'] = self.item_customer_indices_list
data_dict['ITEM_CUSTOMER_VALUES_INPUT'] = self.item_customer_values_list
data_dict['ITEM_CUSTOMER_VALUES_WEIGHT_AVG_INPUT'] = self.item_customer_values_weight_avg_list
data_dict['ITEM_CUSTOMER_NUM_INPUT'] = self.item_customer_num_list
data_dict['ITEM_USER_NUM_DICT_INPUT'] = self.item_user_num_dict
return data_dict
def initializeRankingTrain(self):
self.readData()
self.arrangePositiveData()
self.arrangePositiveDataForItemUser()
self.generateTrainNegative()
def initializeRankingVT(self):
self.readData()
self.arrangePositiveData()
self.arrangePositiveDataForItemUser()
self.generateTrainNegative()
def initalizeRankingEva(self):
self.readData()
self.getEvaPositiveBatch()
self.generateEvaNegative()
def linkedMap(self):
self.data_dict['USER_LIST'] = self.user_list
self.data_dict['ITEM_LIST'] = self.item_list
self.data_dict['LABEL_LIST'] = self.labels_list
def linkedRankingEvaMap(self):
self.data_dict['EVA_USER_LIST'] = self.eva_user_list
self.data_dict['EVA_ITEM_LIST'] = self.eva_item_list
####### Data Loading #######
def readData(self):
f = open(self.filename)
total_user_list = set()
hash_data = defaultdict(int)
for _, line in enumerate(f):
arr = line.split("\t")
hash_data[(int(arr[0]), int(arr[1]))] = 1
total_user_list.add(int(arr[0]))
self.total_user_list = list(total_user_list)
self.hash_data = hash_data
def arrangePositiveData(self):
positive_data = defaultdict(set)
user_item_num_dict = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
positive_data[u].add(i)
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_dict[u] = len(positive_data[u])+1
self.positive_data = positive_data
self.user_item_num_dict = user_item_num_dict
self.user_item_num_for_sparsity_dict = user_item_num_for_sparsity_dict
self.total_data = len(total_data)
def Sparsity_analysis_for_user_item_network(self):
hash_data_for_user_item = self.hash_data
sparisty_user_item_dict = {}
def arrangePositiveDataForItemUser(self):
positive_data_for_item_user = defaultdict(set)
item_user_num_dict = defaultdict(set)
total_data_for_item_user = set()
hash_data_for_item_user = self.hash_data
for (u, i) in hash_data_for_item_user:
total_data_for_item_user.add((i, u))
positive_data_for_item_user[i].add(u)
item_list = sorted(list(positive_data_for_item_user.keys()))
for i in range(self.conf.num_items):
item_user_num_dict[i] = len(positive_data_for_item_user[i])+1
self.item_user_num_dict = item_user_num_dict
self.positive_data_for_item_user = positive_data_for_item_user
self.total_data_for_item_user = len(total_data_for_item_user)
# ----------------------
# This function designes for generating train/val/test negative
def generateTrainNegative(self):
num_items = self.conf.num_items
num_negatives = self.conf.num_negatives
negative_data = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
for _ in range(num_negatives):
j = np.random.randint(num_items)
while (u, j) in hash_data:
j = np.random.randint(num_items)
negative_data[u].add(j)
total_data.add((u, j))
self.negative_data = negative_data
self.terminal_flag = 1
# ----------------------
# This function designes for val/test set, compute loss
def getVTRankingOneBatch(self):
positive_data = self.positive_data
negative_data = self.negative_data
total_user_list = self.total_user_list
user_list = []
item_list = []
labels_list = []
for u in total_user_list:
user_list.extend([u] * len(positive_data[u]))
item_list.extend(positive_data[u])
labels_list.extend([1] * len(positive_data[u]))
user_list.extend([u] * len(negative_data[u]))
item_list.extend(negative_data[u])
labels_list.extend([0] * len(negative_data[u]))
self.user_list = np.reshape(user_list, [-1, 1])
self.item_list = np.reshape(item_list, [-1, 1])
self.labels_list = np.reshape(labels_list, [-1, 1])
# ----------------------
# This function designes for the training process
def getTrainRankingBatch(self):
positive_data = self.positive_data
negative_data = self.negative_data
total_user_list = self.total_user_list
index = self.index
batch_size = self.conf.training_batch_size
user_list, item_list, labels_list = [], [], []
if index + batch_size < len(total_user_list):
target_user_list = total_user_list[index:index+batch_size]
self.index = index + batch_size
else:
target_user_list = total_user_list[index:len(total_user_list)]
self.index = 0
self.terminal_flag = 0
for u in target_user_list:
user_list.extend([u] * len(positive_data[u]))
item_list.extend(list(positive_data[u]))
labels_list.extend([1] * len(positive_data[u]))
user_list.extend([u] * len(negative_data[u]))
item_list.extend(list(negative_data[u]))
labels_list.extend([0] * len(negative_data[u]))
self.user_list = np.reshape(user_list, [-1, 1])
self.item_list = np.reshape(item_list, [-1, 1])
self.labels_list = np.reshape(labels_list, [-1, 1])
# ----------------------
# This function is designed for the positive data
def getEvaPositiveBatch(self):
hash_data = self.hash_data
user_list = []
item_list = []
index_dict = defaultdict(list)
index = 0
for (u, i) in hash_data:
user_list.append(u)
item_list.append(i)
index_dict[u].append(index)
index = index + 1
self.eva_user_list = np.reshape(user_list, [-1, 1])
self.eva_item_list = np.reshape(item_list, [-1, 1])
self.eva_index_dict = index_dict
# ----------------------
#This function is designed for generating negative data
def generateEvaNegative(self):
hash_data = self.hash_data
total_user_list = self.total_user_list
num_evaluate = self.conf.num_evaluate
num_items = self.conf.num_items
eva_negative_data = defaultdict(list)
for u in total_user_list:
for _ in range(num_evaluate):
j = np.random.randint(num_items)
while (u, j) in hash_data:
j = np.random.randint(num_items)
eva_negative_data[u].append(j)
self.eva_negative_data = eva_negative_data
# ----------------------
#This function designs for generating negative batch in rating evaluation,
def getEvaRankingBatch(self):
batch_size = self.conf.evaluate_batch_size
num_evaluate = self.conf.num_evaluate
eva_negative_data = self.eva_negative_data
total_user_list = self.total_user_list
index = self.index
terminal_flag = 1
total_users = len(total_user_list)
user_list = []
item_list = []
if index + batch_size < total_users:
batch_user_list = total_user_list[index:index+batch_size]
self.index = index + batch_size
else:
terminal_flag = 0
batch_user_list = total_user_list[index:total_users]
self.index = 0
for u in batch_user_list:
user_list.extend([u]*num_evaluate)
item_list.extend(eva_negative_data[u])
self.eva_user_list = np.reshape(user_list, [-1, 1])
self.eva_item_list = np.reshape(item_list, [-1, 1])
return batch_user_list, terminal_flag
# ----------------------
# Read social network information
def readSocialNeighbors(self, friends_flag=1):
social_neighbors = defaultdict(set)
social_neighbors_num_dict = defaultdict(set)
links_file = open(self.conf.links_filename)
for _, line in enumerate(links_file):
tmp = line.split('\t')
u1, u2 = int(tmp[0]), int(tmp[1])
social_neighbors[u1].add(u2)
if friends_flag == 1:
social_neighbors[u2].add(u1)
user_list = sorted(list(social_neighbors.keys()))
for u in range(self.conf.num_users):
social_neighbors_num_dict[u] = len(social_neighbors[u])+1
self.social_neighbors_num_dict = social_neighbors_num_dict
self.social_neighbors = social_neighbors
def arrangePositiveData(self):
positive_data = defaultdict(set)
user_item_num_dict = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
positive_data[u].add(i)
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_dict[u] = len(positive_data[u])+1
self.positive_data = positive_data
self.user_item_num_dict = user_item_num_dict
self.total_data = len(total_data)
# ----------------------
#Generate Social Neighbors Sparse Matrix Indices and Values
def generateSocialNeighborsSparseMatrix(self):
social_neighbors = self.social_neighbors
social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
social_neighbors_indices_list = []
social_neighbors_values_list = []
social_neighbors_values_weight_avg_list = []
social_neighbor_num_list = []
social_neighbors_dict = defaultdict(list)
user_user_num_for_sparsity_dict = defaultdict(set)
user_user_sparsity_dict = {}
user_user_sparsity_dict['0-4'] = []
user_user_sparsity_dict['4-8'] = []
user_user_sparsity_dict['8-16'] = []
user_user_sparsity_dict['16-32'] = []
user_user_sparsity_dict['32-64'] = []
user_user_sparsity_dict['64-'] = []
for u in range(self.conf.num_users):
user_user_num_for_sparsity_dict[u] = len(social_neighbors[u])
for u in social_neighbors:
social_neighbors_dict[u] = sorted(social_neighbors[u])
user_list = sorted(list(social_neighbors.keys()))
#node att
for user in range(self.conf.num_users):
if user in social_neighbors_dict:
social_neighbor_num_list.append(len(social_neighbors_dict[user]))
else:
social_neighbor_num_list.append(1)
for user in user_list:
for friend in social_neighbors_dict[user]:
social_neighbors_indices_list.append([user, friend])
social_neighbors_values_list.append(1.0/len(social_neighbors_dict[user]))
social_neighbors_values_weight_avg_list.append(1.0/(np.sqrt(social_neighbors_num_dict[user])*np.sqrt(social_neighbors_num_dict[friend]))) #weight avg
for u in range(self.conf.num_users):
cur_user_neighbors_num = user_user_num_for_sparsity_dict[u]
if( (cur_user_neighbors_num >=0) & (cur_user_neighbors_num<4) ):
user_user_sparsity_dict['0-4'].append(u)
elif( (cur_user_neighbors_num >=4) & (cur_user_neighbors_num<8) ):
user_user_sparsity_dict['4-8'].append(u)
elif( (cur_user_neighbors_num >=8) & (cur_user_neighbors_num<16) ):
user_user_sparsity_dict['8-16'].append(u)
elif( (cur_user_neighbors_num >=16) & (cur_user_neighbors_num<32) ):
user_user_sparsity_dict['16-32'].append(u)
elif( (cur_user_neighbors_num >=32) & (cur_user_neighbors_num<64) ):
user_user_sparsity_dict['32-64'].append(u)
elif( cur_user_neighbors_num >=64):
user_user_sparsity_dict['64-'].append(u)
self.user_user_sparsity_dict = user_user_sparsity_dict
self.social_neighbors_indices_list = np.array(social_neighbors_indices_list).astype(np.int64)
self.social_neighbors_values_list = np.array(social_neighbors_values_list).astype(np.float32)
self.social_neighbors_values_weight_avg_list = np.array(social_neighbors_values_weight_avg_list).astype(np.float32) # weight avg
self.social_neighbor_num_list = np.array(social_neighbor_num_list).astype(np.int64)
#self.social_neighbors_values_list = tf.Variable(tf.random_normal([len(self.social_neighbors_indices_list)], stddev=0.01))
# ----------------------
#Generate Consumed Items Sparse Matrix Indices and Values
def generateConsumedItemsSparseMatrix(self):
positive_data = self.positive_data
consumed_items_indices_list = []
consumed_items_values_list = []
consumed_items_values_weight_avg_list = []
consumed_item_num_list = []
consumed_items_dict = defaultdict(list)
user_item_num_for_sparsity_dict = defaultdict(set)
user_item_sparsity_dict = {}
user_item_sparsity_dict['0-4'] = []
user_item_sparsity_dict['4-8'] = []
user_item_sparsity_dict['8-16'] = []
user_item_sparsity_dict['16-32'] = []
user_item_sparsity_dict['32-64'] = []
user_item_sparsity_dict['64-'] = []
consumed_items_num_dict = self.user_item_num_dict #weight avg
#social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
item_user_num_dict = self.item_user_num_dict #weight avg
for u in positive_data:
consumed_items_dict[u] = sorted(positive_data[u])
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_for_sparsity_dict[u] = len(positive_data[u])
for user in range(self.conf.num_users):
if user in consumed_items_dict:
consumed_item_num_list.append(len(consumed_items_dict[user]))
else:
consumed_item_num_list.append(1)
for u in user_list:
for i in consumed_items_dict[u]:
consumed_items_indices_list.append([u, i])
consumed_items_values_list.append(1.0/len(consumed_items_dict[u]))
consumed_items_values_weight_avg_list.append(1.0/( np.sqrt(consumed_items_num_dict[u]) * np.sqrt(item_user_num_dict[i]) )) #weight avg
for u in range(self.conf.num_users):
cur_user_consumed_item_num = user_item_num_for_sparsity_dict[u]
if( (cur_user_consumed_item_num >=0) & (cur_user_consumed_item_num<4) ):
user_item_sparsity_dict['0-4'].append(u)
elif( (cur_user_consumed_item_num >=4) & (cur_user_consumed_item_num<8) ):
user_item_sparsity_dict['4-8'].append(u)
elif( (cur_user_consumed_item_num >=8) & (cur_user_consumed_item_num<16) ):
user_item_sparsity_dict['8-16'].append(u)
elif( (cur_user_consumed_item_num >=16) & (cur_user_consumed_item_num<32) ):
user_item_sparsity_dict['16-32'].append(u)
elif( (cur_user_consumed_item_num >=32) & (cur_user_consumed_item_num<64) ):
user_item_sparsity_dict['32-64'].append(u)
elif( cur_user_consumed_item_num >=64):
user_item_sparsity_dict['64-'].append(u)
self.user_item_sparsity_dict = user_item_sparsity_dict
self.consumed_items_indices_list = np.array(consumed_items_indices_list).astype(np.int64)
self.consumed_items_values_list = np.array(consumed_items_values_list).astype(np.float32)
self.consumed_items_values_weight_avg_list = np.array(consumed_items_values_weight_avg_list).astype(np.float32) #weight avg
self.consumed_item_num_list = np.array(consumed_item_num_list).astype(np.int64)
def generateConsumedItemsSparseMatrixForItemUser(self):
positive_data_for_item_user = self.positive_data_for_item_user
item_customer_indices_list = []
item_customer_values_list = []
item_customer_values_weight_avg_list = []
item_customer_num_list = []
item_customer_dict = defaultdict(list)
consumed_items_num_dict = self.user_item_num_dict #weight avg
#social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
item_user_num_dict = self.item_user_num_dict #weight avg
for i in positive_data_for_item_user:
item_customer_dict[i] = sorted(positive_data_for_item_user[i])
item_list = sorted(list(positive_data_for_item_user.keys()))
for item in range(self.conf.num_items):
if item in item_customer_dict:
item_customer_num_list.append(len(item_customer_dict[item]))
else:
item_customer_num_list.append(1)
for i in item_list:
for u in item_customer_dict[i]:
item_customer_indices_list.append([i, u])
item_customer_values_list.append(1.0/len(item_customer_dict[i]))
item_customer_values_weight_avg_list.append(1.0/( np.sqrt(consumed_items_num_dict[u]) * np.sqrt(item_user_num_dict[i]) ))
self.item_customer_indices_list = np.array(item_customer_indices_list).astype(np.int64)
self.item_customer_values_list = np.array(item_customer_values_list).astype(np.float32)
self.item_customer_num_list = np.array(item_customer_num_list).astype(np.int64)
self.item_customer_values_weight_avg_list = np.array(item_customer_values_weight_avg_list).astype(np.float32)
| from __future__ import division
from collections import defaultdict
import numpy as np
from time import time
import random
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
# import tensorflow as tf
class DataModule():
def __init__(self, conf, filename):
self.conf = conf
self.data_dict = {}
self.terminal_flag = 1
self.filename = filename
self.index = 0
####### Initalize Procedures #######
def prepareModelSupplement(self, model):
data_dict = {}
if 'CONSUMED_ITEMS_SPARSE_MATRIX' in model.supply_set:
self.generateConsumedItemsSparseMatrix()
#self.arrangePositiveData()
data_dict['CONSUMED_ITEMS_INDICES_INPUT'] = self.consumed_items_indices_list
data_dict['CONSUMED_ITEMS_VALUES_INPUT'] = self.consumed_items_values_list
data_dict['CONSUMED_ITEMS_VALUES_WEIGHT_AVG_INPUT'] = self.consumed_items_values_weight_avg_list
data_dict['CONSUMED_ITEMS_NUM_INPUT'] = self.consumed_item_num_list
data_dict['CONSUMED_ITEMS_NUM_DICT_INPUT'] = self.user_item_num_dict
data_dict['USER_ITEM_SPARSITY_DICT'] = self.user_item_sparsity_dict
if 'SOCIAL_NEIGHBORS_SPARSE_MATRIX' in model.supply_set:
self.readSocialNeighbors()
self.generateSocialNeighborsSparseMatrix()
data_dict['SOCIAL_NEIGHBORS_INDICES_INPUT'] = self.social_neighbors_indices_list
data_dict['SOCIAL_NEIGHBORS_VALUES_INPUT'] = self.social_neighbors_values_list
data_dict['SOCIAL_NEIGHBORS_VALUES_WEIGHT_AVG_INPUT'] = self.social_neighbors_values_weight_avg_list
data_dict['SOCIAL_NEIGHBORS_NUM_INPUT'] = self.social_neighbor_num_list
data_dict['SOCIAL_NEIGHBORS_NUM_DICT_INPUT'] = self.social_neighbors_num_dict
data_dict['USER_USER_SPARSITY_DICT']= self.user_user_sparsity_dict
if 'ITEM_CUSTOMER_SPARSE_MATRIX' in model.supply_set:
self.generateConsumedItemsSparseMatrixForItemUser()
data_dict['ITEM_CUSTOMER_INDICES_INPUT'] = self.item_customer_indices_list
data_dict['ITEM_CUSTOMER_VALUES_INPUT'] = self.item_customer_values_list
data_dict['ITEM_CUSTOMER_VALUES_WEIGHT_AVG_INPUT'] = self.item_customer_values_weight_avg_list
data_dict['ITEM_CUSTOMER_NUM_INPUT'] = self.item_customer_num_list
data_dict['ITEM_USER_NUM_DICT_INPUT'] = self.item_user_num_dict
return data_dict
def initializeRankingTrain(self):
self.readData()
self.arrangePositiveData()
self.arrangePositiveDataForItemUser()
self.generateTrainNegative()
def initializeRankingVT(self):
self.readData()
self.arrangePositiveData()
self.arrangePositiveDataForItemUser()
self.generateTrainNegative()
def initalizeRankingEva(self):
self.readData()
self.getEvaPositiveBatch()
self.generateEvaNegative()
def linkedMap(self):
self.data_dict['USER_LIST'] = self.user_list
self.data_dict['ITEM_LIST'] = self.item_list
self.data_dict['LABEL_LIST'] = self.labels_list
def linkedRankingEvaMap(self):
self.data_dict['EVA_USER_LIST'] = self.eva_user_list
self.data_dict['EVA_ITEM_LIST'] = self.eva_item_list
####### Data Loading #######
def readData(self):
f = open(self.filename)
total_user_list = set()
hash_data = defaultdict(int)
for _, line in enumerate(f):
arr = line.split("\t")
hash_data[(int(arr[0]), int(arr[1]))] = 1
total_user_list.add(int(arr[0]))
self.total_user_list = list(total_user_list)
self.hash_data = hash_data
def arrangePositiveData(self):
positive_data = defaultdict(set)
user_item_num_dict = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
positive_data[u].add(i)
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_dict[u] = len(positive_data[u])+1
self.positive_data = positive_data
self.user_item_num_dict = user_item_num_dict
self.user_item_num_for_sparsity_dict = user_item_num_for_sparsity_dict
self.total_data = len(total_data)
def Sparsity_analysis_for_user_item_network(self):
hash_data_for_user_item = self.hash_data
sparisty_user_item_dict = {}
def arrangePositiveDataForItemUser(self):
positive_data_for_item_user = defaultdict(set)
item_user_num_dict = defaultdict(set)
total_data_for_item_user = set()
hash_data_for_item_user = self.hash_data
for (u, i) in hash_data_for_item_user:
total_data_for_item_user.add((i, u))
positive_data_for_item_user[i].add(u)
item_list = sorted(list(positive_data_for_item_user.keys()))
for i in range(self.conf.num_items):
item_user_num_dict[i] = len(positive_data_for_item_user[i])+1
self.item_user_num_dict = item_user_num_dict
self.positive_data_for_item_user = positive_data_for_item_user
self.total_data_for_item_user = len(total_data_for_item_user)
# ----------------------
# This function designes for generating train/val/test negative
def generateTrainNegative(self):
num_items = self.conf.num_items
num_negatives = self.conf.num_negatives
negative_data = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
for _ in range(num_negatives):
j = np.random.randint(num_items)
while (u, j) in hash_data:
j = np.random.randint(num_items)
negative_data[u].add(j)
total_data.add((u, j))
self.negative_data = negative_data
self.terminal_flag = 1
# ----------------------
# This function designes for val/test set, compute loss
def getVTRankingOneBatch(self):
positive_data = self.positive_data
negative_data = self.negative_data
total_user_list = self.total_user_list
user_list = []
item_list = []
labels_list = []
for u in total_user_list:
user_list.extend([u] * len(positive_data[u]))
item_list.extend(positive_data[u])
labels_list.extend([1] * len(positive_data[u]))
user_list.extend([u] * len(negative_data[u]))
item_list.extend(negative_data[u])
labels_list.extend([0] * len(negative_data[u]))
self.user_list = np.reshape(user_list, [-1, 1])
self.item_list = np.reshape(item_list, [-1, 1])
self.labels_list = np.reshape(labels_list, [-1, 1])
# ----------------------
# This function designes for the training process
def getTrainRankingBatch(self):
positive_data = self.positive_data
negative_data = self.negative_data
total_user_list = self.total_user_list
index = self.index
batch_size = self.conf.training_batch_size
user_list, item_list, labels_list = [], [], []
if index + batch_size < len(total_user_list):
target_user_list = total_user_list[index:index+batch_size]
self.index = index + batch_size
else:
target_user_list = total_user_list[index:len(total_user_list)]
self.index = 0
self.terminal_flag = 0
for u in target_user_list:
user_list.extend([u] * len(positive_data[u]))
item_list.extend(list(positive_data[u]))
labels_list.extend([1] * len(positive_data[u]))
user_list.extend([u] * len(negative_data[u]))
item_list.extend(list(negative_data[u]))
labels_list.extend([0] * len(negative_data[u]))
self.user_list = np.reshape(user_list, [-1, 1])
self.item_list = np.reshape(item_list, [-1, 1])
self.labels_list = np.reshape(labels_list, [-1, 1])
# ----------------------
# This function is designed for the positive data
def getEvaPositiveBatch(self):
hash_data = self.hash_data
user_list = []
item_list = []
index_dict = defaultdict(list)
index = 0
for (u, i) in hash_data:
user_list.append(u)
item_list.append(i)
index_dict[u].append(index)
index = index + 1
self.eva_user_list = np.reshape(user_list, [-1, 1])
self.eva_item_list = np.reshape(item_list, [-1, 1])
self.eva_index_dict = index_dict
# ----------------------
#This function is designed for generating negative data
def generateEvaNegative(self):
hash_data = self.hash_data
total_user_list = self.total_user_list
num_evaluate = self.conf.num_evaluate
num_items = self.conf.num_items
eva_negative_data = defaultdict(list)
for u in total_user_list:
for _ in range(num_evaluate):
j = np.random.randint(num_items)
while (u, j) in hash_data:
j = np.random.randint(num_items)
eva_negative_data[u].append(j)
self.eva_negative_data = eva_negative_data
# ----------------------
#This function designs for generating negative batch in rating evaluation,
def getEvaRankingBatch(self):
batch_size = self.conf.evaluate_batch_size
num_evaluate = self.conf.num_evaluate
eva_negative_data = self.eva_negative_data
total_user_list = self.total_user_list
index = self.index
terminal_flag = 1
total_users = len(total_user_list)
user_list = []
item_list = []
if index + batch_size < total_users:
batch_user_list = total_user_list[index:index+batch_size]
self.index = index + batch_size
else:
terminal_flag = 0
batch_user_list = total_user_list[index:total_users]
self.index = 0
for u in batch_user_list:
user_list.extend([u]*num_evaluate)
item_list.extend(eva_negative_data[u])
self.eva_user_list = np.reshape(user_list, [-1, 1])
self.eva_item_list = np.reshape(item_list, [-1, 1])
return batch_user_list, terminal_flag
# ----------------------
# Read social network information
def readSocialNeighbors(self, friends_flag=1):
social_neighbors = defaultdict(set)
social_neighbors_num_dict = defaultdict(set)
links_file = open(self.conf.links_filename)
for _, line in enumerate(links_file):
tmp = line.split('\t')
u1, u2 = int(tmp[0]), int(tmp[1])
social_neighbors[u1].add(u2)
if friends_flag == 1:
social_neighbors[u2].add(u1)
user_list = sorted(list(social_neighbors.keys()))
for u in range(self.conf.num_users):
social_neighbors_num_dict[u] = len(social_neighbors[u])+1
self.social_neighbors_num_dict = social_neighbors_num_dict
self.social_neighbors = social_neighbors
def arrangePositiveData(self):
positive_data = defaultdict(set)
user_item_num_dict = defaultdict(set)
total_data = set()
hash_data = self.hash_data
for (u, i) in hash_data:
total_data.add((u, i))
positive_data[u].add(i)
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_dict[u] = len(positive_data[u])+1
self.positive_data = positive_data
self.user_item_num_dict = user_item_num_dict
self.total_data = len(total_data)
# ----------------------
#Generate Social Neighbors Sparse Matrix Indices and Values
def generateSocialNeighborsSparseMatrix(self):
social_neighbors = self.social_neighbors
social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
social_neighbors_indices_list = []
social_neighbors_values_list = []
social_neighbors_values_weight_avg_list = []
social_neighbor_num_list = []
social_neighbors_dict = defaultdict(list)
user_user_num_for_sparsity_dict = defaultdict(set)
user_user_sparsity_dict = {}
user_user_sparsity_dict['0-4'] = []
user_user_sparsity_dict['4-8'] = []
user_user_sparsity_dict['8-16'] = []
user_user_sparsity_dict['16-32'] = []
user_user_sparsity_dict['32-64'] = []
user_user_sparsity_dict['64-'] = []
for u in range(self.conf.num_users):
user_user_num_for_sparsity_dict[u] = len(social_neighbors[u])
for u in social_neighbors:
social_neighbors_dict[u] = sorted(social_neighbors[u])
user_list = sorted(list(social_neighbors.keys()))
#node att
for user in range(self.conf.num_users):
if user in social_neighbors_dict:
social_neighbor_num_list.append(len(social_neighbors_dict[user]))
else:
social_neighbor_num_list.append(1)
for user in user_list:
for friend in social_neighbors_dict[user]:
social_neighbors_indices_list.append([user, friend])
social_neighbors_values_list.append(1.0/len(social_neighbors_dict[user]))
social_neighbors_values_weight_avg_list.append(1.0/(np.sqrt(social_neighbors_num_dict[user])*np.sqrt(social_neighbors_num_dict[friend]))) #weight avg
for u in range(self.conf.num_users):
cur_user_neighbors_num = user_user_num_for_sparsity_dict[u]
if( (cur_user_neighbors_num >=0) & (cur_user_neighbors_num<4) ):
user_user_sparsity_dict['0-4'].append(u)
elif( (cur_user_neighbors_num >=4) & (cur_user_neighbors_num<8) ):
user_user_sparsity_dict['4-8'].append(u)
elif( (cur_user_neighbors_num >=8) & (cur_user_neighbors_num<16) ):
user_user_sparsity_dict['8-16'].append(u)
elif( (cur_user_neighbors_num >=16) & (cur_user_neighbors_num<32) ):
user_user_sparsity_dict['16-32'].append(u)
elif( (cur_user_neighbors_num >=32) & (cur_user_neighbors_num<64) ):
user_user_sparsity_dict['32-64'].append(u)
elif( cur_user_neighbors_num >=64):
user_user_sparsity_dict['64-'].append(u)
self.user_user_sparsity_dict = user_user_sparsity_dict
self.social_neighbors_indices_list = np.array(social_neighbors_indices_list).astype(np.int64)
self.social_neighbors_values_list = np.array(social_neighbors_values_list).astype(np.float32)
self.social_neighbors_values_weight_avg_list = np.array(social_neighbors_values_weight_avg_list).astype(np.float32) # weight avg
self.social_neighbor_num_list = np.array(social_neighbor_num_list).astype(np.int64)
#self.social_neighbors_values_list = tf.Variable(tf.random_normal([len(self.social_neighbors_indices_list)], stddev=0.01))
# ----------------------
#Generate Consumed Items Sparse Matrix Indices and Values
def generateConsumedItemsSparseMatrix(self):
positive_data = self.positive_data
consumed_items_indices_list = []
consumed_items_values_list = []
consumed_items_values_weight_avg_list = []
consumed_item_num_list = []
consumed_items_dict = defaultdict(list)
user_item_num_for_sparsity_dict = defaultdict(set)
user_item_sparsity_dict = {}
user_item_sparsity_dict['0-4'] = []
user_item_sparsity_dict['4-8'] = []
user_item_sparsity_dict['8-16'] = []
user_item_sparsity_dict['16-32'] = []
user_item_sparsity_dict['32-64'] = []
user_item_sparsity_dict['64-'] = []
consumed_items_num_dict = self.user_item_num_dict #weight avg
#social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
item_user_num_dict = self.item_user_num_dict #weight avg
for u in positive_data:
consumed_items_dict[u] = sorted(positive_data[u])
user_list = sorted(list(positive_data.keys()))
for u in range(self.conf.num_users):
user_item_num_for_sparsity_dict[u] = len(positive_data[u])
for user in range(self.conf.num_users):
if user in consumed_items_dict:
consumed_item_num_list.append(len(consumed_items_dict[user]))
else:
consumed_item_num_list.append(1)
for u in user_list:
for i in consumed_items_dict[u]:
consumed_items_indices_list.append([u, i])
consumed_items_values_list.append(1.0/len(consumed_items_dict[u]))
consumed_items_values_weight_avg_list.append(1.0/( np.sqrt(consumed_items_num_dict[u]) * np.sqrt(item_user_num_dict[i]) )) #weight avg
for u in range(self.conf.num_users):
cur_user_consumed_item_num = user_item_num_for_sparsity_dict[u]
if( (cur_user_consumed_item_num >=0) & (cur_user_consumed_item_num<4) ):
user_item_sparsity_dict['0-4'].append(u)
elif( (cur_user_consumed_item_num >=4) & (cur_user_consumed_item_num<8) ):
user_item_sparsity_dict['4-8'].append(u)
elif( (cur_user_consumed_item_num >=8) & (cur_user_consumed_item_num<16) ):
user_item_sparsity_dict['8-16'].append(u)
elif( (cur_user_consumed_item_num >=16) & (cur_user_consumed_item_num<32) ):
user_item_sparsity_dict['16-32'].append(u)
elif( (cur_user_consumed_item_num >=32) & (cur_user_consumed_item_num<64) ):
user_item_sparsity_dict['32-64'].append(u)
elif( cur_user_consumed_item_num >=64):
user_item_sparsity_dict['64-'].append(u)
self.user_item_sparsity_dict = user_item_sparsity_dict
self.consumed_items_indices_list = np.array(consumed_items_indices_list).astype(np.int64)
self.consumed_items_values_list = np.array(consumed_items_values_list).astype(np.float32)
self.consumed_items_values_weight_avg_list = np.array(consumed_items_values_weight_avg_list).astype(np.float32) #weight avg
self.consumed_item_num_list = np.array(consumed_item_num_list).astype(np.int64)
def generateConsumedItemsSparseMatrixForItemUser(self):
positive_data_for_item_user = self.positive_data_for_item_user
item_customer_indices_list = []
item_customer_values_list = []
item_customer_values_weight_avg_list = []
item_customer_num_list = []
item_customer_dict = defaultdict(list)
consumed_items_num_dict = self.user_item_num_dict #weight avg
#social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg
item_user_num_dict = self.item_user_num_dict #weight avg
for i in positive_data_for_item_user:
item_customer_dict[i] = sorted(positive_data_for_item_user[i])
item_list = sorted(list(positive_data_for_item_user.keys()))
for item in range(self.conf.num_items):
if item in item_customer_dict:
item_customer_num_list.append(len(item_customer_dict[item]))
else:
item_customer_num_list.append(1)
for i in item_list:
for u in item_customer_dict[i]:
item_customer_indices_list.append([i, u])
item_customer_values_list.append(1.0/len(item_customer_dict[i]))
item_customer_values_weight_avg_list.append(1.0/( np.sqrt(consumed_items_num_dict[u]) * np.sqrt(item_user_num_dict[i]) ))
self.item_customer_indices_list = np.array(item_customer_indices_list).astype(np.int64)
self.item_customer_values_list = np.array(item_customer_values_list).astype(np.float32)
self.item_customer_num_list = np.array(item_customer_num_list).astype(np.int64)
self.item_customer_values_weight_avg_list = np.array(item_customer_values_weight_avg_list).astype(np.float32)
| en | 0.485936 | # import tensorflow as tf ####### Initalize Procedures ####### #self.arrangePositiveData() ####### Data Loading ####### # ---------------------- # This function designes for generating train/val/test negative # ---------------------- # This function designes for val/test set, compute loss # ---------------------- # This function designes for the training process # ---------------------- # This function is designed for the positive data # ---------------------- #This function is designed for generating negative data # ---------------------- #This function designs for generating negative batch in rating evaluation, # ---------------------- # Read social network information # ---------------------- #Generate Social Neighbors Sparse Matrix Indices and Values #weight avg #node att #weight avg # weight avg #self.social_neighbors_values_list = tf.Variable(tf.random_normal([len(self.social_neighbors_indices_list)], stddev=0.01)) # ---------------------- #Generate Consumed Items Sparse Matrix Indices and Values #weight avg #social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg #weight avg #weight avg #weight avg #weight avg #social_neighbors_num_dict = self.social_neighbors_num_dict #weight avg #weight avg | 2.365766 | 2 |
src/models/VanillaTransformer.py | iosurodri/annotated-transformer | 0 | 7598 | from xmlrpc.server import MultiPathXMLRPCServer
import torch.nn as nn
import torch.nn.functional as F
import copy
from src.layers.layers import Encoder, EncoderLayer, Decoder, DecoderLayer, PositionwiseFeedForward
from src.layers.preprocessing import Embeddings, PositionalEncoding
from src.layers.attention import MultiHeadedAttention
### Generic EncoderDecoder structure:
class EncoderDecoder(nn.Module):
"""
A standard Encoder-Decoder architecture. Base for this and many
other models.
"""
def __init__(self, encoder, decoder, src_embed, tgt_embed, generator):
super(EncoderDecoder, self).__init__()
self.encoder = encoder
self.decoder = decoder
self.src_embed = src_embed
self.tgt_embed = tgt_embed
self.generator = generator
def forward(self, src, tgt, src_mask, tgt_mask):
"Take in and process masked src and target sequences."
encoded_src = self.encode(src, src_mask)
return self.decode(encoded_src, src_mask, tgt, tgt_mask)
def encode(self, src, src_mask):
embedded_src = self.src_embed(src)
return self.encoder(embedded_src, src_mask)
def decode(self, memory, src_mask, tgt, tgt_mask):
embedded_tgt = self.tgt_embed(tgt)
return self.decoder(embedded_tgt, memory, src_mask, tgt_mask)
class Generator(nn.Module):
"Define standard linear + softmax generation step."
def __init__(self, d_model, vocab):
super(Generator, self).__init__()
self.proj = nn.Linear(d_model, vocab)
def forward(self, x):
return F.log_softmax(self.proj(x), dim=-1)
def make_model(src_vocab, tgt_vocab, N=6, d_model=512, d_ff=2048, h=8, dropout=0.1, alpha=0.5):
"Helper: Construct a model from hyperparameters."
c = copy.deepcopy
attn = MultiHeadedAttention(h, d_model, alpha=alpha)
ff = PositionwiseFeedForward(d_model, d_ff, dropout)
position = PositionalEncoding(d_model, dropout)
model = EncoderDecoder(
Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N),
Decoder(DecoderLayer(d_model, c(attn), c(attn), c(ff), dropout), N),
nn.Sequential(Embeddings(d_model, src_vocab), c(position)),
nn.Sequential(Embeddings(d_model, tgt_vocab), c(position)),
Generator(d_model, tgt_vocab)
)
# This was important from their code.
# Initialize parameters with Glorot / fan_avg.
for p in model.parameters():
if p.dim() > 1:
nn.init.xavier_uniform(p)
return model
if __name__ == '__main__':
# Small example model
tmp_model = make_model(10, 10, 2)
print(tmp_model)
| from xmlrpc.server import MultiPathXMLRPCServer
import torch.nn as nn
import torch.nn.functional as F
import copy
from src.layers.layers import Encoder, EncoderLayer, Decoder, DecoderLayer, PositionwiseFeedForward
from src.layers.preprocessing import Embeddings, PositionalEncoding
from src.layers.attention import MultiHeadedAttention
### Generic EncoderDecoder structure:
class EncoderDecoder(nn.Module):
"""
A standard Encoder-Decoder architecture. Base for this and many
other models.
"""
def __init__(self, encoder, decoder, src_embed, tgt_embed, generator):
super(EncoderDecoder, self).__init__()
self.encoder = encoder
self.decoder = decoder
self.src_embed = src_embed
self.tgt_embed = tgt_embed
self.generator = generator
def forward(self, src, tgt, src_mask, tgt_mask):
"Take in and process masked src and target sequences."
encoded_src = self.encode(src, src_mask)
return self.decode(encoded_src, src_mask, tgt, tgt_mask)
def encode(self, src, src_mask):
embedded_src = self.src_embed(src)
return self.encoder(embedded_src, src_mask)
def decode(self, memory, src_mask, tgt, tgt_mask):
embedded_tgt = self.tgt_embed(tgt)
return self.decoder(embedded_tgt, memory, src_mask, tgt_mask)
class Generator(nn.Module):
"Define standard linear + softmax generation step."
def __init__(self, d_model, vocab):
super(Generator, self).__init__()
self.proj = nn.Linear(d_model, vocab)
def forward(self, x):
return F.log_softmax(self.proj(x), dim=-1)
def make_model(src_vocab, tgt_vocab, N=6, d_model=512, d_ff=2048, h=8, dropout=0.1, alpha=0.5):
"Helper: Construct a model from hyperparameters."
c = copy.deepcopy
attn = MultiHeadedAttention(h, d_model, alpha=alpha)
ff = PositionwiseFeedForward(d_model, d_ff, dropout)
position = PositionalEncoding(d_model, dropout)
model = EncoderDecoder(
Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N),
Decoder(DecoderLayer(d_model, c(attn), c(attn), c(ff), dropout), N),
nn.Sequential(Embeddings(d_model, src_vocab), c(position)),
nn.Sequential(Embeddings(d_model, tgt_vocab), c(position)),
Generator(d_model, tgt_vocab)
)
# This was important from their code.
# Initialize parameters with Glorot / fan_avg.
for p in model.parameters():
if p.dim() > 1:
nn.init.xavier_uniform(p)
return model
if __name__ == '__main__':
# Small example model
tmp_model = make_model(10, 10, 2)
print(tmp_model)
| en | 0.862204 | ### Generic EncoderDecoder structure: A standard Encoder-Decoder architecture. Base for this and many other models. # This was important from their code. # Initialize parameters with Glorot / fan_avg. # Small example model | 2.243912 | 2 |
venv/lib/python3.8/site-packages/arch/tests/univariate/test_recursions.py | YileC928/finm-portfolio-2021 | 0 | 7599 | import os
import timeit
from typing import List
import numpy as np
from numpy.random import RandomState
from numpy.testing import assert_allclose, assert_almost_equal
import pytest
from scipy.special import gamma
import arch.univariate.recursions_python as recpy
CYTHON_COVERAGE = os.environ.get("ARCH_CYTHON_COVERAGE", "0") in ("true", "1", "True")
try:
import arch.univariate.recursions as rec_cython
missing_extension = False
except ImportError:
missing_extension = True
if missing_extension:
rec = recpy
else:
rec = rec_cython
try:
import numba # noqa
missing_numba = False
except ImportError:
missing_numba = True
pytestmark = pytest.mark.filterwarnings("ignore::arch.compat.numba.PerformanceWarning")
class Timer(object):
def __init__(
self,
first,
first_name,
second,
second_name,
model_name,
setup,
repeat=5,
number=10,
) -> None:
self.first_code = first
self.second_code = second
self.setup = setup
self.first_name = first_name
self.second_name = second_name
self.model_name = model_name
self.repeat = repeat
self.number = number
self._run = False
self.times: List[float] = []
self._codes = [first, second]
self.ratio = np.inf
def display(self):
if not self._run:
self.time()
self.ratio = self.times[0] / self.times[1]
title = self.model_name + " timing"
print("\n" + title)
print("-" * len(title))
print(self.first_name + ": " + "{:0.3f} ms".format(1000 * self.times[0]))
print(self.second_name + ": " + "{:0.3f} ms".format(1000 * self.times[1]))
if self.ratio < 1:
print(
"{0} is {1:0.1f}% faster".format(
self.first_name, 100 * (1 / self.ratio - 1)
)
)
else:
print(
"{0} is {1:0.1f}% faster".format(
self.second_name, 100 * (self.ratio - 1)
)
)
print(
self.first_name
+ "/"
+ self.second_name
+ " Ratio: {:0.3f}\n".format(self.ratio)
)
def time(self):
self.times = []
for code in self._codes:
timer = timeit.Timer(code, setup=self.setup)
self.times.append(min(timer.repeat(self.repeat, self.number)))
class TestRecursions(object):
@classmethod
def setup_class(cls):
cls.nobs = 1000
cls.rng = RandomState(12345)
cls.resids = cls.rng.standard_normal(cls.nobs)
cls.sigma2 = np.zeros_like(cls.resids)
var = cls.resids.var()
var_bounds = np.array([var / 1000000.0, var * 1000000.0])
cls.var_bounds = np.ones((cls.nobs, 2)) * var_bounds
cls.backcast = 1.0
cls.timer_setup = """
import numpy as np
import arch.univariate.recursions as rec
import arch.univariate.recursions_python as recpy
nobs = 10000
resids = np.random.standard_normal(nobs)
sigma2 = np.zeros_like(resids)
var = resids.var()
backcast = 1.0
var_bounds = np.array([var / 1000000.0, var * 1000000.0])
var_bounds = np.ones((nobs, 2)) * var_bounds
"""
def test_garch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_numba = sigma2.copy()
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([0.1, -0.4, 0.3, 0.2])
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 0.4, 3, 2])
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 0.4, 0.3, 0.2])
mod_fresids = fresids.copy()
mod_fresids[:1] = np.inf
recpy.garch_recursion_python(
parameters,
mod_fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.garch_recursion(
parameters,
mod_fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_harch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
lags = np.array([1, 5, 22], dtype=np.int32)
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-0.1, -0.4, 0.3, 0.2])
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.harch_recursion_python(
parameters, mod_resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.harch_recursion(
parameters, mod_resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_arch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
p = 3
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
recpy.arch_recursion(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-0.1, -0.4, 0.3, 0.2])
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.arch_recursion_python(
parameters, mod_resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.arch_recursion(
parameters, mod_resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_garch_power_1(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = np.abs(resids) ** 1.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_direct(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = np.abs(resids) ** 2.0
sresids = np.sign(resids)
for t in range(nobs):
if t == 0:
sigma2[t] = parameters.dot(
np.array([1.0, backcast, 0.5 * backcast, backcast])
)
else:
var = np.array(
[
1.0,
resids[t - 1] ** 2.0,
resids[t - 1] ** 2.0 * (resids[t - 1] < 0),
sigma2[t - 1],
]
)
sigma2[t] = parameters.dot(var)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_q(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
0,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
0,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_p(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
0,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
0,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_o(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
0,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
0,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_arch(self):
backcast = self.backcast
nobs, resids, sigma2 = self.nobs, self.resids, self.sigma2
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
3,
0,
0,
nobs,
backcast,
self.var_bounds,
)
sigma2_garch = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_garch, sigma2)
def test_bounds(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([1e100, 0.4, 0.3, 0.2])
lags = np.array([1, 5, 22], dtype=np.int32)
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
parameters = np.array([1e100, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
parameters = np.array([1e100, 0.4, 0.3, 0.2])
recpy.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
def test_egarch(self):
nobs = self.nobs
parameters = np.array([0.0, 0.1, -0.1, 0.95])
resids, sigma2 = self.resids, self.sigma2
p = o = q = 1
backcast = 0.0
var_bounds = self.var_bounds
lnsigma2 = np.empty_like(sigma2)
std_resids = np.empty_like(sigma2)
abs_std_resids = np.empty_like(sigma2)
recpy.egarch_recursion(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
sigma2_numba = sigma2.copy()
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
sigma2_python = sigma2.copy()
rec.egarch_recursion(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
norm_const = np.sqrt(2 / np.pi)
for t in range(nobs):
lnsigma2[t] = parameters[0]
if t == 0:
lnsigma2[t] += parameters[3] * backcast
else:
stdresid = resids[t - 1] / np.sqrt(sigma2[t - 1])
lnsigma2[t] += parameters[1] * (np.abs(stdresid) - norm_const)
lnsigma2[t] += parameters[2] * stdresid
lnsigma2[t] += parameters[3] * lnsigma2[t - 1]
sigma2[t] = np.exp(lnsigma2[t])
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-100.0, 0.1, -0.1, 0.95])
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.0, 0.1, -0.1, 9.5])
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.0, 0.1, -0.1, 0.95])
mod_resids = resids.copy()
mod_resids[:1] = np.inf
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_midas_hyperbolic(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.8, 0])
j = np.arange(1, 22 + 1)
weights = gamma(j + 0.6) / (gamma(j + 1) * gamma(0.6))
weights = weights / weights.sum()
recpy.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.midas_recursion_python(
parameters, weights, mod_resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 10e10, 0])
j = np.arange(1, 22 + 1)
weights = gamma(j + 0.6) / (gamma(j + 1) * gamma(0.6))
weights = weights / weights.sum()
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, -0.4, 0])
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_figarch_recursion(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([1.0, 0.2, 0.4, 0.3])
fresids = resids ** 2
p = q = 1
trunc_lag = 1000
rec.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
lam = rec.figarch_weights(parameters[1:], p, q, trunc_lag=trunc_lag)
lam_rev = lam[::-1]
omega_tilde = parameters[0] / (1 - parameters[-1])
sigma2_direct = np.empty_like(sigma2)
for t in range(nobs):
backcasts = trunc_lag - t
sigma2_direct[t] = omega_tilde
if backcasts:
sigma2_direct[t] += backcast * lam_rev[:backcasts].sum()
if t:
sigma2_direct[t] += np.sum(lam_rev[-t:] * fresids[max(0, t - 1000) : t])
assert_almost_equal(sigma2_direct, sigma2)
recpy.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
sigma2_numba = sigma2.copy()
recpy.figarch_recursion_python(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
def test_figarch_weights(self):
parameters = np.array([1.0, 0.4])
lam = rec.figarch_weights(parameters[1:], 0, 0, trunc_lag=1000)
lam_direct = np.empty_like(lam)
lam_direct[0] = parameters[-1]
for i in range(1, 1000):
lam_direct[i] = (i - parameters[-1]) / (i + 1) * lam_direct[i - 1]
assert_almost_equal(lam, lam_direct)
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_garch_performance(self):
garch_setup = """
parameters = np.array([.1, .4, .3, .2])
fresids = resids ** 2.0
sresids = np.sign(resids)
"""
garch_first = """
recpy.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,
backcast, var_bounds)
"""
garch_second = """
rec.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs, backcast,
var_bounds)
"""
timer = Timer(
garch_first,
"Numba",
garch_second,
"Cython",
"GARCH",
self.timer_setup + garch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_harch_performance(self):
harch_setup = """
parameters = np.array([.1, .4, .3, .2])
lags = np.array([1, 5, 22], dtype=np.int32)
"""
harch_first = """
recpy.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast,
var_bounds)
"""
harch_second = """
rec.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast, var_bounds)
"""
timer = Timer(
harch_first,
"Numba",
harch_second,
"Cython",
"HARCH",
self.timer_setup + harch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_egarch_performance(self):
egarch_setup = """
parameters = np.array([0.0, 0.1, -0.1, 0.95])
p = o = q = 1
backcast = 0.0
lnsigma2 = np.empty_like(sigma2)
std_resids = np.empty_like(sigma2)
abs_std_resids = np.empty_like(sigma2)
"""
egarch_first = """
recpy.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast,
var_bounds, lnsigma2, std_resids, abs_std_resids)
"""
egarch_second = """
rec.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast,
var_bounds, lnsigma2, std_resids, abs_std_resids)
"""
timer = Timer(
egarch_first,
"Numba",
egarch_second,
"Cython",
"EGARCH",
self.timer_setup + egarch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_midas_performance(self):
midas_setup = """
from scipy.special import gamma
parameters = np.array([.1, 0.8, 0])
j = np.arange(1,22+1)
weights = gamma(j+0.6) / (gamma(j+1) * gamma(0.6))
weights = weights / weights.sum()
"""
midas_first = """
recpy.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds)
"""
midas_second = """
rec.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds)
"""
timer = Timer(
midas_first,
"Numba",
midas_second,
"Cython",
"MIDAS",
self.timer_setup + midas_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_figarch_performance(self):
midas_setup = """
p = q = 1
trunc_lag = 1000
parameters = np.array([1.0, 0.2, 0.2, 0.04])
fresids = resids ** 2.0
"""
midas_first = """
recpy.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds)
"""
midas_second = """
rec.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds)
"""
timer = Timer(
midas_first,
"Numba",
midas_second,
"Cython",
"FIGARCH",
self.timer_setup + midas_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
def test_garch_aparch_equiv(self):
parameters = np.array([0.1, 0.1, 0.8])
fresids = self.resids ** 2
sresids = np.sign(self.resids)
sigma2 = np.empty(1000)
p = q = 1
o = 0
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
sigma2_garch = sigma2.copy()
parameters = np.array([0.1, 0.1, 0.8, 2])
sigma2[:] = np.nan
sigma2_delta = np.empty_like(sigma2)
recpy.aparch_recursion_python(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
sigma2[:] = np.nan
recpy.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
sigma2[:] = np.nan
rec.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
def test_asym_aparch_smoke(self):
sigma2 = np.empty(1000)
p = o = q = 1
parameters = np.array([0.1, 0.1, 0.1, 0.8, 1.3])
sigma2[:] = np.nan
sigma2_delta = np.empty_like(sigma2)
recpy.aparch_recursion_python(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
sigma2_py = sigma2.copy()
sigma2[:] = np.nan
recpy.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
assert_allclose(sigma2_py, sigma2)
sigma2[:] = np.nan
rec.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
assert_allclose(sigma2_py, sigma2)
def test_bounds_check():
var_bounds = np.array([0.1, 10])
assert_almost_equal(recpy.bounds_check_python(-1.0, var_bounds), 0.1)
assert_almost_equal(
recpy.bounds_check_python(20.0, var_bounds), 10 + np.log(20.0 / 10.0)
)
assert_almost_equal(recpy.bounds_check_python(np.inf, var_bounds), 1010.0)
| import os
import timeit
from typing import List
import numpy as np
from numpy.random import RandomState
from numpy.testing import assert_allclose, assert_almost_equal
import pytest
from scipy.special import gamma
import arch.univariate.recursions_python as recpy
CYTHON_COVERAGE = os.environ.get("ARCH_CYTHON_COVERAGE", "0") in ("true", "1", "True")
try:
import arch.univariate.recursions as rec_cython
missing_extension = False
except ImportError:
missing_extension = True
if missing_extension:
rec = recpy
else:
rec = rec_cython
try:
import numba # noqa
missing_numba = False
except ImportError:
missing_numba = True
pytestmark = pytest.mark.filterwarnings("ignore::arch.compat.numba.PerformanceWarning")
class Timer(object):
def __init__(
self,
first,
first_name,
second,
second_name,
model_name,
setup,
repeat=5,
number=10,
) -> None:
self.first_code = first
self.second_code = second
self.setup = setup
self.first_name = first_name
self.second_name = second_name
self.model_name = model_name
self.repeat = repeat
self.number = number
self._run = False
self.times: List[float] = []
self._codes = [first, second]
self.ratio = np.inf
def display(self):
if not self._run:
self.time()
self.ratio = self.times[0] / self.times[1]
title = self.model_name + " timing"
print("\n" + title)
print("-" * len(title))
print(self.first_name + ": " + "{:0.3f} ms".format(1000 * self.times[0]))
print(self.second_name + ": " + "{:0.3f} ms".format(1000 * self.times[1]))
if self.ratio < 1:
print(
"{0} is {1:0.1f}% faster".format(
self.first_name, 100 * (1 / self.ratio - 1)
)
)
else:
print(
"{0} is {1:0.1f}% faster".format(
self.second_name, 100 * (self.ratio - 1)
)
)
print(
self.first_name
+ "/"
+ self.second_name
+ " Ratio: {:0.3f}\n".format(self.ratio)
)
def time(self):
self.times = []
for code in self._codes:
timer = timeit.Timer(code, setup=self.setup)
self.times.append(min(timer.repeat(self.repeat, self.number)))
class TestRecursions(object):
@classmethod
def setup_class(cls):
cls.nobs = 1000
cls.rng = RandomState(12345)
cls.resids = cls.rng.standard_normal(cls.nobs)
cls.sigma2 = np.zeros_like(cls.resids)
var = cls.resids.var()
var_bounds = np.array([var / 1000000.0, var * 1000000.0])
cls.var_bounds = np.ones((cls.nobs, 2)) * var_bounds
cls.backcast = 1.0
cls.timer_setup = """
import numpy as np
import arch.univariate.recursions as rec
import arch.univariate.recursions_python as recpy
nobs = 10000
resids = np.random.standard_normal(nobs)
sigma2 = np.zeros_like(resids)
var = resids.var()
backcast = 1.0
var_bounds = np.array([var / 1000000.0, var * 1000000.0])
var_bounds = np.ones((nobs, 2)) * var_bounds
"""
def test_garch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_numba = sigma2.copy()
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([0.1, -0.4, 0.3, 0.2])
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 0.4, 3, 2])
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 0.4, 0.3, 0.2])
mod_fresids = fresids.copy()
mod_fresids[:1] = np.inf
recpy.garch_recursion_python(
parameters,
mod_fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.garch_recursion(
parameters,
mod_fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_harch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
lags = np.array([1, 5, 22], dtype=np.int32)
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-0.1, -0.4, 0.3, 0.2])
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
recpy.harch_recursion_python(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.harch_recursion_python(
parameters, mod_resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.harch_recursion(
parameters, mod_resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_arch(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
p = 3
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
recpy.arch_recursion(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-0.1, -0.4, 0.3, 0.2])
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 4e8, 3, 2])
recpy.arch_recursion_python(
parameters, resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.arch_recursion_python(
parameters, mod_resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.arch_recursion(
parameters, mod_resids, sigma2, p, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_garch_power_1(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = np.abs(resids) ** 1.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_direct(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = np.abs(resids) ** 2.0
sresids = np.sign(resids)
for t in range(nobs):
if t == 0:
sigma2[t] = parameters.dot(
np.array([1.0, backcast, 0.5 * backcast, backcast])
)
else:
var = np.array(
[
1.0,
resids[t - 1] ** 2.0,
resids[t - 1] ** 2.0 * (resids[t - 1] < 0),
sigma2[t - 1],
]
)
sigma2[t] = parameters.dot(var)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_q(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
0,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
0,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_p(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
0,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
0,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_no_o(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
0,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
0,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
def test_garch_arch(self):
backcast = self.backcast
nobs, resids, sigma2 = self.nobs, self.resids, self.sigma2
parameters = np.array([0.1, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
3,
0,
0,
nobs,
backcast,
self.var_bounds,
)
sigma2_garch = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_garch, sigma2)
def test_bounds(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([1e100, 0.4, 0.3, 0.2])
lags = np.array([1, 5, 22], dtype=np.int32)
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.harch_recursion(
parameters, resids, sigma2, lags, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
parameters = np.array([1e100, 0.4, 0.3, 0.2])
fresids = resids ** 2.0
sresids = np.sign(resids)
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.garch_recursion(
parameters,
fresids,
sresids,
sigma2,
1,
1,
1,
nobs,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
parameters = np.array([1e100, 0.4, 0.3, 0.2])
recpy.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([-1e100, 0.4, 0.3, 0.2])
recpy.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.arch_recursion(
parameters, resids, sigma2, 3, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_python, sigma2)
assert_almost_equal(sigma2, self.var_bounds[:, 0])
def test_egarch(self):
nobs = self.nobs
parameters = np.array([0.0, 0.1, -0.1, 0.95])
resids, sigma2 = self.resids, self.sigma2
p = o = q = 1
backcast = 0.0
var_bounds = self.var_bounds
lnsigma2 = np.empty_like(sigma2)
std_resids = np.empty_like(sigma2)
abs_std_resids = np.empty_like(sigma2)
recpy.egarch_recursion(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
sigma2_numba = sigma2.copy()
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
sigma2_python = sigma2.copy()
rec.egarch_recursion(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
norm_const = np.sqrt(2 / np.pi)
for t in range(nobs):
lnsigma2[t] = parameters[0]
if t == 0:
lnsigma2[t] += parameters[3] * backcast
else:
stdresid = resids[t - 1] / np.sqrt(sigma2[t - 1])
lnsigma2[t] += parameters[1] * (np.abs(stdresid) - norm_const)
lnsigma2[t] += parameters[2] * stdresid
lnsigma2[t] += parameters[3] * lnsigma2[t - 1]
sigma2[t] = np.exp(lnsigma2[t])
assert_almost_equal(sigma2_python, sigma2)
parameters = np.array([-100.0, 0.1, -0.1, 0.95])
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.0, 0.1, -0.1, 9.5])
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.0, 0.1, -0.1, 0.95])
mod_resids = resids.copy()
mod_resids[:1] = np.inf
recpy.egarch_recursion_python(
parameters,
resids,
sigma2,
p,
o,
q,
nobs,
backcast,
var_bounds,
lnsigma2,
std_resids,
abs_std_resids,
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_midas_hyperbolic(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([0.1, 0.8, 0])
j = np.arange(1, 22 + 1)
weights = gamma(j + 0.6) / (gamma(j + 1) * gamma(0.6))
weights = weights / weights.sum()
recpy.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
sigma2_numba = sigma2.copy()
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
sigma2_python = sigma2.copy()
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
mod_resids = resids.copy()
mod_resids[:10] = np.inf
recpy.midas_recursion_python(
parameters, weights, mod_resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, 10e10, 0])
j = np.arange(1, 22 + 1)
weights = gamma(j + 0.6) / (gamma(j + 1) * gamma(0.6))
weights = weights / weights.sum()
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
parameters = np.array([0.1, -0.4, 0])
recpy.midas_recursion_python(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
rec.midas_recursion(
parameters, weights, resids, sigma2, nobs, backcast, self.var_bounds
)
assert np.all(sigma2 >= self.var_bounds[:, 0])
assert np.all(sigma2 <= 2 * self.var_bounds[:, 1])
def test_figarch_recursion(self):
nobs, resids = self.nobs, self.resids
sigma2, backcast = self.sigma2, self.backcast
parameters = np.array([1.0, 0.2, 0.4, 0.3])
fresids = resids ** 2
p = q = 1
trunc_lag = 1000
rec.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
lam = rec.figarch_weights(parameters[1:], p, q, trunc_lag=trunc_lag)
lam_rev = lam[::-1]
omega_tilde = parameters[0] / (1 - parameters[-1])
sigma2_direct = np.empty_like(sigma2)
for t in range(nobs):
backcasts = trunc_lag - t
sigma2_direct[t] = omega_tilde
if backcasts:
sigma2_direct[t] += backcast * lam_rev[:backcasts].sum()
if t:
sigma2_direct[t] += np.sum(lam_rev[-t:] * fresids[max(0, t - 1000) : t])
assert_almost_equal(sigma2_direct, sigma2)
recpy.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
sigma2_numba = sigma2.copy()
recpy.figarch_recursion_python(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
sigma2_python = sigma2.copy()
rec.figarch_recursion(
parameters,
fresids,
sigma2,
p,
q,
nobs,
trunc_lag,
backcast,
self.var_bounds,
)
assert_almost_equal(sigma2_numba, sigma2)
assert_almost_equal(sigma2_python, sigma2)
def test_figarch_weights(self):
parameters = np.array([1.0, 0.4])
lam = rec.figarch_weights(parameters[1:], 0, 0, trunc_lag=1000)
lam_direct = np.empty_like(lam)
lam_direct[0] = parameters[-1]
for i in range(1, 1000):
lam_direct[i] = (i - parameters[-1]) / (i + 1) * lam_direct[i - 1]
assert_almost_equal(lam, lam_direct)
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_garch_performance(self):
garch_setup = """
parameters = np.array([.1, .4, .3, .2])
fresids = resids ** 2.0
sresids = np.sign(resids)
"""
garch_first = """
recpy.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs,
backcast, var_bounds)
"""
garch_second = """
rec.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs, backcast,
var_bounds)
"""
timer = Timer(
garch_first,
"Numba",
garch_second,
"Cython",
"GARCH",
self.timer_setup + garch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_harch_performance(self):
harch_setup = """
parameters = np.array([.1, .4, .3, .2])
lags = np.array([1, 5, 22], dtype=np.int32)
"""
harch_first = """
recpy.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast,
var_bounds)
"""
harch_second = """
rec.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast, var_bounds)
"""
timer = Timer(
harch_first,
"Numba",
harch_second,
"Cython",
"HARCH",
self.timer_setup + harch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_egarch_performance(self):
egarch_setup = """
parameters = np.array([0.0, 0.1, -0.1, 0.95])
p = o = q = 1
backcast = 0.0
lnsigma2 = np.empty_like(sigma2)
std_resids = np.empty_like(sigma2)
abs_std_resids = np.empty_like(sigma2)
"""
egarch_first = """
recpy.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast,
var_bounds, lnsigma2, std_resids, abs_std_resids)
"""
egarch_second = """
rec.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast,
var_bounds, lnsigma2, std_resids, abs_std_resids)
"""
timer = Timer(
egarch_first,
"Numba",
egarch_second,
"Cython",
"EGARCH",
self.timer_setup + egarch_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_midas_performance(self):
midas_setup = """
from scipy.special import gamma
parameters = np.array([.1, 0.8, 0])
j = np.arange(1,22+1)
weights = gamma(j+0.6) / (gamma(j+1) * gamma(0.6))
weights = weights / weights.sum()
"""
midas_first = """
recpy.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds)
"""
midas_second = """
rec.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds)
"""
timer = Timer(
midas_first,
"Numba",
midas_second,
"Cython",
"MIDAS",
self.timer_setup + midas_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
@pytest.mark.skipif(
missing_numba or missing_extension, reason="numba not installed"
)
def test_figarch_performance(self):
midas_setup = """
p = q = 1
trunc_lag = 1000
parameters = np.array([1.0, 0.2, 0.2, 0.04])
fresids = resids ** 2.0
"""
midas_first = """
recpy.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds)
"""
midas_second = """
rec.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds)
"""
timer = Timer(
midas_first,
"Numba",
midas_second,
"Cython",
"FIGARCH",
self.timer_setup + midas_setup,
)
timer.display()
assert timer.ratio < 10.0
if not (missing_numba or CYTHON_COVERAGE):
assert 0.1 < timer.ratio
def test_garch_aparch_equiv(self):
parameters = np.array([0.1, 0.1, 0.8])
fresids = self.resids ** 2
sresids = np.sign(self.resids)
sigma2 = np.empty(1000)
p = q = 1
o = 0
recpy.garch_recursion_python(
parameters,
fresids,
sresids,
sigma2,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
sigma2_garch = sigma2.copy()
parameters = np.array([0.1, 0.1, 0.8, 2])
sigma2[:] = np.nan
sigma2_delta = np.empty_like(sigma2)
recpy.aparch_recursion_python(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
sigma2[:] = np.nan
recpy.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
sigma2[:] = np.nan
rec.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert_allclose(sigma2_garch, sigma2, atol=1e-6)
def test_asym_aparch_smoke(self):
sigma2 = np.empty(1000)
p = o = q = 1
parameters = np.array([0.1, 0.1, 0.1, 0.8, 1.3])
sigma2[:] = np.nan
sigma2_delta = np.empty_like(sigma2)
recpy.aparch_recursion_python(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
sigma2_py = sigma2.copy()
sigma2[:] = np.nan
recpy.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
assert_allclose(sigma2_py, sigma2)
sigma2[:] = np.nan
rec.aparch_recursion(
parameters,
self.resids,
np.abs(self.resids),
sigma2,
sigma2_delta,
p,
o,
q,
self.nobs,
self.backcast,
self.var_bounds,
)
assert np.all(np.isfinite(sigma2))
assert_allclose(sigma2_py, sigma2)
def test_bounds_check():
var_bounds = np.array([0.1, 10])
assert_almost_equal(recpy.bounds_check_python(-1.0, var_bounds), 0.1)
assert_almost_equal(
recpy.bounds_check_python(20.0, var_bounds), 10 + np.log(20.0 / 10.0)
)
assert_almost_equal(recpy.bounds_check_python(np.inf, var_bounds), 1010.0)
| en | 0.335827 | # noqa import numpy as np import arch.univariate.recursions as rec import arch.univariate.recursions_python as recpy nobs = 10000 resids = np.random.standard_normal(nobs) sigma2 = np.zeros_like(resids) var = resids.var() backcast = 1.0 var_bounds = np.array([var / 1000000.0, var * 1000000.0]) var_bounds = np.ones((nobs, 2)) * var_bounds parameters = np.array([.1, .4, .3, .2]) fresids = resids ** 2.0 sresids = np.sign(resids) recpy.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs, backcast, var_bounds) rec.garch_recursion(parameters, fresids, sresids, sigma2, 1, 1, 1, nobs, backcast, var_bounds) parameters = np.array([.1, .4, .3, .2]) lags = np.array([1, 5, 22], dtype=np.int32) recpy.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast, var_bounds) rec.harch_recursion(parameters, resids, sigma2, lags, nobs, backcast, var_bounds) parameters = np.array([0.0, 0.1, -0.1, 0.95]) p = o = q = 1 backcast = 0.0 lnsigma2 = np.empty_like(sigma2) std_resids = np.empty_like(sigma2) abs_std_resids = np.empty_like(sigma2) recpy.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast, var_bounds, lnsigma2, std_resids, abs_std_resids) rec.egarch_recursion(parameters, resids, sigma2, p, o, q, nobs, backcast, var_bounds, lnsigma2, std_resids, abs_std_resids) from scipy.special import gamma parameters = np.array([.1, 0.8, 0]) j = np.arange(1,22+1) weights = gamma(j+0.6) / (gamma(j+1) * gamma(0.6)) weights = weights / weights.sum() recpy.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds) rec.midas_recursion(parameters, weights, resids, sigma2, nobs, backcast, var_bounds) p = q = 1 trunc_lag = 1000 parameters = np.array([1.0, 0.2, 0.2, 0.04]) fresids = resids ** 2.0 recpy.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds) rec.figarch_recursion(parameters, fresids, sigma2, p, q, nobs, trunc_lag, backcast, var_bounds) | 2.242768 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.