pmkhanh7890 commited on
Commit
6610027
·
1 Parent(s): 78a90b5
.DS_Store ADDED
Binary file (8.2 kB). View file
 
.gitignore ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # virtual environment
2
+ .venv
3
+ common/cpp
4
+ common/cpp_gapi
5
+ sample_videos/*
6
+ models/vietocr/*
7
+ models/soundbar/*
8
+ results.json
9
+ output/*
10
+ models/soundbar_detection_yolov7/*
11
+ # Byte-compiled / optimized / DLL files
12
+ __pycache__/
13
+ *.py[cod]
14
+ *$py.class
15
+ # C extensions
16
+ *.so
17
+ # Distribution / packaging
18
+ .Python
19
+ build/
20
+ develop-eggs/
21
+ dist/
22
+ downloads/
23
+ eggs/
24
+ .eggs/
25
+ lib/
26
+ lib64/
27
+ parts/
28
+ sdist/
29
+ var/
30
+ wheels/
31
+ share/python-wheels/
32
+ *.egg-info/
33
+ .installed.cfg
34
+ *.egg
35
+ MANIFEST
36
+ # source/yolov7/*
37
+ # PyInstaller
38
+ # Usually these files are written by a python script from a template
39
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
40
+ *.manifest
41
+ *.spec
42
+ # Installer logs
43
+ pip-log.txt
44
+ pip-delete-this-directory.txt
45
+ # Unit test / coverage reports
46
+ htmlcov/
47
+ .tox/
48
+ .nox/
49
+ .coverage
50
+ .coverage.*
51
+ .cache
52
+ nosetests.xml
53
+ coverage.xml
54
+ *.cover
55
+ .hypothesis/
56
+ .pytest_cache/
57
+ # Translations
58
+ *.mo
59
+ *.pot
60
+ # Django stuff:
61
+ *.log
62
+ local_settings.py
63
+ db.sqlite3
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+ # Scrapy stuff:
68
+ .scrapy
69
+ # Sphinx documentation
70
+ docs/_build/
71
+ # PyBuilder
72
+ target/
73
+ # Jupyter Notebook
74
+ .ipynb_checkpoints
75
+ # IPython
76
+ profile_default/
77
+ ipython_config.py
78
+ # pyenv
79
+ .python-version
80
+ # celery beat schedule file
81
+ celerybeat-schedule
82
+ # SageMath parsed files
83
+ *.sage.py
84
+ # Environments
85
+ .env
86
+ .venv
87
+ env/
88
+ venv/
89
+ ENV/
90
+ env.bak/
91
+ venv.bak/
92
+ # Spyder project settings
93
+ .spyderproject
94
+ .spyproject
95
+ # Rope project settings
96
+ .ropeproject
97
+ # mkdocs documentation
98
+ /site
99
+ # mypy
100
+ .mypy_cache/
101
+ .dmypy.json
102
+ dmypy.json
103
+ # Pyre type checker
104
+ .pyre/
105
+ # custom
106
+ old/
107
+ notebook/
108
+ static/images/face
109
+ static/images/celeba
110
+ celeba_pics.gz
111
+ celeba_vecs.gz
112
+ face_pics.gz
113
+ face_vecs.gz
.pre-commit-config.yaml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # See https://pre-commit.com for more information
2
+ # See https://pre-commit.com/hooks.html for more hooks
3
+ repos:
4
+ - repo: https://github.com/pre-commit/pre-commit-hooks
5
+ rev: v4.2.0
6
+ hooks:
7
+ #- id: check-added-large-files
8
+ - id: fix-byte-order-marker
9
+ - id: check-case-conflict
10
+ - id: check-json
11
+ - id: check-yaml
12
+ args: ['--unsafe']
13
+ - id: detect-aws-credentials
14
+ args: [--allow-missing-credentials]
15
+ - id: detect-private-key
16
+ - id: end-of-file-fixer
17
+ - id: mixed-line-ending
18
+ - id: trailing-whitespace
19
+ - repo: https://github.com/asottile/add-trailing-comma
20
+ rev: v2.2.2
21
+ hooks:
22
+ - id: add-trailing-comma
23
+ - repo: https://github.com/pycqa/isort
24
+ rev: 5.10.1
25
+ hooks:
26
+ - id: isort
27
+ name: isort (python)
28
+ args: [--settings-path=pyproject.toml]
29
+ - id: isort
30
+ name: isort (cython)
31
+ types: [cython]
32
+ - id: isort
33
+ name: isort (pyi)
34
+ types: [pyi]
35
+ - repo: https://github.com/psf/black
36
+ rev: 22.3.0
37
+ hooks:
38
+ - id: black
39
+ args: [--config=pyproject.toml]
40
+ - repo: https://gitlab.com/pycqa/flake8
41
+ rev: 4.0.1
42
+ hooks:
43
+ - id: flake8
44
+ args: [--ignore, "E203", --max-line-length, "79"]
45
+ - repo: https://github.com/kynan/nbstripout
46
+ rev: 0.5.0
47
+ hooks:
48
+ - id: nbstripout
49
+ - repo: https://github.com/asottile/pyupgrade
50
+ rev: v2.32.0
51
+ hooks:
52
+ - id: pyupgrade
53
+ args: [--py36-plus]
.pre-commit-setting.toml ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # See https://pre-commit.com for more information
2
+ # See https://pre-commit.com/hooks.html for more hooks
3
+ [tool.black]
4
+ line-length = 79
5
+ include = '\.pyi?$'
6
+ exclude = '''
7
+ /(
8
+ \.git
9
+ | \.idea
10
+ | \.pytest_cache
11
+ | \.tox
12
+ | \.venv
13
+ | _build
14
+ | buck-out
15
+ | build
16
+ | dist
17
+ )/
18
+ '''
19
+
20
+ [flake8]
21
+ ignore = E203
22
+ max-line-length = 79
_config.yml ADDED
@@ -0,0 +1 @@
 
 
1
+ theme: jekyll-theme-modernist
app.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import openvino as ov
3
+ from pathlib import Path
4
+ import numpy as np
5
+ from PIL import Image
6
+ import cv2
7
+
8
+ from src.config import (
9
+ DICT_DIR,
10
+ IMAGE_TYPES,
11
+ IMAGE_EXAMPLE,
12
+ MODEL_DIR,
13
+ DEVICE,
14
+ )
15
+
16
+ from src.image_processing import recognize
17
+
18
+ # Load models
19
+ core = ov.Core()
20
+ model = core.read_model(model=Path(MODEL_DIR))
21
+ print("[INFO] Loaded recognition model")
22
+
23
+ # Select device (CPU or GPU)
24
+ compiled_model = core.compile_model(model=model, device_name=DEVICE)
25
+
26
+ # Fetch Information About Input and Output Layers
27
+ recognition_input_layer = compiled_model.input(0)
28
+ recognition_output_layer = compiled_model.output(0)
29
+
30
+ print("[INFO] Fetched recognition model")
31
+
32
+ # In JA model, there should be blank symbol added at index 0 of each charlist.
33
+ blank_char = "~"
34
+
35
+ with Path(DICT_DIR).open(mode="r", encoding="utf-8") as charlist:
36
+ letters = blank_char + "".join(line.strip() for line in charlist)
37
+ print("[INFO] Loaded dictionary")
38
+
39
+
40
+
41
+ def do_ocr(inp):
42
+ #img = Image.open(inp).convert('L')
43
+ #img = np.array(img)
44
+ print(f"input: {inp}")
45
+ print(type(inp))
46
+ #img = cv2.imread(inp, cv2.IMREAD_GRAYSCALE)
47
+ img = cv2.cvtColor(inp, cv2.COLOR_BGR2GRAY)
48
+ recognized_text = recognize(img,
49
+ compiled_model,
50
+ recognition_input_layer,
51
+ recognition_output_layer,
52
+ letters,
53
+ )
54
+ return "".join(recognized_text)
55
+
56
+ input = gr.Image()
57
+ output = gr.Textbox()
58
+
59
+ title = "日本語手書き認識"
60
+ description = "DEMO by Tokyo Teachies (注意:画像には1行のテキストしか含まれていません。)"
61
+ examples=[['data/in_1.png'],['data/sample_1_1.png']]
62
+
63
+ gr.Interface(fn=do_ocr,
64
+ inputs=input,
65
+ outputs=output,
66
+ title=title,
67
+ description=description,
68
+ examples=examples).launch()
app_streamlit.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import openvino as ov
3
+ from pathlib import Path
4
+ import numpy as np
5
+ from PIL import Image
6
+
7
+ from src.config import (
8
+ DICT_DIR,
9
+ IMAGE_TYPES,
10
+ IMAGE_EXAMPLE,
11
+ MODEL_DIR,
12
+ DEVICE,
13
+ )
14
+
15
+ from src.image_processing import recognize
16
+
17
+ # hide deprication warnings which directly don't affect the working of the application
18
+ import warnings
19
+ warnings.filterwarnings("ignore")
20
+
21
+ # Setting custom Page Title and Icon with changed layout and sidebar state
22
+ st.set_page_config(
23
+ page_title="日本語手書き認識",
24
+ page_icon="",
25
+ layout="centered",
26
+ initial_sidebar_state="auto",
27
+ )
28
+
29
+ # hide the part of the code, as this is just for adding some custom CSS styling but not a part of the main idea
30
+ hide_streamlit_style = """
31
+ <style>
32
+ #MainMenu {visibility: hidden;}
33
+ footer {visibility: hidden;}
34
+ </style>
35
+ """
36
+ st.markdown(hide_streamlit_style, unsafe_allow_html=True) # hide the CSS code from the screen as they are embedded in markdown text. Also, allow streamlit to unsafely process as HTML
37
+
38
+
39
+ @st.cache_resource
40
+ def init():
41
+ # Load models
42
+ core = ov.Core()
43
+ model = core.read_model(model=Path(MODEL_DIR))
44
+ print("[INFO] Loaded recognition model")
45
+
46
+ # Select device (CPU or GPU)
47
+ compiled_model = core.compile_model(model=model, device_name=DEVICE)
48
+
49
+ # Fetch Information About Input and Output Layers
50
+ recognition_input_layer = compiled_model.input(0)
51
+ recognition_output_layer = compiled_model.output(0)
52
+
53
+ print("[INFO] Fetched recognition model")
54
+
55
+ # In JA model, there should be blank symbol added at index 0 of each charlist.
56
+ blank_char = "~"
57
+
58
+ with Path(DICT_DIR).open(mode="r", encoding="utf-8") as charlist:
59
+ letters = blank_char + "".join(line.strip() for line in charlist)
60
+ print("[INFO] Loaded dictionary")
61
+
62
+
63
+ return [compiled_model, recognition_input_layer, recognition_output_layer, letters]
64
+
65
+ def display_text(bounds):
66
+ text = []
67
+ for x in bounds:
68
+ t = x[1]
69
+ text.append(t)
70
+ text = ' '.join(text)
71
+ return text
72
+
73
+ HWRmodel = init()
74
+ st.set_option('deprecation.showfileUploaderEncoding',False)
75
+ st.title('日本語手書き認識')
76
+ st.subheader('Tokyo Teachies (DEMO)')
77
+ st.subheader('注意:画像には1行のテキストしか含まれていません。')
78
+ #st.text('Select source Language from the Sidebar.')
79
+
80
+ image_file = st.file_uploader("画像をアップロード…",type=IMAGE_TYPES)
81
+ if image_file is not None:
82
+ st.subheader('アップロードした画像')
83
+ st.image(image_file,width=450)
84
+ else:
85
+ st.subheader('例:')
86
+ image_file = IMAGE_EXAMPLE
87
+ st.image(image_file, use_column_width=450)
88
+
89
+
90
+ if st.button("Recognize"):
91
+ if image_file is not None:
92
+ img = Image.open(image_file).convert('L')
93
+ img = np.array(img)
94
+
95
+ with st.spinner('テキストの抽出...'):
96
+ recognized_text = recognize(img, HWRmodel[0], HWRmodel[1], HWRmodel[2], HWRmodel[3])
97
+ #st.subheader('Extracted text is ...')
98
+ #text = display_text(recognized_text)
99
+ st.write("".join(recognized_text))
100
+
101
+
102
+
103
+
104
+
data/in_1.png ADDED
data/sample_1_1.png ADDED
dict/japanese_charlist.txt ADDED
@@ -0,0 +1,4441 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+
4
+
5
+
6
+
7
+
8
+
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+
26
+
27
+
28
+
29
+
30
+
31
+
32
+
33
+
34
+
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+
46
+
47
+
48
+
49
+
50
+
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+
94
+
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+
112
+
113
+
114
+
115
+
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+
129
+
130
+
131
+
132
+
133
+
134
+
135
+
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+ ο
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+
154
+
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+
166
+
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+
175
+
176
+
177
+
178
+
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+
222
+
223
+
224
+
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
240
+
241
+
242
+
243
+
244
+
245
+
246
+
247
+
248
+
249
+
250
+
251
+
252
+
253
+
254
+
255
+
256
+
257
+
258
+
259
+
260
+
261
+
262
+
263
+
264
+
265
+
266
+
267
+
268
+
269
+
270
+
271
+
272
+
273
+
274
+ 禿
275
+
276
+
277
+
278
+
279
+
280
+
281
+
282
+
283
+
284
+
285
+
286
+
287
+
288
+
289
+
290
+
291
+
292
+
293
+
294
+
295
+
296
+
297
+
298
+
299
+
300
+
301
+
302
+
303
+
304
+
305
+
306
+
307
+
308
+
309
+
310
+
311
+
312
+
313
+
314
+
315
+
316
+
317
+
318
+
319
+
320
+
321
+
322
+
323
+
324
+
325
+
326
+
327
+
328
+
329
+
330
+
331
+
332
+
333
+
334
+
335
+
336
+
337
+
338
+
339
+
340
+ 輿
341
+
342
+
343
+
344
+
345
+
346
+
347
+
348
+
349
+
350
+
351
+
352
+ 竿
353
+
354
+
355
+
356
+
357
+
358
+
359
+
360
+
361
+
362
+ 4
363
+
364
+
365
+
366
+
367
+ 便
368
+
369
+
370
+
371
+
372
+
373
+
374
+
375
+
376
+
377
+
378
+
379
+
380
+
381
+
382
+
383
+
384
+
385
+
386
+
387
+
388
+
389
+
390
+
391
+
392
+
393
+
394
+
395
+
396
+
397
+
398
+
399
+
400
+ _
401
+
402
+
403
+
404
+
405
+
406
+
407
+
408
+
409
+
410
+
411
+
412
+
413
+
414
+
415
+
416
+ 退
417
+
418
+
419
+
420
+
421
+
422
+
423
+
424
+
425
+
426
+
427
+
428
+
429
+
430
+ ψ
431
+
432
+
433
+
434
+
435
+
436
+
437
+
438
+
439
+
440
+
441
+
442
+
443
+
444
+
445
+
446
+
447
+
448
+
449
+
450
+
451
+
452
+
453
+
454
+
455
+
456
+
457
+
458
+
459
+
460
+
461
+
462
+
463
+
464
+
465
+
466
+
467
+
468
+ 湿
469
+
470
+
471
+
472
+
473
+
474
+
475
+
476
+
477
+
478
+
479
+
480
+ ±
481
+
482
+
483
+
484
+
485
+
486
+
487
+
488
+
489
+
490
+
491
+
492
+
493
+
494
+
495
+
496
+ 簿
497
+
498
+ ρ
499
+
500
+
501
+
502
+
503
+
504
+
505
+
506
+
507
+
508
+
509
+
510
+
511
+
512
+
513
+
514
+
515
+
516
+
517
+
518
+
519
+
520
+
521
+
522
+
523
+
524
+
525
+
526
+
527
+
528
+
529
+
530
+
531
+ 1
532
+
533
+
534
+
535
+
536
+
537
+
538
+
539
+
540
+
541
+
542
+
543
+
544
+
545
+
546
+
547
+
548
+
549
+
550
+
551
+
552
+
553
+
554
+
555
+
556
+
557
+
558
+
559
+
560
+
561
+
562
+
563
+
564
+
565
+
566
+
567
+
568
+
569
+
570
+
571
+
572
+
573
+
574
+
575
+
576
+
577
+
578
+
579
+
580
+
581
+
582
+
583
+
584
+
585
+
586
+
587
+
588
+
589
+
590
+
591
+
592
+
593
+
594
+
595
+
596
+
597
+
598
+
599
+
600
+
601
+
602
+
603
+
604
+
605
+
606
+
607
+
608
+
609
+ 跿
610
+
611
+
612
+
613
+
614
+
615
+
616
+
617
+
618
+
619
+
620
+
621
+
622
+
623
+
624
+
625
+
626
+
627
+
628
+
629
+
630
+
631
+
632
+
633
+ λ
634
+
635
+
636
+
637
+
638
+
639
+
640
+
641
+
642
+
643
+
644
+
645
+
646
+
647
+
648
+
649
+
650
+
651
+
652
+ π
653
+
654
+
655
+
656
+
657
+
658
+
659
+
660
+
661
+
662
+
663
+
664
+
665
+
666
+
667
+
668
+
669
+
670
+
671
+
672
+
673
+ ×
674
+
675
+
676
+
677
+
678
+
679
+
680
+
681
+
682
+
683
+
684
+
685
+
686
+
687
+
688
+
689
+
690
+
691
+
692
+
693
+
694
+
695
+
696
+
697
+
698
+
699
+
700
+
701
+
702
+
703
+
704
+
705
+
706
+
707
+
708
+
709
+
710
+
711
+
712
+
713
+
714
+
715
+
716
+
717
+
718
+
719
+
720
+
721
+
722
+
723
+
724
+
725
+
726
+
727
+
728
+
729
+
730
+
731
+
732
+
733
+
734
+ κ
735
+
736
+
737
+
738
+
739
+
740
+
741
+
742
+
743
+
744
+
745
+
746
+
747
+
748
+
749
+
750
+
751
+
752
+
753
+
754
+
755
+
756
+ 歿
757
+
758
+
759
+
760
+
761
+
762
+
763
+
764
+
765
+
766
+
767
+
768
+
769
+
770
+
771
+
772
+
773
+
774
+
775
+
776
+
777
+
778
+
779
+
780
+
781
+
782
+
783
+
784
+
785
+
786
+
787
+
788
+
789
+
790
+
791
+
792
+
793
+
794
+
795
+
796
+
797
+
798
+
799
+
800
+
801
+
802
+
803
+
804
+
805
+
806
+
807
+
808
+
809
+
810
+
811
+
812
+
813
+
814
+
815
+
816
+
817
+
818
+
819
+
820
+
821
+
822
+
823
+
824
+
825
+
826
+
827
+
828
+
829
+
830
+
831
+
832
+
833
+
834
+
835
+
836
+
837
+
838
+
839
+
840
+
841
+
842
+
843
+
844
+
845
+
846
+
847
+
848
+
849
+
850
+
851
+
852
+
853
+
854
+
855
+
856
+
857
+
858
+
859
+
860
+
861
+
862
+
863
+
864
+
865
+
866
+
867
+
868
+
869
+
870
+
871
+
872
+
873
+
874
+
875
+
876
+
877
+
878
+
879
+
880
+ 0
881
+
882
+
883
+
884
+
885
+
886
+
887
+
888
+
889
+
890
+
891
+
892
+
893
+
894
+
895
+
896
+
897
+
898
+
899
+
900
+
901
+
902
+
903
+
904
+
905
+
906
+
907
+
908
+
909
+
910
+
911
+
912
+
913
+
914
+
915
+
916
+
917
+
918
+
919
+
920
+
921
+
922
+
923
+
924
+
925
+
926
+
927
+
928
+
929
+
930
+
931
+
932
+
933
+
934
+
935
+
936
+
937
+
938
+
939
+
940
+
941
+
942
+
943
+
944
+
945
+
946
+
947
+
948
+
949
+
950
+
951
+
952
+
953
+
954
+
955
+
956
+
957
+
958
+
959
+
960
+ Φ
961
+
962
+
963
+
964
+
965
+
966
+
967
+
968
+
969
+
970
+
971
+
972
+
973
+
974
+
975
+
976
+
977
+
978
+
979
+
980
+
981
+
982
+
983
+
984
+
985
+
986
+
987
+
988
+
989
+
990
+
991
+
992
+
993
+
994
+
995
+
996
+
997
+
998
+
999
+
1000
+
1001
+
1002
+
1003
+
1004
+
1005
+
1006
+
1007
+
1008
+
1009
+
1010
+
1011
+
1012
+
1013
+
1014
+
1015
+
1016
+
1017
+
1018
+
1019
+
1020
+
1021
+
1022
+
1023
+
1024
+
1025
+
1026
+
1027
+
1028
+
1029
+
1030
+
1031
+
1032
+
1033
+
1034
+
1035
+
1036
+
1037
+
1038
+
1039
+
1040
+
1041
+
1042
+
1043
+
1044
+ 3
1045
+
1046
+
1047
+
1048
+
1049
+
1050
+
1051
+
1052
+
1053
+
1054
+
1055
+
1056
+
1057
+
1058
+
1059
+
1060
+
1061
+
1062
+
1063
+
1064
+
1065
+
1066
+
1067
+
1068
+
1069
+
1070
+
1071
+
1072
+
1073
+
1074
+
1075
+
1076
+
1077
+
1078
+
1079
+
1080
+
1081
+
1082
+
1083
+
1084
+
1085
+
1086
+
1087
+
1088
+
1089
+
1090
+
1091
+
1092
+
1093
+
1094
+
1095
+
1096
+
1097
+
1098
+
1099
+
1100
+
1101
+
1102
+
1103
+
1104
+
1105
+
1106
+
1107
+
1108
+
1109
+
1110
+
1111
+
1112
+
1113
+
1114
+
1115
+
1116
+
1117
+
1118
+
1119
+
1120
+
1121
+
1122
+
1123
+
1124
+
1125
+
1126
+
1127
+
1128
+
1129
+
1130
+
1131
+
1132
+
1133
+
1134
+
1135
+
1136
+
1137
+
1138
+
1139
+
1140
+
1141
+
1142
+
1143
+
1144
+
1145
+
1146
+
1147
+
1148
+
1149
+
1150
+
1151
+
1152
+
1153
+
1154
+
1155
+
1156
+
1157
+
1158
+
1159
+ 使
1160
+
1161
+
1162
+
1163
+
1164
+
1165
+
1166
+
1167
+
1168
+
1169
+
1170
+
1171
+
1172
+
1173
+
1174
+
1175
+
1176
+
1177
+
1178
+
1179
+
1180
+
1181
+
1182
+
1183
+ ε
1184
+
1185
+
1186
+
1187
+
1188
+
1189
+
1190
+
1191
+
1192
+
1193
+
1194
+
1195
+
1196
+
1197
+
1198
+
1199
+
1200
+
1201
+
1202
+
1203
+
1204
+
1205
+ θ
1206
+
1207
+
1208
+
1209
+
1210
+
1211
+
1212
+
1213
+
1214
+
1215
+
1216
+
1217
+
1218
+
1219
+
1220
+
1221
+
1222
+
1223
+
1224
+
1225
+
1226
+
1227
+
1228
+
1229
+
1230
+
1231
+
1232
+
1233
+
1234
+
1235
+
1236
+
1237
+
1238
+
1239
+
1240
+
1241
+
1242
+
1243
+
1244
+
1245
+
1246
+
1247
+
1248
+
1249
+
1250
+
1251
+
1252
+
1253
+
1254
+
1255
+
1256
+
1257
+
1258
+
1259
+
1260
+
1261
+
1262
+
1263
+ 2
1264
+
1265
+
1266
+
1267
+
1268
+
1269
+
1270
+
1271
+
1272
+
1273
+
1274
+
1275
+
1276
+
1277
+
1278
+
1279
+
1280
+
1281
+
1282
+
1283
+
1284
+
1285
+
1286
+
1287
+
1288
+
1289
+
1290
+
1291
+
1292
+
1293
+
1294
+
1295
+
1296
+
1297
+
1298
+
1299
+
1300
+
1301
+
1302
+
1303
+
1304
+
1305
+ 7
1306
+
1307
+
1308
+
1309
+
1310
+
1311
+
1312
+
1313
+
1314
+
1315
+
1316
+
1317
+
1318
+
1319
+
1320
+
1321
+
1322
+
1323
+
1324
+
1325
+
1326
+
1327
+
1328
+
1329
+
1330
+
1331
+
1332
+
1333
+
1334
+
1335
+
1336
+
1337
+ 駿
1338
+
1339
+
1340
+
1341
+
1342
+
1343
+
1344
+
1345
+
1346
+
1347
+
1348
+
1349
+
1350
+
1351
+
1352
+
1353
+
1354
+
1355
+
1356
+
1357
+
1358
+
1359
+
1360
+
1361
+
1362
+
1363
+
1364
+
1365
+
1366
+
1367
+
1368
+
1369
+
1370
+
1371
+
1372
+
1373
+
1374
+
1375
+
1376
+
1377
+
1378
+ δ
1379
+
1380
+
1381
+
1382
+
1383
+
1384
+
1385
+
1386
+
1387
+
1388
+
1389
+
1390
+
1391
+
1392
+
1393
+
1394
+
1395
+
1396
+
1397
+
1398
+
1399
+
1400
+
1401
+
1402
+
1403
+
1404
+
1405
+
1406
+
1407
+
1408
+
1409
+
1410
+
1411
+
1412
+
1413
+
1414
+
1415
+
1416
+
1417
+
1418
+
1419
+
1420
+
1421
+
1422
+
1423
+
1424
+
1425
+
1426
+
1427
+
1428
+
1429
+
1430
+
1431
+
1432
+
1433
+
1434
+
1435
+
1436
+
1437
+
1438
+
1439
+
1440
+
1441
+
1442
+
1443
+
1444
+
1445
+
1446
+
1447
+
1448
+
1449
+
1450
+
1451
+
1452
+
1453
+
1454
+
1455
+
1456
+
1457
+
1458
+ ω
1459
+
1460
+
1461
+
1462
+
1463
+
1464
+
1465
+
1466
+
1467
+
1468
+
1469
+
1470
+
1471
+
1472
+
1473
+
1474
+
1475
+
1476
+
1477
+
1478
+
1479
+
1480
+
1481
+
1482
+
1483
+
1484
+
1485
+
1486
+
1487
+
1488
+
1489
+
1490
+
1491
+
1492
+
1493
+
1494
+
1495
+
1496
+
1497
+
1498
+
1499
+
1500
+
1501
+
1502
+
1503
+
1504
+
1505
+
1506
+
1507
+
1508
+
1509
+
1510
+
1511
+
1512
+
1513
+
1514
+
1515
+
1516
+
1517
+
1518
+
1519
+
1520
+
1521
+
1522
+
1523
+
1524
+
1525
+
1526
+
1527
+
1528
+
1529
+
1530
+
1531
+
1532
+
1533
+
1534
+
1535
+
1536
+
1537
+
1538
+
1539
+
1540
+
1541
+
1542
+
1543
+
1544
+
1545
+
1546
+
1547
+
1548
+
1549
+
1550
+
1551
+
1552
+
1553
+
1554
+
1555
+
1556
+
1557
+
1558
+
1559
+
1560
+
1561
+
1562
+
1563
+
1564
+
1565
+
1566
+
1567
+
1568
+
1569
+
1570
+
1571
+
1572
+
1573
+
1574
+
1575
+
1576
+
1577
+
1578
+
1579
+
1580
+
1581
+
1582
+
1583
+
1584
+
1585
+
1586
+
1587
+
1588
+
1589
+
1590
+
1591
+
1592
+
1593
+
1594
+
1595
+
1596
+
1597
+
1598
+
1599
+
1600
+
1601
+
1602
+
1603
+
1604
+
1605
+
1606
+
1607
+
1608
+
1609
+
1610
+
1611
+
1612
+
1613
+
1614
+
1615
+
1616
+
1617
+
1618
+
1619
+
1620
+
1621
+
1622
+
1623
+
1624
+
1625
+
1626
+
1627
+
1628
+
1629
+
1630
+
1631
+
1632
+
1633
+
1634
+
1635
+
1636
+
1637
+
1638
+
1639
+
1640
+
1641
+
1642
+
1643
+
1644
+
1645
+
1646
+
1647
+
1648
+
1649
+
1650
+
1651
+
1652
+
1653
+
1654
+
1655
+
1656
+
1657
+
1658
+
1659
+
1660
+
1661
+
1662
+
1663
+
1664
+
1665
+ Σ
1666
+
1667
+
1668
+
1669
+
1670
+
1671
+
1672
+
1673
+
1674
+
1675
+
1676
+
1677
+
1678
+
1679
+
1680
+
1681
+
1682
+
1683
+
1684
+
1685
+
1686
+
1687
+
1688
+
1689
+
1690
+
1691
+
1692
+
1693
+
1694
+
1695
+
1696
+
1697
+
1698
+
1699
+
1700
+
1701
+
1702
+
1703
+
1704
+
1705
+
1706
+
1707
+
1708
+
1709
+
1710
+
1711
+
1712
+
1713
+
1714
+
1715
+
1716
+
1717
+
1718
+
1719
+
1720
+ ÷
1721
+
1722
+
1723
+
1724
+
1725
+
1726
+
1727
+
1728
+
1729
+
1730
+
1731
+ 鹿
1732
+
1733
+
1734
+
1735
+
1736
+
1737
+
1738
+
1739
+
1740
+
1741
+
1742
+
1743
+
1744
+
1745
+
1746
+
1747
+
1748
+
1749
+
1750
+
1751
+
1752
+
1753
+
1754
+
1755
+
1756
+
1757
+
1758
+
1759
+
1760
+
1761
+
1762
+
1763
+
1764
+
1765
+
1766
+
1767
+
1768
+
1769
+
1770
+
1771
+
1772
+
1773
+
1774
+
1775
+
1776
+
1777
+
1778
+
1779
+
1780
+
1781
+
1782
+
1783
+
1784
+
1785
+
1786
+
1787
+
1788
+
1789
+
1790
+
1791
+ 榿
1792
+
1793
+
1794
+
1795
+
1796
+
1797
+
1798
+
1799
+
1800
+
1801
+
1802
+
1803
+
1804
+
1805
+
1806
+
1807
+
1808
+
1809
+
1810
+
1811
+
1812
+
1813
+
1814
+
1815
+
1816
+
1817
+
1818
+
1819
+
1820
+
1821
+
1822
+ 麿
1823
+
1824
+
1825
+
1826
+
1827
+
1828
+
1829
+
1830
+
1831
+
1832
+
1833
+
1834
+
1835
+
1836
+
1837
+
1838
+
1839
+
1840
+
1841
+
1842
+
1843
+
1844
+
1845
+
1846
+
1847
+
1848
+
1849
+
1850
+
1851
+
1852
+
1853
+
1854
+
1855
+
1856
+
1857
+
1858
+
1859
+
1860
+
1861
+
1862
+
1863
+
1864
+
1865
+
1866
+
1867
+
1868
+
1869
+
1870
+
1871
+
1872
+
1873
+
1874
+
1875
+
1876
+
1877
+
1878
+
1879
+
1880
+
1881
+
1882
+
1883
+
1884
+
1885
+
1886
+
1887
+
1888
+
1889
+
1890
+
1891
+
1892
+
1893
+
1894
+
1895
+
1896
+
1897
+
1898
+
1899
+
1900
+
1901
+
1902
+
1903
+
1904
+
1905
+
1906
+
1907
+
1908
+
1909
+
1910
+
1911
+
1912
+
1913
+
1914
+
1915
+
1916
+
1917
+
1918
+
1919
+
1920
+
1921
+
1922
+
1923
+
1924
+
1925
+
1926
+
1927
+
1928
+
1929
+
1930
+
1931
+
1932
+
1933
+
1934
+
1935
+
1936
+
1937
+
1938
+
1939
+
1940
+
1941
+
1942
+
1943
+
1944
+
1945
+
1946
+
1947
+
1948
+
1949
+
1950
+
1951
+
1952
+
1953
+
1954
+
1955
+
1956
+
1957
+
1958
+
1959
+
1960
+
1961
+
1962
+
1963
+
1964
+
1965
+
1966
+
1967
+
1968
+
1969
+
1970
+
1971
+
1972
+
1973
+
1974
+
1975
+
1976
+
1977
+
1978
+
1979
+
1980
+
1981
+
1982
+
1983
+
1984
+
1985
+
1986
+
1987
+
1988
+
1989
+
1990
+
1991
+
1992
+
1993
+
1994
+
1995
+
1996
+
1997
+
1998
+
1999
+
2000
+
2001
+
2002
+
2003
+
2004
+
2005
+
2006
+
2007
+
2008
+
2009
+
2010
+
2011
+
2012
+
2013
+
2014
+
2015
+
2016
+
2017
+
2018
+
2019
+
2020
+
2021
+
2022
+
2023
+
2024
+
2025
+
2026
+
2027
+
2028
+
2029
+
2030
+
2031
+
2032
+
2033
+
2034
+
2035
+
2036
+
2037
+
2038
+
2039
+
2040
+
2041
+
2042
+
2043
+
2044
+
2045
+
2046
+
2047
+
2048
+
2049
+
2050
+ ��
2051
+
2052
+ β
2053
+
2054
+
2055
+
2056
+
2057
+ 6
2058
+
2059
+
2060
+
2061
+
2062
+
2063
+
2064
+
2065
+
2066
+
2067
+
2068
+
2069
+
2070
+
2071
+
2072
+
2073
+
2074
+
2075
+
2076
+
2077
+
2078
+
2079
+
2080
+
2081
+
2082
+
2083
+
2084
+
2085
+
2086
+
2087
+
2088
+
2089
+
2090
+
2091
+
2092
+
2093
+
2094
+
2095
+
2096
+
2097
+
2098
+
2099
+ 穿
2100
+
2101
+
2102
+ τ
2103
+
2104
+
2105
+
2106
+
2107
+
2108
+
2109
+
2110
+
2111
+
2112
+
2113
+
2114
+
2115
+
2116
+
2117
+
2118
+
2119
+
2120
+
2121
+
2122
+
2123
+
2124
+
2125
+
2126
+
2127
+
2128
+
2129
+
2130
+
2131
+
2132
+
2133
+
2134
+
2135
+
2136
+
2137
+
2138
+
2139
+
2140
+
2141
+
2142
+
2143
+
2144
+
2145
+
2146
+
2147
+
2148
+
2149
+
2150
+
2151
+
2152
+
2153
+
2154
+
2155
+
2156
+
2157
+
2158
+
2159
+
2160
+
2161
+
2162
+
2163
+
2164
+
2165
+
2166
+
2167
+
2168
+
2169
+
2170
+
2171
+
2172
+
2173
+
2174
+
2175
+
2176
+
2177
+
2178
+
2179
+
2180
+
2181
+
2182
+
2183
+
2184
+
2185
+
2186
+
2187
+
2188
+
2189
+
2190
+
2191
+
2192
+
2193
+
2194
+
2195
+
2196
+
2197
+
2198
+
2199
+
2200
+
2201
+
2202
+
2203
+
2204
+
2205
+
2206
+
2207
+
2208
+
2209
+
2210
+
2211
+
2212
+
2213
+
2214
+
2215
+
2216
+
2217
+
2218
+
2219
+
2220
+
2221
+
2222
+
2223
+
2224
+
2225
+
2226
+
2227
+
2228
+
2229
+
2230
+
2231
+
2232
+
2233
+
2234
+
2235
+
2236
+
2237
+
2238
+
2239
+
2240
+
2241
+
2242
+
2243
+
2244
+
2245
+
2246
+
2247
+
2248
+
2249
+
2250
+
2251
+
2252
+
2253
+
2254
+
2255
+
2256
+
2257
+
2258
+
2259
+
2260
+
2261
+
2262
+
2263
+
2264
+
2265
+
2266
+ 5
2267
+
2268
+
2269
+
2270
+
2271
+
2272
+
2273
+
2274
+
2275
+
2276
+
2277
+
2278
+
2279
+
2280
+
2281
+
2282
+
2283
+
2284
+
2285
+
2286
+
2287
+
2288
+
2289
+
2290
+
2291
+
2292
+
2293
+
2294
+
2295
+
2296
+
2297
+
2298
+
2299
+
2300
+
2301
+
2302
+
2303
+
2304
+
2305
+
2306
+
2307
+
2308
+ 姿
2309
+
2310
+
2311
+
2312
+
2313
+
2314
+
2315
+
2316
+
2317
+
2318
+
2319
+
2320
+
2321
+
2322
+
2323
+
2324
+
2325
+
2326
+
2327
+
2328
+
2329
+
2330
+
2331
+
2332
+
2333
+
2334
+ μ
2335
+
2336
+
2337
+
2338
+
2339
+
2340
+
2341
+
2342
+
2343
+
2344
+
2345
+
2346
+
2347
+
2348
+
2349
+
2350
+
2351
+
2352
+
2353
+
2354
+
2355
+
2356
+
2357
+
2358
+
2359
+
2360
+
2361
+
2362
+
2363
+ 椿
2364
+
2365
+
2366
+
2367
+
2368
+
2369
+
2370
+
2371
+
2372
+
2373
+
2374
+ 西
2375
+
2376
+
2377
+
2378
+
2379
+
2380
+
2381
+
2382
+
2383
+
2384
+
2385
+
2386
+
2387
+
2388
+
2389
+
2390
+
2391
+
2392
+
2393
+
2394
+
2395
+
2396
+
2397
+
2398
+
2399
+
2400
+
2401
+
2402
+
2403
+
2404
+
2405
+
2406
+
2407
+
2408
+
2409
+
2410
+
2411
+
2412
+
2413
+
2414
+
2415
+
2416
+
2417
+
2418
+
2419
+
2420
+
2421
+
2422
+
2423
+
2424
+
2425
+
2426
+
2427
+
2428
+
2429
+
2430
+
2431
+
2432
+
2433
+
2434
+
2435
+
2436
+
2437
+
2438
+
2439
+
2440
+
2441
+
2442
+
2443
+
2444
+
2445
+
2446
+
2447
+
2448
+
2449
+
2450
+
2451
+
2452
+
2453
+
2454
+
2455
+
2456
+
2457
+
2458
+
2459
+
2460
+
2461
+
2462
+
2463
+
2464
+
2465
+
2466
+
2467
+
2468
+
2469
+
2470
+
2471
+
2472
+
2473
+
2474
+
2475
+
2476
+
2477
+
2478
+
2479
+
2480
+
2481
+
2482
+
2483
+
2484
+
2485
+
2486
+
2487
+
2488
+
2489
+
2490
+
2491
+
2492
+
2493
+
2494
+
2495
+
2496
+
2497
+
2498
+
2499
+
2500
+
2501
+
2502
+
2503
+
2504
+
2505
+
2506
+
2507
+
2508
+
2509
+
2510
+
2511
+
2512
+
2513
+
2514
+
2515
+
2516
+
2517
+
2518
+
2519
+
2520
+
2521
+
2522
+
2523
+
2524
+
2525
+
2526
+
2527
+
2528
+
2529
+ Δ
2530
+
2531
+
2532
+
2533
+
2534
+
2535
+
2536
+
2537
+
2538
+
2539
+
2540
+
2541
+
2542
+
2543
+
2544
+
2545
+
2546
+
2547
+
2548
+
2549
+
2550
+
2551
+
2552
+
2553
+
2554
+
2555
+
2556
+
2557
+
2558
+
2559
+
2560
+
2561
+
2562
+
2563
+
2564
+
2565
+
2566
+
2567
+
2568
+
2569
+
2570
+
2571
+
2572
+
2573
+
2574
+
2575
+
2576
+
2577
+
2578
+
2579
+
2580
+
2581
+
2582
+
2583
+
2584
+
2585
+
2586
+
2587
+
2588
+
2589
+
2590
+
2591
+
2592
+
2593
+
2594
+
2595
+
2596
+
2597
+
2598
+
2599
+
2600
+
2601
+
2602
+
2603
+
2604
+
2605
+
2606
+
2607
+
2608
+
2609
+ Ω
2610
+
2611
+
2612
+
2613
+
2614
+
2615
+
2616
+
2617
+
2618
+ ν
2619
+
2620
+
2621
+
2622
+
2623
+
2624
+
2625
+
2626
+
2627
+
2628
+
2629
+
2630
+
2631
+
2632
+
2633
+
2634
+
2635
+
2636
+
2637
+
2638
+
2639
+
2640
+
2641
+
2642
+
2643
+
2644
+
2645
+
2646
+
2647
+
2648
+
2649
+
2650
+
2651
+
2652
+
2653
+
2654
+
2655
+
2656
+
2657
+
2658
+
2659
+
2660
+
2661
+
2662
+
2663
+
2664
+
2665
+
2666
+
2667
+
2668
+
2669
+
2670
+
2671
+
2672
+
2673
+
2674
+
2675
+
2676
+
2677
+
2678
+
2679
+
2680
+
2681
+
2682
+
2683
+
2684
+
2685
+
2686
+
2687
+
2688
+
2689
+
2690
+
2691
+
2692
+
2693
+
2694
+
2695
+
2696
+
2697
+
2698
+
2699
+
2700
+
2701
+
2702
+
2703
+
2704
+
2705
+
2706
+
2707
+
2708
+
2709
+ 沿
2710
+
2711
+
2712
+
2713
+
2714
+
2715
+
2716
+
2717
+
2718
+
2719
+
2720
+
2721
+
2722
+ 8
2723
+
2724
+
2725
+
2726
+
2727
+
2728
+ 調
2729
+
2730
+
2731
+
2732
+
2733
+
2734
+
2735
+
2736
+
2737
+
2738
+
2739
+
2740
+
2741
+
2742
+
2743
+
2744
+
2745
+
2746
+
2747
+
2748
+
2749
+
2750
+
2751
+
2752
+
2753
+
2754
+
2755
+
2756
+
2757
+
2758
+
2759
+
2760
+
2761
+
2762
+
2763
+
2764
+
2765
+
2766
+
2767
+
2768
+
2769
+
2770
+
2771
+
2772
+
2773
+
2774
+
2775
+
2776
+
2777
+
2778
+
2779
+
2780
+
2781
+
2782
+
2783
+
2784
+
2785
+
2786
+
2787
+
2788
+
2789
+
2790
+
2791
+
2792
+
2793
+
2794
+
2795
+
2796
+
2797
+
2798
+ °
2799
+
2800
+
2801
+
2802
+
2803
+
2804
+
2805
+
2806
+
2807
+
2808
+
2809
+
2810
+
2811
+
2812
+
2813
+
2814
+
2815
+
2816
+
2817
+
2818
+
2819
+
2820
+
2821
+
2822
+
2823
+
2824
+
2825
+
2826
+
2827
+
2828
+
2829
+
2830
+
2831
+
2832
+
2833
+
2834
+
2835
+
2836
+
2837
+
2838
+
2839
+
2840
+
2841
+
2842
+
2843
+
2844
+
2845
+
2846
+
2847
+
2848
+
2849
+
2850
+
2851
+
2852
+
2853
+
2854
+
2855
+
2856
+
2857
+
2858
+
2859
+
2860
+
2861
+
2862
+
2863
+
2864
+
2865
+
2866
+
2867
+
2868
+
2869
+
2870
+
2871
+
2872
+
2873
+
2874
+
2875
+
2876
+
2877
+
2878
+
2879
+
2880
+
2881
+
2882
+
2883
+
2884
+
2885
+
2886
+
2887
+
2888
+
2889
+
2890
+
2891
+
2892
+
2893
+
2894
+
2895
+
2896
+
2897
+
2898
+
2899
+
2900
+
2901
+
2902
+
2903
+
2904
+
2905
+
2906
+
2907
+
2908
+
2909
+
2910
+
2911
+
2912
+
2913
+
2914
+
2915
+
2916
+
2917
+
2918
+
2919
+
2920
+
2921
+
2922
+
2923
+
2924
+
2925
+
2926
+
2927
+
2928
+
2929
+
2930
+
2931
+
2932
+
2933
+
2934
+
2935
+
2936
+
2937
+
2938
+
2939
+
2940
+
2941
+
2942
+
2943
+
2944
+
2945
+
2946
+
2947
+
2948
+
2949
+
2950
+
2951
+ 9
2952
+
2953
+
2954
+
2955
+
2956
+
2957
+
2958
+
2959
+
2960
+
2961
+
2962
+
2963
+
2964
+
2965
+
2966
+
2967
+
2968
+
2969
+
2970
+
2971
+
2972
+
2973
+
2974
+
2975
+
2976
+ 綿
2977
+
2978
+
2979
+
2980
+
2981
+
2982
+
2983
+
2984
+
2985
+
2986
+
2987
+
2988
+
2989
+
2990
+
2991
+
2992
+
2993
+
2994
+
2995
+
2996
+
2997
+
2998
+
2999
+
3000
+
3001
+
3002
+
3003
+
3004
+
3005
+
3006
+
3007
+
3008
+
3009
+
3010
+
3011
+
3012
+
3013
+
3014
+
3015
+
3016
+
3017
+
3018
+
3019
+
3020
+
3021
+
3022
+
3023
+
3024
+
3025
+
3026
+
3027
+
3028
+
3029
+
3030
+
3031
+
3032
+ 廿
3033
+
3034
+
3035
+
3036
+
3037
+
3038
+
3039
+
3040
+
3041
+
3042
+
3043
+
3044
+
3045
+
3046
+
3047
+
3048
+
3049
+
3050
+
3051
+
3052
+
3053
+
3054
+
3055
+
3056
+
3057
+
3058
+
3059
+
3060
+
3061
+
3062
+
3063
+
3064
+
3065
+
3066
+
3067
+
3068
+
3069
+
3070
+
3071
+
3072
+
3073
+ 尿
3074
+
3075
+
3076
+
3077
+ ��
3078
+
3079
+
3080
+
3081
+
3082
+
3083
+
3084
+
3085
+
3086
+
3087
+
3088
+
3089
+
3090
+
3091
+
3092
+
3093
+
3094
+
3095
+
3096
+
3097
+
3098
+
3099
+
3100
+
3101
+
3102
+
3103
+
3104
+
3105
+
3106
+
3107
+
3108
+
3109
+
3110
+
3111
+
3112
+
3113
+
3114
+
3115
+
3116
+
3117
+
3118
+
3119
+
3120
+
3121
+
3122
+
3123
+
3124
+
3125
+
3126
+
3127
+
3128
+
3129
+
3130
+
3131
+
3132
+
3133
+
3134
+
3135
+
3136
+
3137
+
3138
+
3139
+
3140
+
3141
+
3142
+
3143
+
3144
+
3145
+
3146
+
3147
+
3148
+
3149
+
3150
+
3151
+
3152
+
3153
+
3154
+
3155
+
3156
+
3157
+
3158
+
3159
+
3160
+
3161
+
3162
+
3163
+
3164
+
3165
+
3166
+
3167
+
3168
+
3169
+
3170
+
3171
+
3172
+
3173
+
3174
+
3175
+
3176
+
3177
+
3178
+
3179
+
3180
+
3181
+
3182
+
3183
+
3184
+
3185
+
3186
+
3187
+
3188
+
3189
+
3190
+
3191
+
3192
+
3193
+
3194
+
3195
+
3196
+
3197
+
3198
+
3199
+
3200
+
3201
+
3202
+
3203
+
3204
+
3205
+
3206
+
3207
+
3208
+
3209
+
3210
+
3211
+
3212
+
3213
+
3214
+
3215
+
3216
+
3217
+
3218
+
3219
+
3220
+
3221
+
3222
+
3223
+
3224
+
3225
+
3226
+
3227
+
3228
+
3229
+
3230
+
3231
+
3232
+
3233
+
3234
+
3235
+
3236
+
3237
+
3238
+
3239
+
3240
+
3241
+
3242
+
3243
+
3244
+
3245
+
3246
+
3247
+
3248
+
3249
+
3250
+
3251
+
3252
+
3253
+
3254
+
3255
+
3256
+
3257
+
3258
+
3259
+
3260
+
3261
+
3262
+
3263
+
3264
+
3265
+
3266
+
3267
+
3268
+
3269
+
3270
+
3271
+
3272
+
3273
+
3274
+
3275
+
3276
+
3277
+
3278
+
3279
+
3280
+
3281
+ 婿
3282
+
3283
+
3284
+
3285
+
3286
+
3287
+
3288
+
3289
+
3290
+
3291
+
3292
+
3293
+
3294
+
3295
+
3296
+
3297
+
3298
+
3299
+
3300
+
3301
+
3302
+
3303
+
3304
+
3305
+
3306
+
3307
+
3308
+
3309
+
3310
+
3311
+
3312
+
3313
+
3314
+
3315
+
3316
+
3317
+
3318
+
3319
+
3320
+
3321
+
3322
+
3323
+
3324
+
3325
+
3326
+
3327
+
3328
+
3329
+
3330
+
3331
+
3332
+
3333
+
3334
+
3335
+
3336
+
3337
+
3338
+
3339
+
3340
+
3341
+
3342
+
3343
+
3344
+
3345
+
3346
+
3347
+
3348
+
3349
+
3350
+
3351
+
3352
+
3353
+
3354
+
3355
+
3356
+
3357
+
3358
+
3359
+
3360
+
3361
+
3362
+
3363
+
3364
+
3365
+
3366
+
3367
+
3368
+
3369
+
3370
+
3371
+
3372
+
3373
+
3374
+
3375
+
3376
+
3377
+
3378
+
3379
+
3380
+
3381
+
3382
+
3383
+
3384
+
3385
+
3386
+
3387
+
3388
+
3389
+
3390
+
3391
+
3392
+
3393
+
3394
+
3395
+
3396
+
3397
+
3398
+
3399
+
3400
+
3401
+
3402
+
3403
+
3404
+ 漿
3405
+
3406
+
3407
+
3408
+
3409
+
3410
+
3411
+
3412
+
3413
+
3414
+
3415
+
3416
+
3417
+
3418
+
3419
+
3420
+
3421
+
3422
+
3423
+
3424
+
3425
+
3426
+
3427
+
3428
+
3429
+
3430
+
3431
+
3432
+
3433
+
3434
+
3435
+
3436
+
3437
+
3438
+
3439
+
3440
+
3441
+
3442
+
3443
+
3444
+
3445
+
3446
+
3447
+
3448
+
3449
+
3450
+
3451
+
3452
+
3453
+
3454
+
3455
+
3456
+
3457
+
3458
+
3459
+
3460
+
3461
+
3462
+
3463
+
3464
+
3465
+
3466
+
3467
+
3468
+
3469
+
3470
+
3471
+
3472
+
3473
+
3474
+
3475
+
3476
+
3477
+
3478
+
3479
+
3480
+
3481
+
3482
+
3483
+
3484
+
3485
+
3486
+
3487
+
3488
+
3489
+
3490
+
3491
+
3492
+
3493
+
3494
+
3495
+
3496
+
3497
+
3498
+
3499
+
3500
+
3501
+
3502
+
3503
+
3504
+
3505
+
3506
+
3507
+
3508
+
3509
+
3510
+
3511
+
3512
+
3513
+
3514
+
3515
+
3516
+
3517
+ ζ
3518
+
3519
+
3520
+
3521
+
3522
+
3523
+
3524
+
3525
+
3526
+
3527
+
3528
+
3529
+
3530
+
3531
+
3532
+
3533
+
3534
+
3535
+
3536
+
3537
+
3538
+
3539
+
3540
+
3541
+
3542
+
3543
+
3544
+
3545
+
3546
+
3547
+
3548
+
3549
+ η
3550
+
3551
+
3552
+
3553
+
3554
+
3555
+ 貿
3556
+
3557
+
3558
+
3559
+
3560
+
3561
+
3562
+
3563
+
3564
+
3565
+
3566
+
3567
+
3568
+
3569
+
3570
+
3571
+
3572
+
3573
+
3574
+
3575
+
3576
+
3577
+
3578
+
3579
+
3580
+
3581
+
3582
+
3583
+
3584
+
3585
+
3586
+
3587
+
3588
+
3589
+
3590
+
3591
+
3592
+
3593
+
3594
+
3595
+
3596
+
3597
+
3598
+
3599
+
3600
+
3601
+
3602
+
3603
+
3604
+
3605
+
3606
+
3607
+
3608
+
3609
+
3610
+
3611
+
3612
+
3613
+
3614
+
3615
+
3616
+
3617
+
3618
+
3619
+
3620
+
3621
+
3622
+
3623
+
3624
+
3625
+
3626
+
3627
+
3628
+
3629
+
3630
+
3631
+
3632
+
3633
+
3634
+
3635
+
3636
+
3637
+
3638
+
3639
+
3640
+
3641
+
3642
+
3643
+
3644
+ 耀
3645
+
3646
+
3647
+
3648
+
3649
+
3650
+
3651
+
3652
+
3653
+
3654
+
3655
+
3656
+
3657
+
3658
+
3659
+
3660
+
3661
+
3662
+
3663
+
3664
+
3665
+
3666
+
3667
+
3668
+
3669
+
3670
+
3671
+
3672
+
3673
+
3674
+
3675
+
3676
+
3677
+
3678
+
3679
+
3680
+
3681
+
3682
+
3683
+
3684
+
3685
+
3686
+
3687
+
3688
+
3689
+
3690
+ 橿
3691
+
3692
+
3693
+
3694
+
3695
+
3696
+
3697
+
3698
+
3699
+
3700
+
3701
+
3702
+
3703
+
3704
+
3705
+
3706
+
3707
+
3708
+
3709
+
3710
+
3711
+
3712
+
3713
+
3714
+
3715
+
3716
+
3717
+
3718
+
3719
+
3720
+
3721
+
3722
+
3723
+
3724
+
3725
+
3726
+
3727
+
3728
+
3729
+
3730
+
3731
+
3732
+
3733
+
3734
+
3735
+
3736
+
3737
+
3738
+
3739
+
3740
+
3741
+
3742
+
3743
+
3744
+
3745
+
3746
+
3747
+
3748
+
3749
+
3750
+
3751
+
3752
+
3753
+
3754
+
3755
+
3756
+
3757
+
3758
+
3759
+
3760
+
3761
+
3762
+
3763
+
3764
+
3765
+
3766
+
3767
+
3768
+
3769
+
3770
+
3771
+
3772
+
3773
+
3774
+
3775
+
3776
+
3777
+
3778
+
3779
+
3780
+
3781
+
3782
+
3783
+
3784
+
3785
+
3786
+
3787
+
3788
+
3789
+
3790
+
3791
+
3792
+
3793
+
3794
+
3795
+
3796
+
3797
+
3798
+
3799
+
3800
+
3801
+
3802
+
3803
+
3804
+
3805
+
3806
+
3807
+
3808
+
3809
+
3810
+
3811
+
3812
+
3813
+
3814
+
3815
+
3816
+
3817
+
3818
+
3819
+
3820
+
3821
+
3822
+
3823
+
3824
+
3825
+
3826
+
3827
+
3828
+
3829
+
3830
+
3831
+
3832
+
3833
+
3834
+
3835
+
3836
+
3837
+
3838
+
3839
+
3840
+
3841
+
3842
+
3843
+
3844
+ γ
3845
+
3846
+ 覿
3847
+
3848
+
3849
+
3850
+
3851
+
3852
+
3853
+
3854
+
3855
+
3856
+
3857
+
3858
+ χ
3859
+
3860
+
3861
+
3862
+
3863
+
3864
+
3865
+
3866
+
3867
+
3868
+
3869
+
3870
+
3871
+
3872
+
3873
+
3874
+
3875
+
3876
+
3877
+
3878
+
3879
+
3880
+
3881
+
3882
+
3883
+
3884
+
3885
+
3886
+
3887
+
3888
+
3889
+
3890
+
3891
+
3892
+
3893
+
3894
+
3895
+
3896
+
3897
+
3898
+
3899
+
3900
+
3901
+
3902
+
3903
+
3904
+
3905
+
3906
+
3907
+
3908
+
3909
+ ι
3910
+
3911
+
3912
+
3913
+
3914
+
3915
+
3916
+
3917
+
3918
+
3919
+
3920
+
3921
+
3922
+
3923
+
3924
+
3925
+
3926
+
3927
+
3928
+
3929
+
3930
+ υ
3931
+
3932
+
3933
+
3934
+
3935
+
3936
+
3937
+
3938
+
3939
+
3940
+
3941
+
3942
+
3943
+
3944
+
3945
+
3946
+ 祿
3947
+
3948
+
3949
+
3950
+
3951
+
3952
+
3953
+
3954
+
3955
+
3956
+
3957
+
3958
+
3959
+
3960
+
3961
+
3962
+
3963
+
3964
+
3965
+
3966
+
3967
+
3968
+
3969
+
3970
+
3971
+
3972
+ α
3973
+
3974
+
3975
+
3976
+
3977
+
3978
+
3979
+
3980
+
3981
+
3982
+
3983
+
3984
+
3985
+
3986
+
3987
+
3988
+
3989
+
3990
+
3991
+ 宿
3992
+
3993
+
3994
+
3995
+
3996
+
3997
+
3998
+
3999
+
4000
+
4001
+ σ
4002
+
4003
+
4004
+
4005
+
4006
+
4007
+
4008
+
4009
+
4010
+
4011
+
4012
+
4013
+
4014
+
4015
+
4016
+
4017
+
4018
+
4019
+
4020
+
4021
+
4022
+
4023
+
4024
+
4025
+
4026
+
4027
+
4028
+
4029
+
4030
+
4031
+
4032
+
4033
+
4034
+
4035
+
4036
+
4037
+
4038
+
4039
+
4040
+
4041
+
4042
+
4043
+
4044
+
4045
+
4046
+
4047
+
4048
+
4049
+
4050
+
4051
+
4052
+
4053
+
4054
+ 殿
4055
+
4056
+
4057
+
4058
+
4059
+ 稿
4060
+
4061
+
4062
+
4063
+
4064
+
4065
+
4066
+
4067
+
4068
+
4069
+
4070
+
4071
+
4072
+
4073
+
4074
+
4075
+
4076
+
4077
+
4078
+
4079
+
4080
+
4081
+
4082
+
4083
+
4084
+
4085
+
4086
+
4087
+
4088
+
4089
+
4090
+
4091
+
4092
+
4093
+
4094
+
4095
+
4096
+
4097
+
4098
+
4099
+
4100
+
4101
+
4102
+ 寿
4103
+
4104
+
4105
+
4106
+
4107
+
4108
+
4109
+
4110
+
4111
+
4112
+
4113
+
4114
+
4115
+
4116
+
4117
+
4118
+ ξ
4119
+
4120
+
4121
+
4122
+
4123
+
4124
+
4125
+
4126
+
4127
+
4128
+
4129
+
4130
+
4131
+
4132
+
4133
+
4134
+
4135
+
4136
+
4137
+
4138
+
4139
+
4140
+
4141
+
4142
+
4143
+
4144
+
4145
+
4146
+
4147
+
4148
+
4149
+
4150
+
4151
+
4152
+
4153
+
4154
+
4155
+
4156
+
4157
+
4158
+
4159
+
4160
+
4161
+
4162
+
4163
+
4164
+
4165
+
4166
+
4167
+
4168
+
4169
+
4170
+
4171
+
4172
+
4173
+
4174
+
4175
+
4176
+
4177
+
4178
+
4179
+
4180
+
4181
+
4182
+
4183
+
4184
+
4185
+
4186
+
4187
+
4188
+
4189
+
4190
+
4191
+
4192
+
4193
+
4194
+
4195
+
4196
+ φ
4197
+
4198
+
4199
+
4200
+
4201
+
4202
+
4203
+
4204
+
4205
+
4206
+
4207
+
4208
+
4209
+
4210
+
4211
+
4212
+
4213
+
4214
+
4215
+
4216
+
4217
+
4218
+
4219
+
4220
+
4221
+
4222
+
4223
+
4224
+
4225
+
4226
+
4227
+
4228
+
4229
+
4230
+
4231
+
4232
+
4233
+
4234
+
4235
+
4236
+
4237
+
4238
+
4239
+
4240
+
4241
+
4242
+
4243
+
4244
+
4245
+
4246
+
4247
+
4248
+
4249
+
4250
+
4251
+
4252
+
4253
+
4254
+
4255
+
4256
+
4257
+
4258
+
4259
+
4260
+
4261
+
4262
+
4263
+
4264
+
4265
+
4266
+
4267
+
4268
+
4269
+
4270
+
4271
+
4272
+
4273
+
4274
+
4275
+
4276
+
4277
+
4278
+
4279
+
4280
+
4281
+
4282
+
4283
+
4284
+
4285
+
4286
+
4287
+
4288
+
4289
+
4290
+
4291
+
4292
+
4293
+
4294
+
4295
+
4296
+
4297
+
4298
+
4299
+
4300
+
4301
+
4302
+
4303
+
4304
+
4305
+
4306
+
4307
+
4308
+
4309
+
4310
+
4311
+
4312
+
4313
+
4314
+
4315
+
4316
+
4317
+
4318
+
4319
+
4320
+
4321
+
4322
+
4323
+
4324
+
4325
+
4326
+
4327
+ 辿
4328
+
4329
+
4330
+
4331
+
4332
+
4333
+
4334
+
4335
+
4336
+
4337
+
4338
+
4339
+
4340
+
4341
+
4342
+
4343
+
4344
+
4345
+
4346
+
4347
+
4348
+
4349
+
4350
+
4351
+
4352
+
4353
+
4354
+
4355
+
4356
+
4357
+
4358
+
4359
+
4360
+
4361
+
4362
+
4363
+
4364
+
4365
+
4366
+
4367
+
4368
+
4369
+
4370
+
4371
+
4372
+
4373
+
4374
+
4375
+
4376
+
4377
+
4378
+
4379
+
4380
+
4381
+
4382
+
4383
+
4384
+
4385
+
4386
+
4387
+
4388
+
4389
+
4390
+
4391
+
4392
+
4393
+
4394
+
4395
+
4396
+
4397
+
4398
+
4399
+
4400
+
4401
+
4402
+
4403
+
4404
+
4405
+
4406
+
4407
+
4408
+
4409
+
4410
+
4411
+
4412
+
4413
+
4414
+
4415
+
4416
+
4417
+
4418
+
4419
+
4420
+
4421
+ §
4422
+
4423
+
4424
+
4425
+
4426
+
4427
+
4428
+
4429
+
4430
+
4431
+
4432
+
4433
+
4434
+
4435
+
4436
+
4437
+
4438
+
4439
+
4440
+
4441
+
models/handwritten-japanese-recognition-0001.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:734967ddeb1037045be348bc6fce8ddf0bfcc601fe2de2d8e8aceeae6239158a
3
+ size 33984614
models/handwritten-japanese-recognition-0001.xml ADDED
@@ -0,0 +1,2252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="torch-jit-export" version="11">
3
+ <layers>
4
+ <layer id="0" name="actual_input" type="Parameter" version="opset1">
5
+ <data shape="1,1,96,2000" element_type="f32" />
6
+ <rt_info>
7
+ <attribute name="old_api_map_element_type" version="0" value="f16" />
8
+ </rt_info>
9
+ <output>
10
+ <port id="0" precision="FP32" names="actual_input">
11
+ <dim>1</dim>
12
+ <dim>1</dim>
13
+ <dim>96</dim>
14
+ <dim>2000</dim>
15
+ <rt_info>
16
+ <attribute name="layout" version="0" layout="[N,C,H,W]" />
17
+ </rt_info>
18
+ </port>
19
+ </output>
20
+ </layer>
21
+ <layer id="1" name="Constant_2751_compressed" type="Const" version="opset1">
22
+ <data element_type="f16" shape="1, 1, 1, 1" offset="0" size="2" />
23
+ <output>
24
+ <port id="0" precision="FP16">
25
+ <dim>1</dim>
26
+ <dim>1</dim>
27
+ <dim>1</dim>
28
+ <dim>1</dim>
29
+ </port>
30
+ </output>
31
+ </layer>
32
+ <layer id="2" name="Constant_2751" type="Convert" version="opset1">
33
+ <data destination_type="f32" />
34
+ <rt_info>
35
+ <attribute name="decompression" version="0" />
36
+ </rt_info>
37
+ <input>
38
+ <port id="0" precision="FP16">
39
+ <dim>1</dim>
40
+ <dim>1</dim>
41
+ <dim>1</dim>
42
+ <dim>1</dim>
43
+ </port>
44
+ </input>
45
+ <output>
46
+ <port id="1" precision="FP32">
47
+ <dim>1</dim>
48
+ <dim>1</dim>
49
+ <dim>1</dim>
50
+ <dim>1</dim>
51
+ </port>
52
+ </output>
53
+ </layer>
54
+ <layer id="3" name="Subtract_351" type="Subtract" version="opset1">
55
+ <data auto_broadcast="numpy" />
56
+ <rt_info>
57
+ <attribute name="preprocessing" version="0" />
58
+ </rt_info>
59
+ <input>
60
+ <port id="0" precision="FP32">
61
+ <dim>1</dim>
62
+ <dim>1</dim>
63
+ <dim>96</dim>
64
+ <dim>2000</dim>
65
+ </port>
66
+ <port id="1" precision="FP32">
67
+ <dim>1</dim>
68
+ <dim>1</dim>
69
+ <dim>1</dim>
70
+ <dim>1</dim>
71
+ </port>
72
+ </input>
73
+ <output>
74
+ <port id="2" precision="FP32">
75
+ <dim>1</dim>
76
+ <dim>1</dim>
77
+ <dim>96</dim>
78
+ <dim>2000</dim>
79
+ </port>
80
+ </output>
81
+ </layer>
82
+ <layer id="4" name="Multiply_2600_compressed" type="Const" version="opset1">
83
+ <data element_type="f16" shape="64, 1, 3, 3" offset="2" size="1152" />
84
+ <rt_info>
85
+ <attribute name="preprocessing" version="0" />
86
+ </rt_info>
87
+ <output>
88
+ <port id="0" precision="FP16">
89
+ <dim>64</dim>
90
+ <dim>1</dim>
91
+ <dim>3</dim>
92
+ <dim>3</dim>
93
+ </port>
94
+ </output>
95
+ </layer>
96
+ <layer id="5" name="Multiply_2600" type="Convert" version="opset1">
97
+ <data destination_type="f32" />
98
+ <rt_info>
99
+ <attribute name="decompression" version="0" />
100
+ <attribute name="preprocessing" version="0" />
101
+ </rt_info>
102
+ <input>
103
+ <port id="0" precision="FP16">
104
+ <dim>64</dim>
105
+ <dim>1</dim>
106
+ <dim>3</dim>
107
+ <dim>3</dim>
108
+ </port>
109
+ </input>
110
+ <output>
111
+ <port id="1" precision="FP32">
112
+ <dim>64</dim>
113
+ <dim>1</dim>
114
+ <dim>3</dim>
115
+ <dim>3</dim>
116
+ </port>
117
+ </output>
118
+ </layer>
119
+ <layer id="6" name="Multiply_2466" type="Convolution" version="opset1">
120
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
121
+ <rt_info>
122
+ <attribute name="preprocessing" version="0" />
123
+ </rt_info>
124
+ <input>
125
+ <port id="0" precision="FP32">
126
+ <dim>1</dim>
127
+ <dim>1</dim>
128
+ <dim>96</dim>
129
+ <dim>2000</dim>
130
+ </port>
131
+ <port id="1" precision="FP32">
132
+ <dim>64</dim>
133
+ <dim>1</dim>
134
+ <dim>3</dim>
135
+ <dim>3</dim>
136
+ </port>
137
+ </input>
138
+ <output>
139
+ <port id="2" precision="FP32">
140
+ <dim>1</dim>
141
+ <dim>64</dim>
142
+ <dim>96</dim>
143
+ <dim>2000</dim>
144
+ </port>
145
+ </output>
146
+ </layer>
147
+ <layer id="7" name="Constant_2471_compressed" type="Const" version="opset1">
148
+ <data element_type="f16" shape="1, 64, 1, 1" offset="1154" size="128" />
149
+ <output>
150
+ <port id="0" precision="FP16">
151
+ <dim>1</dim>
152
+ <dim>64</dim>
153
+ <dim>1</dim>
154
+ <dim>1</dim>
155
+ </port>
156
+ </output>
157
+ </layer>
158
+ <layer id="8" name="Constant_2471" type="Convert" version="opset1">
159
+ <data destination_type="f32" />
160
+ <rt_info>
161
+ <attribute name="decompression" version="0" />
162
+ </rt_info>
163
+ <input>
164
+ <port id="0" precision="FP16">
165
+ <dim>1</dim>
166
+ <dim>64</dim>
167
+ <dim>1</dim>
168
+ <dim>1</dim>
169
+ </port>
170
+ </input>
171
+ <output>
172
+ <port id="1" precision="FP32">
173
+ <dim>1</dim>
174
+ <dim>64</dim>
175
+ <dim>1</dim>
176
+ <dim>1</dim>
177
+ </port>
178
+ </output>
179
+ </layer>
180
+ <layer id="9" name="95" type="Add" version="opset1">
181
+ <data auto_broadcast="numpy" />
182
+ <input>
183
+ <port id="0" precision="FP32">
184
+ <dim>1</dim>
185
+ <dim>64</dim>
186
+ <dim>96</dim>
187
+ <dim>2000</dim>
188
+ </port>
189
+ <port id="1" precision="FP32">
190
+ <dim>1</dim>
191
+ <dim>64</dim>
192
+ <dim>1</dim>
193
+ <dim>1</dim>
194
+ </port>
195
+ </input>
196
+ <output>
197
+ <port id="2" precision="FP32" names="95">
198
+ <dim>1</dim>
199
+ <dim>64</dim>
200
+ <dim>96</dim>
201
+ <dim>2000</dim>
202
+ </port>
203
+ </output>
204
+ </layer>
205
+ <layer id="10" name="96" type="ReLU" version="opset1">
206
+ <input>
207
+ <port id="0" precision="FP32">
208
+ <dim>1</dim>
209
+ <dim>64</dim>
210
+ <dim>96</dim>
211
+ <dim>2000</dim>
212
+ </port>
213
+ </input>
214
+ <output>
215
+ <port id="1" precision="FP32" names="96">
216
+ <dim>1</dim>
217
+ <dim>64</dim>
218
+ <dim>96</dim>
219
+ <dim>2000</dim>
220
+ </port>
221
+ </output>
222
+ </layer>
223
+ <layer id="11" name="Multiply_2606_compressed" type="Const" version="opset1">
224
+ <data element_type="f16" shape="64, 64, 3, 3" offset="1282" size="73728" />
225
+ <output>
226
+ <port id="0" precision="FP16">
227
+ <dim>64</dim>
228
+ <dim>64</dim>
229
+ <dim>3</dim>
230
+ <dim>3</dim>
231
+ </port>
232
+ </output>
233
+ </layer>
234
+ <layer id="12" name="Multiply_2606" type="Convert" version="opset1">
235
+ <data destination_type="f32" />
236
+ <rt_info>
237
+ <attribute name="decompression" version="0" />
238
+ </rt_info>
239
+ <input>
240
+ <port id="0" precision="FP16">
241
+ <dim>64</dim>
242
+ <dim>64</dim>
243
+ <dim>3</dim>
244
+ <dim>3</dim>
245
+ </port>
246
+ </input>
247
+ <output>
248
+ <port id="1" precision="FP32">
249
+ <dim>64</dim>
250
+ <dim>64</dim>
251
+ <dim>3</dim>
252
+ <dim>3</dim>
253
+ </port>
254
+ </output>
255
+ </layer>
256
+ <layer id="13" name="Multiply_2476" type="Convolution" version="opset1">
257
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
258
+ <input>
259
+ <port id="0" precision="FP32">
260
+ <dim>1</dim>
261
+ <dim>64</dim>
262
+ <dim>96</dim>
263
+ <dim>2000</dim>
264
+ </port>
265
+ <port id="1" precision="FP32">
266
+ <dim>64</dim>
267
+ <dim>64</dim>
268
+ <dim>3</dim>
269
+ <dim>3</dim>
270
+ </port>
271
+ </input>
272
+ <output>
273
+ <port id="2" precision="FP32">
274
+ <dim>1</dim>
275
+ <dim>64</dim>
276
+ <dim>96</dim>
277
+ <dim>2000</dim>
278
+ </port>
279
+ </output>
280
+ </layer>
281
+ <layer id="14" name="Constant_2481_compressed" type="Const" version="opset1">
282
+ <data element_type="f16" shape="1, 64, 1, 1" offset="75010" size="128" />
283
+ <output>
284
+ <port id="0" precision="FP16">
285
+ <dim>1</dim>
286
+ <dim>64</dim>
287
+ <dim>1</dim>
288
+ <dim>1</dim>
289
+ </port>
290
+ </output>
291
+ </layer>
292
+ <layer id="15" name="Constant_2481" type="Convert" version="opset1">
293
+ <data destination_type="f32" />
294
+ <rt_info>
295
+ <attribute name="decompression" version="0" />
296
+ </rt_info>
297
+ <input>
298
+ <port id="0" precision="FP16">
299
+ <dim>1</dim>
300
+ <dim>64</dim>
301
+ <dim>1</dim>
302
+ <dim>1</dim>
303
+ </port>
304
+ </input>
305
+ <output>
306
+ <port id="1" precision="FP32">
307
+ <dim>1</dim>
308
+ <dim>64</dim>
309
+ <dim>1</dim>
310
+ <dim>1</dim>
311
+ </port>
312
+ </output>
313
+ </layer>
314
+ <layer id="16" name="98" type="Add" version="opset1">
315
+ <data auto_broadcast="numpy" />
316
+ <input>
317
+ <port id="0" precision="FP32">
318
+ <dim>1</dim>
319
+ <dim>64</dim>
320
+ <dim>96</dim>
321
+ <dim>2000</dim>
322
+ </port>
323
+ <port id="1" precision="FP32">
324
+ <dim>1</dim>
325
+ <dim>64</dim>
326
+ <dim>1</dim>
327
+ <dim>1</dim>
328
+ </port>
329
+ </input>
330
+ <output>
331
+ <port id="2" precision="FP32" names="98">
332
+ <dim>1</dim>
333
+ <dim>64</dim>
334
+ <dim>96</dim>
335
+ <dim>2000</dim>
336
+ </port>
337
+ </output>
338
+ </layer>
339
+ <layer id="17" name="99" type="ReLU" version="opset1">
340
+ <input>
341
+ <port id="0" precision="FP32">
342
+ <dim>1</dim>
343
+ <dim>64</dim>
344
+ <dim>96</dim>
345
+ <dim>2000</dim>
346
+ </port>
347
+ </input>
348
+ <output>
349
+ <port id="1" precision="FP32" names="99">
350
+ <dim>1</dim>
351
+ <dim>64</dim>
352
+ <dim>96</dim>
353
+ <dim>2000</dim>
354
+ </port>
355
+ </output>
356
+ </layer>
357
+ <layer id="18" name="100" type="MaxPool" version="opset8">
358
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
359
+ <input>
360
+ <port id="0" precision="FP32">
361
+ <dim>1</dim>
362
+ <dim>64</dim>
363
+ <dim>96</dim>
364
+ <dim>2000</dim>
365
+ </port>
366
+ </input>
367
+ <output>
368
+ <port id="1" precision="FP32" names="100">
369
+ <dim>1</dim>
370
+ <dim>64</dim>
371
+ <dim>48</dim>
372
+ <dim>1000</dim>
373
+ </port>
374
+ <port id="2" precision="I64">
375
+ <dim>1</dim>
376
+ <dim>64</dim>
377
+ <dim>48</dim>
378
+ <dim>1000</dim>
379
+ </port>
380
+ </output>
381
+ </layer>
382
+ <layer id="19" name="Multiply_2612_compressed" type="Const" version="opset1">
383
+ <data element_type="f16" shape="128, 64, 3, 3" offset="75138" size="147456" />
384
+ <output>
385
+ <port id="0" precision="FP16">
386
+ <dim>128</dim>
387
+ <dim>64</dim>
388
+ <dim>3</dim>
389
+ <dim>3</dim>
390
+ </port>
391
+ </output>
392
+ </layer>
393
+ <layer id="20" name="Multiply_2612" type="Convert" version="opset1">
394
+ <data destination_type="f32" />
395
+ <rt_info>
396
+ <attribute name="decompression" version="0" />
397
+ </rt_info>
398
+ <input>
399
+ <port id="0" precision="FP16">
400
+ <dim>128</dim>
401
+ <dim>64</dim>
402
+ <dim>3</dim>
403
+ <dim>3</dim>
404
+ </port>
405
+ </input>
406
+ <output>
407
+ <port id="1" precision="FP32">
408
+ <dim>128</dim>
409
+ <dim>64</dim>
410
+ <dim>3</dim>
411
+ <dim>3</dim>
412
+ </port>
413
+ </output>
414
+ </layer>
415
+ <layer id="21" name="Multiply_2486" type="Convolution" version="opset1">
416
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
417
+ <input>
418
+ <port id="0" precision="FP32">
419
+ <dim>1</dim>
420
+ <dim>64</dim>
421
+ <dim>48</dim>
422
+ <dim>1000</dim>
423
+ </port>
424
+ <port id="1" precision="FP32">
425
+ <dim>128</dim>
426
+ <dim>64</dim>
427
+ <dim>3</dim>
428
+ <dim>3</dim>
429
+ </port>
430
+ </input>
431
+ <output>
432
+ <port id="2" precision="FP32">
433
+ <dim>1</dim>
434
+ <dim>128</dim>
435
+ <dim>48</dim>
436
+ <dim>1000</dim>
437
+ </port>
438
+ </output>
439
+ </layer>
440
+ <layer id="22" name="Constant_2491_compressed" type="Const" version="opset1">
441
+ <data element_type="f16" shape="1, 128, 1, 1" offset="222594" size="256" />
442
+ <output>
443
+ <port id="0" precision="FP16">
444
+ <dim>1</dim>
445
+ <dim>128</dim>
446
+ <dim>1</dim>
447
+ <dim>1</dim>
448
+ </port>
449
+ </output>
450
+ </layer>
451
+ <layer id="23" name="Constant_2491" type="Convert" version="opset1">
452
+ <data destination_type="f32" />
453
+ <rt_info>
454
+ <attribute name="decompression" version="0" />
455
+ </rt_info>
456
+ <input>
457
+ <port id="0" precision="FP16">
458
+ <dim>1</dim>
459
+ <dim>128</dim>
460
+ <dim>1</dim>
461
+ <dim>1</dim>
462
+ </port>
463
+ </input>
464
+ <output>
465
+ <port id="1" precision="FP32">
466
+ <dim>1</dim>
467
+ <dim>128</dim>
468
+ <dim>1</dim>
469
+ <dim>1</dim>
470
+ </port>
471
+ </output>
472
+ </layer>
473
+ <layer id="24" name="102" type="Add" version="opset1">
474
+ <data auto_broadcast="numpy" />
475
+ <input>
476
+ <port id="0" precision="FP32">
477
+ <dim>1</dim>
478
+ <dim>128</dim>
479
+ <dim>48</dim>
480
+ <dim>1000</dim>
481
+ </port>
482
+ <port id="1" precision="FP32">
483
+ <dim>1</dim>
484
+ <dim>128</dim>
485
+ <dim>1</dim>
486
+ <dim>1</dim>
487
+ </port>
488
+ </input>
489
+ <output>
490
+ <port id="2" precision="FP32" names="102">
491
+ <dim>1</dim>
492
+ <dim>128</dim>
493
+ <dim>48</dim>
494
+ <dim>1000</dim>
495
+ </port>
496
+ </output>
497
+ </layer>
498
+ <layer id="25" name="103" type="ReLU" version="opset1">
499
+ <input>
500
+ <port id="0" precision="FP32">
501
+ <dim>1</dim>
502
+ <dim>128</dim>
503
+ <dim>48</dim>
504
+ <dim>1000</dim>
505
+ </port>
506
+ </input>
507
+ <output>
508
+ <port id="1" precision="FP32" names="103">
509
+ <dim>1</dim>
510
+ <dim>128</dim>
511
+ <dim>48</dim>
512
+ <dim>1000</dim>
513
+ </port>
514
+ </output>
515
+ </layer>
516
+ <layer id="26" name="Multiply_2618_compressed" type="Const" version="opset1">
517
+ <data element_type="f16" shape="128, 128, 3, 3" offset="222850" size="294912" />
518
+ <output>
519
+ <port id="0" precision="FP16">
520
+ <dim>128</dim>
521
+ <dim>128</dim>
522
+ <dim>3</dim>
523
+ <dim>3</dim>
524
+ </port>
525
+ </output>
526
+ </layer>
527
+ <layer id="27" name="Multiply_2618" type="Convert" version="opset1">
528
+ <data destination_type="f32" />
529
+ <rt_info>
530
+ <attribute name="decompression" version="0" />
531
+ </rt_info>
532
+ <input>
533
+ <port id="0" precision="FP16">
534
+ <dim>128</dim>
535
+ <dim>128</dim>
536
+ <dim>3</dim>
537
+ <dim>3</dim>
538
+ </port>
539
+ </input>
540
+ <output>
541
+ <port id="1" precision="FP32">
542
+ <dim>128</dim>
543
+ <dim>128</dim>
544
+ <dim>3</dim>
545
+ <dim>3</dim>
546
+ </port>
547
+ </output>
548
+ </layer>
549
+ <layer id="28" name="Multiply_2496" type="Convolution" version="opset1">
550
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
551
+ <input>
552
+ <port id="0" precision="FP32">
553
+ <dim>1</dim>
554
+ <dim>128</dim>
555
+ <dim>48</dim>
556
+ <dim>1000</dim>
557
+ </port>
558
+ <port id="1" precision="FP32">
559
+ <dim>128</dim>
560
+ <dim>128</dim>
561
+ <dim>3</dim>
562
+ <dim>3</dim>
563
+ </port>
564
+ </input>
565
+ <output>
566
+ <port id="2" precision="FP32">
567
+ <dim>1</dim>
568
+ <dim>128</dim>
569
+ <dim>48</dim>
570
+ <dim>1000</dim>
571
+ </port>
572
+ </output>
573
+ </layer>
574
+ <layer id="29" name="Constant_2501_compressed" type="Const" version="opset1">
575
+ <data element_type="f16" shape="1, 128, 1, 1" offset="517762" size="256" />
576
+ <output>
577
+ <port id="0" precision="FP16">
578
+ <dim>1</dim>
579
+ <dim>128</dim>
580
+ <dim>1</dim>
581
+ <dim>1</dim>
582
+ </port>
583
+ </output>
584
+ </layer>
585
+ <layer id="30" name="Constant_2501" type="Convert" version="opset1">
586
+ <data destination_type="f32" />
587
+ <rt_info>
588
+ <attribute name="decompression" version="0" />
589
+ </rt_info>
590
+ <input>
591
+ <port id="0" precision="FP16">
592
+ <dim>1</dim>
593
+ <dim>128</dim>
594
+ <dim>1</dim>
595
+ <dim>1</dim>
596
+ </port>
597
+ </input>
598
+ <output>
599
+ <port id="1" precision="FP32">
600
+ <dim>1</dim>
601
+ <dim>128</dim>
602
+ <dim>1</dim>
603
+ <dim>1</dim>
604
+ </port>
605
+ </output>
606
+ </layer>
607
+ <layer id="31" name="105" type="Add" version="opset1">
608
+ <data auto_broadcast="numpy" />
609
+ <input>
610
+ <port id="0" precision="FP32">
611
+ <dim>1</dim>
612
+ <dim>128</dim>
613
+ <dim>48</dim>
614
+ <dim>1000</dim>
615
+ </port>
616
+ <port id="1" precision="FP32">
617
+ <dim>1</dim>
618
+ <dim>128</dim>
619
+ <dim>1</dim>
620
+ <dim>1</dim>
621
+ </port>
622
+ </input>
623
+ <output>
624
+ <port id="2" precision="FP32" names="105">
625
+ <dim>1</dim>
626
+ <dim>128</dim>
627
+ <dim>48</dim>
628
+ <dim>1000</dim>
629
+ </port>
630
+ </output>
631
+ </layer>
632
+ <layer id="32" name="106" type="ReLU" version="opset1">
633
+ <input>
634
+ <port id="0" precision="FP32">
635
+ <dim>1</dim>
636
+ <dim>128</dim>
637
+ <dim>48</dim>
638
+ <dim>1000</dim>
639
+ </port>
640
+ </input>
641
+ <output>
642
+ <port id="1" precision="FP32" names="106">
643
+ <dim>1</dim>
644
+ <dim>128</dim>
645
+ <dim>48</dim>
646
+ <dim>1000</dim>
647
+ </port>
648
+ </output>
649
+ </layer>
650
+ <layer id="33" name="107" type="MaxPool" version="opset8">
651
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
652
+ <input>
653
+ <port id="0" precision="FP32">
654
+ <dim>1</dim>
655
+ <dim>128</dim>
656
+ <dim>48</dim>
657
+ <dim>1000</dim>
658
+ </port>
659
+ </input>
660
+ <output>
661
+ <port id="1" precision="FP32" names="107">
662
+ <dim>1</dim>
663
+ <dim>128</dim>
664
+ <dim>24</dim>
665
+ <dim>500</dim>
666
+ </port>
667
+ <port id="2" precision="I64">
668
+ <dim>1</dim>
669
+ <dim>128</dim>
670
+ <dim>24</dim>
671
+ <dim>500</dim>
672
+ </port>
673
+ </output>
674
+ </layer>
675
+ <layer id="34" name="Multiply_2624_compressed" type="Const" version="opset1">
676
+ <data element_type="f16" shape="256, 128, 3, 3" offset="518018" size="589824" />
677
+ <output>
678
+ <port id="0" precision="FP16">
679
+ <dim>256</dim>
680
+ <dim>128</dim>
681
+ <dim>3</dim>
682
+ <dim>3</dim>
683
+ </port>
684
+ </output>
685
+ </layer>
686
+ <layer id="35" name="Multiply_2624" type="Convert" version="opset1">
687
+ <data destination_type="f32" />
688
+ <rt_info>
689
+ <attribute name="decompression" version="0" />
690
+ </rt_info>
691
+ <input>
692
+ <port id="0" precision="FP16">
693
+ <dim>256</dim>
694
+ <dim>128</dim>
695
+ <dim>3</dim>
696
+ <dim>3</dim>
697
+ </port>
698
+ </input>
699
+ <output>
700
+ <port id="1" precision="FP32">
701
+ <dim>256</dim>
702
+ <dim>128</dim>
703
+ <dim>3</dim>
704
+ <dim>3</dim>
705
+ </port>
706
+ </output>
707
+ </layer>
708
+ <layer id="36" name="Multiply_2506" type="Convolution" version="opset1">
709
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
710
+ <input>
711
+ <port id="0" precision="FP32">
712
+ <dim>1</dim>
713
+ <dim>128</dim>
714
+ <dim>24</dim>
715
+ <dim>500</dim>
716
+ </port>
717
+ <port id="1" precision="FP32">
718
+ <dim>256</dim>
719
+ <dim>128</dim>
720
+ <dim>3</dim>
721
+ <dim>3</dim>
722
+ </port>
723
+ </input>
724
+ <output>
725
+ <port id="2" precision="FP32">
726
+ <dim>1</dim>
727
+ <dim>256</dim>
728
+ <dim>24</dim>
729
+ <dim>500</dim>
730
+ </port>
731
+ </output>
732
+ </layer>
733
+ <layer id="37" name="Constant_2511_compressed" type="Const" version="opset1">
734
+ <data element_type="f16" shape="1, 256, 1, 1" offset="1107842" size="512" />
735
+ <output>
736
+ <port id="0" precision="FP16">
737
+ <dim>1</dim>
738
+ <dim>256</dim>
739
+ <dim>1</dim>
740
+ <dim>1</dim>
741
+ </port>
742
+ </output>
743
+ </layer>
744
+ <layer id="38" name="Constant_2511" type="Convert" version="opset1">
745
+ <data destination_type="f32" />
746
+ <rt_info>
747
+ <attribute name="decompression" version="0" />
748
+ </rt_info>
749
+ <input>
750
+ <port id="0" precision="FP16">
751
+ <dim>1</dim>
752
+ <dim>256</dim>
753
+ <dim>1</dim>
754
+ <dim>1</dim>
755
+ </port>
756
+ </input>
757
+ <output>
758
+ <port id="1" precision="FP32">
759
+ <dim>1</dim>
760
+ <dim>256</dim>
761
+ <dim>1</dim>
762
+ <dim>1</dim>
763
+ </port>
764
+ </output>
765
+ </layer>
766
+ <layer id="39" name="109" type="Add" version="opset1">
767
+ <data auto_broadcast="numpy" />
768
+ <input>
769
+ <port id="0" precision="FP32">
770
+ <dim>1</dim>
771
+ <dim>256</dim>
772
+ <dim>24</dim>
773
+ <dim>500</dim>
774
+ </port>
775
+ <port id="1" precision="FP32">
776
+ <dim>1</dim>
777
+ <dim>256</dim>
778
+ <dim>1</dim>
779
+ <dim>1</dim>
780
+ </port>
781
+ </input>
782
+ <output>
783
+ <port id="2" precision="FP32" names="109">
784
+ <dim>1</dim>
785
+ <dim>256</dim>
786
+ <dim>24</dim>
787
+ <dim>500</dim>
788
+ </port>
789
+ </output>
790
+ </layer>
791
+ <layer id="40" name="110" type="ReLU" version="opset1">
792
+ <input>
793
+ <port id="0" precision="FP32">
794
+ <dim>1</dim>
795
+ <dim>256</dim>
796
+ <dim>24</dim>
797
+ <dim>500</dim>
798
+ </port>
799
+ </input>
800
+ <output>
801
+ <port id="1" precision="FP32" names="110">
802
+ <dim>1</dim>
803
+ <dim>256</dim>
804
+ <dim>24</dim>
805
+ <dim>500</dim>
806
+ </port>
807
+ </output>
808
+ </layer>
809
+ <layer id="41" name="Multiply_2630_compressed" type="Const" version="opset1">
810
+ <data element_type="f16" shape="256, 256, 3, 3" offset="1108354" size="1179648" />
811
+ <output>
812
+ <port id="0" precision="FP16">
813
+ <dim>256</dim>
814
+ <dim>256</dim>
815
+ <dim>3</dim>
816
+ <dim>3</dim>
817
+ </port>
818
+ </output>
819
+ </layer>
820
+ <layer id="42" name="Multiply_2630" type="Convert" version="opset1">
821
+ <data destination_type="f32" />
822
+ <rt_info>
823
+ <attribute name="decompression" version="0" />
824
+ </rt_info>
825
+ <input>
826
+ <port id="0" precision="FP16">
827
+ <dim>256</dim>
828
+ <dim>256</dim>
829
+ <dim>3</dim>
830
+ <dim>3</dim>
831
+ </port>
832
+ </input>
833
+ <output>
834
+ <port id="1" precision="FP32">
835
+ <dim>256</dim>
836
+ <dim>256</dim>
837
+ <dim>3</dim>
838
+ <dim>3</dim>
839
+ </port>
840
+ </output>
841
+ </layer>
842
+ <layer id="43" name="Multiply_2516" type="Convolution" version="opset1">
843
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
844
+ <input>
845
+ <port id="0" precision="FP32">
846
+ <dim>1</dim>
847
+ <dim>256</dim>
848
+ <dim>24</dim>
849
+ <dim>500</dim>
850
+ </port>
851
+ <port id="1" precision="FP32">
852
+ <dim>256</dim>
853
+ <dim>256</dim>
854
+ <dim>3</dim>
855
+ <dim>3</dim>
856
+ </port>
857
+ </input>
858
+ <output>
859
+ <port id="2" precision="FP32">
860
+ <dim>1</dim>
861
+ <dim>256</dim>
862
+ <dim>24</dim>
863
+ <dim>500</dim>
864
+ </port>
865
+ </output>
866
+ </layer>
867
+ <layer id="44" name="Constant_2521_compressed" type="Const" version="opset1">
868
+ <data element_type="f16" shape="1, 256, 1, 1" offset="2288002" size="512" />
869
+ <output>
870
+ <port id="0" precision="FP16">
871
+ <dim>1</dim>
872
+ <dim>256</dim>
873
+ <dim>1</dim>
874
+ <dim>1</dim>
875
+ </port>
876
+ </output>
877
+ </layer>
878
+ <layer id="45" name="Constant_2521" type="Convert" version="opset1">
879
+ <data destination_type="f32" />
880
+ <rt_info>
881
+ <attribute name="decompression" version="0" />
882
+ </rt_info>
883
+ <input>
884
+ <port id="0" precision="FP16">
885
+ <dim>1</dim>
886
+ <dim>256</dim>
887
+ <dim>1</dim>
888
+ <dim>1</dim>
889
+ </port>
890
+ </input>
891
+ <output>
892
+ <port id="1" precision="FP32">
893
+ <dim>1</dim>
894
+ <dim>256</dim>
895
+ <dim>1</dim>
896
+ <dim>1</dim>
897
+ </port>
898
+ </output>
899
+ </layer>
900
+ <layer id="46" name="112" type="Add" version="opset1">
901
+ <data auto_broadcast="numpy" />
902
+ <input>
903
+ <port id="0" precision="FP32">
904
+ <dim>1</dim>
905
+ <dim>256</dim>
906
+ <dim>24</dim>
907
+ <dim>500</dim>
908
+ </port>
909
+ <port id="1" precision="FP32">
910
+ <dim>1</dim>
911
+ <dim>256</dim>
912
+ <dim>1</dim>
913
+ <dim>1</dim>
914
+ </port>
915
+ </input>
916
+ <output>
917
+ <port id="2" precision="FP32" names="112">
918
+ <dim>1</dim>
919
+ <dim>256</dim>
920
+ <dim>24</dim>
921
+ <dim>500</dim>
922
+ </port>
923
+ </output>
924
+ </layer>
925
+ <layer id="47" name="113" type="ReLU" version="opset1">
926
+ <input>
927
+ <port id="0" precision="FP32">
928
+ <dim>1</dim>
929
+ <dim>256</dim>
930
+ <dim>24</dim>
931
+ <dim>500</dim>
932
+ </port>
933
+ </input>
934
+ <output>
935
+ <port id="1" precision="FP32" names="113">
936
+ <dim>1</dim>
937
+ <dim>256</dim>
938
+ <dim>24</dim>
939
+ <dim>500</dim>
940
+ </port>
941
+ </output>
942
+ </layer>
943
+ <layer id="48" name="Multiply_2636_compressed" type="Const" version="opset1">
944
+ <data element_type="f16" shape="256, 256, 3, 3" offset="2288514" size="1179648" />
945
+ <output>
946
+ <port id="0" precision="FP16">
947
+ <dim>256</dim>
948
+ <dim>256</dim>
949
+ <dim>3</dim>
950
+ <dim>3</dim>
951
+ </port>
952
+ </output>
953
+ </layer>
954
+ <layer id="49" name="Multiply_2636" type="Convert" version="opset1">
955
+ <data destination_type="f32" />
956
+ <rt_info>
957
+ <attribute name="decompression" version="0" />
958
+ </rt_info>
959
+ <input>
960
+ <port id="0" precision="FP16">
961
+ <dim>256</dim>
962
+ <dim>256</dim>
963
+ <dim>3</dim>
964
+ <dim>3</dim>
965
+ </port>
966
+ </input>
967
+ <output>
968
+ <port id="1" precision="FP32">
969
+ <dim>256</dim>
970
+ <dim>256</dim>
971
+ <dim>3</dim>
972
+ <dim>3</dim>
973
+ </port>
974
+ </output>
975
+ </layer>
976
+ <layer id="50" name="Multiply_2526" type="Convolution" version="opset1">
977
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
978
+ <input>
979
+ <port id="0" precision="FP32">
980
+ <dim>1</dim>
981
+ <dim>256</dim>
982
+ <dim>24</dim>
983
+ <dim>500</dim>
984
+ </port>
985
+ <port id="1" precision="FP32">
986
+ <dim>256</dim>
987
+ <dim>256</dim>
988
+ <dim>3</dim>
989
+ <dim>3</dim>
990
+ </port>
991
+ </input>
992
+ <output>
993
+ <port id="2" precision="FP32">
994
+ <dim>1</dim>
995
+ <dim>256</dim>
996
+ <dim>24</dim>
997
+ <dim>500</dim>
998
+ </port>
999
+ </output>
1000
+ </layer>
1001
+ <layer id="51" name="Constant_2531_compressed" type="Const" version="opset1">
1002
+ <data element_type="f16" shape="1, 256, 1, 1" offset="3468162" size="512" />
1003
+ <output>
1004
+ <port id="0" precision="FP16">
1005
+ <dim>1</dim>
1006
+ <dim>256</dim>
1007
+ <dim>1</dim>
1008
+ <dim>1</dim>
1009
+ </port>
1010
+ </output>
1011
+ </layer>
1012
+ <layer id="52" name="Constant_2531" type="Convert" version="opset1">
1013
+ <data destination_type="f32" />
1014
+ <rt_info>
1015
+ <attribute name="decompression" version="0" />
1016
+ </rt_info>
1017
+ <input>
1018
+ <port id="0" precision="FP16">
1019
+ <dim>1</dim>
1020
+ <dim>256</dim>
1021
+ <dim>1</dim>
1022
+ <dim>1</dim>
1023
+ </port>
1024
+ </input>
1025
+ <output>
1026
+ <port id="1" precision="FP32">
1027
+ <dim>1</dim>
1028
+ <dim>256</dim>
1029
+ <dim>1</dim>
1030
+ <dim>1</dim>
1031
+ </port>
1032
+ </output>
1033
+ </layer>
1034
+ <layer id="53" name="115" type="Add" version="opset1">
1035
+ <data auto_broadcast="numpy" />
1036
+ <input>
1037
+ <port id="0" precision="FP32">
1038
+ <dim>1</dim>
1039
+ <dim>256</dim>
1040
+ <dim>24</dim>
1041
+ <dim>500</dim>
1042
+ </port>
1043
+ <port id="1" precision="FP32">
1044
+ <dim>1</dim>
1045
+ <dim>256</dim>
1046
+ <dim>1</dim>
1047
+ <dim>1</dim>
1048
+ </port>
1049
+ </input>
1050
+ <output>
1051
+ <port id="2" precision="FP32" names="115">
1052
+ <dim>1</dim>
1053
+ <dim>256</dim>
1054
+ <dim>24</dim>
1055
+ <dim>500</dim>
1056
+ </port>
1057
+ </output>
1058
+ </layer>
1059
+ <layer id="54" name="116" type="ReLU" version="opset1">
1060
+ <input>
1061
+ <port id="0" precision="FP32">
1062
+ <dim>1</dim>
1063
+ <dim>256</dim>
1064
+ <dim>24</dim>
1065
+ <dim>500</dim>
1066
+ </port>
1067
+ </input>
1068
+ <output>
1069
+ <port id="1" precision="FP32" names="116">
1070
+ <dim>1</dim>
1071
+ <dim>256</dim>
1072
+ <dim>24</dim>
1073
+ <dim>500</dim>
1074
+ </port>
1075
+ </output>
1076
+ </layer>
1077
+ <layer id="55" name="118" type="MaxPool" version="opset8">
1078
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
1079
+ <input>
1080
+ <port id="0" precision="FP32">
1081
+ <dim>1</dim>
1082
+ <dim>256</dim>
1083
+ <dim>24</dim>
1084
+ <dim>500</dim>
1085
+ </port>
1086
+ </input>
1087
+ <output>
1088
+ <port id="1" precision="FP32" names="118">
1089
+ <dim>1</dim>
1090
+ <dim>256</dim>
1091
+ <dim>12</dim>
1092
+ <dim>250</dim>
1093
+ </port>
1094
+ <port id="2" precision="I64">
1095
+ <dim>1</dim>
1096
+ <dim>256</dim>
1097
+ <dim>12</dim>
1098
+ <dim>250</dim>
1099
+ </port>
1100
+ </output>
1101
+ </layer>
1102
+ <layer id="56" name="Multiply_2642_compressed" type="Const" version="opset1">
1103
+ <data element_type="f16" shape="512, 256, 3, 3" offset="3468674" size="2359296" />
1104
+ <output>
1105
+ <port id="0" precision="FP16">
1106
+ <dim>512</dim>
1107
+ <dim>256</dim>
1108
+ <dim>3</dim>
1109
+ <dim>3</dim>
1110
+ </port>
1111
+ </output>
1112
+ </layer>
1113
+ <layer id="57" name="Multiply_2642" type="Convert" version="opset1">
1114
+ <data destination_type="f32" />
1115
+ <rt_info>
1116
+ <attribute name="decompression" version="0" />
1117
+ </rt_info>
1118
+ <input>
1119
+ <port id="0" precision="FP16">
1120
+ <dim>512</dim>
1121
+ <dim>256</dim>
1122
+ <dim>3</dim>
1123
+ <dim>3</dim>
1124
+ </port>
1125
+ </input>
1126
+ <output>
1127
+ <port id="1" precision="FP32">
1128
+ <dim>512</dim>
1129
+ <dim>256</dim>
1130
+ <dim>3</dim>
1131
+ <dim>3</dim>
1132
+ </port>
1133
+ </output>
1134
+ </layer>
1135
+ <layer id="58" name="Multiply_2536" type="Convolution" version="opset1">
1136
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1137
+ <input>
1138
+ <port id="0" precision="FP32">
1139
+ <dim>1</dim>
1140
+ <dim>256</dim>
1141
+ <dim>12</dim>
1142
+ <dim>250</dim>
1143
+ </port>
1144
+ <port id="1" precision="FP32">
1145
+ <dim>512</dim>
1146
+ <dim>256</dim>
1147
+ <dim>3</dim>
1148
+ <dim>3</dim>
1149
+ </port>
1150
+ </input>
1151
+ <output>
1152
+ <port id="2" precision="FP32">
1153
+ <dim>1</dim>
1154
+ <dim>512</dim>
1155
+ <dim>12</dim>
1156
+ <dim>250</dim>
1157
+ </port>
1158
+ </output>
1159
+ </layer>
1160
+ <layer id="59" name="Constant_2541_compressed" type="Const" version="opset1">
1161
+ <data element_type="f16" shape="1, 512, 1, 1" offset="5827970" size="1024" />
1162
+ <output>
1163
+ <port id="0" precision="FP16">
1164
+ <dim>1</dim>
1165
+ <dim>512</dim>
1166
+ <dim>1</dim>
1167
+ <dim>1</dim>
1168
+ </port>
1169
+ </output>
1170
+ </layer>
1171
+ <layer id="60" name="Constant_2541" type="Convert" version="opset1">
1172
+ <data destination_type="f32" />
1173
+ <rt_info>
1174
+ <attribute name="decompression" version="0" />
1175
+ </rt_info>
1176
+ <input>
1177
+ <port id="0" precision="FP16">
1178
+ <dim>1</dim>
1179
+ <dim>512</dim>
1180
+ <dim>1</dim>
1181
+ <dim>1</dim>
1182
+ </port>
1183
+ </input>
1184
+ <output>
1185
+ <port id="1" precision="FP32">
1186
+ <dim>1</dim>
1187
+ <dim>512</dim>
1188
+ <dim>1</dim>
1189
+ <dim>1</dim>
1190
+ </port>
1191
+ </output>
1192
+ </layer>
1193
+ <layer id="61" name="121" type="Add" version="opset1">
1194
+ <data auto_broadcast="numpy" />
1195
+ <input>
1196
+ <port id="0" precision="FP32">
1197
+ <dim>1</dim>
1198
+ <dim>512</dim>
1199
+ <dim>12</dim>
1200
+ <dim>250</dim>
1201
+ </port>
1202
+ <port id="1" precision="FP32">
1203
+ <dim>1</dim>
1204
+ <dim>512</dim>
1205
+ <dim>1</dim>
1206
+ <dim>1</dim>
1207
+ </port>
1208
+ </input>
1209
+ <output>
1210
+ <port id="2" precision="FP32" names="121">
1211
+ <dim>1</dim>
1212
+ <dim>512</dim>
1213
+ <dim>12</dim>
1214
+ <dim>250</dim>
1215
+ </port>
1216
+ </output>
1217
+ </layer>
1218
+ <layer id="62" name="122" type="ReLU" version="opset1">
1219
+ <input>
1220
+ <port id="0" precision="FP32">
1221
+ <dim>1</dim>
1222
+ <dim>512</dim>
1223
+ <dim>12</dim>
1224
+ <dim>250</dim>
1225
+ </port>
1226
+ </input>
1227
+ <output>
1228
+ <port id="1" precision="FP32" names="122">
1229
+ <dim>1</dim>
1230
+ <dim>512</dim>
1231
+ <dim>12</dim>
1232
+ <dim>250</dim>
1233
+ </port>
1234
+ </output>
1235
+ </layer>
1236
+ <layer id="63" name="Multiply_2648_compressed" type="Const" version="opset1">
1237
+ <data element_type="f16" shape="512, 512, 3, 3" offset="5828994" size="4718592" />
1238
+ <output>
1239
+ <port id="0" precision="FP16">
1240
+ <dim>512</dim>
1241
+ <dim>512</dim>
1242
+ <dim>3</dim>
1243
+ <dim>3</dim>
1244
+ </port>
1245
+ </output>
1246
+ </layer>
1247
+ <layer id="64" name="Multiply_2648" type="Convert" version="opset1">
1248
+ <data destination_type="f32" />
1249
+ <rt_info>
1250
+ <attribute name="decompression" version="0" />
1251
+ </rt_info>
1252
+ <input>
1253
+ <port id="0" precision="FP16">
1254
+ <dim>512</dim>
1255
+ <dim>512</dim>
1256
+ <dim>3</dim>
1257
+ <dim>3</dim>
1258
+ </port>
1259
+ </input>
1260
+ <output>
1261
+ <port id="1" precision="FP32">
1262
+ <dim>512</dim>
1263
+ <dim>512</dim>
1264
+ <dim>3</dim>
1265
+ <dim>3</dim>
1266
+ </port>
1267
+ </output>
1268
+ </layer>
1269
+ <layer id="65" name="Multiply_2546" type="Convolution" version="opset1">
1270
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1271
+ <input>
1272
+ <port id="0" precision="FP32">
1273
+ <dim>1</dim>
1274
+ <dim>512</dim>
1275
+ <dim>12</dim>
1276
+ <dim>250</dim>
1277
+ </port>
1278
+ <port id="1" precision="FP32">
1279
+ <dim>512</dim>
1280
+ <dim>512</dim>
1281
+ <dim>3</dim>
1282
+ <dim>3</dim>
1283
+ </port>
1284
+ </input>
1285
+ <output>
1286
+ <port id="2" precision="FP32">
1287
+ <dim>1</dim>
1288
+ <dim>512</dim>
1289
+ <dim>12</dim>
1290
+ <dim>250</dim>
1291
+ </port>
1292
+ </output>
1293
+ </layer>
1294
+ <layer id="66" name="Constant_2551_compressed" type="Const" version="opset1">
1295
+ <data element_type="f16" shape="1, 512, 1, 1" offset="10547586" size="1024" />
1296
+ <output>
1297
+ <port id="0" precision="FP16">
1298
+ <dim>1</dim>
1299
+ <dim>512</dim>
1300
+ <dim>1</dim>
1301
+ <dim>1</dim>
1302
+ </port>
1303
+ </output>
1304
+ </layer>
1305
+ <layer id="67" name="Constant_2551" type="Convert" version="opset1">
1306
+ <data destination_type="f32" />
1307
+ <rt_info>
1308
+ <attribute name="decompression" version="0" />
1309
+ </rt_info>
1310
+ <input>
1311
+ <port id="0" precision="FP16">
1312
+ <dim>1</dim>
1313
+ <dim>512</dim>
1314
+ <dim>1</dim>
1315
+ <dim>1</dim>
1316
+ </port>
1317
+ </input>
1318
+ <output>
1319
+ <port id="1" precision="FP32">
1320
+ <dim>1</dim>
1321
+ <dim>512</dim>
1322
+ <dim>1</dim>
1323
+ <dim>1</dim>
1324
+ </port>
1325
+ </output>
1326
+ </layer>
1327
+ <layer id="68" name="124" type="Add" version="opset1">
1328
+ <data auto_broadcast="numpy" />
1329
+ <input>
1330
+ <port id="0" precision="FP32">
1331
+ <dim>1</dim>
1332
+ <dim>512</dim>
1333
+ <dim>12</dim>
1334
+ <dim>250</dim>
1335
+ </port>
1336
+ <port id="1" precision="FP32">
1337
+ <dim>1</dim>
1338
+ <dim>512</dim>
1339
+ <dim>1</dim>
1340
+ <dim>1</dim>
1341
+ </port>
1342
+ </input>
1343
+ <output>
1344
+ <port id="2" precision="FP32" names="124">
1345
+ <dim>1</dim>
1346
+ <dim>512</dim>
1347
+ <dim>12</dim>
1348
+ <dim>250</dim>
1349
+ </port>
1350
+ </output>
1351
+ </layer>
1352
+ <layer id="69" name="125" type="ReLU" version="opset1">
1353
+ <input>
1354
+ <port id="0" precision="FP32">
1355
+ <dim>1</dim>
1356
+ <dim>512</dim>
1357
+ <dim>12</dim>
1358
+ <dim>250</dim>
1359
+ </port>
1360
+ </input>
1361
+ <output>
1362
+ <port id="1" precision="FP32" names="125">
1363
+ <dim>1</dim>
1364
+ <dim>512</dim>
1365
+ <dim>12</dim>
1366
+ <dim>250</dim>
1367
+ </port>
1368
+ </output>
1369
+ </layer>
1370
+ <layer id="70" name="Multiply_2654_compressed" type="Const" version="opset1">
1371
+ <data element_type="f16" shape="512, 512, 3, 3" offset="10548610" size="4718592" />
1372
+ <output>
1373
+ <port id="0" precision="FP16">
1374
+ <dim>512</dim>
1375
+ <dim>512</dim>
1376
+ <dim>3</dim>
1377
+ <dim>3</dim>
1378
+ </port>
1379
+ </output>
1380
+ </layer>
1381
+ <layer id="71" name="Multiply_2654" type="Convert" version="opset1">
1382
+ <data destination_type="f32" />
1383
+ <rt_info>
1384
+ <attribute name="decompression" version="0" />
1385
+ </rt_info>
1386
+ <input>
1387
+ <port id="0" precision="FP16">
1388
+ <dim>512</dim>
1389
+ <dim>512</dim>
1390
+ <dim>3</dim>
1391
+ <dim>3</dim>
1392
+ </port>
1393
+ </input>
1394
+ <output>
1395
+ <port id="1" precision="FP32">
1396
+ <dim>512</dim>
1397
+ <dim>512</dim>
1398
+ <dim>3</dim>
1399
+ <dim>3</dim>
1400
+ </port>
1401
+ </output>
1402
+ </layer>
1403
+ <layer id="72" name="Multiply_2556" type="Convolution" version="opset1">
1404
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1405
+ <input>
1406
+ <port id="0" precision="FP32">
1407
+ <dim>1</dim>
1408
+ <dim>512</dim>
1409
+ <dim>12</dim>
1410
+ <dim>250</dim>
1411
+ </port>
1412
+ <port id="1" precision="FP32">
1413
+ <dim>512</dim>
1414
+ <dim>512</dim>
1415
+ <dim>3</dim>
1416
+ <dim>3</dim>
1417
+ </port>
1418
+ </input>
1419
+ <output>
1420
+ <port id="2" precision="FP32">
1421
+ <dim>1</dim>
1422
+ <dim>512</dim>
1423
+ <dim>12</dim>
1424
+ <dim>250</dim>
1425
+ </port>
1426
+ </output>
1427
+ </layer>
1428
+ <layer id="73" name="Constant_2561_compressed" type="Const" version="opset1">
1429
+ <data element_type="f16" shape="1, 512, 1, 1" offset="15267202" size="1024" />
1430
+ <output>
1431
+ <port id="0" precision="FP16">
1432
+ <dim>1</dim>
1433
+ <dim>512</dim>
1434
+ <dim>1</dim>
1435
+ <dim>1</dim>
1436
+ </port>
1437
+ </output>
1438
+ </layer>
1439
+ <layer id="74" name="Constant_2561" type="Convert" version="opset1">
1440
+ <data destination_type="f32" />
1441
+ <rt_info>
1442
+ <attribute name="decompression" version="0" />
1443
+ </rt_info>
1444
+ <input>
1445
+ <port id="0" precision="FP16">
1446
+ <dim>1</dim>
1447
+ <dim>512</dim>
1448
+ <dim>1</dim>
1449
+ <dim>1</dim>
1450
+ </port>
1451
+ </input>
1452
+ <output>
1453
+ <port id="1" precision="FP32">
1454
+ <dim>1</dim>
1455
+ <dim>512</dim>
1456
+ <dim>1</dim>
1457
+ <dim>1</dim>
1458
+ </port>
1459
+ </output>
1460
+ </layer>
1461
+ <layer id="75" name="127" type="Add" version="opset1">
1462
+ <data auto_broadcast="numpy" />
1463
+ <input>
1464
+ <port id="0" precision="FP32">
1465
+ <dim>1</dim>
1466
+ <dim>512</dim>
1467
+ <dim>12</dim>
1468
+ <dim>250</dim>
1469
+ </port>
1470
+ <port id="1" precision="FP32">
1471
+ <dim>1</dim>
1472
+ <dim>512</dim>
1473
+ <dim>1</dim>
1474
+ <dim>1</dim>
1475
+ </port>
1476
+ </input>
1477
+ <output>
1478
+ <port id="2" precision="FP32" names="127">
1479
+ <dim>1</dim>
1480
+ <dim>512</dim>
1481
+ <dim>12</dim>
1482
+ <dim>250</dim>
1483
+ </port>
1484
+ </output>
1485
+ </layer>
1486
+ <layer id="76" name="128" type="ReLU" version="opset1">
1487
+ <input>
1488
+ <port id="0" precision="FP32">
1489
+ <dim>1</dim>
1490
+ <dim>512</dim>
1491
+ <dim>12</dim>
1492
+ <dim>250</dim>
1493
+ </port>
1494
+ </input>
1495
+ <output>
1496
+ <port id="1" precision="FP32" names="128">
1497
+ <dim>1</dim>
1498
+ <dim>512</dim>
1499
+ <dim>12</dim>
1500
+ <dim>250</dim>
1501
+ </port>
1502
+ </output>
1503
+ </layer>
1504
+ <layer id="77" name="130" type="MaxPool" version="opset8">
1505
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
1506
+ <input>
1507
+ <port id="0" precision="FP32">
1508
+ <dim>1</dim>
1509
+ <dim>512</dim>
1510
+ <dim>12</dim>
1511
+ <dim>250</dim>
1512
+ </port>
1513
+ </input>
1514
+ <output>
1515
+ <port id="1" precision="FP32" names="130">
1516
+ <dim>1</dim>
1517
+ <dim>512</dim>
1518
+ <dim>6</dim>
1519
+ <dim>125</dim>
1520
+ </port>
1521
+ <port id="2" precision="I64">
1522
+ <dim>1</dim>
1523
+ <dim>512</dim>
1524
+ <dim>6</dim>
1525
+ <dim>125</dim>
1526
+ </port>
1527
+ </output>
1528
+ </layer>
1529
+ <layer id="78" name="Multiply_2660_compressed" type="Const" version="opset1">
1530
+ <data element_type="f16" shape="512, 512, 3, 3" offset="15268226" size="4718592" />
1531
+ <output>
1532
+ <port id="0" precision="FP16">
1533
+ <dim>512</dim>
1534
+ <dim>512</dim>
1535
+ <dim>3</dim>
1536
+ <dim>3</dim>
1537
+ </port>
1538
+ </output>
1539
+ </layer>
1540
+ <layer id="79" name="Multiply_2660" type="Convert" version="opset1">
1541
+ <data destination_type="f32" />
1542
+ <rt_info>
1543
+ <attribute name="decompression" version="0" />
1544
+ </rt_info>
1545
+ <input>
1546
+ <port id="0" precision="FP16">
1547
+ <dim>512</dim>
1548
+ <dim>512</dim>
1549
+ <dim>3</dim>
1550
+ <dim>3</dim>
1551
+ </port>
1552
+ </input>
1553
+ <output>
1554
+ <port id="1" precision="FP32">
1555
+ <dim>512</dim>
1556
+ <dim>512</dim>
1557
+ <dim>3</dim>
1558
+ <dim>3</dim>
1559
+ </port>
1560
+ </output>
1561
+ </layer>
1562
+ <layer id="80" name="Multiply_2566" type="Convolution" version="opset1">
1563
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1564
+ <input>
1565
+ <port id="0" precision="FP32">
1566
+ <dim>1</dim>
1567
+ <dim>512</dim>
1568
+ <dim>6</dim>
1569
+ <dim>125</dim>
1570
+ </port>
1571
+ <port id="1" precision="FP32">
1572
+ <dim>512</dim>
1573
+ <dim>512</dim>
1574
+ <dim>3</dim>
1575
+ <dim>3</dim>
1576
+ </port>
1577
+ </input>
1578
+ <output>
1579
+ <port id="2" precision="FP32">
1580
+ <dim>1</dim>
1581
+ <dim>512</dim>
1582
+ <dim>6</dim>
1583
+ <dim>125</dim>
1584
+ </port>
1585
+ </output>
1586
+ </layer>
1587
+ <layer id="81" name="Constant_2571_compressed" type="Const" version="opset1">
1588
+ <data element_type="f16" shape="1, 512, 1, 1" offset="19986818" size="1024" />
1589
+ <output>
1590
+ <port id="0" precision="FP16">
1591
+ <dim>1</dim>
1592
+ <dim>512</dim>
1593
+ <dim>1</dim>
1594
+ <dim>1</dim>
1595
+ </port>
1596
+ </output>
1597
+ </layer>
1598
+ <layer id="82" name="Constant_2571" type="Convert" version="opset1">
1599
+ <data destination_type="f32" />
1600
+ <rt_info>
1601
+ <attribute name="decompression" version="0" />
1602
+ </rt_info>
1603
+ <input>
1604
+ <port id="0" precision="FP16">
1605
+ <dim>1</dim>
1606
+ <dim>512</dim>
1607
+ <dim>1</dim>
1608
+ <dim>1</dim>
1609
+ </port>
1610
+ </input>
1611
+ <output>
1612
+ <port id="1" precision="FP32">
1613
+ <dim>1</dim>
1614
+ <dim>512</dim>
1615
+ <dim>1</dim>
1616
+ <dim>1</dim>
1617
+ </port>
1618
+ </output>
1619
+ </layer>
1620
+ <layer id="83" name="133" type="Add" version="opset1">
1621
+ <data auto_broadcast="numpy" />
1622
+ <input>
1623
+ <port id="0" precision="FP32">
1624
+ <dim>1</dim>
1625
+ <dim>512</dim>
1626
+ <dim>6</dim>
1627
+ <dim>125</dim>
1628
+ </port>
1629
+ <port id="1" precision="FP32">
1630
+ <dim>1</dim>
1631
+ <dim>512</dim>
1632
+ <dim>1</dim>
1633
+ <dim>1</dim>
1634
+ </port>
1635
+ </input>
1636
+ <output>
1637
+ <port id="2" precision="FP32" names="133">
1638
+ <dim>1</dim>
1639
+ <dim>512</dim>
1640
+ <dim>6</dim>
1641
+ <dim>125</dim>
1642
+ </port>
1643
+ </output>
1644
+ </layer>
1645
+ <layer id="84" name="134" type="ReLU" version="opset1">
1646
+ <input>
1647
+ <port id="0" precision="FP32">
1648
+ <dim>1</dim>
1649
+ <dim>512</dim>
1650
+ <dim>6</dim>
1651
+ <dim>125</dim>
1652
+ </port>
1653
+ </input>
1654
+ <output>
1655
+ <port id="1" precision="FP32" names="134">
1656
+ <dim>1</dim>
1657
+ <dim>512</dim>
1658
+ <dim>6</dim>
1659
+ <dim>125</dim>
1660
+ </port>
1661
+ </output>
1662
+ </layer>
1663
+ <layer id="85" name="Multiply_2666_compressed" type="Const" version="opset1">
1664
+ <data element_type="f16" shape="512, 512, 3, 3" offset="19987842" size="4718592" />
1665
+ <output>
1666
+ <port id="0" precision="FP16">
1667
+ <dim>512</dim>
1668
+ <dim>512</dim>
1669
+ <dim>3</dim>
1670
+ <dim>3</dim>
1671
+ </port>
1672
+ </output>
1673
+ </layer>
1674
+ <layer id="86" name="Multiply_2666" type="Convert" version="opset1">
1675
+ <data destination_type="f32" />
1676
+ <rt_info>
1677
+ <attribute name="decompression" version="0" />
1678
+ </rt_info>
1679
+ <input>
1680
+ <port id="0" precision="FP16">
1681
+ <dim>512</dim>
1682
+ <dim>512</dim>
1683
+ <dim>3</dim>
1684
+ <dim>3</dim>
1685
+ </port>
1686
+ </input>
1687
+ <output>
1688
+ <port id="1" precision="FP32">
1689
+ <dim>512</dim>
1690
+ <dim>512</dim>
1691
+ <dim>3</dim>
1692
+ <dim>3</dim>
1693
+ </port>
1694
+ </output>
1695
+ </layer>
1696
+ <layer id="87" name="Multiply_2576" type="Convolution" version="opset1">
1697
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1698
+ <input>
1699
+ <port id="0" precision="FP32">
1700
+ <dim>1</dim>
1701
+ <dim>512</dim>
1702
+ <dim>6</dim>
1703
+ <dim>125</dim>
1704
+ </port>
1705
+ <port id="1" precision="FP32">
1706
+ <dim>512</dim>
1707
+ <dim>512</dim>
1708
+ <dim>3</dim>
1709
+ <dim>3</dim>
1710
+ </port>
1711
+ </input>
1712
+ <output>
1713
+ <port id="2" precision="FP32">
1714
+ <dim>1</dim>
1715
+ <dim>512</dim>
1716
+ <dim>6</dim>
1717
+ <dim>125</dim>
1718
+ </port>
1719
+ </output>
1720
+ </layer>
1721
+ <layer id="88" name="Constant_2581_compressed" type="Const" version="opset1">
1722
+ <data element_type="f16" shape="1, 512, 1, 1" offset="24706434" size="1024" />
1723
+ <output>
1724
+ <port id="0" precision="FP16">
1725
+ <dim>1</dim>
1726
+ <dim>512</dim>
1727
+ <dim>1</dim>
1728
+ <dim>1</dim>
1729
+ </port>
1730
+ </output>
1731
+ </layer>
1732
+ <layer id="89" name="Constant_2581" type="Convert" version="opset1">
1733
+ <data destination_type="f32" />
1734
+ <rt_info>
1735
+ <attribute name="decompression" version="0" />
1736
+ </rt_info>
1737
+ <input>
1738
+ <port id="0" precision="FP16">
1739
+ <dim>1</dim>
1740
+ <dim>512</dim>
1741
+ <dim>1</dim>
1742
+ <dim>1</dim>
1743
+ </port>
1744
+ </input>
1745
+ <output>
1746
+ <port id="1" precision="FP32">
1747
+ <dim>1</dim>
1748
+ <dim>512</dim>
1749
+ <dim>1</dim>
1750
+ <dim>1</dim>
1751
+ </port>
1752
+ </output>
1753
+ </layer>
1754
+ <layer id="90" name="136" type="Add" version="opset1">
1755
+ <data auto_broadcast="numpy" />
1756
+ <input>
1757
+ <port id="0" precision="FP32">
1758
+ <dim>1</dim>
1759
+ <dim>512</dim>
1760
+ <dim>6</dim>
1761
+ <dim>125</dim>
1762
+ </port>
1763
+ <port id="1" precision="FP32">
1764
+ <dim>1</dim>
1765
+ <dim>512</dim>
1766
+ <dim>1</dim>
1767
+ <dim>1</dim>
1768
+ </port>
1769
+ </input>
1770
+ <output>
1771
+ <port id="2" precision="FP32" names="136">
1772
+ <dim>1</dim>
1773
+ <dim>512</dim>
1774
+ <dim>6</dim>
1775
+ <dim>125</dim>
1776
+ </port>
1777
+ </output>
1778
+ </layer>
1779
+ <layer id="91" name="137" type="ReLU" version="opset1">
1780
+ <input>
1781
+ <port id="0" precision="FP32">
1782
+ <dim>1</dim>
1783
+ <dim>512</dim>
1784
+ <dim>6</dim>
1785
+ <dim>125</dim>
1786
+ </port>
1787
+ </input>
1788
+ <output>
1789
+ <port id="1" precision="FP32" names="137">
1790
+ <dim>1</dim>
1791
+ <dim>512</dim>
1792
+ <dim>6</dim>
1793
+ <dim>125</dim>
1794
+ </port>
1795
+ </output>
1796
+ </layer>
1797
+ <layer id="92" name="Multiply_2672_compressed" type="Const" version="opset1">
1798
+ <data element_type="f16" shape="512, 512, 3, 3" offset="24707458" size="4718592" />
1799
+ <output>
1800
+ <port id="0" precision="FP16">
1801
+ <dim>512</dim>
1802
+ <dim>512</dim>
1803
+ <dim>3</dim>
1804
+ <dim>3</dim>
1805
+ </port>
1806
+ </output>
1807
+ </layer>
1808
+ <layer id="93" name="Multiply_2672" type="Convert" version="opset1">
1809
+ <data destination_type="f32" />
1810
+ <rt_info>
1811
+ <attribute name="decompression" version="0" />
1812
+ </rt_info>
1813
+ <input>
1814
+ <port id="0" precision="FP16">
1815
+ <dim>512</dim>
1816
+ <dim>512</dim>
1817
+ <dim>3</dim>
1818
+ <dim>3</dim>
1819
+ </port>
1820
+ </input>
1821
+ <output>
1822
+ <port id="1" precision="FP32">
1823
+ <dim>512</dim>
1824
+ <dim>512</dim>
1825
+ <dim>3</dim>
1826
+ <dim>3</dim>
1827
+ </port>
1828
+ </output>
1829
+ </layer>
1830
+ <layer id="94" name="Multiply_2586" type="Convolution" version="opset1">
1831
+ <data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
1832
+ <input>
1833
+ <port id="0" precision="FP32">
1834
+ <dim>1</dim>
1835
+ <dim>512</dim>
1836
+ <dim>6</dim>
1837
+ <dim>125</dim>
1838
+ </port>
1839
+ <port id="1" precision="FP32">
1840
+ <dim>512</dim>
1841
+ <dim>512</dim>
1842
+ <dim>3</dim>
1843
+ <dim>3</dim>
1844
+ </port>
1845
+ </input>
1846
+ <output>
1847
+ <port id="2" precision="FP32">
1848
+ <dim>1</dim>
1849
+ <dim>512</dim>
1850
+ <dim>6</dim>
1851
+ <dim>125</dim>
1852
+ </port>
1853
+ </output>
1854
+ </layer>
1855
+ <layer id="95" name="Constant_2591_compressed" type="Const" version="opset1">
1856
+ <data element_type="f16" shape="1, 512, 1, 1" offset="29426050" size="1024" />
1857
+ <output>
1858
+ <port id="0" precision="FP16">
1859
+ <dim>1</dim>
1860
+ <dim>512</dim>
1861
+ <dim>1</dim>
1862
+ <dim>1</dim>
1863
+ </port>
1864
+ </output>
1865
+ </layer>
1866
+ <layer id="96" name="Constant_2591" type="Convert" version="opset1">
1867
+ <data destination_type="f32" />
1868
+ <rt_info>
1869
+ <attribute name="decompression" version="0" />
1870
+ </rt_info>
1871
+ <input>
1872
+ <port id="0" precision="FP16">
1873
+ <dim>1</dim>
1874
+ <dim>512</dim>
1875
+ <dim>1</dim>
1876
+ <dim>1</dim>
1877
+ </port>
1878
+ </input>
1879
+ <output>
1880
+ <port id="1" precision="FP32">
1881
+ <dim>1</dim>
1882
+ <dim>512</dim>
1883
+ <dim>1</dim>
1884
+ <dim>1</dim>
1885
+ </port>
1886
+ </output>
1887
+ </layer>
1888
+ <layer id="97" name="139" type="Add" version="opset1">
1889
+ <data auto_broadcast="numpy" />
1890
+ <input>
1891
+ <port id="0" precision="FP32">
1892
+ <dim>1</dim>
1893
+ <dim>512</dim>
1894
+ <dim>6</dim>
1895
+ <dim>125</dim>
1896
+ </port>
1897
+ <port id="1" precision="FP32">
1898
+ <dim>1</dim>
1899
+ <dim>512</dim>
1900
+ <dim>1</dim>
1901
+ <dim>1</dim>
1902
+ </port>
1903
+ </input>
1904
+ <output>
1905
+ <port id="2" precision="FP32" names="139">
1906
+ <dim>1</dim>
1907
+ <dim>512</dim>
1908
+ <dim>6</dim>
1909
+ <dim>125</dim>
1910
+ </port>
1911
+ </output>
1912
+ </layer>
1913
+ <layer id="98" name="140" type="ReLU" version="opset1">
1914
+ <input>
1915
+ <port id="0" precision="FP32">
1916
+ <dim>1</dim>
1917
+ <dim>512</dim>
1918
+ <dim>6</dim>
1919
+ <dim>125</dim>
1920
+ </port>
1921
+ </input>
1922
+ <output>
1923
+ <port id="1" precision="FP32" names="140">
1924
+ <dim>1</dim>
1925
+ <dim>512</dim>
1926
+ <dim>6</dim>
1927
+ <dim>125</dim>
1928
+ </port>
1929
+ </output>
1930
+ </layer>
1931
+ <layer id="99" name="142" type="MaxPool" version="opset8">
1932
+ <data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" kernel="2, 2" rounding_type="floor" auto_pad="explicit" index_element_type="i64" axis="0" />
1933
+ <input>
1934
+ <port id="0" precision="FP32">
1935
+ <dim>1</dim>
1936
+ <dim>512</dim>
1937
+ <dim>6</dim>
1938
+ <dim>125</dim>
1939
+ </port>
1940
+ </input>
1941
+ <output>
1942
+ <port id="1" precision="FP32" names="142">
1943
+ <dim>1</dim>
1944
+ <dim>512</dim>
1945
+ <dim>3</dim>
1946
+ <dim>62</dim>
1947
+ </port>
1948
+ <port id="2" precision="I64">
1949
+ <dim>1</dim>
1950
+ <dim>512</dim>
1951
+ <dim>3</dim>
1952
+ <dim>62</dim>
1953
+ </port>
1954
+ </output>
1955
+ </layer>
1956
+ <layer id="100" name="144" type="Const" version="opset1">
1957
+ <data element_type="i64" shape="3" offset="29427074" size="24" />
1958
+ <output>
1959
+ <port id="0" precision="I64" names="144">
1960
+ <dim>3</dim>
1961
+ </port>
1962
+ </output>
1963
+ </layer>
1964
+ <layer id="101" name="145" type="Reshape" version="opset1">
1965
+ <data special_zero="true" />
1966
+ <input>
1967
+ <port id="0" precision="FP32">
1968
+ <dim>1</dim>
1969
+ <dim>512</dim>
1970
+ <dim>3</dim>
1971
+ <dim>62</dim>
1972
+ </port>
1973
+ <port id="1" precision="I64">
1974
+ <dim>3</dim>
1975
+ </port>
1976
+ </input>
1977
+ <output>
1978
+ <port id="2" precision="FP32" names="145">
1979
+ <dim>1</dim>
1980
+ <dim>512</dim>
1981
+ <dim>186</dim>
1982
+ </port>
1983
+ </output>
1984
+ </layer>
1985
+ <layer id="102" name="92_compressed" type="Const" version="opset1">
1986
+ <data element_type="f16" shape="4442, 512" offset="29427098" size="4548608" />
1987
+ <output>
1988
+ <port id="0" precision="FP16">
1989
+ <dim>4442</dim>
1990
+ <dim>512</dim>
1991
+ </port>
1992
+ </output>
1993
+ </layer>
1994
+ <layer id="103" name="92" type="Convert" version="opset1">
1995
+ <data destination_type="f32" />
1996
+ <rt_info>
1997
+ <attribute name="decompression" version="0" />
1998
+ </rt_info>
1999
+ <input>
2000
+ <port id="0" precision="FP16">
2001
+ <dim>4442</dim>
2002
+ <dim>512</dim>
2003
+ </port>
2004
+ </input>
2005
+ <output>
2006
+ <port id="1" precision="FP32" names="92">
2007
+ <dim>4442</dim>
2008
+ <dim>512</dim>
2009
+ </port>
2010
+ </output>
2011
+ </layer>
2012
+ <layer id="104" name="148" type="MatMul" version="opset1">
2013
+ <data transpose_a="true" transpose_b="true" />
2014
+ <input>
2015
+ <port id="0" precision="FP32">
2016
+ <dim>1</dim>
2017
+ <dim>512</dim>
2018
+ <dim>186</dim>
2019
+ </port>
2020
+ <port id="1" precision="FP32">
2021
+ <dim>4442</dim>
2022
+ <dim>512</dim>
2023
+ </port>
2024
+ </input>
2025
+ <output>
2026
+ <port id="2" precision="FP32" names="148">
2027
+ <dim>1</dim>
2028
+ <dim>186</dim>
2029
+ <dim>4442</dim>
2030
+ </port>
2031
+ </output>
2032
+ </layer>
2033
+ <layer id="105" name="Constant_2752_compressed" type="Const" version="opset1">
2034
+ <data element_type="f16" shape="1, 1, 4442" offset="33975706" size="8884" />
2035
+ <output>
2036
+ <port id="0" precision="FP16">
2037
+ <dim>1</dim>
2038
+ <dim>1</dim>
2039
+ <dim>4442</dim>
2040
+ </port>
2041
+ </output>
2042
+ </layer>
2043
+ <layer id="106" name="Constant_2752" type="Convert" version="opset1">
2044
+ <data destination_type="f32" />
2045
+ <rt_info>
2046
+ <attribute name="decompression" version="0" />
2047
+ </rt_info>
2048
+ <input>
2049
+ <port id="0" precision="FP16">
2050
+ <dim>1</dim>
2051
+ <dim>1</dim>
2052
+ <dim>4442</dim>
2053
+ </port>
2054
+ </input>
2055
+ <output>
2056
+ <port id="1" precision="FP32">
2057
+ <dim>1</dim>
2058
+ <dim>1</dim>
2059
+ <dim>4442</dim>
2060
+ </port>
2061
+ </output>
2062
+ </layer>
2063
+ <layer id="107" name="149" type="Add" version="opset1">
2064
+ <data auto_broadcast="numpy" />
2065
+ <input>
2066
+ <port id="0" precision="FP32">
2067
+ <dim>1</dim>
2068
+ <dim>186</dim>
2069
+ <dim>4442</dim>
2070
+ </port>
2071
+ <port id="1" precision="FP32">
2072
+ <dim>1</dim>
2073
+ <dim>1</dim>
2074
+ <dim>4442</dim>
2075
+ </port>
2076
+ </input>
2077
+ <output>
2078
+ <port id="2" precision="FP32" names="149">
2079
+ <dim>1</dim>
2080
+ <dim>186</dim>
2081
+ <dim>4442</dim>
2082
+ </port>
2083
+ </output>
2084
+ </layer>
2085
+ <layer id="108" name="Constant_1597" type="Const" version="opset1">
2086
+ <data element_type="i64" shape="3" offset="33984590" size="24" />
2087
+ <output>
2088
+ <port id="0" precision="I64">
2089
+ <dim>3</dim>
2090
+ </port>
2091
+ </output>
2092
+ </layer>
2093
+ <layer id="109" name="output" type="Reshape" version="opset1">
2094
+ <data special_zero="true" />
2095
+ <input>
2096
+ <port id="0" precision="FP32">
2097
+ <dim>1</dim>
2098
+ <dim>186</dim>
2099
+ <dim>4442</dim>
2100
+ </port>
2101
+ <port id="1" precision="I64">
2102
+ <dim>3</dim>
2103
+ </port>
2104
+ </input>
2105
+ <output>
2106
+ <port id="2" precision="FP32" names="output">
2107
+ <dim>186</dim>
2108
+ <dim>1</dim>
2109
+ <dim>4442</dim>
2110
+ </port>
2111
+ </output>
2112
+ </layer>
2113
+ <layer id="110" name="output/sink_port_0" type="Result" version="opset1">
2114
+ <input>
2115
+ <port id="0" precision="FP32">
2116
+ <dim>186</dim>
2117
+ <dim>1</dim>
2118
+ <dim>4442</dim>
2119
+ </port>
2120
+ </input>
2121
+ </layer>
2122
+ </layers>
2123
+ <edges>
2124
+ <edge from-layer="0" from-port="0" to-layer="3" to-port="0" />
2125
+ <edge from-layer="1" from-port="0" to-layer="2" to-port="0" />
2126
+ <edge from-layer="2" from-port="1" to-layer="3" to-port="1" />
2127
+ <edge from-layer="3" from-port="2" to-layer="6" to-port="0" />
2128
+ <edge from-layer="4" from-port="0" to-layer="5" to-port="0" />
2129
+ <edge from-layer="5" from-port="1" to-layer="6" to-port="1" />
2130
+ <edge from-layer="6" from-port="2" to-layer="9" to-port="0" />
2131
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="0" />
2132
+ <edge from-layer="8" from-port="1" to-layer="9" to-port="1" />
2133
+ <edge from-layer="9" from-port="2" to-layer="10" to-port="0" />
2134
+ <edge from-layer="10" from-port="1" to-layer="13" to-port="0" />
2135
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="0" />
2136
+ <edge from-layer="12" from-port="1" to-layer="13" to-port="1" />
2137
+ <edge from-layer="13" from-port="2" to-layer="16" to-port="0" />
2138
+ <edge from-layer="14" from-port="0" to-layer="15" to-port="0" />
2139
+ <edge from-layer="15" from-port="1" to-layer="16" to-port="1" />
2140
+ <edge from-layer="16" from-port="2" to-layer="17" to-port="0" />
2141
+ <edge from-layer="17" from-port="1" to-layer="18" to-port="0" />
2142
+ <edge from-layer="18" from-port="1" to-layer="21" to-port="0" />
2143
+ <edge from-layer="19" from-port="0" to-layer="20" to-port="0" />
2144
+ <edge from-layer="20" from-port="1" to-layer="21" to-port="1" />
2145
+ <edge from-layer="21" from-port="2" to-layer="24" to-port="0" />
2146
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="0" />
2147
+ <edge from-layer="23" from-port="1" to-layer="24" to-port="1" />
2148
+ <edge from-layer="24" from-port="2" to-layer="25" to-port="0" />
2149
+ <edge from-layer="25" from-port="1" to-layer="28" to-port="0" />
2150
+ <edge from-layer="26" from-port="0" to-layer="27" to-port="0" />
2151
+ <edge from-layer="27" from-port="1" to-layer="28" to-port="1" />
2152
+ <edge from-layer="28" from-port="2" to-layer="31" to-port="0" />
2153
+ <edge from-layer="29" from-port="0" to-layer="30" to-port="0" />
2154
+ <edge from-layer="30" from-port="1" to-layer="31" to-port="1" />
2155
+ <edge from-layer="31" from-port="2" to-layer="32" to-port="0" />
2156
+ <edge from-layer="32" from-port="1" to-layer="33" to-port="0" />
2157
+ <edge from-layer="33" from-port="1" to-layer="36" to-port="0" />
2158
+ <edge from-layer="34" from-port="0" to-layer="35" to-port="0" />
2159
+ <edge from-layer="35" from-port="1" to-layer="36" to-port="1" />
2160
+ <edge from-layer="36" from-port="2" to-layer="39" to-port="0" />
2161
+ <edge from-layer="37" from-port="0" to-layer="38" to-port="0" />
2162
+ <edge from-layer="38" from-port="1" to-layer="39" to-port="1" />
2163
+ <edge from-layer="39" from-port="2" to-layer="40" to-port="0" />
2164
+ <edge from-layer="40" from-port="1" to-layer="43" to-port="0" />
2165
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="0" />
2166
+ <edge from-layer="42" from-port="1" to-layer="43" to-port="1" />
2167
+ <edge from-layer="43" from-port="2" to-layer="46" to-port="0" />
2168
+ <edge from-layer="44" from-port="0" to-layer="45" to-port="0" />
2169
+ <edge from-layer="45" from-port="1" to-layer="46" to-port="1" />
2170
+ <edge from-layer="46" from-port="2" to-layer="47" to-port="0" />
2171
+ <edge from-layer="47" from-port="1" to-layer="50" to-port="0" />
2172
+ <edge from-layer="48" from-port="0" to-layer="49" to-port="0" />
2173
+ <edge from-layer="49" from-port="1" to-layer="50" to-port="1" />
2174
+ <edge from-layer="50" from-port="2" to-layer="53" to-port="0" />
2175
+ <edge from-layer="51" from-port="0" to-layer="52" to-port="0" />
2176
+ <edge from-layer="52" from-port="1" to-layer="53" to-port="1" />
2177
+ <edge from-layer="53" from-port="2" to-layer="54" to-port="0" />
2178
+ <edge from-layer="54" from-port="1" to-layer="55" to-port="0" />
2179
+ <edge from-layer="55" from-port="1" to-layer="58" to-port="0" />
2180
+ <edge from-layer="56" from-port="0" to-layer="57" to-port="0" />
2181
+ <edge from-layer="57" from-port="1" to-layer="58" to-port="1" />
2182
+ <edge from-layer="58" from-port="2" to-layer="61" to-port="0" />
2183
+ <edge from-layer="59" from-port="0" to-layer="60" to-port="0" />
2184
+ <edge from-layer="60" from-port="1" to-layer="61" to-port="1" />
2185
+ <edge from-layer="61" from-port="2" to-layer="62" to-port="0" />
2186
+ <edge from-layer="62" from-port="1" to-layer="65" to-port="0" />
2187
+ <edge from-layer="63" from-port="0" to-layer="64" to-port="0" />
2188
+ <edge from-layer="64" from-port="1" to-layer="65" to-port="1" />
2189
+ <edge from-layer="65" from-port="2" to-layer="68" to-port="0" />
2190
+ <edge from-layer="66" from-port="0" to-layer="67" to-port="0" />
2191
+ <edge from-layer="67" from-port="1" to-layer="68" to-port="1" />
2192
+ <edge from-layer="68" from-port="2" to-layer="69" to-port="0" />
2193
+ <edge from-layer="69" from-port="1" to-layer="72" to-port="0" />
2194
+ <edge from-layer="70" from-port="0" to-layer="71" to-port="0" />
2195
+ <edge from-layer="71" from-port="1" to-layer="72" to-port="1" />
2196
+ <edge from-layer="72" from-port="2" to-layer="75" to-port="0" />
2197
+ <edge from-layer="73" from-port="0" to-layer="74" to-port="0" />
2198
+ <edge from-layer="74" from-port="1" to-layer="75" to-port="1" />
2199
+ <edge from-layer="75" from-port="2" to-layer="76" to-port="0" />
2200
+ <edge from-layer="76" from-port="1" to-layer="77" to-port="0" />
2201
+ <edge from-layer="77" from-port="1" to-layer="80" to-port="0" />
2202
+ <edge from-layer="78" from-port="0" to-layer="79" to-port="0" />
2203
+ <edge from-layer="79" from-port="1" to-layer="80" to-port="1" />
2204
+ <edge from-layer="80" from-port="2" to-layer="83" to-port="0" />
2205
+ <edge from-layer="81" from-port="0" to-layer="82" to-port="0" />
2206
+ <edge from-layer="82" from-port="1" to-layer="83" to-port="1" />
2207
+ <edge from-layer="83" from-port="2" to-layer="84" to-port="0" />
2208
+ <edge from-layer="84" from-port="1" to-layer="87" to-port="0" />
2209
+ <edge from-layer="85" from-port="0" to-layer="86" to-port="0" />
2210
+ <edge from-layer="86" from-port="1" to-layer="87" to-port="1" />
2211
+ <edge from-layer="87" from-port="2" to-layer="90" to-port="0" />
2212
+ <edge from-layer="88" from-port="0" to-layer="89" to-port="0" />
2213
+ <edge from-layer="89" from-port="1" to-layer="90" to-port="1" />
2214
+ <edge from-layer="90" from-port="2" to-layer="91" to-port="0" />
2215
+ <edge from-layer="91" from-port="1" to-layer="94" to-port="0" />
2216
+ <edge from-layer="92" from-port="0" to-layer="93" to-port="0" />
2217
+ <edge from-layer="93" from-port="1" to-layer="94" to-port="1" />
2218
+ <edge from-layer="94" from-port="2" to-layer="97" to-port="0" />
2219
+ <edge from-layer="95" from-port="0" to-layer="96" to-port="0" />
2220
+ <edge from-layer="96" from-port="1" to-layer="97" to-port="1" />
2221
+ <edge from-layer="97" from-port="2" to-layer="98" to-port="0" />
2222
+ <edge from-layer="98" from-port="1" to-layer="99" to-port="0" />
2223
+ <edge from-layer="99" from-port="1" to-layer="101" to-port="0" />
2224
+ <edge from-layer="100" from-port="0" to-layer="101" to-port="1" />
2225
+ <edge from-layer="101" from-port="2" to-layer="104" to-port="0" />
2226
+ <edge from-layer="102" from-port="0" to-layer="103" to-port="0" />
2227
+ <edge from-layer="103" from-port="1" to-layer="104" to-port="1" />
2228
+ <edge from-layer="104" from-port="2" to-layer="107" to-port="0" />
2229
+ <edge from-layer="105" from-port="0" to-layer="106" to-port="0" />
2230
+ <edge from-layer="106" from-port="1" to-layer="107" to-port="1" />
2231
+ <edge from-layer="107" from-port="2" to-layer="109" to-port="0" />
2232
+ <edge from-layer="108" from-port="0" to-layer="109" to-port="1" />
2233
+ <edge from-layer="109" from-port="2" to-layer="110" to-port="0" />
2234
+ </edges>
2235
+ <rt_info>
2236
+ <MO_version value="custom_HEAD_f6ee6e92f846a8c665e4a7089c51481f9689a3b5" />
2237
+ <Runtime_version value="2023.0.0-10521-f6ee6e92f84-HEAD" />
2238
+ <conversion_parameters>
2239
+ <framework value="onnx" />
2240
+ <input value="actual_input" />
2241
+ <input_model value="DIR/handwritten-japanese-recognition-0001.onnx" />
2242
+ <input_shape value="[1,1,96,2000]" />
2243
+ <layout value="actual_input(nchw)" />
2244
+ <mean_values value="actual_input[127.5]" />
2245
+ <model_name value="handwritten-japanese-recognition-0001" />
2246
+ <output value="output" />
2247
+ <output_dir value="DIR" />
2248
+ <scale_values value="actual_input[127.5]" />
2249
+ </conversion_parameters>
2250
+ <legacy_frontend value="False" />
2251
+ </rt_info>
2252
+ </net>
pyproject.toml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.black]
2
+ line-length = 79
3
+ include = '\.pyi?$'
4
+ exclude = '''
5
+ /(
6
+ \.git
7
+ | \.idea
8
+ | \.pytest_cache
9
+ | \.tox
10
+ | \.venv
11
+ | _build
12
+ | buck-out
13
+ | build
14
+ | dist
15
+ )/
16
+ '''
17
+
18
+ [tool.isort]
19
+ profile = "black"
20
+ force_grid_wrap=2
21
+ multi_line_output=3
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ streamlit
2
+ openvino
3
+ opencv-python
4
+ gradio
src/__Init__.py ADDED
File without changes
src/config.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #from streamlit_webrtc import RTCConfiguration
2
+ from collections import namedtuple
3
+
4
+ # MODELS
5
+ MODEL_DIR = "models/handwritten-japanese-recognition-0001.xml"
6
+ #MODEL_NAME = "handwritten-japanese-recognition-0001"
7
+ DATA_DIR = "data"
8
+ DICT_DIR = "dict/japanese_charlist.txt"
9
+ DEVICE = "CPU"
10
+
11
+ # Precision used by the model.
12
+ precision = "FP16"
13
+
14
+ # Config server for webcam
15
+ #RTC_CONFIGURATION = RTCConfiguration(
16
+ # {"iceServers": [{"urls": ["stun:stun4.l.google.com:19302"]}]},
17
+ #)
18
+
19
+ # IMAGE
20
+ IMAGE_EXAMPLE = "data/in_1.png"
21
+ IMAGE_TYPES = ['jpg','png','jpeg','JPG']
22
+
23
+
24
+ Language = namedtuple(
25
+ typename="Language", field_names=["model_name", "dict_name", "demo_image_name"]
26
+ )
src/detect_mask_image.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # USAGE
2
+ # python detect_mask_image.py --image images/pic1.jpeg
3
+
4
+ import argparse
5
+ import os
6
+
7
+ import cv2
8
+ import numpy as np
9
+
10
+ # import the necessary packages
11
+ from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
12
+ from tensorflow.keras.models import load_model
13
+ from tensorflow.keras.preprocessing.image import img_to_array
14
+
15
+
16
+ def mask_image():
17
+ # construct the argument parser and parse the arguments
18
+ ap = argparse.ArgumentParser()
19
+ ap.add_argument(
20
+ "-i",
21
+ "--image",
22
+ required=True,
23
+ help="path to input image",
24
+ )
25
+ ap.add_argument(
26
+ "-f",
27
+ "--face",
28
+ type=str,
29
+ default="face_detector",
30
+ help="path to face detector model directory",
31
+ )
32
+ ap.add_argument(
33
+ "-m",
34
+ "--model",
35
+ type=str,
36
+ default="mask_detector.model",
37
+ help="path to trained face mask detector model",
38
+ )
39
+ ap.add_argument(
40
+ "-c",
41
+ "--confidence",
42
+ type=float,
43
+ default=0.5,
44
+ help="minimum probability to filter weak detections",
45
+ )
46
+ args = vars(ap.parse_args())
47
+
48
+ # load our serialized face detector model from disk
49
+ print("[INFO] loading face detector model...")
50
+ prototxtPath = os.path.sep.join([args["face"], "deploy.prototxt"])
51
+ weightsPath = os.path.sep.join(
52
+ [
53
+ args["face"],
54
+ "res10_300x300_ssd_iter_140000.caffemodel",
55
+ ],
56
+ )
57
+ net = cv2.dnn.readNet(prototxtPath, weightsPath)
58
+
59
+ # load the face mask detector model from disk
60
+ print("[INFO] loading face mask detector model...")
61
+ model = load_model(args["model"])
62
+
63
+ # load the input image from disk, clone it, and grab the image spatial
64
+ # dimensions
65
+ image = cv2.imread(args["image"])
66
+ (h, w) = image.shape[:2]
67
+
68
+ # construct a blob from the image
69
+ blob = cv2.dnn.blobFromImage(
70
+ image,
71
+ 1.0,
72
+ (300, 300),
73
+ (104.0, 177.0, 123.0),
74
+ )
75
+
76
+ # pass the blob through the network and obtain the face detections
77
+ print("[INFO] computing face detections...")
78
+ net.setInput(blob)
79
+ detections = net.forward()
80
+
81
+ # loop over the detections
82
+ for i in range(0, detections.shape[2]):
83
+ # extract the confidence (i.e., probability) associated with
84
+ # the detection
85
+ confidence = detections[0, 0, i, 2]
86
+
87
+ # filter out weak detections by ensuring the confidence is
88
+ # greater than the minimum confidence
89
+ if confidence > args["confidence"]:
90
+ # compute the (x, y)-coordinates of the bounding box for
91
+ # the object
92
+ box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
93
+ (startX, startY, endX, endY) = box.astype("int")
94
+
95
+ # ensure the bounding boxes fall within the dimensions of
96
+ # the frame
97
+ (startX, startY) = (max(0, startX), max(0, startY))
98
+ (endX, endY) = (min(w - 1, endX), min(h - 1, endY))
99
+
100
+ # extract the face ROI, convert it from BGR to RGB channel
101
+ # ordering, resize it to 224x224, and preprocess it
102
+ face = image[startY:endY, startX:endX]
103
+ face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB)
104
+ face = cv2.resize(face, (224, 224))
105
+ face = img_to_array(face)
106
+ face = preprocess_input(face)
107
+ face = np.expand_dims(face, axis=0)
108
+
109
+ # pass the face through the model to determine if the face
110
+ # has a mask or not
111
+ (mask, withoutMask) = model.predict(face)[0]
112
+
113
+ # determine the class label and color we'll use to draw
114
+ # the bounding box and text
115
+ label = "Mask" if mask > withoutMask else "No Mask"
116
+ color = (0, 255, 0) if label == "Mask" else (0, 0, 255)
117
+
118
+ # include the probability in the label
119
+ label = f"{label}: {max(mask, withoutMask) * 100:.2f}%"
120
+
121
+ # display the label and bounding box rectangle on the output
122
+ # frame
123
+ cv2.putText(
124
+ image,
125
+ label,
126
+ (startX, startY - 10),
127
+ cv2.FONT_HERSHEY_SIMPLEX,
128
+ 0.45,
129
+ color,
130
+ 2,
131
+ )
132
+ cv2.rectangle(image, (startX, startY), (endX, endY), color, 2)
133
+
134
+ # show the output image
135
+ cv2.imshow("Output", image)
136
+ cv2.waitKey(0)
137
+
138
+
139
+ def detect_mask_in_image(image, faceNet, maskNet):
140
+ # dimensions
141
+ (h, w) = image.shape[:2]
142
+
143
+ # construct a blob from the image
144
+ blob = cv2.dnn.blobFromImage(
145
+ image,
146
+ 1.0,
147
+ (300, 300),
148
+ (104.0, 177.0, 123.0),
149
+ ) # TODO: add to config
150
+
151
+ # pass the blob through the network and obtain the face detections
152
+ print("[INFO] computing face detections...")
153
+ faceNet.setInput(blob)
154
+ detections = faceNet.forward()
155
+
156
+ face_count = 0
157
+ # image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
158
+ # loop over the detections
159
+ for i in range(0, detections.shape[2]):
160
+ # extract the confidence associated with the detection
161
+ confidence = detections[0, 0, i, 2]
162
+ # print(f"[INFO] face {i}: {confidence}")
163
+ # filter out weak detections by ensuring the confidence is
164
+ # greater than the minimum confidence
165
+ if confidence > 0.5:
166
+ face_count += 1
167
+ # compute the (x, y)-coordinates of the object's bbox
168
+ box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
169
+ (startX, startY, endX, endY) = box.astype("int")
170
+
171
+ # ensure the bounding boxes fall within the dimensions of
172
+ # the frame
173
+ (startX, startY) = (max(0, startX), max(0, startY))
174
+ (endX, endY) = (min(w - 1, endX), min(h - 1, endY))
175
+
176
+ # extract the face ROI
177
+ # ordering, resize it to 224x224, and preprocess it
178
+ face = image[startY:endY, startX:endX]
179
+ face = cv2.cvtColor(face, cv2.COLOR_BGR2RGB)
180
+ face = cv2.resize(face, (224, 224))
181
+ face = img_to_array(face)
182
+ face = preprocess_input(face)
183
+ face = np.expand_dims(face, axis=0)
184
+
185
+ # pass the face through the model to determine if the face
186
+ # has a mask or not
187
+ (mask, withoutMask) = maskNet.predict(face)[0]
188
+
189
+ # determine the class label and color we'll use to draw
190
+ # the bounding box and text
191
+ label = "Mask" if mask > withoutMask else "No Mask"
192
+ color = (0, 255, 0) if label == "Mask" else (255, 0, 0)
193
+
194
+ # include the probability in the label
195
+ label = f"{label}: {max(mask, withoutMask) * 100:.2f}%"
196
+
197
+ # display the label & bbox rectangle on the output frame
198
+ cv2.putText(
199
+ image,
200
+ label,
201
+ (startX, startY - 10),
202
+ cv2.FONT_HERSHEY_SIMPLEX,
203
+ 0.45,
204
+ color,
205
+ 2,
206
+ )
207
+ cv2.rectangle(image, (startX, startY), (endX, endY), color, 2)
208
+ else:
209
+ break
210
+ text = f"[INFO] Detect {face_count} face(s)."
211
+ print(text)
212
+ cv2.putText(
213
+ image,
214
+ text,
215
+ (10, 30),
216
+ cv2.FONT_HERSHEY_SIMPLEX,
217
+ 0.70,
218
+ (0, 255, 0),
219
+ 2,
220
+ )
221
+ return image
222
+
223
+
224
+ if __name__ == "__main__":
225
+ mask_image()
src/image_processing.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import numpy as np
3
+ from itertools import groupby
4
+
5
+
6
+ def process_image(image, recognition_input_layer):
7
+ # Text detection models expect an image in grayscale format.
8
+ # IMPORTANT! This model enables reading only one line at time.
9
+ #image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
10
+
11
+
12
+ # Fetch the shape.
13
+ image_height, _ = image.shape
14
+
15
+ # B,C,H,W = batch size, number of channels, height, width.
16
+ _, _, H, W = recognition_input_layer.shape
17
+
18
+ # Calculate scale ratio between the input shape height and image height to resize the image.
19
+ scale_ratio = H / image_height
20
+
21
+ # Resize the image to expected input sizes.
22
+ resized_image = cv2.resize(
23
+ image, None, fx=scale_ratio, fy=scale_ratio, interpolation=cv2.INTER_AREA
24
+ )
25
+
26
+ # Pad the image to match input size, without changing aspect ratio.
27
+ resized_image = np.pad(
28
+ resized_image, ((0, 0), (0, W - resized_image.shape[1])), mode="edge"
29
+ )
30
+
31
+ # Reshape to network input shape.
32
+ input_image = resized_image[None, None, :, :]
33
+
34
+ return input_image
35
+
36
+
37
+ def recognize(image, compiled_model, recognition_input_layer, recognition_output_layer, letters):
38
+ input_image = process_image(image, recognition_input_layer)
39
+ # Run inference on the model
40
+ predictions = compiled_model([input_image])[recognition_output_layer]
41
+
42
+ # Remove a batch dimension.
43
+ predictions = np.squeeze(predictions)
44
+
45
+ # Run the `argmax` function to pick the symbols with the highest probability.
46
+ predictions_indexes = np.argmax(predictions, axis=1)
47
+
48
+ # Use the `groupby` function to remove concurrent letters, as required by CTC greedy decoding.
49
+ output_text_indexes = list(groupby(predictions_indexes))
50
+
51
+ # Remove grouper objects.
52
+ output_text_indexes, _ = np.transpose(output_text_indexes, (1, 0))
53
+
54
+ # Remove blank symbols.
55
+ output_text_indexes = output_text_indexes[output_text_indexes != 0]
56
+
57
+ # Assign letters to indexes from the output array.
58
+ output_text = [letters[letter_index] for letter_index in output_text_indexes]
59
+
60
+ return output_text
src/model2onnx.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # import the necessary packages
2
+ import argparse
3
+
4
+ import onnx
5
+ import tf2onnx
6
+ from tensorflow.keras.models import load_model
7
+
8
+
9
+ def model2onnx():
10
+ # construct the argument parser and parse the arguments
11
+ ap = argparse.ArgumentParser()
12
+ ap.add_argument(
13
+ "-m",
14
+ "--model",
15
+ type=str,
16
+ default="mask_detector.model",
17
+ help="path to trained face mask detector model",
18
+ )
19
+ ap.add_argument(
20
+ "-o",
21
+ "--output",
22
+ type=str,
23
+ default="mask_detector.onnx",
24
+ help="path to trained face mask detector model",
25
+ )
26
+
27
+ args = vars(ap.parse_args())
28
+
29
+ # load the face mask detector model from disk
30
+ print("[INFO] loading face mask detector model...")
31
+ model = load_model(args["model"])
32
+ onnx_model, _ = tf2onnx.convert.from_keras(model, opset=13)
33
+
34
+ onnx_model.graph.input[0].type.tensor_type.shape.dim[0].dim_param = "?"
35
+ onnx_model.graph.output[0].type.tensor_type.shape.dim[0].dim_param = "?"
36
+
37
+ onnx.save(onnx_model, args["output"])
38
+
39
+
40
+ if __name__ == "__main__":
41
+ model2onnx()
src/search.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os
3
+
4
+ import cv2
5
+ import requests
6
+ from requests import exceptions
7
+
8
+ ap = argparse.ArgumentParser()
9
+ ap.add_argument(
10
+ "-q",
11
+ "--query",
12
+ required=True,
13
+ help="search query to search Bing Image API for",
14
+ )
15
+ ap.add_argument(
16
+ "-o",
17
+ "--output",
18
+ required=True,
19
+ help="path to output directory of images",
20
+ )
21
+ args = vars(ap.parse_args())
22
+ API_KEY = "d8982f9e69a4437fa6e10715d1ed691d"
23
+ MAX_RESULTS = 500
24
+ GROUP_SIZE = 50
25
+ URL = "https://api.cognitive.microsoft.com/bing/v7.0/images/search"
26
+ EXCEPTIONS = {
27
+ IOError,
28
+ FileNotFoundError,
29
+ exceptions.RequestException,
30
+ exceptions.HTTPError,
31
+ exceptions.ConnectionError,
32
+ exceptions.Timeout,
33
+ }
34
+ term = args["query"]
35
+ headers = {"Ocp-Apim-Subscription-Key": API_KEY}
36
+ params = {"q": term, "offset": 0, "count": GROUP_SIZE}
37
+ print(f"[INFO] searching Bing API for '{term}'")
38
+ search = requests.get(URL, headers=headers, params=params)
39
+ search.raise_for_status()
40
+ results = search.json()
41
+ estNumResults = min(results["totalEstimatedMatches"], MAX_RESULTS)
42
+ print(
43
+ "[INFO] {} total results for '{}'".format(
44
+ estNumResults,
45
+ term,
46
+ ),
47
+ )
48
+ total = 0
49
+ for offset in range(0, estNumResults, GROUP_SIZE):
50
+ print(
51
+ "[INFO] making request for group {}-{} of {}...".format(
52
+ offset,
53
+ offset + GROUP_SIZE,
54
+ estNumResults,
55
+ ),
56
+ )
57
+ params["offset"] = offset
58
+ search = requests.get(URL, headers=headers, params=params)
59
+ search.raise_for_status()
60
+ results = search.json()
61
+ print(
62
+ "[INFO] saving images for group {}-{} of {}...".format(
63
+ offset,
64
+ offset + GROUP_SIZE,
65
+ estNumResults,
66
+ ),
67
+ )
68
+ for v in results["value"]:
69
+ try:
70
+ print("[INFO] fetching: {}".format(v["contentUrl"]))
71
+ r = requests.get(v["contentUrl"], timeout=30)
72
+ ext = v["contentUrl"][v["contentUrl"].rfind(".") :]
73
+ p = os.path.sep.join(
74
+ [
75
+ args["output"],
76
+ "{}{}".format(
77
+ str(total).zfill(8),
78
+ ext,
79
+ ),
80
+ ],
81
+ )
82
+ f = open(p, "wb")
83
+ f.write(r.content)
84
+ f.close()
85
+ except Exception as e:
86
+ if type(e) in EXCEPTIONS:
87
+ print("[INFO] skipping: {}".format(v["contentUrl"]))
88
+ continue
89
+ image = cv2.imread(p)
90
+ if image is None:
91
+ print(f"[INFO] deleting: {p}")
92
+ os.remove(p)
93
+ continue
94
+ total += 1
src/train_mask_detector.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # USAGE
2
+ # python train_mask_detector.py --dataset dataset
3
+
4
+ import argparse
5
+ import os
6
+
7
+ import matplotlib.pyplot as plt
8
+ import numpy as np
9
+ from imutils import paths
10
+ from sklearn.metrics import classification_report
11
+ from sklearn.model_selection import train_test_split
12
+ from sklearn.preprocessing import LabelBinarizer
13
+ from tensorflow.keras.applications import MobileNetV2
14
+ from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
15
+ from tensorflow.keras.layers import (
16
+ AveragePooling2D,
17
+ Dense,
18
+ Dropout,
19
+ Flatten,
20
+ Input,
21
+ )
22
+ from tensorflow.keras.models import Model
23
+ from tensorflow.keras.optimizers import Adam
24
+
25
+ # import the necessary packages
26
+ from tensorflow.keras.preprocessing.image import (
27
+ ImageDataGenerator,
28
+ img_to_array,
29
+ load_img,
30
+ )
31
+ from tensorflow.keras.utils import to_categorical
32
+
33
+ # construct the argument parser and parse the arguments
34
+ ap = argparse.ArgumentParser()
35
+ ap.add_argument(
36
+ "-d",
37
+ "--dataset",
38
+ required=True,
39
+ help="path to input dataset",
40
+ )
41
+ ap.add_argument(
42
+ "-p",
43
+ "--plot",
44
+ type=str,
45
+ default="plot.png",
46
+ help="path to output loss/accuracy plot",
47
+ )
48
+ ap.add_argument(
49
+ "-m",
50
+ "--model",
51
+ type=str,
52
+ default="mask_detector.model",
53
+ help="path to output face mask detector model",
54
+ )
55
+ args = vars(ap.parse_args())
56
+
57
+ # initialize the initial learning rate, number of epochs to train for,
58
+ # and batch size
59
+ INIT_LR = 1e-4
60
+ EPOCHS = 20
61
+ BS = 32
62
+
63
+ # grab the list of images in our dataset directory, then initialize
64
+ # the list of data (i.e., images) and class images
65
+ print("[INFO] loading images...")
66
+ imagePaths = list(paths.list_images(args["dataset"]))
67
+ data = []
68
+ labels = []
69
+
70
+ # loop over the image paths
71
+ for imagePath in imagePaths:
72
+ # extract the class label from the filename
73
+ label = imagePath.split(os.path.sep)[-2]
74
+
75
+ # load the input image (224x224) and preprocess it
76
+ image = load_img(imagePath, target_size=(224, 224))
77
+ image = img_to_array(image)
78
+ image = preprocess_input(image)
79
+
80
+ # update the data and labels lists, respectively
81
+ data.append(image)
82
+ labels.append(label)
83
+
84
+ # convert the data and labels to NumPy arrays
85
+ data = np.array(data, dtype="float32")
86
+ labels = np.array(labels)
87
+
88
+ # perform one-hot encoding on the labels
89
+ lb = LabelBinarizer()
90
+ labels = lb.fit_transform(labels)
91
+ labels = to_categorical(labels)
92
+
93
+ # partition the data into training and testing splits using 75% of
94
+ # the data for training and the remaining 25% for testing
95
+ (trainX, testX, trainY, testY) = train_test_split(
96
+ data,
97
+ labels,
98
+ test_size=0.20,
99
+ stratify=labels,
100
+ random_state=42,
101
+ )
102
+
103
+ # construct the training image generator for data augmentation
104
+ aug = ImageDataGenerator(
105
+ rotation_range=20,
106
+ zoom_range=0.15,
107
+ width_shift_range=0.2,
108
+ height_shift_range=0.2,
109
+ shear_range=0.15,
110
+ horizontal_flip=True,
111
+ fill_mode="nearest",
112
+ )
113
+
114
+ # load the MobileNetV2 network, ensuring the head FC layer sets are
115
+ # left off
116
+ baseModel = MobileNetV2(
117
+ weights="imagenet",
118
+ include_top=False,
119
+ input_tensor=Input(shape=(224, 224, 3)),
120
+ )
121
+
122
+ # construct the head of the model that will be placed on top of the
123
+ # the base model
124
+ headModel = baseModel.output
125
+ headModel = AveragePooling2D(pool_size=(7, 7))(headModel)
126
+ headModel = Flatten(name="flatten")(headModel)
127
+ headModel = Dense(128, activation="relu")(headModel)
128
+ headModel = Dropout(0.5)(headModel)
129
+ headModel = Dense(2, activation="softmax")(headModel)
130
+
131
+ # place the head FC model on top of the base model (this will become
132
+ # the actual model we will train)
133
+ model = Model(inputs=baseModel.input, outputs=headModel)
134
+
135
+ # loop over all layers in the base model and freeze them so they will
136
+ # *not* be updated during the first training process
137
+ for layer in baseModel.layers:
138
+ layer.trainable = False
139
+
140
+ # compile our model
141
+ print("[INFO] compiling model...")
142
+ opt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)
143
+ model.compile(
144
+ loss="binary_crossentropy",
145
+ optimizer=opt,
146
+ metrics=["accuracy"],
147
+ )
148
+
149
+ # train the head of the network
150
+ print("[INFO] training head...")
151
+ H = model.fit(
152
+ aug.flow(trainX, trainY, batch_size=BS),
153
+ steps_per_epoch=len(trainX) // BS,
154
+ validation_data=(testX, testY),
155
+ validation_steps=len(testX) // BS,
156
+ epochs=EPOCHS,
157
+ )
158
+
159
+ # make predictions on the testing set
160
+ print("[INFO] evaluating network...")
161
+ predIdxs = model.predict(testX, batch_size=BS)
162
+
163
+ # for each image in the testing set we need to find the index of the
164
+ # label with corresponding largest predicted probability
165
+ predIdxs = np.argmax(predIdxs, axis=1)
166
+
167
+ # show a nicely formatted classification report
168
+ print(
169
+ classification_report(
170
+ testY.argmax(axis=1),
171
+ predIdxs,
172
+ target_names=lb.classes_,
173
+ ),
174
+ )
175
+
176
+ # serialize the model to disk
177
+ print("[INFO] saving mask detector model...")
178
+ model.save(args["model"], save_format="h5")
179
+
180
+ # plot the training loss and accuracy
181
+ N = EPOCHS
182
+ plt.style.use("ggplot")
183
+ plt.figure()
184
+ plt.plot(np.arange(0, N), H.history["loss"], label="train_loss")
185
+ plt.plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
186
+ plt.plot(np.arange(0, N), H.history["accuracy"], label="train_acc")
187
+ plt.plot(np.arange(0, N), H.history["val_accuracy"], label="val_acc")
188
+ plt.title("Training Loss and Accuracy")
189
+ plt.xlabel("Epoch #")
190
+ plt.ylabel("Loss/Accuracy")
191
+ plt.legend(loc="lower left")
192
+ plt.savefig(args["plot"])