introvoyz041 commited on
Commit
12d2e9e
·
verified ·
1 Parent(s): bb620ed

Migrated from GitHub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +7 -0
  2. data/.coveragerc +14 -0
  3. data/.dockerignore +41 -0
  4. data/.flake8 +4 -0
  5. data/.pre-commit-config.yaml +26 -0
  6. data/.readthedocs.yaml +21 -0
  7. data/CITATION.cff +64 -0
  8. data/CONTRIBUTING.rst +197 -0
  9. data/Dockerfile +47 -0
  10. data/LICENSE +339 -0
  11. data/ai-digital-pathology-assistant-v3/src/DESC.txt +1 -0
  12. data/ai-digital-pathology-assistant-v3/src/DPTv3.png +3 -0
  13. data/ai-digital-pathology-assistant-v3/src/INSTRUCTIONS.txt +60 -0
  14. data/ai-digital-pathology-assistant-v3/src/KB-settings.png +3 -0
  15. data/ai-digital-pathology-assistant-v3/src/README.pdf +3 -0
  16. data/ai-digital-pathology-assistant-v3/src/STARTERS.txt +11 -0
  17. data/ai-digital-pathology-assistant-v3/src/Screen Recording 2024-04-06 at 10.47.21 AM.gif +3 -0
  18. data/ai-digital-pathology-assistant-v3/src/Screen Recording 2024-04-06 at 10.53.46 AM.gif +3 -0
  19. data/ai-digital-pathology-assistant-v3/src/api_reference_merged.pdf +0 -0
  20. data/ai-digital-pathology-assistant-v3/src/examples_merged.pdf +3 -0
  21. data/ai-digital-pathology-assistant-v3/src/main_merged.pdf +0 -0
  22. data/docker/entrypoint.sh +17 -0
  23. data/docs/Makefile +20 -0
  24. data/docs/make.bat +35 -0
  25. data/docs/readthedocs-requirements.txt +7 -0
  26. data/docs/source/_static/css/pathml.css +21 -0
  27. data/docs/source/_static/images/dfci_cornell_joint_logos.png +3 -0
  28. data/docs/source/_static/images/logo.png +3 -0
  29. data/docs/source/_static/images/overview.png +3 -0
  30. data/docs/source/_static/images/pipeline_composition_schematic.png +3 -0
  31. data/docs/source/_static/images/running_preprocessing_schematic.png +3 -0
  32. data/docs/source/_static/images/schematic_design.jpg +3 -0
  33. data/docs/source/_static/images/transform_schematic.png +3 -0
  34. data/docs/source/_static/images/transforms.png +3 -0
  35. data/docs/source/_templates/breadcrumbs.html +7 -0
  36. data/docs/source/api_core_reference.rst +79 -0
  37. data/docs/source/api_datasets_reference.rst +14 -0
  38. data/docs/source/api_graph_reference.rst +24 -0
  39. data/docs/source/api_inference_reference.rst +36 -0
  40. data/docs/source/api_ml_reference.rst +30 -0
  41. data/docs/source/api_preprocessing_reference.rst +42 -0
  42. data/docs/source/api_utils_reference.rst +69 -0
  43. data/docs/source/conf.py +99 -0
  44. data/docs/source/creating_pipelines.rst +103 -0
  45. data/docs/source/dataloaders.rst +24 -0
  46. data/docs/source/datasets.rst +50 -0
  47. data/docs/source/examples/link_codex.nblink +3 -0
  48. data/docs/source/examples/link_construct_graphs.nblink +3 -0
  49. data/docs/source/examples/link_gallery.nblink +3 -0
  50. data/docs/source/examples/link_jco.nblink +3 -0
.gitattributes CHANGED
@@ -57,3 +57,10 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ data/ai-digital-pathology-assistant-v3/src/README.pdf filter=lfs diff=lfs merge=lfs -text
61
+ data/ai-digital-pathology-assistant-v3/src/examples_merged.pdf filter=lfs diff=lfs merge=lfs -text
62
+ data/examples/codex.ipynb filter=lfs diff=lfs merge=lfs -text
63
+ data/tests/testdata/small_HE.svs filter=lfs diff=lfs merge=lfs -text
64
+ data/tests/testdata/small_dicom.dcm filter=lfs diff=lfs merge=lfs -text
65
+ data/tests/testdata/small_vectra.qptiff filter=lfs diff=lfs merge=lfs -text
66
+ data/tests/testdata/tilestitching_testdata/MISI3542i_W21-04143_bi016966_M394_OVX_LM_Scan1_\[14384,29683\]_component_data.tif filter=lfs diff=lfs merge=lfs -text
data/.coveragerc ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # reference here: https://coverage.readthedocs.io/en/coverage-5.1/config.html
2
+
3
+ [run]
4
+ source = pathml
5
+ command_line = -m pytest
6
+
7
+ [html]
8
+ directory = coverage_report_html
9
+
10
+ [report]
11
+ exclude_lines =
12
+ pragma: no cover
13
+ if self.debug:
14
+ raise NotImplementedError
data/.dockerignore ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+
4
+ # Unit test / coverage reports
5
+ .pytest_cache/
6
+ coverage_report_html/
7
+ .coverage
8
+
9
+ # Sphinx documentation
10
+ docs/build/
11
+
12
+ # data
13
+ data/
14
+
15
+ # files created by matplotlib testing suite
16
+ result_images/
17
+
18
+ # distribution / packaging
19
+ *.egg-info
20
+ *.egg
21
+ build/
22
+ dist/
23
+
24
+ # Jupyter Notebook
25
+ .ipynb_checkpoints
26
+
27
+ # scratch notebook
28
+ scratch.ipynb
29
+
30
+ # Mac files
31
+ .DS_Store
32
+
33
+ # Pycharm
34
+ .idea/
35
+
36
+ # dask
37
+ dask-worker-space/
38
+
39
+ # git
40
+ .git
41
+ .gitigore
data/.flake8 ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [flake8]
2
+ max-line-length = 88
3
+ extend-ignore = E203 E501
4
+ per-file-ignores = __init__.py:F401
data/.pre-commit-config.yaml ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # See https://pre-commit.com for more information
2
+ # See https://pre-commit.com/hooks.html for more hooks
3
+ files: ^pathml/|^tests/
4
+ repos:
5
+ - repo: https://github.com/pre-commit/pre-commit-hooks
6
+ rev: v4.0.1
7
+ hooks:
8
+ - id: trailing-whitespace
9
+ - id: end-of-file-fixer
10
+ - id: check-yaml
11
+ - id: check-added-large-files
12
+
13
+ - repo: https://github.com/psf/black
14
+ rev: 24.1.1
15
+ hooks:
16
+ - id: black
17
+
18
+ - repo: https://github.com/timothycrosley/isort
19
+ rev: 5.11.5
20
+ hooks:
21
+ - id: isort
22
+
23
+ - repo: https://github.com/pycqa/flake8
24
+ rev: 7.0.0
25
+ hooks:
26
+ - id: flake8
data/.readthedocs.yaml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # .readthedocs.yaml
2
+ # Read the Docs configuration file
3
+ # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4
+
5
+ # Required
6
+ version: 2
7
+
8
+ build:
9
+ os: ubuntu-22.04
10
+ tools:
11
+ python: "3.8"
12
+
13
+ # Build documentation with Sphinx
14
+ sphinx:
15
+ configuration: docs/source/conf.py
16
+ builder: html
17
+ fail_on_warning: false
18
+
19
+ python:
20
+ install:
21
+ - requirements: docs/readthedocs-requirements.txt
data/CITATION.cff ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cff-version: 1.2.0
2
+ message: "Citation for this repository"
3
+ authors:
4
+ - family-names: Rosenthal
5
+ given-names: Jacob
6
+ - family-names: Carelli
7
+ given-names: Ryan
8
+ - family-names: Mohammed
9
+ given-names: Omar
10
+ - family-names: Brundage
11
+ given-names: David
12
+ - family-names: Halbert
13
+ given-names: Ella
14
+ - family-names: Nyman
15
+ given-names: Jackson
16
+ - family-names: Hari
17
+ given-names: Surya
18
+ - family-names: Van Allen
19
+ given-names: Eliezer
20
+ - family-names: Marchionni
21
+ given-names: Luigi
22
+ - family-names: Umeton
23
+ given-names: Renato
24
+ - family-names: Loda
25
+ given-names: Massimo
26
+ title: "Building Tools for Machine Learning and Artificial Intelligence in Cancer Research: Best Practices and a Case Study with the PathML Toolkit for Computational Pathology"
27
+ date-released: 2022-02-03
28
+ doi: 10.1158/1541-7786.MCR-21-0665
29
+ url: https://github.com/Dana-Farber-AIOS/pathml
30
+ preferred-citation:
31
+ type: article
32
+ authors:
33
+ - family-names: Rosenthal
34
+ given-names: Jacob
35
+ - family-names: Carelli
36
+ given-names: Ryan
37
+ - family-names: Mohammed
38
+ given-names: Omar
39
+ - family-names: Brundage
40
+ given-names: David
41
+ - family-names: Halbert
42
+ given-names: Ella
43
+ - family-names: Nyman
44
+ given-names: Jackson
45
+ - family-names: Hari
46
+ given-names: Surya
47
+ - family-names: Van Allen
48
+ given-names: Eliezer
49
+ - family-names: Marchionni
50
+ given-names: Luigi
51
+ - family-names: Umeton
52
+ given-names: Renato
53
+ - family-names: Loda
54
+ given-names: Massimo
55
+ doi: 10.1158/1541-7786.MCR-21-0665
56
+ journal: "Molecular Cancer Research"
57
+ publisher: American Academy of Cancer Research
58
+ month: 2
59
+ year: 2022
60
+ issue: 2
61
+ volume: 20
62
+ start: 202
63
+ title: "Building Tools for Machine Learning and Artificial Intelligence in Cancer Research: Best Practices and a Case Study with the PathML Toolkit for Computational Pathology"
64
+ url: https://doi.org/10.1158/1541-7786.MCR-21-0665
data/CONTRIBUTING.rst ADDED
@@ -0,0 +1,197 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Contributing
2
+ ************
3
+
4
+ ``PathML`` is an open source project. Consider contributing to benefit the entire community!
5
+
6
+ There are many ways to contribute to PathML, including:
7
+
8
+ * Submitting bug reports
9
+ * Submitting feature requests
10
+ * Writing documentation
11
+ * Fixing bugs
12
+ * Writing code for new features
13
+ * Sharing trained model parameters [coming soon]
14
+ * Sharing ``PathML`` with colleagues, students, etc.
15
+
16
+
17
+ Submitting a bug report
18
+ =======================
19
+ Report bugs or errors by filing an issue on GitHub. Make sure to include the following information:
20
+
21
+ * Short description of the bug
22
+ * Minimum working example to reproduce the bug
23
+ * Expected result vs. actual result
24
+ * Any other useful information
25
+
26
+ If a bug cannot be reproduced by someone else on a different machine, it will usually be hard to identify
27
+ what is causing it.
28
+
29
+ Requesting a new feature
30
+ =========================
31
+ Request a new feature by filing an issue on GitHub. Make sure to include the following information:
32
+
33
+ * Description of the feature
34
+ * Pseudocode of how the feature might work (if applicable)
35
+ * Any other useful information
36
+
37
+ For developers
38
+ ==============
39
+
40
+ Coordinate system conventions
41
+ -----------------------------
42
+
43
+ With multiple tools for interacting with matrices/images, conflicting coordinate systems has been a common source of
44
+ bugs. This is typically caused when mixing up (X, Y) coordinate systems and (i, j) coordinate systems. **To avoid these
45
+ issues, we have adopted the (i, j) coordinate convention throughout PathML.** This follows the convention used by
46
+ NumPy and many others, where ``A[i, j]`` refers to the element of matrix A in the ith row, jth column.
47
+ Developers should be careful about coordinate systems and make the necessary adjustments when using third-party tools
48
+ so that users of PathML can rely on a consistent coordinate system when using our tools.
49
+
50
+ Setting up a local development environment
51
+ -------------------------------------------
52
+
53
+ 1. Create a new fork of the ``PathML`` repository
54
+ 2. Clone your fork to your local machine
55
+ 3. Set up the PathML environment: ``conda env create -f environment.yml; conda activate pathml``
56
+ 4. Install PathML: ``pip install -e .``
57
+ 5. Install pre-commit hooks: ``pre-commit install``
58
+
59
+ Running tests
60
+ -------------
61
+
62
+ To run the full testing suite (not recommended):
63
+
64
+ .. code-block::
65
+
66
+ python -m pytest
67
+
68
+ Some tests are known to be very slow. Tests for the tile stitching functionality must be ran separately. To skip them, run:
69
+
70
+ .. code-block::
71
+
72
+ python -m pytest -m "not slow and not exclude"
73
+
74
+ Then, run the tilestitching test:
75
+
76
+ .. code-block::
77
+
78
+ python -m pytest tests/preprocessing_tests/test_tilestitcher.py
79
+
80
+ Building documentation locally
81
+ ------------------------------
82
+
83
+ .. code-block::
84
+
85
+ cd docs # enter docs directory
86
+ pip install -r readthedocs-requirements.txt # install packages to build docs
87
+ make html # build docs in html format
88
+
89
+ Then use your favorite web browser to open ``pathml/docs/build/html/index.html``
90
+
91
+ Checking code coverage
92
+ ----------------------
93
+
94
+ .. code-block::
95
+
96
+ conda install coverage # install coverage package for code coverage
97
+ COVERAGE_FILE=.coverage_others coverage run -m pytest -m "not slow and not exclude" # run coverage for all files except tile stitching
98
+ COVERAGE_FILE=.coverage_tilestitcher coverage run -m pytest tests/preprocessing_tests/test_tilestitcher.py # run coverage for tile stitching
99
+ coverage combine .coverage_tilestitcher .coverage_others # combine coverage results
100
+ coverage report # view coverage report
101
+ coverage html # optionally generate HTML coverage report
102
+
103
+ How to contribute code, documentation, etc.
104
+ -------------------------------------------
105
+
106
+ 1. Create a new GitHub issue for what you will be working on, if one does not already exist
107
+ 2. Create a local development environment (see above)
108
+ 3. Create a new branch from the dev branch and implement your changes
109
+ 4. Write new tests as needed to maintain code coverage
110
+ 5. Ensure that all tests pass
111
+ 6. Push your changes and open a pull request on GitHub referencing the corresponding issue
112
+ 7. Respond to discussion/feedback about the pull request, make changes as necessary
113
+
114
+ Versioning and Distributing
115
+ ---------------------------
116
+
117
+ We use `semantic versioning`_. The version is tracked in ``pathml/_version.py`` and should be updated there as required.
118
+ When new code is merged to the master branch on GitHub, the version should be incremented and a new release should be
119
+ pushed. Releases can be created using the GitHub website interface, and should be tagged in version format
120
+ (e.g., "v1.0.0" for version 1.0.0) and include release notes indicating what has changed.
121
+ Once a new release is created, GitHub Actions workflows will automatically build and publish the updated package on
122
+ PyPI and TestPyPI, as well as build and publish the Docker image to Docker Hub.
123
+
124
+ Code Quality
125
+ ------------
126
+
127
+ We want PathML to be built on high-quality code. However, the idea of "code quality" is somewhat subjective.
128
+ If the code works perfectly but cannot be read and understood by someone else, then it can't be maintained,
129
+ and this accumulated tech debt is something we want to avoid.
130
+ Writing code that "works", i.e. does what you want it to do, is therefore necessary but not sufficient.
131
+ Good code also demands efficiency, consistency, good design, clarity, and many other factors.
132
+
133
+ Here are some general tips and ideas:
134
+
135
+ - Strive to make code concise, but not at the expense of clarity.
136
+ - Seek efficient and general designs, but avoid premature optimization.
137
+ - Prefer informative variable names.
138
+ - Encapsulate code in functions or objects.
139
+ - Comment, comment, comment your code.
140
+
141
+ All code should be reviewed by someone else before merging.
142
+
143
+ We use `Black`_ to enforce consistency of code style.
144
+
145
+ Documentation Standards
146
+ -----------------------
147
+
148
+ All code should be documented, including docstrings for users AND inline comments for
149
+ other developers whenever possible! Both are crucial for ensuring long-term usability and maintainability.
150
+ Documentation is automatically generated using the Sphinx `autodoc`_ and `napoleon`_ extensions from
151
+ properly formatted Google-style docstrings.
152
+ All documentation (including docstrings) is written in `reStructuredText`_ format.
153
+ See this `docstring example`_ to get started.
154
+
155
+ Testing Standards
156
+ -----------------
157
+
158
+ All code should be accompanied by tests, whenever possible, to ensure that everything is working as intended.
159
+
160
+ The type of testing required may vary depending on the type of contribution:
161
+
162
+ - New features should use tests to ensure that the code is working as intended, e.g. comparing output of
163
+ a function with the expected output.
164
+ - Bug fixes should first add a failing test, then make it pass by fixing the bug
165
+
166
+ No pull request can be merged unless all tests pass.
167
+ We aim to maintain good code coverage for the testing suite (target >90%).
168
+ We use the `pytest`_ testing framework.
169
+ To run the test suite and check code coverage:
170
+
171
+ .. code-block::
172
+
173
+ conda install coverage # install coverage package for code coverage
174
+ COVERAGE_FILE=.coverage_others coverage run -m pytest -m "not slow and not exclude" # run coverage for all files except tile stitching
175
+ COVERAGE_FILE=.coverage_tilestitcher coverage run -m pytest tests/preprocessing_tests/test_tilestitcher.py # run coverage for tile stitching
176
+ coverage combine .coverage_tilestitcher .coverage_others # combine coverage results
177
+ coverage report # view coverage report
178
+ coverage html # optionally generate HTML coverage report
179
+
180
+ We suggest using test-driven development when applicable. I.e., if you're fixing a bug or adding new features,
181
+ write the tests first! (they should all fail). Then, write the actual code. When all tests pass, you know
182
+ that your implementation is working. This helps ensure that all code is tested and that the tests are testing
183
+ what we want them to.
184
+
185
+ Thank You!
186
+ ==========
187
+
188
+ Thank you for helping make ``PathML`` better!
189
+
190
+
191
+ .. _pytest: https://docs.pytest.org/en/stable/
192
+ .. _autodoc: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
193
+ .. _reStructuredText: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
194
+ .. _docstring example: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
195
+ .. _napoleon: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html
196
+ .. _Black: https://black.readthedocs.io/en/stable
197
+ .. _semantic versioning: https://semver.org/
data/Dockerfile ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM continuumio/miniconda3
2
+
3
+ # LABEL about the custom image
4
+ LABEL maintainer="[email protected]"
5
+ LABEL description="This is custom Docker Image for running PathML"
6
+
7
+ # install packages on root
8
+ USER root
9
+
10
+ # download and install miniconda and external dependencies
11
+ RUN apt-get update && apt-get install -y --no-install-recommends openslide-tools \
12
+ g++ \
13
+ gcc \
14
+ libpixman-1-0 \
15
+ libblas-dev \
16
+ liblapack-dev
17
+
18
+ # download and install opencv dependencies
19
+ RUN apt-get update && apt-get install ffmpeg libsm6 libxext6 -y
20
+
21
+ # copy pathml files into docker
22
+ COPY setup.py README.md /opt/pathml/
23
+ COPY requirements/ /opt/pathml/requirements/
24
+ COPY examples/ /opt/pathml/examples/
25
+ COPY tests/ /opt/pathml/tests
26
+ COPY pathml/ /opt/pathml/pathml
27
+ COPY docker/entrypoint.sh /opt/pathml/
28
+
29
+ # make a new conda environment
30
+ RUN conda env create -f /opt/pathml/requirements/environment_docker.yml
31
+
32
+ # set wording directory
33
+ WORKDIR /opt/pathml
34
+
35
+ # make RUN commands use the new environment
36
+ RUN echo "conda activate pathml" >> ~/.bashrc
37
+ SHELL ["/bin/bash", "--login", "-c"]
38
+
39
+ # install pathml, purest and Jupyter lab
40
+ RUN pip3 install /opt/pathml/ pytest
41
+ RUN pip3 install jupyter -U && pip3 install jupyterlab
42
+
43
+ # export port 8888 on the docker
44
+ EXPOSE 8888
45
+
46
+ # run entrypoint script
47
+ ENTRYPOINT ["./entrypoint.sh"]
data/LICENSE ADDED
@@ -0,0 +1,339 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ GNU GENERAL PUBLIC LICENSE
2
+ Version 2, June 1991
3
+
4
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
5
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
6
+ Everyone is permitted to copy and distribute verbatim copies
7
+ of this license document, but changing it is not allowed.
8
+
9
+ Preamble
10
+
11
+ The licenses for most software are designed to take away your
12
+ freedom to share and change it. By contrast, the GNU General Public
13
+ License is intended to guarantee your freedom to share and change free
14
+ software--to make sure the software is free for all its users. This
15
+ General Public License applies to most of the Free Software
16
+ Foundation's software and to any other program whose authors commit to
17
+ using it. (Some other Free Software Foundation software is covered by
18
+ the GNU Lesser General Public License instead.) You can apply it to
19
+ your programs, too.
20
+
21
+ When we speak of free software, we are referring to freedom, not
22
+ price. Our General Public Licenses are designed to make sure that you
23
+ have the freedom to distribute copies of free software (and charge for
24
+ this service if you wish), that you receive source code or can get it
25
+ if you want it, that you can change the software or use pieces of it
26
+ in new free programs; and that you know you can do these things.
27
+
28
+ To protect your rights, we need to make restrictions that forbid
29
+ anyone to deny you these rights or to ask you to surrender the rights.
30
+ These restrictions translate to certain responsibilities for you if you
31
+ distribute copies of the software, or if you modify it.
32
+
33
+ For example, if you distribute copies of such a program, whether
34
+ gratis or for a fee, you must give the recipients all the rights that
35
+ you have. You must make sure that they, too, receive or can get the
36
+ source code. And you must show them these terms so they know their
37
+ rights.
38
+
39
+ We protect your rights with two steps: (1) copyright the software, and
40
+ (2) offer you this license which gives you legal permission to copy,
41
+ distribute and/or modify the software.
42
+
43
+ Also, for each author's protection and ours, we want to make certain
44
+ that everyone understands that there is no warranty for this free
45
+ software. If the software is modified by someone else and passed on, we
46
+ want its recipients to know that what they have is not the original, so
47
+ that any problems introduced by others will not reflect on the original
48
+ authors' reputations.
49
+
50
+ Finally, any free program is threatened constantly by software
51
+ patents. We wish to avoid the danger that redistributors of a free
52
+ program will individually obtain patent licenses, in effect making the
53
+ program proprietary. To prevent this, we have made it clear that any
54
+ patent must be licensed for everyone's free use or not licensed at all.
55
+
56
+ The precise terms and conditions for copying, distribution and
57
+ modification follow.
58
+
59
+ GNU GENERAL PUBLIC LICENSE
60
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
61
+
62
+ 0. This License applies to any program or other work which contains
63
+ a notice placed by the copyright holder saying it may be distributed
64
+ under the terms of this General Public License. The "Program", below,
65
+ refers to any such program or work, and a "work based on the Program"
66
+ means either the Program or any derivative work under copyright law:
67
+ that is to say, a work containing the Program or a portion of it,
68
+ either verbatim or with modifications and/or translated into another
69
+ language. (Hereinafter, translation is included without limitation in
70
+ the term "modification".) Each licensee is addressed as "you".
71
+
72
+ Activities other than copying, distribution and modification are not
73
+ covered by this License; they are outside its scope. The act of
74
+ running the Program is not restricted, and the output from the Program
75
+ is covered only if its contents constitute a work based on the
76
+ Program (independent of having been made by running the Program).
77
+ Whether that is true depends on what the Program does.
78
+
79
+ 1. You may copy and distribute verbatim copies of the Program's
80
+ source code as you receive it, in any medium, provided that you
81
+ conspicuously and appropriately publish on each copy an appropriate
82
+ copyright notice and disclaimer of warranty; keep intact all the
83
+ notices that refer to this License and to the absence of any warranty;
84
+ and give any other recipients of the Program a copy of this License
85
+ along with the Program.
86
+
87
+ You may charge a fee for the physical act of transferring a copy, and
88
+ you may at your option offer warranty protection in exchange for a fee.
89
+
90
+ 2. You may modify your copy or copies of the Program or any portion
91
+ of it, thus forming a work based on the Program, and copy and
92
+ distribute such modifications or work under the terms of Section 1
93
+ above, provided that you also meet all of these conditions:
94
+
95
+ a) You must cause the modified files to carry prominent notices
96
+ stating that you changed the files and the date of any change.
97
+
98
+ b) You must cause any work that you distribute or publish, that in
99
+ whole or in part contains or is derived from the Program or any
100
+ part thereof, to be licensed as a whole at no charge to all third
101
+ parties under the terms of this License.
102
+
103
+ c) If the modified program normally reads commands interactively
104
+ when run, you must cause it, when started running for such
105
+ interactive use in the most ordinary way, to print or display an
106
+ announcement including an appropriate copyright notice and a
107
+ notice that there is no warranty (or else, saying that you provide
108
+ a warranty) and that users may redistribute the program under
109
+ these conditions, and telling the user how to view a copy of this
110
+ License. (Exception: if the Program itself is interactive but
111
+ does not normally print such an announcement, your work based on
112
+ the Program is not required to print an announcement.)
113
+
114
+ These requirements apply to the modified work as a whole. If
115
+ identifiable sections of that work are not derived from the Program,
116
+ and can be reasonably considered independent and separate works in
117
+ themselves, then this License, and its terms, do not apply to those
118
+ sections when you distribute them as separate works. But when you
119
+ distribute the same sections as part of a whole which is a work based
120
+ on the Program, the distribution of the whole must be on the terms of
121
+ this License, whose permissions for other licensees extend to the
122
+ entire whole, and thus to each and every part regardless of who wrote it.
123
+
124
+ Thus, it is not the intent of this section to claim rights or contest
125
+ your rights to work written entirely by you; rather, the intent is to
126
+ exercise the right to control the distribution of derivative or
127
+ collective works based on the Program.
128
+
129
+ In addition, mere aggregation of another work not based on the Program
130
+ with the Program (or with a work based on the Program) on a volume of
131
+ a storage or distribution medium does not bring the other work under
132
+ the scope of this License.
133
+
134
+ 3. You may copy and distribute the Program (or a work based on it,
135
+ under Section 2) in object code or executable form under the terms of
136
+ Sections 1 and 2 above provided that you also do one of the following:
137
+
138
+ a) Accompany it with the complete corresponding machine-readable
139
+ source code, which must be distributed under the terms of Sections
140
+ 1 and 2 above on a medium customarily used for software interchange; or,
141
+
142
+ b) Accompany it with a written offer, valid for at least three
143
+ years, to give any third party, for a charge no more than your
144
+ cost of physically performing source distribution, a complete
145
+ machine-readable copy of the corresponding source code, to be
146
+ distributed under the terms of Sections 1 and 2 above on a medium
147
+ customarily used for software interchange; or,
148
+
149
+ c) Accompany it with the information you received as to the offer
150
+ to distribute corresponding source code. (This alternative is
151
+ allowed only for noncommercial distribution and only if you
152
+ received the program in object code or executable form with such
153
+ an offer, in accord with Subsection b above.)
154
+
155
+ The source code for a work means the preferred form of the work for
156
+ making modifications to it. For an executable work, complete source
157
+ code means all the source code for all modules it contains, plus any
158
+ associated interface definition files, plus the scripts used to
159
+ control compilation and installation of the executable. However, as a
160
+ special exception, the source code distributed need not include
161
+ anything that is normally distributed (in either source or binary
162
+ form) with the major components (compiler, kernel, and so on) of the
163
+ operating system on which the executable runs, unless that component
164
+ itself accompanies the executable.
165
+
166
+ If distribution of executable or object code is made by offering
167
+ access to copy from a designated place, then offering equivalent
168
+ access to copy the source code from the same place counts as
169
+ distribution of the source code, even though third parties are not
170
+ compelled to copy the source along with the object code.
171
+
172
+ 4. You may not copy, modify, sublicense, or distribute the Program
173
+ except as expressly provided under this License. Any attempt
174
+ otherwise to copy, modify, sublicense or distribute the Program is
175
+ void, and will automatically terminate your rights under this License.
176
+ However, parties who have received copies, or rights, from you under
177
+ this License will not have their licenses terminated so long as such
178
+ parties remain in full compliance.
179
+
180
+ 5. You are not required to accept this License, since you have not
181
+ signed it. However, nothing else grants you permission to modify or
182
+ distribute the Program or its derivative works. These actions are
183
+ prohibited by law if you do not accept this License. Therefore, by
184
+ modifying or distributing the Program (or any work based on the
185
+ Program), you indicate your acceptance of this License to do so, and
186
+ all its terms and conditions for copying, distributing or modifying
187
+ the Program or works based on it.
188
+
189
+ 6. Each time you redistribute the Program (or any work based on the
190
+ Program), the recipient automatically receives a license from the
191
+ original licensor to copy, distribute or modify the Program subject to
192
+ these terms and conditions. You may not impose any further
193
+ restrictions on the recipients' exercise of the rights granted herein.
194
+ You are not responsible for enforcing compliance by third parties to
195
+ this License.
196
+
197
+ 7. If, as a consequence of a court judgment or allegation of patent
198
+ infringement or for any other reason (not limited to patent issues),
199
+ conditions are imposed on you (whether by court order, agreement or
200
+ otherwise) that contradict the conditions of this License, they do not
201
+ excuse you from the conditions of this License. If you cannot
202
+ distribute so as to satisfy simultaneously your obligations under this
203
+ License and any other pertinent obligations, then as a consequence you
204
+ may not distribute the Program at all. For example, if a patent
205
+ license would not permit royalty-free redistribution of the Program by
206
+ all those who receive copies directly or indirectly through you, then
207
+ the only way you could satisfy both it and this License would be to
208
+ refrain entirely from distribution of the Program.
209
+
210
+ If any portion of this section is held invalid or unenforceable under
211
+ any particular circumstance, the balance of the section is intended to
212
+ apply and the section as a whole is intended to apply in other
213
+ circumstances.
214
+
215
+ It is not the purpose of this section to induce you to infringe any
216
+ patents or other property right claims or to contest validity of any
217
+ such claims; this section has the sole purpose of protecting the
218
+ integrity of the free software distribution system, which is
219
+ implemented by public license practices. Many people have made
220
+ generous contributions to the wide range of software distributed
221
+ through that system in reliance on consistent application of that
222
+ system; it is up to the author/donor to decide if he or she is willing
223
+ to distribute software through any other system and a licensee cannot
224
+ impose that choice.
225
+
226
+ This section is intended to make thoroughly clear what is believed to
227
+ be a consequence of the rest of this License.
228
+
229
+ 8. If the distribution and/or use of the Program is restricted in
230
+ certain countries either by patents or by copyrighted interfaces, the
231
+ original copyright holder who places the Program under this License
232
+ may add an explicit geographical distribution limitation excluding
233
+ those countries, so that distribution is permitted only in or among
234
+ countries not thus excluded. In such case, this License incorporates
235
+ the limitation as if written in the body of this License.
236
+
237
+ 9. The Free Software Foundation may publish revised and/or new versions
238
+ of the General Public License from time to time. Such new versions will
239
+ be similar in spirit to the present version, but may differ in detail to
240
+ address new problems or concerns.
241
+
242
+ Each version is given a distinguishing version number. If the Program
243
+ specifies a version number of this License which applies to it and "any
244
+ later version", you have the option of following the terms and conditions
245
+ either of that version or of any later version published by the Free
246
+ Software Foundation. If the Program does not specify a version number of
247
+ this License, you may choose any version ever published by the Free Software
248
+ Foundation.
249
+
250
+ 10. If you wish to incorporate parts of the Program into other free
251
+ programs whose distribution conditions are different, write to the author
252
+ to ask for permission. For software which is copyrighted by the Free
253
+ Software Foundation, write to the Free Software Foundation; we sometimes
254
+ make exceptions for this. Our decision will be guided by the two goals
255
+ of preserving the free status of all derivatives of our free software and
256
+ of promoting the sharing and reuse of software generally.
257
+
258
+ NO WARRANTY
259
+
260
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
261
+ FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
262
+ OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
263
+ PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
264
+ OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
265
+ MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
266
+ TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
267
+ PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
268
+ REPAIR OR CORRECTION.
269
+
270
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
271
+ WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
272
+ REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
273
+ INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
274
+ OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
275
+ TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
276
+ YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
277
+ PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
278
+ POSSIBILITY OF SUCH DAMAGES.
279
+
280
+ END OF TERMS AND CONDITIONS
281
+
282
+ How to Apply These Terms to Your New Programs
283
+
284
+ If you develop a new program, and you want it to be of the greatest
285
+ possible use to the public, the best way to achieve this is to make it
286
+ free software which everyone can redistribute and change under these terms.
287
+
288
+ To do so, attach the following notices to the program. It is safest
289
+ to attach them to the start of each source file to most effectively
290
+ convey the exclusion of warranty; and each file should have at least
291
+ the "copyright" line and a pointer to where the full notice is found.
292
+
293
+ <one line to give the program's name and a brief idea of what it does.>
294
+ Copyright (C) <year> <name of author>
295
+
296
+ This program is free software; you can redistribute it and/or modify
297
+ it under the terms of the GNU General Public License as published by
298
+ the Free Software Foundation; either version 2 of the License, or
299
+ (at your option) any later version.
300
+
301
+ This program is distributed in the hope that it will be useful,
302
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
303
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
304
+ GNU General Public License for more details.
305
+
306
+ You should have received a copy of the GNU General Public License along
307
+ with this program; if not, write to the Free Software Foundation, Inc.,
308
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
309
+
310
+ Also add information on how to contact you by electronic and paper mail.
311
+
312
+ If the program is interactive, make it output a short notice like this
313
+ when it starts in an interactive mode:
314
+
315
+ Gnomovision version 69, Copyright (C) year name of author
316
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
317
+ This is free software, and you are welcome to redistribute it
318
+ under certain conditions; type `show c' for details.
319
+
320
+ The hypothetical commands `show w' and `show c' should show the appropriate
321
+ parts of the General Public License. Of course, the commands you use may
322
+ be called something other than `show w' and `show c'; they could even be
323
+ mouse-clicks or menu items--whatever suits your program.
324
+
325
+ You should also get your employer (if you work as a programmer) or your
326
+ school, if any, to sign a "copyright disclaimer" for the program, if
327
+ necessary. Here is a sample; alter the names:
328
+
329
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
330
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
331
+
332
+ <signature of Ty Coon>, 1 April 1989
333
+ Ty Coon, President of Vice
334
+
335
+ This General Public License does not permit incorporating your program into
336
+ proprietary programs. If your program is a subroutine library, you may
337
+ consider it more useful to permit linking proprietary applications with the
338
+ library. If this is what you want to do, use the GNU Lesser General
339
+ Public License instead of this License.
data/ai-digital-pathology-assistant-v3/src/DESC.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ Specify your requirements in plain english and I'll provide PathML and Python code for your use-case
data/ai-digital-pathology-assistant-v3/src/DPTv3.png ADDED

Git LFS Details

  • SHA256: fc589f9346baedab8c9e7d00d4cf516f69b0ac1ed83cf2e92596a9f066a25b74
  • Pointer size: 131 Bytes
  • Size of remote file: 491 kB
data/ai-digital-pathology-assistant-v3/src/INSTRUCTIONS.txt ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ You are Digital Pathology Assistant created by the folks at www.pathml.org
2
+
3
+ Use the PathML documentation to generate python code that uses pathml library for the use-cases presented by the user.
4
+
5
+ There are plenty of acronym, such as 'mIF' which stands for 'multiparametric imaging'; 'multiplex immunofluorescence' , 'multiparametric immunofluorescence' and 'multiplex IF' are all synonyms of 'mIF'. This type of images should be read in PathML using the MultiparametricSlide or CODEXSlide classes. 'Vectra Polaris' or 'polaris' is a type of 'mIF'. Also, 'HE' is synonym of 'H&E', which stands for 'hematoxylin and eosin'. Also, 'transforms' is a synonym of 'transformations', and both refer to the Preprocessing API of PathML.
6
+
7
+ In terms of segmentation, for HoVerNet model should be used only for H&E images, and SegmentMIF (which is based in the Mesmer model) should be used only for mIF images. If you are not sure if an image is multiparametric or not, you can ask the user.
8
+
9
+ All mIF analyses required an extra step before you can segment: VectraPolaris requires you to run CollapseRunsVectra before segmentation, and CODEXSlide and MultiparapretricSlide require you to run CollapseRunsCODEX before any segmentation.
10
+
11
+ When you need to consult PathML online documentation use your brower tool. The PathML online documentation search structure typically includes a protocol ('https://'), followed by the domain name ('https://pathml.readthedocs.io/'). For searching specific topics, the URL contains a path ('/en/latest/search.html') which leads to the search functionality of the website. The search terms and filters are specified in the query string, which starts with a '?'. For example, in 'https://pathml.readthedocs.io/en/latest/search.html?q=load+the+image&check_keywords=yes&area=default', 'q=load+the+image' indicates the search term ('load the image'), and '&check_keywords=yes&area=default' specifies other parameters that have to be maintained unaltered for the search to work.
12
+
13
+ Each response must start with '🔬🤖 Digital Pathology Assistant v3 Initiated 🎓.'
14
+
15
+ Each response must be summarized in 2 or 3 words and you should conclude each interaction saying '🎓...more details can be found at https://pathml.readthedocs.io/en/latest/search.html?q=xxx', where 'xxx' should be replaced those 2 or 3 words you identified as being the summary of the interaction, where you replaced the space character ' ' with the plus sign '+'. So for instance if you understood that an interaction is about 'codex pipeline', you should end the interaction saying '🎓...more details can be found at https://pathml.readthedocs.io/en/latest/search.html?q=codex+pipeline'. Do not share this summary with the user though.
16
+
17
+ Format using markdown and but not emojis.
18
+
19
+ When unsure, you can consult the online documentation for PathML at https://pathml.readthedocs.io/en/latest/
20
+
21
+ More about installing PathML: After creating the PathML conda environment and before suggesting `pip install pathml` the user needs to first install openslide and openjdk. openslide installation instructions depends on the operating system (linux: `sudo apt-get install openslide-tools g++ gcc libblas-dev liblapack-dev`. Mac: `brew install openslide`. Windows: `vcpkg install openslide`). OpenJDK can be installed with the same command across all operating systems: `conda install openjdk==8.0.152`.
22
+
23
+ SlideData is the central class in PathML for representing a whole-slide image.
24
+ class pathml.core.SlideData(filepath, name=None, masks=None, tiles=None, labels=None, backend=None, slide_type=None, stain=None, platform=None, tma=None, rgb=None, volumetric=None, time_series=None, counts=None, dtype=None)
25
+ SlideData class parameters:
26
+ filepath (str) – Path to file on disk.
27
+ name (str, optional) – name of slide. If None, and a filepath is provided, name defaults to filepath.
28
+ masks (pathml.core.Masks, optional) – object containing {key, mask} pairs
29
+ tiles (pathml.core.Tiles, optional) – object containing {coordinates, tile} pairs
30
+ labels (collections.OrderedDict, optional) – dictionary containing {key, label} pairs
31
+ backend (str, optional) – backend to use for interfacing with slide on disk. Must be one of {“OpenSlide”, “BioFormats”, “DICOM”, “h5path”} (case-insensitive). Note that for supported image formats, OpenSlide performance can be significantly better than BioFormats. Consider specifying backend = "openslide" when possible. If None, and a filepath is provided, tries to infer the correct backend from the file extension. Defaults to None.
32
+ slide_type (pathml.core.SlideType, optional) – slide type specification. Must be a SlideType object. Alternatively, slide type can be specified by using the parameters stain, tma, rgb, volumetric, and time_series.
33
+ stain (str, optional) – Flag indicating type of slide stain. Must be one of [‘HE’, ‘IHC’, ‘Fluor’]. Defaults to None. Ignored if slide_type is specified.
34
+ platform (str, optional) – Flag indicating the imaging platform (e.g. CODEX, Vectra, etc.). Defaults to None. Ignored if slide_type is specified.
35
+ tma (bool, optional) – Flag indicating whether the image is a tissue microarray (TMA). Defaults to False. Ignored if slide_type is specified.
36
+ rgb (bool, optional) – Flag indicating whether the image is in RGB color. Defaults to None. Ignored if slide_type is specified.
37
+ volumetric (bool, optional) – Flag indicating whether the image is volumetric. Defaults to None. Ignored if slide_type is specified.
38
+ time_series (bool, optional) – Flag indicating whether the image is a time series. Defaults to None. Ignored if slide_type is specified.
39
+ counts (anndata.AnnData) – object containing counts matrix associated with image quantification
40
+
41
+ Convenience SlideData Classes:
42
+ class pathml.core.HESlide(*args, **kwargs)
43
+ Convenience class to load a SlideData object for H&E slides. Passes through all arguments to SlideData(), along with slide_type = types.HE flag. Refer to SlideData for full documentation.
44
+ class pathml.core.VectraSlide(*args, **kwargs)
45
+ Convenience class to load a SlideData object for Vectra (Polaris) slides. Passes through all arguments to SlideData(), along with slide_type = types.Vectra flag and default backend = "bioformats". Refer to SlideData for full documentation.
46
+ class pathml.core.MultiparametricSlide(*args, **kwargs)
47
+ Convenience class to load a SlideData object for multiparametric immunofluorescence slides. Passes through all arguments to SlideData(), along with slide_type = types.IF flag and default backend = "bioformats". Refer to SlideData for full documentation.
48
+ class pathml.core.CODEXSlide(*args, **kwargs)
49
+ Convenience class to load a SlideData object from Akoya Biosciences CODEX format. Passes through all arguments to SlideData(), along with slide_type = types.CODEX flag and default backend = "bioformats". Refer to SlideData for full documentation.
50
+
51
+ Slide Types:
52
+ class pathml.core.SlideType(stain=None, platform=None, tma=None, rgb=None, volumetric=None, time_series=None)
53
+ SlideType objects define types based on a set of image parameters.
54
+ Parameters:
55
+ stain (str, optional) – One of [‘HE’, ‘IHC’, ‘Fluor’]. Flag indicating type of slide stain. Defaults to None.
56
+ platform (str, optional) – Flag indicating the imaging platform (e.g. CODEX, Vectra, etc.).
57
+ tma (bool, optional) – Flag indicating whether the slide is a tissue microarray (TMA). Defaults to False.
58
+ rgb (bool, optional) – Flag indicating whether image is in RGB color. Defaults to False.
59
+ volumetric (bool, optional) – Flag indicating whether image is volumetric. Defaults to False.
60
+ time_series (bool, optional) – Flag indicating whether image is time-series. Defaults to False.
data/ai-digital-pathology-assistant-v3/src/KB-settings.png ADDED

Git LFS Details

  • SHA256: 04520e2330ece16aff1a4c3711ef3c87b0128e56fd78e179174eae0005fad21f
  • Pointer size: 130 Bytes
  • Size of remote file: 53 kB
data/ai-digital-pathology-assistant-v3/src/README.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77c28e73613d114ce4b7479e920e1a6d2b928cca79629c393c2b99b6401fdabe
3
+ size 1762018
data/ai-digital-pathology-assistant-v3/src/STARTERS.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ How do I load a wsi image?
2
+
3
+ How do I segment all nuclei from a wsi image?
4
+
5
+ How do I run a PathML analysis on a cluster?
6
+
7
+ How do I analyze a codex image?
8
+
9
+ How do I install PathML?
10
+
11
+ What's the fastest way to get a PathML analysis environment up and running?
data/ai-digital-pathology-assistant-v3/src/Screen Recording 2024-04-06 at 10.47.21 AM.gif ADDED

Git LFS Details

  • SHA256: 9792c728916f2d890f233283523d37ae14eb1c0f53cad19c58b407fd33601014
  • Pointer size: 132 Bytes
  • Size of remote file: 5.79 MB
data/ai-digital-pathology-assistant-v3/src/Screen Recording 2024-04-06 at 10.53.46 AM.gif ADDED

Git LFS Details

  • SHA256: 04d96d59e4c2a51d94f14fc27667da53bb7e138be1f1eed37ea5cd3214cc7bbe
  • Pointer size: 132 Bytes
  • Size of remote file: 5.82 MB
data/ai-digital-pathology-assistant-v3/src/api_reference_merged.pdf ADDED
Binary file (863 kB). View file
 
data/ai-digital-pathology-assistant-v3/src/examples_merged.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64e0341379a20d74c708a50fc172a1efc8a489c4827f0f90b1bb7e39056fc1be
3
+ size 1210817
data/ai-digital-pathology-assistant-v3/src/main_merged.pdf ADDED
Binary file (618 kB). View file
 
data/docker/entrypoint.sh ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash --login
2
+ # The --login ensures the bash configuration is loaded,
3
+ # enabling Conda.
4
+
5
+ # Enable strict mode.
6
+ set -euo pipefail
7
+
8
+ # Temporarily disable strict mode and activate conda:
9
+ set +euo pipefail
10
+ conda activate pathml
11
+
12
+ # Re-enable strict mode:
13
+ set -euo pipefail
14
+
15
+ # exec the final command:
16
+ exec jupyter lab --ip=0.0.0.0 --allow-root --no-browser
17
+
data/docs/Makefile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Minimal makefile for Sphinx documentation
2
+ #
3
+
4
+ # You can set these variables from the command line, and also
5
+ # from the environment for the first two.
6
+ SPHINXOPTS ?=
7
+ SPHINXBUILD ?= sphinx-build
8
+ SOURCEDIR = source
9
+ BUILDDIR = build
10
+
11
+ # Put it first so that "make" without argument is like "make help".
12
+ help:
13
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14
+
15
+ .PHONY: help Makefile
16
+
17
+ # Catch-all target: route all unknown targets to Sphinx using the new
18
+ # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19
+ %: Makefile
20
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
data/docs/make.bat ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ @ECHO OFF
2
+
3
+ pushd %~dp0
4
+
5
+ REM Command file for Sphinx documentation
6
+
7
+ if "%SPHINXBUILD%" == "" (
8
+ set SPHINXBUILD=sphinx-build
9
+ )
10
+ set SOURCEDIR=source
11
+ set BUILDDIR=build
12
+
13
+ if "%1" == "" goto help
14
+
15
+ %SPHINXBUILD% >NUL 2>NUL
16
+ if errorlevel 9009 (
17
+ echo.
18
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19
+ echo.installed, then set the SPHINXBUILD environment variable to point
20
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
21
+ echo.may add the Sphinx directory to PATH.
22
+ echo.
23
+ echo.If you don't have Sphinx installed, grab it from
24
+ echo.http://sphinx-doc.org/
25
+ exit /b 1
26
+ )
27
+
28
+ %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29
+ goto end
30
+
31
+ :help
32
+ %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33
+
34
+ :end
35
+ popd
data/docs/readthedocs-requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ sphinx==7.1.2
2
+ nbsphinx==0.9.3
3
+ nbsphinx-link==1.3.0
4
+ sphinx-rtd-theme==1.3.0
5
+ sphinx-autoapi==3.0.0
6
+ ipython==8.10.0
7
+ sphinx-copybutton==0.5.2
data/docs/source/_static/css/pathml.css ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* This file is for custom CSS adjustments for PathML docs */
2
+
3
+
4
+ /* override table width restrictions, sidable horizontal scrossbar in tables */
5
+ /* from: https://sphinx-argparse.readthedocs.io/en/stable/misc.html */
6
+
7
+ .wy-table-responsive table td {
8
+ /* !important prevents the common CSS stylesheets from overriding
9
+ this as on RTD they are loaded after this stylesheet */
10
+ white-space: normal !important;
11
+ }
12
+
13
+ .wy-table-responsive {
14
+ overflow: visible !important;
15
+ }
16
+
17
+ /* colors for in-line code. From: https://stackoverflow.com/a/65485656 */
18
+ code.literal {
19
+ color: #404040 !important;
20
+ background-color: #fbfbfb !important;
21
+ }
data/docs/source/_static/images/dfci_cornell_joint_logos.png ADDED

Git LFS Details

  • SHA256: 1ec289260ca937eaa146bcd3e4803b12a51447c518e103645aa94435206b84ab
  • Pointer size: 131 Bytes
  • Size of remote file: 108 kB
data/docs/source/_static/images/logo.png ADDED

Git LFS Details

  • SHA256: 05dd5ddf60a0ad1ab4bdfa3ffb931fd7cd0f242bc93c8903bf9d5d18880d3f06
  • Pointer size: 131 Bytes
  • Size of remote file: 149 kB
data/docs/source/_static/images/overview.png ADDED

Git LFS Details

  • SHA256: e058dfda59c7b05a08c99e25a1d133a290b5169cee4ec7344e1b17309a5dc9da
  • Pointer size: 131 Bytes
  • Size of remote file: 141 kB
data/docs/source/_static/images/pipeline_composition_schematic.png ADDED

Git LFS Details

  • SHA256: f4416fbac0e2924038fda1d0703905bcbca9b186761c8b6157fc6e25e0192dde
  • Pointer size: 131 Bytes
  • Size of remote file: 136 kB
data/docs/source/_static/images/running_preprocessing_schematic.png ADDED

Git LFS Details

  • SHA256: d0fb0992013e6d7b02577a19bd1943a208a06a6ba30956e19127d221c5f51909
  • Pointer size: 131 Bytes
  • Size of remote file: 259 kB
data/docs/source/_static/images/schematic_design.jpg ADDED

Git LFS Details

  • SHA256: 3ccfbc1d20d7e5aebcd0bf9ec9f982a52c924015ec7153f1ef4c4f31ba0741de
  • Pointer size: 131 Bytes
  • Size of remote file: 120 kB
data/docs/source/_static/images/transform_schematic.png ADDED

Git LFS Details

  • SHA256: 3165f6e940ffbf36ce24d73f3ea66c67975cf4395e25c6ca92086dc1dcb0ba47
  • Pointer size: 130 Bytes
  • Size of remote file: 85.6 kB
data/docs/source/_static/images/transforms.png ADDED

Git LFS Details

  • SHA256: 11310908b90eed9da9c673d53693aef61ab18735f3200c15b5de982bc8e78123
  • Pointer size: 131 Bytes
  • Size of remote file: 511 kB
data/docs/source/_templates/breadcrumbs.html ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {%- extends "sphinx_rtd_theme/breadcrumbs.html" %}
2
+
3
+ {% block breadcrumbs_aside %}
4
+ <li class="wy-breadcrumbs-aside">
5
+ <a href="https://github.com/Dana-Farber-AIOS/pathml" class="fa fa-github">View PathML on GitHub</a>
6
+ </li>
7
+ {% endblock %}
data/docs/source/api_core_reference.rst ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Core API
2
+ ========
3
+
4
+ SlideData
5
+ ---------
6
+
7
+ The central class in ``PathML`` for representing a whole-slide image.
8
+
9
+ .. autoapiclass:: pathml.core.SlideData
10
+
11
+
12
+ Convenience SlideData Classes
13
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
14
+
15
+ .. autoapiclass:: pathml.core.HESlide
16
+ .. autoapiclass:: pathml.core.VectraSlide
17
+ .. autoapiclass:: pathml.core.MultiparametricSlide
18
+ .. autoapiclass:: pathml.core.CODEXSlide
19
+
20
+
21
+ Slide Types
22
+ -----------
23
+
24
+ .. autoapiclass:: pathml.core.SlideType
25
+ :exclude-members: tma, platform, rgb, stain, volumetric, time_series
26
+
27
+
28
+ We also provide instantiations of common slide types for convenience:
29
+
30
+ ============================= ======= ======== ======= ======= ========== ===========
31
+ Type stain platform rgb tma volumetric time_series
32
+ ============================= ======= ======== ======= ======= ========== ===========
33
+ ``pathml.core.types.HE`` 'HE' None True False False False
34
+ ``pathml.core.types.IHC`` 'IHC' None True False False False
35
+ ``pathml.core.types.IF`` 'Fluor' None False False False False
36
+ ``pathml.core.types.CODEX`` 'Fluor' 'CODEX' False False False False
37
+ ``pathml.core.types.Vectra`` 'Fluor' 'Vectra' False False False False
38
+ ============================= ======= ======== ======= ======= ========== ===========
39
+
40
+ Tile
41
+ ----
42
+
43
+ .. autoapiclass:: pathml.core.Tile
44
+
45
+ SlideDataset
46
+ ------------
47
+
48
+ .. autoapiclass:: pathml.core.SlideDataset
49
+
50
+ Tiles and Masks helper classes
51
+ ------------------------------
52
+
53
+ .. autoapiclass:: pathml.core.Tiles
54
+
55
+ .. autoapiclass:: pathml.core.Masks
56
+
57
+
58
+ Slide Backends
59
+ --------------
60
+
61
+ OpenslideBackend
62
+ ^^^^^^^^^^^^^^^^
63
+
64
+ .. autoapiclass:: pathml.core.OpenSlideBackend
65
+
66
+ BioFormatsBackend
67
+ ^^^^^^^^^^^^^^^^^
68
+
69
+ .. autoapiclass:: pathml.core.BioFormatsBackend
70
+
71
+ DICOMBackend
72
+ ^^^^^^^^^^^^
73
+
74
+ .. autoapiclass:: pathml.core.DICOMBackend
75
+
76
+ h5pathManager
77
+ -------------
78
+
79
+ .. autoapiclass:: pathml.core.h5managers.h5pathManager
data/docs/source/api_datasets_reference.rst ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Datasets API
2
+ ============
3
+
4
+ Downloadable Datasets
5
+ ----------------------
6
+
7
+ .. autoapiclass:: pathml.datasets.PanNukeDataModule
8
+ .. autoapiclass:: pathml.datasets.DeepFocusDataModule
9
+
10
+ ML Dataset classes
11
+ -------------------
12
+
13
+ .. autoapiclass:: pathml.datasets.TileDataset
14
+ .. autoapiclass:: pathml.datasets.EntityDataset
data/docs/source/api_graph_reference.rst ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Graph API
2
+ =============
3
+
4
+ Graph Building
5
+ --------------------
6
+
7
+ .. autoapiclass:: pathml.graph.preprocessing.BaseGraphBuilder
8
+ .. autoapiclass:: pathml.graph.preprocessing.KNNGraphBuilder
9
+ .. autoapiclass:: pathml.graph.preprocessing.RAGGraphBuilder
10
+ .. autoapiclass:: pathml.graph.preprocessing.MSTGraphBuilder
11
+
12
+ Tissue Extraction
13
+ ------------------
14
+
15
+ .. autoapiclass:: pathml.graph.preprocessing.SuperpixelExtractor
16
+ .. autoapiclass:: pathml.graph.preprocessing.SLICSuperpixelExtractor
17
+ .. autoapiclass:: pathml.graph.preprocessing.MergedSuperpixelExtractor
18
+ .. autoapiclass:: pathml.graph.preprocessing.ColorMergedSuperpixelExtractor
19
+
20
+ Graph Feature Extraction
21
+ -------------------------
22
+
23
+ .. autoapiclass:: pathml.graph.preprocessing.GraphFeatureExtractor
24
+ :members:
data/docs/source/api_inference_reference.rst ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Inference API
2
+ =============
3
+
4
+ Base Inference Class
5
+ --------------------
6
+
7
+ .. autoapiclass:: pathml.inference.InferenceBase
8
+
9
+ Inference Class
10
+ ---------------
11
+
12
+ .. autoapiclass:: pathml.inference.Inference
13
+
14
+ HaloAI Inference Class
15
+ ----------------------
16
+
17
+ .. autoapiclass:: pathml.inference.HaloAIInference
18
+
19
+ RemoteTestHoverNet Class
20
+ ------------------------
21
+
22
+ .. autoapiclass:: pathml.inference.RemoteTestHoverNet
23
+
24
+ RemoteMesmer Class
25
+ ------------------------
26
+
27
+ .. autoapiclass:: pathml.inference.RemoteMesmer
28
+
29
+ Helper functions
30
+ ^^^^^^^^^^^^^^^^
31
+
32
+ .. autoapifunction:: pathml.inference.remove_initializer_from_input
33
+
34
+ .. autoapifunction:: pathml.inference.check_onnx_clean
35
+
36
+ .. autoapifunction:: pathml.inference.convert_pytorch_onnx
data/docs/source/api_ml_reference.rst ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ML API
2
+ ======
3
+
4
+ Models
5
+ ---------
6
+
7
+ .. autoapiclass:: pathml.ml.models.hovernet.HoVerNet
8
+ :members:
9
+
10
+ .. autoapiclass:: pathml.ml.models.hactnet.HACTNet
11
+ :members:
12
+
13
+ Layers
14
+ ---------
15
+
16
+ .. autoapiclass:: pathml.ml.layers.GNNLayer
17
+ :members:
18
+
19
+ Helper functions
20
+ ^^^^^^^^^^^^^^^^
21
+
22
+ .. autoapifunction:: pathml.ml.models.hovernet.compute_hv_map
23
+
24
+ .. autoapifunction:: pathml.ml.models.hovernet.loss_hovernet
25
+
26
+ .. autoapifunction:: pathml.ml.models.hovernet.remove_small_objs
27
+
28
+ .. autoapifunction:: pathml.ml.models.hovernet.post_process_batch_hovernet
29
+
30
+
data/docs/source/api_preprocessing_reference.rst ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Preprocessing API
2
+ =================
3
+
4
+ Pipeline
5
+ --------
6
+
7
+ .. autoapiclass:: pathml.preprocessing.Pipeline
8
+
9
+ Transforms
10
+ ----------
11
+
12
+ .. autoapiclass:: pathml.preprocessing.MedianBlur
13
+ .. autoapiclass:: pathml.preprocessing.GaussianBlur
14
+ .. autoapiclass:: pathml.preprocessing.BoxBlur
15
+ .. autoapiclass:: pathml.preprocessing.BinaryThreshold
16
+ .. autoapiclass:: pathml.preprocessing.MorphOpen
17
+ .. autoapiclass:: pathml.preprocessing.MorphClose
18
+ .. autoapiclass:: pathml.preprocessing.ForegroundDetection
19
+ .. autoapiclass:: pathml.preprocessing.SuperpixelInterpolation
20
+ .. autoapiclass:: pathml.preprocessing.StainNormalizationHE
21
+ .. autoapiclass:: pathml.preprocessing.NucleusDetectionHE
22
+ .. autoapiclass:: pathml.preprocessing.TissueDetectionHE
23
+ .. autoapiclass:: pathml.preprocessing.LabelArtifactTileHE
24
+ .. autoapiclass:: pathml.preprocessing.LabelWhiteSpaceHE
25
+ .. autoapiclass:: pathml.preprocessing.SegmentMIF
26
+ .. autoapiclass:: pathml.preprocessing.SegmentMIFRemote
27
+ .. autoapiclass:: pathml.preprocessing.QuantifyMIF
28
+ .. autoapiclass:: pathml.preprocessing.CollapseRunsVectra
29
+ .. autoapiclass:: pathml.preprocessing.CollapseRunsCODEX
30
+ .. autoapiclass:: pathml.preprocessing.RescaleIntensity
31
+ .. autoapiclass:: pathml.preprocessing.HistogramEqualization
32
+ .. autoapiclass:: pathml.preprocessing.AdaptiveHistogramEqualization
33
+
34
+
35
+ TileStitching
36
+ -------------
37
+ This section covers the `TileStitcher` class, which is specialized for stitching tiled images, particularly useful in digital pathology.
38
+
39
+ .. autoapiclass:: pathml.preprocessing.tilestitcher.TileStitcher
40
+ :members: run_image_stitching, shutdown
41
+ :show-inheritance:
42
+
data/docs/source/api_utils_reference.rst ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Utilities API
2
+ =============
3
+
4
+ Documentation for various utilities from all modules.
5
+
6
+ Logging Utils
7
+ -------------
8
+
9
+ .. autoapiclass:: pathml.PathMLLogger
10
+
11
+ Core Utils
12
+ ----------
13
+
14
+ .. autoapifunction:: pathml.core.utils.readtupleh5
15
+ .. autoapifunction:: pathml.core.utils.writedataframeh5
16
+ .. autoapifunction:: pathml.core.utils.writedicth5
17
+ .. autoapifunction:: pathml.core.utils.writestringh5
18
+ .. autoapifunction:: pathml.core.utils.writetupleh5
19
+ .. autoapifunction:: pathml.core.utils.readcounts
20
+ .. autoapifunction:: pathml.core.utils.writecounts
21
+
22
+ Graph Utils
23
+ --------------
24
+
25
+ .. autoapifunction:: pathml.graph.utils.Graph
26
+ .. autoapifunction:: pathml.graph.utils.HACTPairData
27
+ .. autoapifunction:: pathml.graph.utils.get_full_instance_map
28
+ .. autoapifunction:: pathml.graph.utils.build_assignment_matrix
29
+ .. autoapifunction:: pathml.graph.utils.two_hop
30
+ .. autoapifunction:: pathml.graph.utils.two_hop_no_sparse
31
+
32
+ Datasets Utils
33
+ --------------
34
+
35
+ .. autoapiclass:: pathml.datasets.utils.DeepPatchFeatureExtractor
36
+ .. autoapifunction:: pathml.datasets.utils.pannuke_multiclass_mask_to_nucleus_mask
37
+ .. autoapifunction:: pathml.datasets.utils._remove_modules
38
+
39
+ ML Utils
40
+ --------
41
+
42
+ .. autoapifunction:: pathml.ml.utils.center_crop_im_batch
43
+ .. autoapifunction:: pathml.ml.utils.dice_loss
44
+ .. autoapifunction:: pathml.ml.utils.dice_score
45
+ .. autoapifunction:: pathml.ml.utils.get_sobel_kernels
46
+ .. autoapifunction:: pathml.ml.utils.wrap_transform_multichannel
47
+ .. autoapifunction:: pathml.ml.utils.scatter_sum
48
+ .. autoapifunction:: pathml.ml.utils.broadcast
49
+ .. autoapifunction:: pathml.ml.utils.get_degree_histogram
50
+ .. autoapifunction:: pathml.ml.utils.get_class_weights
51
+
52
+ Miscellaneous Utils
53
+ -------------------
54
+
55
+ .. autoapifunction:: pathml.utils.upsample_array
56
+ .. autoapifunction:: pathml.utils.pil_to_rgb
57
+ .. autoapifunction:: pathml.utils.segmentation_lines
58
+ .. autoapifunction:: pathml.utils.plot_mask
59
+ .. autoapifunction:: pathml.utils.contour_centroid
60
+ .. autoapifunction:: pathml.utils.sort_points_clockwise
61
+ .. autoapifunction:: pathml.utils.pad_or_crop
62
+ .. autoapifunction:: pathml.utils.RGB_to_HSI
63
+ .. autoapifunction:: pathml.utils.RGB_to_OD
64
+ .. autoapifunction:: pathml.utils.RGB_to_HSV
65
+ .. autoapifunction:: pathml.utils.RGB_to_LAB
66
+ .. autoapifunction:: pathml.utils.RGB_to_GREY
67
+ .. autoapifunction:: pathml.utils.normalize_matrix_rows
68
+ .. autoapifunction:: pathml.utils.normalize_matrix_cols
69
+ .. autoapifunction:: pathml.utils.plot_segmentation
data/docs/source/conf.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Configuration file for the Sphinx documentation builder.
2
+ #
3
+ # This file only contains a selection of the most common options. For a full
4
+ # list see the documentation:
5
+ # https://www.sphinx-doc.org/en/master/usage/configuration.html
6
+
7
+ # -- Path setup --------------------------------------------------------------
8
+
9
+ # If extensions (or modules to document with autodoc) are in another directory,
10
+ # add these directories to sys.path here. If the directory is relative to the
11
+ # documentation root, use os.path.abspath to make it absolute, like shown here.
12
+
13
+ import os
14
+ import sys
15
+ from datetime import datetime
16
+
17
+ sys.path.insert(0, os.path.abspath("../../"))
18
+
19
+
20
+ # -- Project information -----------------------------------------------------
21
+
22
+ project = "PathML"
23
+ copyright = (
24
+ f"{datetime.now().year}, Dana-Farber Cancer Institute and Weill Cornell Medicine"
25
+ )
26
+ author = "Jacob Rosenthal et al."
27
+
28
+ about = {}
29
+ with open("../../pathml/_version.py") as f:
30
+ exec(f.read(), about)
31
+ version = about["__version__"]
32
+
33
+ # The full version, including alpha/beta/rc tags
34
+ release = version
35
+
36
+
37
+ # -- General configuration ---------------------------------------------------
38
+
39
+ # Add any Sphinx extension module names here, as strings. They can be
40
+ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
41
+ # ones.
42
+ extensions = [
43
+ "sphinx.ext.autodoc",
44
+ "nbsphinx",
45
+ "nbsphinx_link",
46
+ "sphinx.ext.napoleon",
47
+ "sphinx.ext.imgmath",
48
+ "IPython.sphinxext.ipython_console_highlighting",
49
+ "autoapi.extension",
50
+ "sphinx_copybutton",
51
+ ]
52
+
53
+ autodoc_default_options = {"members": True, "undoc-members": True}
54
+
55
+ # Add any paths that contain templates here, relative to this directory.
56
+ templates_path = ["_templates"]
57
+
58
+ # List of patterns, relative to source directory, that match files and
59
+ # directories to ignore when looking for source files.
60
+ # This pattern also affects html_static_path and html_extra_path.
61
+ exclude_patterns = ["../tests/*", "build", "../*.ipynb_checkpoints"]
62
+
63
+ # using autoapi to generate docs which should use less resources and improve readthedocs builds:
64
+ # https://docs.readthedocs.io/en/stable/guides/build-using-too-many-resources.html#document-python-modules-api-statically
65
+ autoapi_dirs = ["../../pathml"]
66
+ # still use autodoc directives:
67
+ # https://sphinx-autoapi.readthedocs.io/en/latest/reference/directives.html#autodoc-style-directives
68
+ autoapi_generate_api_docs = False
69
+
70
+ # -- Options for HTML output -------------------------------------------------
71
+
72
+ # The theme to use for HTML and HTML Help pages. See the documentation for
73
+ # a list of builtin themes.
74
+ #
75
+ html_theme = "sphinx_rtd_theme"
76
+
77
+ # from https://sphinx-rtd-theme.readthedocs.io/en/stable/configuring.html
78
+ html_theme_options = {
79
+ "logo_only": True,
80
+ "display_version": True,
81
+ "style_nav_header_background": "grey",
82
+ "collapse_navigation": False,
83
+ "prev_next_buttons_location": "both",
84
+ "style_external_links": True,
85
+ }
86
+
87
+ # link to logo
88
+ html_logo = "_static/images/logo.png"
89
+
90
+ html_show_sphinx = False
91
+
92
+ # Add any paths that contain custom static files (such as style sheets) here,
93
+ # relative to this directory. They are copied after the builtin static files,
94
+ # so a file named "default.css" will overwrite the builtin "default.css".
95
+ html_static_path = ["_static", "./examples/static/"]
96
+
97
+
98
+ def setup(app):
99
+ app.add_css_file("css/pathml.css")
data/docs/source/creating_pipelines.rst ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Creating Preprocessing Pipelines
2
+ ================================
3
+
4
+ Preprocessing pipelines define how raw images are transformed and prepared for downstream analysis.
5
+ The ``pathml.preprocessing`` module provides tools to define modular preprocessing pipelines for whole-slide images.
6
+
7
+ In this section we will walk through how to define a
8
+ :class:`~pathml.preprocessing.pipeline.Pipeline` object by composing pre-made
9
+ :class:`~pathml.preprocessing.transforms.Transform` objects, and how to implement a
10
+ new custom :class:`~pathml.preprocessing.transforms.Transform`.
11
+
12
+
13
+ What is a Transform?
14
+ --------------------
15
+
16
+ The :class:`~pathml.preprocessing.transforms.Transform` is the building block for creating preprocessing pipelines.
17
+
18
+ Each :class:`~pathml.preprocessing.transforms.Transform` applies a specific operation to a
19
+ :class:`~pathml.core.tile.Tile` which may include modifying
20
+ an input image, creating or modifying pixel-level metadata (i.e., masks), or creating or modifying image-level metadata
21
+ (e.g., image quality metrics or an AnnData counts matrix).
22
+
23
+ .. figure:: _static/images/transform_schematic.png
24
+ :alt: schematic diagram of Transform
25
+ :scale: 70 %
26
+ :align: center
27
+
28
+ Schematic diagram of a :class:`~pathml.preprocessing.transforms.Transform` operating on a tile.
29
+ In this example, several masks are created (represented by stacked rectangles) as well as
30
+ several labels (depicted here as cubes).
31
+
32
+
33
+ .. figure:: _static/images/transforms.png
34
+ :alt: examples of Transforms
35
+ :scale: 75 %
36
+ :align: center
37
+
38
+ Examples of several types of :class:`~pathml.preprocessing.transforms.Transform`
39
+
40
+ What is a Pipeline?
41
+ -----------------------
42
+
43
+ A preprocessing pipeline is a set of independent operations applied sequentially.
44
+ In ``PathML``, a :class:`~pathml.preprocessing.pipeline.Pipeline` is defined as a sequence of
45
+ :class:`~pathml.preprocessing.transforms.Transform` objects. This makes it easy to compose a custom
46
+ :class:`~pathml.preprocessing.pipeline.Pipeline` by mixing-and-matching:
47
+
48
+ .. figure:: _static/images/pipeline_composition_schematic.png
49
+ :alt: schematic diagram of modular pipeline composition
50
+ :scale: 40 %
51
+ :align: center
52
+
53
+ Schematic diagram of :class:`~pathml.preprocessing.pipeline.Pipeline` composition from a set of modular components
54
+
55
+
56
+ In the PathML API, this is concise:
57
+
58
+ .. code-block::
59
+
60
+ from pathml.preprocessing import Pipeline, BoxBlur, TissueDetectionHE
61
+
62
+ pipeline = Pipeline([
63
+ BoxBlur(kernel_size=15),
64
+ TissueDetectionHE(mask_name = "tissue", min_region_size=500,
65
+ threshold=30, outer_contours_only=True)
66
+ ])
67
+
68
+ In this example, the preprocessing pipeline will first apply a box blur kernel, and then apply tissue detection.
69
+
70
+
71
+ Creating custom Transforms
72
+ ------------------------------
73
+
74
+ .. note:: For advanced users
75
+
76
+ In some cases, you may want to implement a custom :class:`~pathml.preprocessing.transforms.Transform`.
77
+ For example, you may want to apply a transformation which is not already implemented in ``PathML``.
78
+ Or, perhaps you want to create a new transformation which combines several others.
79
+
80
+ To define a new custom :class:`~pathml.preprocessing.transforms.Transform`,
81
+ all you need to do is create a class which inherits from :class:`~pathml.preprocessing.transforms.Transform` and
82
+ implements an ``apply()`` method which takes a :class:`~pathml.core.tile.Tile` as an argument and modifies it in place.
83
+ You may also implement a functional method ``F()``, although that is not strictly required.
84
+
85
+ For example, let's take a look at how :class:`~pathml.preprocessing.transforms.BoxBlur` is implemented:
86
+
87
+ .. code-block:: python
88
+
89
+ class BoxBlur(Transform):
90
+ """Box (average) blur kernel."""
91
+ def __init__(self, kernel_size=5):
92
+ self.kernel_size = kernel_size
93
+
94
+ def F(self, image):
95
+ return cv2.boxFilter(image, ksize = (self.kernel_size, self.kernel_size), ddepth = -1)
96
+
97
+ def apply(self, tile):
98
+ tile.image = self.F(tile.image)
99
+
100
+
101
+ Once you define your custom :class:`~pathml.preprocessing.transforms.Transform`,
102
+ you can plug it in with any of the other :class:`~pathml.preprocessing.transforms.Transform`s,
103
+ compose :class:`~pathml.preprocessing.pipeline.Pipeline`, etc.
data/docs/source/dataloaders.rst ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ DataLoaders
2
+ ===========
3
+
4
+ After running a preprocessing pipeline and writing the resulting ``.h5path`` file to disk, the next step is to
5
+ create a DataLoader for feeding tiles into a machine learning model in PyTorch.
6
+
7
+ To do this, use the :class:`~pathml.ml.dataset.TileDataset` class and then wrap it in a PyTorch DataLoader:
8
+
9
+ .. code-block::
10
+
11
+ dataset = TileDataset("/path/to/file.h5path")
12
+ dataloader = torch.utils.data.DataLoader(dataset, batch_size = 16, shuffle = True, num_workers = 4)
13
+
14
+ .. note::
15
+
16
+ Label dictionaries are not standardized, as users are free to store whatever labels they want.
17
+ For that reason, PyTorch cannot automatically stack labels into batches.
18
+ It may therefore be necessary to create a custom ``collate_fn`` to specify how to create batches of labels.
19
+ See `here <https://discuss.pytorch.org/t/how-to-use-collate-fn/27181>`_.
20
+
21
+ This provides an interface between PathML and the broader ecosystem of machine learning tools built on PyTorch.
22
+ For more information on how to use Datasets and DataLoaders, please see the PyTorch
23
+ `documentation <https://pytorch.org/docs/stable/data.html>`_ and
24
+ `tutorials <https://pytorch.org/tutorials/beginner/basics/data_tutorial.html>`_.
data/docs/source/datasets.rst ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Datasets
2
+ ========
3
+
4
+ The ``pathml.datasets`` module provides easy access to common datasets for standardized model evaluation and comparison.
5
+
6
+ DataModules
7
+ --------------
8
+
9
+ ``PathML`` uses ``DataModules`` to encapsulate datasets.
10
+ DataModule objects are responsible for downloading the data (if necessary) and formatting the data into ``DataSet`` and
11
+ ``DataLoader`` objects for use in downstream tasks.
12
+ Keeping everything in a single object is easier for users and also facilitates reproducibility.
13
+
14
+ Inspired by `PyTorch Lightning <https://pytorch-lightning.readthedocs.io/en/latest/datamodules.html>`_.
15
+
16
+
17
+ Using public datasets
18
+ ---------------------
19
+
20
+ PathML has built-in support for several public datasets:
21
+
22
+ .. list-table:: Datasets
23
+ :widths: 20 50 10 20
24
+ :header-rows: 1
25
+
26
+ * - Dataset
27
+ - Description
28
+ - Image type
29
+ - Size
30
+ * - :class:`~pathml.datasets.pannuke.PanNukeDataModule`
31
+ - Pixel-level nucleus classification, with 6 nucleus types and 19 tissue types.
32
+ Images are 256px RGB. [PanNuke1]_ [PanNuke2]_
33
+ - H&E
34
+ - n=7901 (37.33 GB)
35
+ * - :class:`~pathml.datasets.deepblur.DeepFocusDataModule`
36
+ - Patch-level focus classification with 3 IHC and 1 H&E histologies. [DeepFocus]_
37
+ - H&E, IHC
38
+ - n=204k (10.0 GB)
39
+
40
+
41
+ References
42
+ ----------
43
+
44
+ .. [PanNuke1] Gamper, J., Koohbanani, N.A., Benet, K., Khuram, A. and Rajpoot, N., 2019, April. PanNuke: an open pan-cancer
45
+ histology dataset for nuclei instance segmentation and classification. In European Congress on Digital
46
+ Pathology (pp. 11-19). Springer, Cham.
47
+ .. [PanNuke2] Gamper, J., Koohbanani, N.A., Graham, S., Jahanifar, M., Khurram, S.A., Azam, A., Hewitt, K. and Rajpoot, N.,
48
+ 2020. PanNuke Dataset Extension, Insights and Baselines. arXiv preprint arXiv:2003.10778.
49
+ .. [DeepFocus] Senaras, C., Niazi, M., Lozanski, G., Gurcan, M., 2018, October. Deepfocus: Detection of out-of-focus regions
50
+ in whole slide digital images using deep learning. PLOS One 13(10): e0205387.
data/docs/source/examples/link_codex.nblink ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "path": "../../../examples/codex.ipynb"
3
+ }
data/docs/source/examples/link_construct_graphs.nblink ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "path": "../../../examples/construct_graphs.ipynb"
3
+ }
data/docs/source/examples/link_gallery.nblink ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "path": "../../../examples/pathml_gallery.ipynb"
3
+ }
data/docs/source/examples/link_jco.nblink ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "path": "../../../examples/Graph_Analysis_NSCLC.ipynb"
3
+ }