instance_id
stringlengths
10
57
base_commit
stringlengths
40
40
created_at
stringdate
2014-04-30 14:58:36
2025-04-30 20:14:11
environment_setup_commit
stringlengths
40
40
hints_text
stringlengths
0
273k
patch
stringlengths
251
7.06M
problem_statement
stringlengths
11
52.5k
repo
stringlengths
7
53
test_patch
stringlengths
231
997k
meta
dict
version
stringclasses
851 values
install_config
dict
requirements
stringlengths
93
34.2k
environment
stringlengths
760
20.5k
FAIL_TO_PASS
listlengths
1
9.39k
FAIL_TO_FAIL
listlengths
0
2.69k
PASS_TO_PASS
listlengths
0
7.87k
PASS_TO_FAIL
listlengths
0
192
license_name
stringclasses
55 values
__index_level_0__
int64
0
21.4k
before_filepaths
listlengths
1
105
after_filepaths
listlengths
1
105
pre-commit__pre-commit-687
65f60e25930a4979a4571e41f320b81f622b2556
2018-01-14 01:29:11
65f60e25930a4979a4571e41f320b81f622b2556
diff --git a/pre_commit/commands/run.py b/pre_commit/commands/run.py index c70eff0..a16d8fe 100644 --- a/pre_commit/commands/run.py +++ b/pre_commit/commands/run.py @@ -67,6 +67,15 @@ def _run_single_hook(filenames, hook, repo, args, skips, cols): filenames = _filter_by_include_exclude(filenames, include, exclude) types, exclude_types = hook['types'], hook['exclude_types'] filenames = _filter_by_types(filenames, types, exclude_types) + + if hook['language'] == 'pcre': + logger.warning( + '`{}` (from {}) uses the deprecated pcre language.\n' + 'The pcre language is scheduled for removal in pre-commit 2.x.\n' + 'The pygrep language is a more portable (and usually drop-in) ' + 'replacement.'.format(hook['id'], repo.repo_config['repo']), + ) + if hook['id'] in skips: output.write(get_hook_message( _hook_msg_start(hook, args.verbose),
Deprecate `pcre` language Now that pygrep (#630) is a much more portable alternative, pcre is unnecessary and should be deprecated. A deprecation warning should be issued when loading a configuration containing `language: pcre` and should point the consumer in the right direction to correcting it (either by suggesting a pull request, or by indicating they should modify their `local` configuration). The `pcre` language will likely be removed in `pre-commit==2.0.0`
pre-commit/pre-commit
diff --git a/tests/commands/run_test.py b/tests/commands/run_test.py index 97c82c2..cbf4e98 100644 --- a/tests/commands/run_test.py +++ b/tests/commands/run_test.py @@ -529,7 +529,7 @@ def test_push_hook(cap_out, repo_with_passing_hook, mock_out_store_directory): ('id', 'do_not_commit'), ('name', 'hook 2'), ('entry', 'DO NOT COMMIT'), - ('language', 'pcre'), + ('language', 'pygrep'), ('types', ['text']), ('stages', ['push']), )), @@ -592,7 +592,7 @@ def test_local_hook_passes( ('id', 'do_not_commit'), ('name', 'Block if "DO NOT COMMIT" is found'), ('entry', 'DO NOT COMMIT'), - ('language', 'pcre'), + ('language', 'pygrep'), ('files', '^(.*)$'), )), ), @@ -645,6 +645,35 @@ def test_local_hook_fails( ) +def test_pcre_deprecation_warning( + cap_out, repo_with_passing_hook, mock_out_store_directory, +): + config = OrderedDict(( + ('repo', 'local'), + ( + 'hooks', [OrderedDict(( + ('id', 'pcre-hook'), + ('name', 'pcre-hook'), + ('language', 'pcre'), + ('entry', '.'), + ))], + ), + )) + add_config_to_repo(repo_with_passing_hook, config) + + _test_run( + cap_out, + repo_with_passing_hook, + opts={}, + expected_outputs=[ + b'[WARNING] `pcre-hook` (from local) uses the deprecated ' + b'pcre language.', + ], + expected_ret=0, + stage=False, + ) + + def test_meta_hook_passes( cap_out, repo_with_passing_hook, mock_out_store_directory, ):
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 1 }
1.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
aspy.yaml==1.3.0 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 coverage==6.2 distlib==0.3.9 filelock==3.4.1 flake8==5.0.4 identify==2.4.4 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 mccabe==0.7.0 mock==5.2.0 nodeenv==1.6.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 -e git+https://github.com/pre-commit/pre-commit.git@65f60e25930a4979a4571e41f320b81f622b2556#egg=pre_commit py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-env==0.6.2 PyYAML==6.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 virtualenv==20.17.1 zipp==3.6.0
name: pre-commit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - aspy-yaml==1.3.0 - attrs==22.2.0 - cached-property==1.5.2 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - flake8==5.0.4 - identify==2.4.4 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - mccabe==0.7.0 - mock==5.2.0 - nodeenv==1.6.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-env==0.6.2 - pyyaml==6.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - virtualenv==20.17.1 - zipp==3.6.0 prefix: /opt/conda/envs/pre-commit
[ "tests/commands/run_test.py::test_pcre_deprecation_warning" ]
[ "tests/commands/run_test.py::test_hook_install_failure" ]
[ "tests/commands/run_test.py::test_run_all_hooks_failing", "tests/commands/run_test.py::test_arbitrary_bytes_hook", "tests/commands/run_test.py::test_hook_that_modifies_but_returns_zero", "tests/commands/run_test.py::test_types_hook_repository", "tests/commands/run_test.py::test_exclude_types_hook_repository", "tests/commands/run_test.py::test_global_exclude", "tests/commands/run_test.py::test_show_diff_on_failure", "tests/commands/run_test.py::test_run[options0-outputs0-0-True]", "tests/commands/run_test.py::test_run[options1-outputs1-0-True]", "tests/commands/run_test.py::test_run[options2-outputs2-0-True]", "tests/commands/run_test.py::test_run[options3-outputs3-1-True]", "tests/commands/run_test.py::test_run[options4-outputs4-0-True]", "tests/commands/run_test.py::test_run[options5-outputs5-0-True]", "tests/commands/run_test.py::test_run[options6-outputs6-0-False]", "tests/commands/run_test.py::test_run_output_logfile", "tests/commands/run_test.py::test_always_run", "tests/commands/run_test.py::test_always_run_alt_config", "tests/commands/run_test.py::test_origin_source_error_msg[master-master-False]", "tests/commands/run_test.py::test_origin_source_error_msg[master--True]", "tests/commands/run_test.py::test_origin_source_error_msg[-master-True]", "tests/commands/run_test.py::test_has_unmerged_paths", "tests/commands/run_test.py::test_merge_conflict", "tests/commands/run_test.py::test_merge_conflict_modified", "tests/commands/run_test.py::test_merge_conflict_resolved", "tests/commands/run_test.py::test_compute_cols[hooks0-True-80]", "tests/commands/run_test.py::test_compute_cols[hooks1-False-81]", "tests/commands/run_test.py::test_compute_cols[hooks2-True-85]", "tests/commands/run_test.py::test_compute_cols[hooks3-False-82]", "tests/commands/run_test.py::test_get_skips[environ0-expected_output0]", "tests/commands/run_test.py::test_get_skips[environ1-expected_output1]", "tests/commands/run_test.py::test_get_skips[environ2-expected_output2]", "tests/commands/run_test.py::test_get_skips[environ3-expected_output3]", "tests/commands/run_test.py::test_get_skips[environ4-expected_output4]", "tests/commands/run_test.py::test_get_skips[environ5-expected_output5]", "tests/commands/run_test.py::test_get_skips[environ6-expected_output6]", "tests/commands/run_test.py::test_skip_hook", "tests/commands/run_test.py::test_hook_id_not_in_non_verbose_output", "tests/commands/run_test.py::test_hook_id_in_verbose_output", "tests/commands/run_test.py::test_multiple_hooks_same_id", "tests/commands/run_test.py::test_non_ascii_hook_id", "tests/commands/run_test.py::test_stdout_write_bug_py26", "tests/commands/run_test.py::test_lots_of_files", "tests/commands/run_test.py::test_push_hook", "tests/commands/run_test.py::test_commit_msg_hook", "tests/commands/run_test.py::test_local_hook_passes", "tests/commands/run_test.py::test_local_hook_fails", "tests/commands/run_test.py::test_meta_hook_passes", "tests/commands/run_test.py::test_error_with_unstaged_config", "tests/commands/run_test.py::test_no_unstaged_error_with_all_files_or_files[opts0]", "tests/commands/run_test.py::test_no_unstaged_error_with_all_files_or_files[opts1]", "tests/commands/run_test.py::test_files_running_subdir", "tests/commands/run_test.py::test_pass_filenames[True-hook_args0-foo.py]", "tests/commands/run_test.py::test_pass_filenames[False-hook_args1-]", "tests/commands/run_test.py::test_pass_filenames[True-hook_args2-some", "tests/commands/run_test.py::test_pass_filenames[False-hook_args3-some", "tests/commands/run_test.py::test_fail_fast", "tests/commands/run_test.py::test_include_exclude_base_case", "tests/commands/run_test.py::test_matches_broken_symlink", "tests/commands/run_test.py::test_include_exclude_total_match", "tests/commands/run_test.py::test_include_exclude_does_search_instead_of_match", "tests/commands/run_test.py::test_include_exclude_exclude_removes_files" ]
[]
MIT License
2,045
[ "pre_commit/commands/run.py" ]
[ "pre_commit/commands/run.py" ]
dask__dask-3067
1223f0167e1f760a93aaf286e6dcfa72ba21f773
2018-01-14 22:11:35
de6c2a49d76066abb51085570816322f063fc5c5
jcrist: LGTM. Thanks @metasyn!
diff --git a/dask/array/core.py b/dask/array/core.py index af23cc282..d7a944784 100644 --- a/dask/array/core.py +++ b/dask/array/core.py @@ -1111,6 +1111,8 @@ class Array(Base): chunks = property(_get_chunks, _set_chunks, "chunks property") def __len__(self): + if not self.chunks: + raise TypeError("len() of unsized object") return sum(self.chunks[0]) def __array_ufunc__(self, numpy_ufunc, method, *inputs, **kwargs): diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index e789593ba..823f70472 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -7,7 +7,7 @@ Changelog Array +++++ - +- Update error handling when len is called with empty chunks (:issue:`3058`) `Xander Johnson`_ DataFrame +++++++++ @@ -918,3 +918,4 @@ Other .. _`Markus Gonser`: https://github.com/magonser .. _`Martijn Arts`: https://github.com/mfaafm .. _`Jon Mease`: https://github.com/jmmease +.. _`Xander Johnson`: https://github.com/metasyn
len(dask scalar) gives an error ``` Traceback (most recent call last): File "make-pkhalo.py", line 12, in <module> print(cath200['Mass'][0]) File "/home/yfeng1/anaconda3/install/envs/cfastpm/lib/python3.6/site-packages/nbodykit/base/catalog.py", line 75, in __str__ if len(self) > 0: File "/home/yfeng1/anaconda3/install/envs/cfastpm/lib/python3.6/site-packages/dask/array/core.py", line 1064, in __len__ return sum(self.chunks[0]) IndexError: tuple index out of range ``` The error is probably valid as self is a scalar, but the message could be more informative than an out of range index error.
dask/dask
diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py index 0eececc3c..a5c76c46d 100644 --- a/dask/array/tests/test_array_core.py +++ b/dask/array/tests/test_array_core.py @@ -3032,3 +3032,12 @@ def test_delayed_array_key_hygeine(): d = delayed(identity)(a) b = da.from_delayed(d, shape=a.shape, dtype=a.dtype) assert_eq(a, b) + + +def test_empty_chunks_in_array_len(): + x = da.ones((), chunks=()) + with pytest.raises(TypeError) as exc_info: + len(x) + + err_msg = 'len() of unsized object' + assert err_msg in str(exc_info.value)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.16
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 click==8.0.4 cloudpickle==2.2.1 -e git+https://github.com/dask/dask.git@1223f0167e1f760a93aaf286e6dcfa72ba21f773#egg=dask distributed==1.20.2 HeapDict==1.0.1 importlib-metadata==4.8.3 iniconfig==1.1.1 locket==1.0.0 msgpack-python==0.5.6 numpy==1.19.5 packaging==21.3 pandas==1.1.5 partd==1.2.0 pluggy==1.0.0 psutil==7.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 six==1.17.0 sortedcontainers==2.4.0 tblib==1.7.0 tomli==1.2.3 toolz==0.12.0 tornado==6.1 typing_extensions==4.1.1 zict==2.1.0 zipp==3.6.0
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==8.0.4 - cloudpickle==2.2.1 - distributed==1.20.2 - heapdict==1.0.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - locket==1.0.0 - msgpack-python==0.5.6 - numpy==1.19.5 - packaging==21.3 - pandas==1.1.5 - partd==1.2.0 - pluggy==1.0.0 - psutil==7.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - six==1.17.0 - sortedcontainers==2.4.0 - tblib==1.7.0 - tomli==1.2.3 - toolz==0.12.0 - tornado==6.1 - typing-extensions==4.1.1 - zict==2.1.0 - zipp==3.6.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_array_core.py::test_empty_chunks_in_array_len" ]
[ "dask/array/tests/test_array_core.py::test_matmul" ]
[ "dask/array/tests/test_array_core.py::test_getem", "dask/array/tests/test_array_core.py::test_top", "dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules", "dask/array/tests/test_array_core.py::test_top_literals", "dask/array/tests/test_array_core.py::test_atop_literals", "dask/array/tests/test_array_core.py::test_concatenate3_on_scalars", "dask/array/tests/test_array_core.py::test_chunked_dot_product", "dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one", "dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions", "dask/array/tests/test_array_core.py::test_broadcast_dimensions", "dask/array/tests/test_array_core.py::test_Array", "dask/array/tests/test_array_core.py::test_uneven_chunks", "dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims", "dask/array/tests/test_array_core.py::test_keys", "dask/array/tests/test_array_core.py::test_Array_computation", "dask/array/tests/test_array_core.py::test_stack", "dask/array/tests/test_array_core.py::test_short_stack", "dask/array/tests/test_array_core.py::test_stack_scalars", "dask/array/tests/test_array_core.py::test_stack_promote_type", "dask/array/tests/test_array_core.py::test_stack_rechunk", "dask/array/tests/test_array_core.py::test_concatenate", "dask/array/tests/test_array_core.py::test_concatenate_unknown_axes", "dask/array/tests/test_array_core.py::test_concatenate_rechunk", "dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings", "dask/array/tests/test_array_core.py::test_block_simple_row_wise", "dask/array/tests/test_array_core.py::test_block_simple_column_wise", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_row_wise", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_multiple_rows", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_column_wise", "dask/array/tests/test_array_core.py::test_block_mixed_1d_and_2d", "dask/array/tests/test_array_core.py::test_block_complicated", "dask/array/tests/test_array_core.py::test_block_nested", "dask/array/tests/test_array_core.py::test_block_3d", "dask/array/tests/test_array_core.py::test_block_with_mismatched_shape", "dask/array/tests/test_array_core.py::test_block_no_lists", "dask/array/tests/test_array_core.py::test_block_invalid_nesting", "dask/array/tests/test_array_core.py::test_block_empty_lists", "dask/array/tests/test_array_core.py::test_block_tuple", "dask/array/tests/test_array_core.py::test_binops", "dask/array/tests/test_array_core.py::test_broadcast_shapes", "dask/array/tests/test_array_core.py::test_elemwise_on_scalars", "dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays", "dask/array/tests/test_array_core.py::test_elemwise_differently_chunked", "dask/array/tests/test_array_core.py::test_elemwise_dtype", "dask/array/tests/test_array_core.py::test_operators", "dask/array/tests/test_array_core.py::test_operator_dtype_promotion", "dask/array/tests/test_array_core.py::test_field_access", "dask/array/tests/test_array_core.py::test_field_access_with_shape", "dask/array/tests/test_array_core.py::test_T", "dask/array/tests/test_array_core.py::test_norm", "dask/array/tests/test_array_core.py::test_broadcast_to", "dask/array/tests/test_array_core.py::test_broadcast_to_array", "dask/array/tests/test_array_core.py::test_broadcast_to_scalar", "dask/array/tests/test_array_core.py::test_broadcast_to_chunks", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape0-v_shape0]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape1-v_shape1]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape2-v_shape2]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape3-v_shape3]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape4-v_shape4]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape5-v_shape5]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape6-v_shape6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]", "dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]", "dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]", "dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]", "dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]", "dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]", "dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]", "dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]", "dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]", "dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]", "dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]", "dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]", "dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]", "dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]", "dask/array/tests/test_array_core.py::test_reshape[original_shape31-new_shape31-chunks31]", "dask/array/tests/test_array_core.py::test_reshape[original_shape32-new_shape32-chunks32]", "dask/array/tests/test_array_core.py::test_reshape[original_shape33-new_shape33-chunks33]", "dask/array/tests/test_array_core.py::test_reshape[original_shape34-new_shape34-chunks34]", "dask/array/tests/test_array_core.py::test_reshape_exceptions", "dask/array/tests/test_array_core.py::test_reshape_splat", "dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only", "dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions", "dask/array/tests/test_array_core.py::test_full", "dask/array/tests/test_array_core.py::test_map_blocks", "dask/array/tests/test_array_core.py::test_map_blocks2", "dask/array/tests/test_array_core.py::test_map_blocks_with_constants", "dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs", "dask/array/tests/test_array_core.py::test_map_blocks_with_chunks", "dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference", "dask/array/tests/test_array_core.py::test_from_function_requires_block_args", "dask/array/tests/test_array_core.py::test_repr", "dask/array/tests/test_array_core.py::test_slicing_with_ellipsis", "dask/array/tests/test_array_core.py::test_slicing_with_ndarray", "dask/array/tests/test_array_core.py::test_dtype", "dask/array/tests/test_array_core.py::test_blockdims_from_blockshape", "dask/array/tests/test_array_core.py::test_coerce", "dask/array/tests/test_array_core.py::test_bool", "dask/array/tests/test_array_core.py::test_store_delayed_target", "dask/array/tests/test_array_core.py::test_store", "dask/array/tests/test_array_core.py::test_store_regions", "dask/array/tests/test_array_core.py::test_store_compute_false", "dask/array/tests/test_array_core.py::test_store_locks", "dask/array/tests/test_array_core.py::test_to_dask_dataframe", "dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions", "dask/array/tests/test_array_core.py::test_dtype_complex", "dask/array/tests/test_array_core.py::test_astype", "dask/array/tests/test_array_core.py::test_arithmetic", "dask/array/tests/test_array_core.py::test_elemwise_consistent_names", "dask/array/tests/test_array_core.py::test_optimize", "dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays", "dask/array/tests/test_array_core.py::test_getter", "dask/array/tests/test_array_core.py::test_size", "dask/array/tests/test_array_core.py::test_nbytes", "dask/array/tests/test_array_core.py::test_itemsize", "dask/array/tests/test_array_core.py::test_Array_normalizes_dtype", "dask/array/tests/test_array_core.py::test_from_array_with_lock", "dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter", "dask/array/tests/test_array_core.py::test_from_array_no_asarray", "dask/array/tests/test_array_core.py::test_from_array_getitem", "dask/array/tests/test_array_core.py::test_from_array_minus_one", "dask/array/tests/test_array_core.py::test_asarray", "dask/array/tests/test_array_core.py::test_asanyarray", "dask/array/tests/test_array_core.py::test_from_func", "dask/array/tests/test_array_core.py::test_concatenate3_2", "dask/array/tests/test_array_core.py::test_map_blocks3", "dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks", "dask/array/tests/test_array_core.py::test_normalize_chunks", "dask/array/tests/test_array_core.py::test_raise_on_no_chunks", "dask/array/tests/test_array_core.py::test_chunks_is_immutable", "dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs", "dask/array/tests/test_array_core.py::test_long_slice", "dask/array/tests/test_array_core.py::test_ellipsis_slicing", "dask/array/tests/test_array_core.py::test_point_slicing", "dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice", "dask/array/tests/test_array_core.py::test_slice_with_floats", "dask/array/tests/test_array_core.py::test_slice_with_integer_types", "dask/array/tests/test_array_core.py::test_index_with_integer_types", "dask/array/tests/test_array_core.py::test_vindex_basic", "dask/array/tests/test_array_core.py::test_vindex_nd", "dask/array/tests/test_array_core.py::test_vindex_negative", "dask/array/tests/test_array_core.py::test_vindex_errors", "dask/array/tests/test_array_core.py::test_vindex_merge", "dask/array/tests/test_array_core.py::test_empty_array", "dask/array/tests/test_array_core.py::test_memmap", "dask/array/tests/test_array_core.py::test_to_npy_stack", "dask/array/tests/test_array_core.py::test_view", "dask/array/tests/test_array_core.py::test_view_fortran", "dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension", "dask/array/tests/test_array_core.py::test_broadcast_chunks", "dask/array/tests/test_array_core.py::test_chunks_error", "dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs", "dask/array/tests/test_array_core.py::test_dont_fuse_outputs", "dask/array/tests/test_array_core.py::test_dont_dealias_outputs", "dask/array/tests/test_array_core.py::test_timedelta_op", "dask/array/tests/test_array_core.py::test_to_delayed", "dask/array/tests/test_array_core.py::test_to_delayed_optimizes", "dask/array/tests/test_array_core.py::test_cumulative", "dask/array/tests/test_array_core.py::test_atop_names", "dask/array/tests/test_array_core.py::test_atop_new_axes", "dask/array/tests/test_array_core.py::test_atop_kwargs", "dask/array/tests/test_array_core.py::test_atop_chunks", "dask/array/tests/test_array_core.py::test_from_delayed", "dask/array/tests/test_array_core.py::test_A_property", "dask/array/tests/test_array_core.py::test_copy_mutate", "dask/array/tests/test_array_core.py::test_npartitions", "dask/array/tests/test_array_core.py::test_astype_gh1151", "dask/array/tests/test_array_core.py::test_elemwise_name", "dask/array/tests/test_array_core.py::test_map_blocks_name", "dask/array/tests/test_array_core.py::test_array_picklable", "dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks", "dask/array/tests/test_array_core.py::test_concatenate_axes", "dask/array/tests/test_array_core.py::test_atop_concatenate", "dask/array/tests/test_array_core.py::test_common_blockdim", "dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly", "dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks", "dask/array/tests/test_array_core.py::test_uneven_chunks_atop", "dask/array/tests/test_array_core.py::test_warn_bad_rechunking", "dask/array/tests/test_array_core.py::test_optimize_fuse_keys", "dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn", "dask/array/tests/test_array_core.py::test_map_blocks_delayed", "dask/array/tests/test_array_core.py::test_no_chunks", "dask/array/tests/test_array_core.py::test_no_chunks_2d", "dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks", "dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks", "dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d", "dask/array/tests/test_array_core.py::test_index_array_with_array_1d", "dask/array/tests/test_array_core.py::test_index_array_with_array_2d", "dask/array/tests/test_array_core.py::test_setitem_1d", "dask/array/tests/test_array_core.py::test_setitem_2d", "dask/array/tests/test_array_core.py::test_setitem_errs", "dask/array/tests/test_array_core.py::test_zero_slice_dtypes", "dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk", "dask/array/tests/test_array_core.py::test_atop_zero_shape", "dask/array/tests/test_array_core.py::test_atop_zero_shape_new_axes", "dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape", "dask/array/tests/test_array_core.py::test_from_array_name", "dask/array/tests/test_array_core.py::test_concatenate_errs", "dask/array/tests/test_array_core.py::test_stack_errs", "dask/array/tests/test_array_core.py::test_atop_with_numpy_arrays", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]", "dask/array/tests/test_array_core.py::test_constructor_plugin", "dask/array/tests/test_array_core.py::test_no_warnings_on_metadata", "dask/array/tests/test_array_core.py::test_delayed_array_key_hygeine" ]
[]
BSD 3-Clause "New" or "Revised" License
2,048
[ "docs/source/changelog.rst", "dask/array/core.py" ]
[ "docs/source/changelog.rst", "dask/array/core.py" ]
nickpegg__posty-21
af8cd366f8ea6e5889e38235b2be2cba4fd8c914
2018-01-15 05:11:35
af8cd366f8ea6e5889e38235b2be2cba4fd8c914
diff --git a/posty/importers.py b/posty/importers.py index 9e00c70..bf8c198 100644 --- a/posty/importers.py +++ b/posty/importers.py @@ -59,7 +59,16 @@ class Posty1Importer(Importer): self._copy_files('_templates', 'templates') def import_pages(self): - self._copy_files('_pages', 'pages') + src_dir = os.path.join(self.src_path, '_pages') + dst_dir = os.path.join(self.site.site_path, 'pages') + + for page in os.listdir(src_dir): + src_file = os.path.join(src_dir, page) + dst_file = os.path.join(dst_dir, page) + + new_page = self._convert_page(open(src_file).read()) + with open(dst_file, 'w') as fh: + fh.write(new_page) def import_posts(self): src_dir = os.path.join(self.src_path, '_posts') @@ -99,6 +108,25 @@ class Posty1Importer(Importer): print((" Looks like {} isn't a file nor dir, " "not copying.").format(src_path)) + def _convert_page(self, old_page): + """ + Converts an old Posty 1.x page into a new-style one. Notably just + throws away any existing `url` + """ + old_page = old_page.replace("\r\n", "\n") + docs = old_page.split("---\n") + new_page = '' + + meta = yaml.load(docs[1]) + if 'url' in meta.keys(): + del meta['url'] + new_page += yaml.dump(meta, default_flow_style=False) + + new_page += "---\n" + new_page += docs[2] + + return new_page + def _convert_post(self, old_post): """ Converts an old Posty post (a string) into a new-style post with a diff --git a/posty/site.py b/posty/site.py index ff63cec..cc0a442 100644 --- a/posty/site.py +++ b/posty/site.py @@ -63,7 +63,7 @@ class Site(object): page_dir = os.path.join(self.site_path, 'pages') for filename in os.listdir(page_dir): contents = open(os.path.join(page_dir, filename)).read() - _, meta_yaml, body = contents.split("---\n") + meta_yaml, body = contents.split("---\n") page = yaml.load(meta_yaml) page['body'] = body.strip()
Scrub URL on posty1 pages We don't use this page URL anymore and instead rely solely on the slug
nickpegg/posty
diff --git a/tests/fixtures/site/pages/test-child.yaml b/tests/fixtures/site/pages/test-child.yaml index 86692a9..fa7d1df 100644 --- a/tests/fixtures/site/pages/test-child.yaml +++ b/tests/fixtures/site/pages/test-child.yaml @@ -1,4 +1,3 @@ ---- title: Test's child url: child.html parent: Test diff --git a/tests/fixtures/site/pages/test.yaml b/tests/fixtures/site/pages/test.yaml index 61bdf2a..7a1578d 100644 --- a/tests/fixtures/site/pages/test.yaml +++ b/tests/fixtures/site/pages/test.yaml @@ -1,4 +1,3 @@ ---- title: Test url: test.html --- diff --git a/tests/fixtures/site/pages/yup.yaml b/tests/fixtures/site/pages/yup.yaml index b140315..159ded5 100644 --- a/tests/fixtures/site/pages/yup.yaml +++ b/tests/fixtures/site/pages/yup.yaml @@ -1,4 +1,3 @@ ---- title: Yup url: yup.html --- diff --git a/tests/test_import.py b/tests/test_import.py index e8b9ed5..079d06a 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -58,12 +58,11 @@ class TestPosty1Importer(object): importer = importer_with_directories importer.import_pages() - src_path = os.path.join(importer.src_path, '_pages') - dst_path = os.path.join(importer.site.site_path, 'pages') - for f in os.listdir(src_path): - src_file = open(os.path.join(src_path, f)).read() - dst_file = open(os.path.join(dst_path, f)).read() - assert src_file == dst_file + # Ensure `url` is not set on any pages + site = importer.site + site._load_pages() + for page in site.payload['pages']: + assert page.get('url') is None def test_import_posts(self, importer_with_directories): """
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 awesome-slugify==1.6.5 certifi==2021.5.30 click==6.7 importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 -e git+https://github.com/nickpegg/posty.git@af8cd366f8ea6e5889e38235b2be2cba4fd8c914#egg=Posty py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 PyYAML==3.13 regex==2023.8.8 tomli==1.2.3 typing_extensions==4.1.1 Unidecode==0.4.21 zipp==3.6.0
name: posty channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - awesome-slugify==1.6.5 - click==6.7 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==3.13 - regex==2023.8.8 - tomli==1.2.3 - typing-extensions==4.1.1 - unidecode==0.04.21 - zipp==3.6.0 prefix: /opt/conda/envs/posty
[ "tests/test_import.py::TestPosty1Importer::test_import_pages" ]
[]
[ "tests/test_import.py::TestPosty1Importer::test_ensure_directories", "tests/test_import.py::TestPosty1Importer::test_import_media", "tests/test_import.py::TestPosty1Importer::test_import_templates", "tests/test_import.py::TestPosty1Importer::test_import_posts", "tests/test_import.py::TestPosty1Importer::test_it_at_least_runs" ]
[]
MIT License
2,049
[ "posty/site.py", "posty/importers.py" ]
[ "posty/site.py", "posty/importers.py" ]
iamlikeme__rainflow-7
8e961750baa61b12582b108677ff463b5c9cbf99
2018-01-17 20:47:44
8e961750baa61b12582b108677ff463b5c9cbf99
diff --git a/README.md b/README.md index b972db0..376ee71 100644 --- a/README.md +++ b/README.md @@ -24,9 +24,23 @@ number of cycles: (0.48294318988133728, 0.5), (0.52799626197601901, 0.5), (0.78150280937784777, 0.5), (1.102640610792428, 0.5)] ``` + Not interested in all the decimals? Use *ndigits*: ```python >>> rainflow.count_cycles(y, ndigits=2) [(0.11, 1.5), (0.21, 1.5), (0.37, 0.5), (0.44, 0.5), (0.48, 0.5), (0.53, 0.5), (0.78, 0.5), (1.1, 0.5)] ``` + +If you need more detailed output, like cycle lows, highs or means, use `extract_cycles`: +```python +>>> for low, high, mult in rainflow.extract_cycles(y): +... mean = 0.5 * (high + low) +... rng = high - low +``` + +Running tests +------------- +``` +python -m unittest tests/*.py +``` diff --git a/rainflow.py b/rainflow.py index 15545e7..94ba48e 100644 --- a/rainflow.py +++ b/rainflow.py @@ -3,9 +3,20 @@ Implements rainflow cycle counting algorythm for fatigue analysis according to section 5.4.4 in ASTM E1049-85 (2011). """ -__version__ = "1.0.1" +__version__ = "2.0.0" from collections import deque, defaultdict +import functools + + +def get_round_function(ndigits=None): + if ndigits is None: + def func(x): + return x + else: + def func(x): + return round(x, ndigits) + return func def reversals(series): @@ -16,10 +27,10 @@ def reversals(series): the first and the last points in the series. """ series = iter(series) - + x_last, x = next(series), next(series) d_last = (x - x_last) - + for x_next in series: if x_next == x: continue @@ -30,15 +41,29 @@ def reversals(series): d_last = d_next +def _sort_lows_and_highs(func): + "Decorator for extract_cycles" + @functools.wraps(func) + def wrapper(*args, **kwargs): + for low, high, mult in func(*args, **kwargs): + if low < high: + yield low, high, mult + else: + yield high, low, mult + return wrapper + +@_sort_lows_and_highs def extract_cycles(series): """ - Returns two lists: the first one containig full cycles and the second - containing one-half cycles. The cycles are extracted from the iterable - *series* according to section 5.4.4 in ASTM E1049 (2011). + A generator function which extracts cycles from the iterable *series* + according to section 5.4.4 in ASTM E1049 (2011). + + The generator produces tuples (low, high, mult), where low and high + define cycle amplitude and mult equals to 1.0 for full cycles and 0.5 + for half cycles. """ points = deque() - full, half = [], [] for x in reversals(series): points.append(x) @@ -53,11 +78,11 @@ def extract_cycles(series): elif len(points) == 3: # Y contains the starting point # Count Y as one-half cycle and discard the first point - half.append(Y) + yield points[-3], points[-2], 0.5 points.popleft() else: # Count Y as one cycle and discard the peak and the valley of Y - full.append(Y) + yield points[-3], points[-2], 1.0 last = points.pop() points.pop() points.pop() @@ -65,10 +90,8 @@ def extract_cycles(series): else: # Count the remaining ranges as one-half cycles while len(points) > 1: - half.append(abs(points[-2] - points[-1])) + yield points[-2], points[-1], 0.5 points.pop() - return full, half - def count_cycles(series, ndigits=None): @@ -79,18 +102,10 @@ def count_cycles(series, ndigits=None): using the extract_cycles function. If *ndigits* is given the cycles will be rounded to the given number of digits before counting. """ - full, half = extract_cycles(series) - - # Round the cycles if requested - if ndigits is not None: - full = (round(x, ndigits) for x in full) - half = (round(x, ndigits) for x in half) - - # Count cycles counts = defaultdict(float) - for x in full: - counts[x] += 1.0 - for x in half: - counts[x] += 0.5 - + round_ = get_round_function(ndigits) + + for low, high, mult in extract_cycles(series): + delta = round_(abs(high - low)) + counts[delta] += mult return sorted(counts.items()) diff --git a/setup.py b/setup.py index 46c872f..4f4daba 100644 --- a/setup.py +++ b/setup.py @@ -1,10 +1,10 @@ from distutils.core import setup LICENSE = open("LICENSE").read() -LONG_DESCRIPTION = open("README.md").read() +LONG_DESCRIPTION = open("README.md").read() setup(name='rainflow', - version='1.0.1', + version='2.0.0', author='Piotr Janiszewski', author_email='[email protected]', url='https://github.com/iamlikeme/rainflow/',
Feature: Rainflow Matrix I would love to see the method described [here](https://community.plm.automation.siemens.com/t5/Testing-Knowledge-Base/Rainflow-Counting/ta-p/383093) in this library. Is there a chance this could happen? :)
iamlikeme/rainflow
diff --git a/tests/test_rainflow.py b/tests/test_rainflow.py index ae71ec3..d2cd9eb 100644 --- a/tests/test_rainflow.py +++ b/tests/test_rainflow.py @@ -1,20 +1,25 @@ import unittest, rainflow, random, itertools + class TestRainflowCounting(unittest.TestCase): # Load series and corresponding cycle counts from ASTM E1049-85 series = [0, -2, 1, -3, 5, -1, 3, -4, 4, -2, 0] cycles = [(3, 0.5), (4, 1.5), (6, 0.5), (8, 1.0), (9, 0.5)] - + + def test_lows_and_highs_sorted(self): + self.assertTrue(all( + low <= high + for low, high, mult in rainflow.extract_cycles(self.series) + )) + def test_rainflow_counting(self): self.assertEqual(rainflow.count_cycles(self.series), self.cycles) - - + def test_rainflow_ndigits(self): series = [x + 0.01 * random.random() for x in self.series] self.assertNotEqual(rainflow.count_cycles(series), self.cycles) self.assertEqual(rainflow.count_cycles(series, ndigits=1), self.cycles) - + def test_series_with_zero_derivatives(self): series = itertools.chain(*([x, x] for x in self.series)) self.assertEqual(rainflow.count_cycles(series), self.cycles) -
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 3 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 -e git+https://github.com/iamlikeme/rainflow.git@8e961750baa61b12582b108677ff463b5c9cbf99#egg=rainflow tomli==2.2.1
name: rainflow channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - tomli==2.2.1 prefix: /opt/conda/envs/rainflow
[ "tests/test_rainflow.py::TestRainflowCounting::test_lows_and_highs_sorted" ]
[]
[ "tests/test_rainflow.py::TestRainflowCounting::test_rainflow_counting", "tests/test_rainflow.py::TestRainflowCounting::test_rainflow_ndigits", "tests/test_rainflow.py::TestRainflowCounting::test_series_with_zero_derivatives" ]
[]
MIT License
2,052
[ "rainflow.py", "README.md", "setup.py" ]
[ "rainflow.py", "README.md", "setup.py" ]
PlasmaPy__PlasmaPy-238
7655c9a4e756aaf1e2430902d80112c9f82f5953
2018-01-17 21:18:54
7655c9a4e756aaf1e2430902d80112c9f82f5953
diff --git a/plasmapy/atomic/__init__.py b/plasmapy/atomic/__init__.py index 6c7b5762..bf053c42 100644 --- a/plasmapy/atomic/__init__.py +++ b/plasmapy/atomic/__init__.py @@ -18,7 +18,7 @@ common_isotopes, stable_isotopes, isotopic_abundance, - charge_state, + integer_charge, electric_charge, ) diff --git a/plasmapy/atomic/atomic.py b/plasmapy/atomic/atomic.py index d97c7302..bc103071 100644 --- a/plasmapy/atomic/atomic.py +++ b/plasmapy/atomic/atomic.py @@ -24,7 +24,7 @@ from .names import ( atomic_symbol, isotope_symbol, - _extract_charge_state, + _extract_integer_charge, _is_proton, _is_positron, _is_antineutron, @@ -262,7 +262,7 @@ def standard_atomic_weight(argument: Union[str, int]) -> Quantity: "standard_atomic_weight.") try: - charge_state(argument) + integer_charge(argument) except ChargeError: pass else: @@ -349,9 +349,9 @@ def isotope_mass(argument: Union[str, int], """ - argument, charge_state = _extract_charge_state(argument) + argument, Z = _extract_integer_charge(argument) - if charge_state is not None and charge_state != 0: + if Z is not None and Z != 0: raise AtomicError("Use ion_mass instead of isotope_mass for masses of " "charged particles.") @@ -503,14 +503,14 @@ def ion_mass(argument: Union[str, int, Quantity], Z: int = None, raise InvalidIonError("Use isotope_mass or m_n to get mass of neutron") if isinstance(argument, str): - argument, Z_from_arg = _extract_charge_state(argument) + argument, Z_from_arg = _extract_integer_charge(argument) else: Z_from_arg = None if Z is None and Z_from_arg is None: Z = 1 elif Z is not None and Z_from_arg is not None and Z != Z_from_arg: - raise InvalidIonError("Inconsistent charge state information in" + raise InvalidIonError("Inconsistent charge information in" "ion_mass.") elif Z is None and Z_from_arg is not None: Z = Z_from_arg @@ -642,8 +642,8 @@ def isotopic_abundance(argument: Union[str, int], return iso_comp -def charge_state(particle: str) -> int: - r"""Returns the charge state of an ion or other particle. +def integer_charge(particle: str) -> int: + r"""Returns the integer charge of an ion or other particle. Parameters ---------- @@ -655,7 +655,7 @@ def charge_state(particle: str) -> int: ------- Z : integer - The charge state, or None if it is not available. + The integer charge, or None if it is not available. Raises ------ @@ -668,18 +668,17 @@ def charge_state(particle: str) -> int: If charge information for the particle is not available. AtomicWarning - If the input represents an ion with a charge state that is + If the input represents an ion with an integer charge that is below -3. Notes ----- - This function supports two formats for the charge state - information. + This function supports two formats for integer charge information. The first format is a string that has information for the element - or isotope at the beginning, a space in between, and the charge - state information in the form of an integer followed by a plus or + or isotope at the beginning, a space in between, and the integer + charge information in the form of an integer followed by a plus or minus sign, or a plus or minus sign followed by an integer. The second format is a string containing element information at @@ -691,13 +690,13 @@ def charge_state(particle: str) -> int: Examples -------- - >>> charge_state('Fe-56 2+') + >>> integer_charge('Fe-56 2+') 2 - >>> charge_state('He -2') + >>> integer_charge('He -2') -2 - >>> charge_state('H+') + >>> integer_charge('H+') 1 - >>> charge_state('N-14++') + >>> integer_charge('N-14++') 2 """ @@ -709,12 +708,12 @@ def charge_state(particle: str) -> int: elif _is_neutron(particle) or _is_antineutron(particle): return 0 - particle, Z = _extract_charge_state(particle) + particle, Z = _extract_integer_charge(particle) try: atomic_symbol(particle) except InvalidParticleError: - raise InvalidParticleError("Invalid particle in charge_state") + raise InvalidParticleError("Invalid particle in integer_charge") if Z is None: raise ChargeError(f"Unable to find charge of {particle}.") @@ -723,7 +722,7 @@ def charge_state(particle: str) -> int: if Z > atomic_numb: raise InvalidParticleError("The integer charge cannot be greater than " - "the atomic number in charge_state.") + "the atomic number in integer_charge.") if Z < -atomic_numb - 1 or Z < -3: warnings.warn(f"Element {atomic_symbol(particle)} has a charge of {Z}" @@ -740,8 +739,8 @@ def electric_charge(particle: str) -> Quantity: ---------- particle : string - String representing an element or isotope followed by charge - state information. + String representing an element or isotope followed by integer + charge information. Returns ------- @@ -760,18 +759,17 @@ def electric_charge(particle: str) -> Quantity: If charge information for the particle is not available. AtomicWarning - If the input represents an ion with a charge state that is + If the input represents an ion with an integer charge that is below -3. Notes ----- - This function supports two formats for the charge state - information. + This function supports two formats for integer charge information. The first format is a string that has information for the element - or isotope at the beginning, a space in between, and the charge - state information in the form of an integer followed by a plus or + or isotope at the beginning, a space in between, and the integer + charge information in the form of an integer followed by a plus or minus sign, or a plus or minus sign followed by an integer. The second format is a string containing element information at @@ -791,7 +789,7 @@ def electric_charge(particle: str) -> Quantity: """ try: - charge = charge_state(particle) * const.e.to('C') + charge = integer_charge(particle) * const.e.to('C') except InvalidParticleError: raise InvalidParticleError("Invalid particle in electric_charge.") except ChargeError: diff --git a/plasmapy/atomic/names.py b/plasmapy/atomic/names.py index a103c73e..1fc89da0 100644 --- a/plasmapy/atomic/names.py +++ b/plasmapy/atomic/names.py @@ -99,7 +99,7 @@ def atomic_symbol(argument: Union[str, int]) -> str: raise InvalidElementError(f"{argument} is not a valid element.") try: - argument, Z = _extract_charge_state(argument) + argument, Z = _extract_integer_charge(argument) except InvalidParticleError: raise InvalidParticleError("Invalid charge in atomic_symbol") @@ -257,7 +257,7 @@ def isotope_symbol(argument: Union[str, int], mass_numb: int = None) -> str: return argument if isinstance(argument, str): - argument, Z = _extract_charge_state(argument) + argument, Z = _extract_integer_charge(argument) if isinstance(argument, str) and argument.isdigit(): argument = int(argument) @@ -403,7 +403,7 @@ def element_name(argument: Union[str, int]) -> str: return name -def _extract_charge_state(argument: str) -> Tuple[str, int]: +def _extract_integer_charge(argument: str) -> Tuple[str, int]: r"""Splits strings containing element or isotope and charge state information into a string without the charge state information and the charge state as an integer (or None if no charge state @@ -444,12 +444,12 @@ def _extract_charge_state(argument: str) -> Tuple[str, int]: Examples -------- - >>> isotope, Z = _extract_charge_state('Fe-56+++') + >>> isotope, Z = _extract_integer_charge('Fe-56+++') >>> print(isotope) Fe-56 >>> print(Z) 3 - >>> _extract_charge_state('D +1') + >>> _extract_integer_charge('D +1') ('D', 1) """ @@ -490,7 +490,7 @@ def _extract_charge_state(argument: str) -> Tuple[str, int]: charge = ion_info.replace('+', '') try: - charge_state = sign * int(charge) + Z = sign * int(charge) check3 = True except Exception: check3 = False @@ -505,10 +505,10 @@ def _extract_charge_state(argument: str) -> Tuple[str, int]: char = argument[-1] match = re.match(r"["+char+"]*", argument[::-1]) - charge_state = match.span()[1] + Z = match.span()[1] if char == '-': - charge_state = -charge_state + Z = -Z argument = argument[0:len(argument)-match.span()[1]] @@ -516,14 +516,14 @@ def _extract_charge_state(argument: str) -> Tuple[str, int]: raise InvalidParticleError("Invalid charge state information") else: - charge_state = None + Z = None - if charge_state is not None and charge_state < -3: + if Z is not None and Z < -3: warnings.warn(f"Element {atomic_symbol(argument)} has a charge of " - f"{charge_state} which is unlikely to occur in nature.", + f"{Z} which is unlikely to occur in nature.", AtomicWarning) - return argument, charge_state + return argument, Z def _is_neutron(argument: Any, mass_numb: int = None) -> bool: @@ -557,7 +557,7 @@ def _is_hydrogen(argument: Any, if isinstance(argument, str): - argument, Z = _extract_charge_state(argument) + argument, Z = _extract_integer_charge(argument) if argument in case_sensitive_aliases: is_hydrogen = True @@ -620,7 +620,7 @@ def _is_proton(arg: Any, Z: int = None, mass_numb: int = None) -> bool: False otherwise. This function returns False for 'H-1' if no charge state is given.""" - argument, Z_from_arg = _extract_charge_state(arg) + argument, Z_from_arg = _extract_integer_charge(arg) if (Z is None) == (Z_from_arg is None): return False @@ -649,7 +649,7 @@ def _is_alpha(arg: Any) -> bool: raise InvalidParticleError( f"{arg} is an invalid representation of an alpha particle") else: - arg, Z = _extract_charge_state(arg) + arg, Z = _extract_integer_charge(arg) if Z != 2 or arg[-2:] != '-4': return False diff --git a/plasmapy/atomic/nuclear.py b/plasmapy/atomic/nuclear.py index 8d7c51e6..313c24c4 100644 --- a/plasmapy/atomic/nuclear.py +++ b/plasmapy/atomic/nuclear.py @@ -7,7 +7,7 @@ isotope_mass, ion_mass, atomic_number, - charge_state) + integer_charge) from .names import (isotope_symbol, _is_neutron, @@ -242,7 +242,7 @@ def _total_charge(particles): try: total_charge += atomic_number(particle) except InvalidElementError: - total_charge += charge_state(particle) + total_charge += integer_charge(particle) return total_charge diff --git a/plasmapy/classes/species.py b/plasmapy/classes/species.py index b1049460..aba2ef8c 100644 --- a/plasmapy/classes/species.py +++ b/plasmapy/classes/species.py @@ -63,7 +63,7 @@ def __init__(self, plasma, particle_type='p', n_particles=1, scaling=1, if np.isinf(dt) and np.isinf(nt): # coveralls: ignore raise ValueError("Both dt and nt are infinite.") - self.q = atomic.charge_state(particle_type) * constants.e.si + self.q = atomic.integer_charge(particle_type) * constants.e.si self.m = atomic.ion_mass(particle_type) self.N = int(n_particles) self.scaling = scaling diff --git a/plasmapy/diagnostics/langmuir.py b/plasmapy/diagnostics/langmuir.py index dcfcf30e..8a53af45 100644 --- a/plasmapy/diagnostics/langmuir.py +++ b/plasmapy/diagnostics/langmuir.py @@ -28,12 +28,12 @@ def __init__(self, area=u.m**2, config='single'): self.config = config -def swept_probe_analysis(potential_sweep, - measured_current, - probe, +def swept_probe_analysis(potential_sweep, + measured_current, + probe, ion='H', - method='maxwellian', - smoothing=None, + method='maxwellian', + smoothing=None, polyorder=3): r"""Performs a Langmuir analysis of a given probe V-I profile in order to obtain various plasma parameters. @@ -41,7 +41,8 @@ def swept_probe_analysis(potential_sweep, Parameters ---------- potential_sweep : Quantity - The swept electric potential applied to the probe in units convertible to V. + The swept electric potential applied to the probe in units + convertible to V. measured_current : Quantity The corresponding measured current in units convertible to A. @@ -59,7 +60,7 @@ def swept_probe_analysis(potential_sweep, smoothing : string, optional The smoothing method applied to the measured current in order to obtain clean derivatives. Options are None (default) and 'savgol', - which uses scipy.signal.savgol_filter. When `smoothing` is 'savgol', + which uses scipy.signal.savgol_filter. When `smoothing` is 'savgol', the polynomial order is given by polyorder. polyorder : int @@ -100,7 +101,8 @@ def obtain_EEDF(potential_sweep, measured_current, probe): Parameters ---------- potential_sweep : Quantity - The swept electric potential applied to the probe in units convertible to V. + The swept electric potential applied to the probe in units + convertible to V. measured_current : Quantity The corresponding measured current in units convertible to A. @@ -125,7 +127,8 @@ def obtain_EEPF(potential_sweep, measured_current, probe): Parameters ---------- potential_sweep : Quantity - The swept electric potential applied to the probe in units convertible to V. + The swept electric potential applied to the probe in units + convertible to V. measured_current : Quantity The corresponding measured current in units convertible to A. diff --git a/plasmapy/mathematics/mathematics.py b/plasmapy/mathematics/mathematics.py index 58c10e5f..f51027f6 100644 --- a/plasmapy/mathematics/mathematics.py +++ b/plasmapy/mathematics/mathematics.py @@ -38,7 +38,8 @@ def plasma_dispersion_func(zeta): The plasma dispersion function is defined as: .. math:: - Z(\zeta) = \pi^{-0.5} \int_{-\infty}^{+\infty} \frac{e^{-x^2}}{x-\zeta} dx + Z(\zeta) = \pi^{-0.5} \int_{-\infty}^{+\infty} + \frac{e^{-x^2}}{x-\zeta} dx where the argument is a complex number [fried.conte-1961]_. @@ -116,7 +117,8 @@ def plasma_dispersion_func_deriv(zeta): The derivative of the plasma dispersion function is defined as: .. math:: - Z'(\zeta) = \pi^{-0.5} \int_{-\infty}^{+\infty} \frac{e^{-x^2}}{(x-\zeta)^2} dx + Z'(\zeta) = \pi^{-0.5} \int_{-\infty}^{+\infty} + \frac{e^{-x^2}}{(x-\zeta)^2} dx where the argument is a complex number [fried.conte-1961]_. diff --git a/plasmapy/physics/__init__.py b/plasmapy/physics/__init__.py index de2f04da..781435dd 100644 --- a/plasmapy/physics/__init__.py +++ b/plasmapy/physics/__init__.py @@ -1,9 +1,23 @@ # 'physics' is a tentative name for this subpackage. Another # possibility is 'plasma'. The organization is to be decided by v0.1. -from .parameters import Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, inertial_length, magnetic_pressure, magnetic_energy_density, upper_hybrid_frequency, lower_hybrid_frequency -from .quantum import deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, Thomas_Fermi_length +from .dielectric import ( + cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP) + +from .distribution import ( + Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, + Maxwellian_speed_3D, kappa_velocity_3D, kappa_velocity_1D) + +from .parameters import ( + Alfven_speed, ion_sound_speed, thermal_speed, kappa_thermal_speed, + gyrofrequency, gyroradius, plasma_frequency, Debye_length, Debye_number, + inertial_length, magnetic_pressure, magnetic_energy_density, + upper_hybrid_frequency, lower_hybrid_frequency) + +from .quantum import ( + deBroglie_wavelength, thermal_deBroglie_wavelength, Fermi_energy, + Thomas_Fermi_length) + from .relativity import Lorentz_factor -from .transport import Coulomb_logarithm, classical_transport -from .distribution import Maxwellian_1D, Maxwellian_velocity_3D, Maxwellian_speed_1D, Maxwellian_speed_3D, kappa_velocity_3D, kappa_velocity_1D -from .dielectric import cold_plasma_permittivity_LRP, cold_plasma_permittivity_SDP + +from .transport import (Coulomb_logarithm, classical_transport) diff --git a/plasmapy/physics/distribution.py b/plasmapy/physics/distribution.py index f0c5fd45..632075e0 100644 --- a/plasmapy/physics/distribution.py +++ b/plasmapy/physics/distribution.py @@ -483,7 +483,8 @@ def Maxwellian_speed_3D(vx, .. math:: - f = 4 \pi \vec{v}^2 (\pi * v_Th^2)^{-3/2} \exp(-(\vec{v} - \vec{V_{drift}})^2 / v_Th^2) + f = 4 \pi \vec{v}^2 (\pi * v_Th^2)^{-3/2} \exp(-(\vec{v} - + \vec{V_{drift}})^2 / v_Th^2) where :math:`v_Th = \sqrt(2 k_B T / m)` is the thermal speed. @@ -643,10 +644,12 @@ def kappa_velocity_1D(v, .. math:: - f = A_\kappa \left(1 + \frac{(\vec{v} - \vec{V_{drift}})^2}{\kappa v_Th,\kappa^2}\right)^{-\kappa} + f = A_\kappa \left(1 + \frac{(\vec{v} - + \vec{V_{drift}})^2}{\kappa v_Th,\kappa^2}\right)^{-\kappa} where :math:`v_Th,\kappa` is the kappa thermal speed - and :math:`A_\kappa = \frac{1}{\sqrt{\pi} \kappa^{3/2} v_Th,\kappa^2 \frac{\Gamma(\kappa + 1)}{\Gamma(\kappa - 1/2)}}` + and :math:`A_\kappa = \frac{1}{\sqrt{\pi} \kappa^{3/2} v_Th,\kappa^2 + \frac{\Gamma(\kappa + 1)}{\Gamma(\kappa - 1/2)}}` is the normalization constant As kappa approaches infinity, the kappa distribution function converges @@ -743,8 +746,8 @@ def kappa_velocity_3D(vx, particle: string, optional Representation of the particle species(e.g., 'p' for protons, 'D+' - for deuterium, or 'He-4 +1' for :math:`He_4^{+1}` : singly ionized helium-4), - which defaults to electrons. + for deuterium, or 'He-4 +1' for :math:`He_4^{+1}` : singly ionized + helium-4), which defaults to electrons. Vx_drift: Quantity, optional The drift velocity in x-direction units convertible to m/s. @@ -793,10 +796,13 @@ def kappa_velocity_3D(vx, .. math:: - f = A_\kappa \left(1 + \frac{(\vec{v} - \vec{V_{drift}})^2}{\kappa v_Th,\kappa^2}\right)^{-(\kappa + 1)} + f = A_\kappa \left(1 + \frac{(\vec{v} - + \vec{V_{drift}})^2}{\kappa v_Th,\kappa^2}\right)^{-(\kappa + 1)} where :math:`v_Th,\kappa` is the kappa thermal speed - and :math:`A_\kappa = \frac{1}{2 \pi (\kappa v_Th,\kappa^2)^{3/2}} \frac{\Gamma(\kappa + 1)}{\Gamma(\kappa - 1/2) \Gamma(3/2)}` is the normalization constant + and :math:`A_\kappa = \frac{1}{2 \pi (\kappa v_Th,\kappa^2)^{3/2}} + \frac{\Gamma(\kappa + 1)}{\Gamma(\kappa - 1/2) \Gamma(3/2)}` is the + normalization constant. As kappa approaches infinity, the kappa distribution function converges to the Maxwellian distribution function. diff --git a/plasmapy/physics/parameters.py b/plasmapy/physics/parameters.py index 955295f7..886606ed 100644 --- a/plasmapy/physics/parameters.py +++ b/plasmapy/physics/parameters.py @@ -2,7 +2,7 @@ from astropy import units -# from plasmapy.atomic import ion_mass, charge_state +# from plasmapy.atomic import ion_mass, integer_charge import numpy as np # import warnings @@ -13,7 +13,7 @@ # TODO for future: change these into decorators. _check_quantity does a # bit more than @quantity_input as it can allow from plasmapy.utils.checks import _check_quantity -from plasmapy.utils.exceptions import PhysicsError, AtomicError # , PhysicsWarning +from plasmapy.utils.exceptions import (PhysicsError, AtomicError) r""" @@ -180,7 +180,7 @@ def Alfven_speed(B, density, ion="p", z_mean=None): # warnings.warn("No z_mean given, defaulting to atomic charge", # PhysicsWarning) try: - Z = atomic.charge_state(ion) + Z = atomic.integer_charge(ion) except AtomicError: Z = 1 else: @@ -328,7 +328,7 @@ def ion_sound_speed(*ignore, # warnings.warn("No z_mean given, defaulting to atomic charge", # PhysicsWarning) try: - Z = atomic.charge_state(ion) + Z = atomic.integer_charge(ion) except AtomicError: Z = 1 else: @@ -615,7 +615,7 @@ def collision_rate_electron_ion(T_e, else: particles = ['e', ion_particle] coulomb_log_val = Coulomb_logarithm(T_e, n_e, particles, V) - Z_i = atomic.charge_state(ion_particle) + Z_i = atomic.integer_charge(ion_particle) nu_e = 4 / 3 * np.sqrt(2 * np.pi / m_e) / (4 * np.pi * eps0) ** 2 * \ e ** 4 * n_e * Z_i * coulomb_log_val / (k_B * T_e) ** 1.5 return nu_e.to(1 / units.s) @@ -680,7 +680,7 @@ def collision_rate_ion_ion(T_i, n_i, ion_particle, else: particles = [ion_particle, ion_particle] coulomb_log_val = Coulomb_logarithm(T_i, n_i, particles, V) - Z_i = atomic.charge_state(ion_particle) + Z_i = atomic.integer_charge(ion_particle) m_i = atomic.ion_mass(ion_particle) nu_i = 4 / 3 * np.sqrt(np.pi / m_i) / (4 * np.pi * eps0)**2 * e**4 * \ n_i * Z_i**4 * coulomb_log_val / (k_B * T_i)**1.5 @@ -810,7 +810,7 @@ def gyrofrequency(B, particle='e', signed=False, z_mean=None): # warnings.warn("No z_mean given, defaulting to atomic charge", # PhysicsWarning) try: - Z = atomic.charge_state(particle) + Z = atomic.integer_charge(particle) except AtomicError: Z = 1 else: @@ -1028,7 +1028,7 @@ def plasma_frequency(n, particle='e', z_mean=None): # warnings.warn("No z_mean given, defaulting to atomic charge", # PhysicsWarning) try: - Z = atomic.charge_state(particle) + Z = atomic.integer_charge(particle) except Exception: Z = 1 else: @@ -1239,7 +1239,7 @@ def inertial_length(n, particle='e'): """ try: - Z = atomic.charge_state(particle) + Z = atomic.integer_charge(particle) except AtomicError: raise ValueError(f"Invalid particle {particle} in inertial_length.") if Z: @@ -1506,7 +1506,7 @@ def lower_hybrid_frequency(B, n_i, ion='p'): # We do not need a charge state here, so the sole intent is to # catch invalid ions. try: - atomic.charge_state(ion) + atomic.integer_charge(ion) except Exception: raise ValueError("Invalid ion in lower_hybrid_frequency.") diff --git a/plasmapy/physics/quantum.py b/plasmapy/physics/quantum.py index 2b69af02..f65b053a 100644 --- a/plasmapy/physics/quantum.py +++ b/plasmapy/physics/quantum.py @@ -199,7 +199,8 @@ def Fermi_energy(n_e): .. math:: - E_F = \frac{\pi^2 \hbar^2}{2 m_{e}} \left( \frac{3 n_{e}}{\pi} \right )^{2/3} + E_F = \frac{\pi^2 \hbar^2}{2 m_{e}} + \left( \frac{3 n_{e}}{\pi} \right )^{2/3} This quantity is often used in place of thermal energy for analysis of cold, dense plasmas (e.g. warm dense matter, condensed matter). diff --git a/plasmapy/physics/transport.py b/plasmapy/physics/transport.py index 0f4c2148..5e9ce5ff 100644 --- a/plasmapy/physics/transport.py +++ b/plasmapy/physics/transport.py @@ -8,7 +8,7 @@ from plasmapy.utils.checks import check_quantity, _check_relativistic from plasmapy.utils.exceptions import PhysicsError, PhysicsWarning from plasmapy.constants import (m_p, m_e, c, mu0, k_B, e, eps0, pi, h, hbar) -from ..atomic import (ion_mass, charge_state) +from ..atomic import (ion_mass, integer_charge) from plasmapy.atomic.atomic import _is_electron from .parameters import (Debye_length, Hall_parameter, collision_rate_electron_ion, collision_rate_ion_ion) @@ -137,7 +137,7 @@ def Coulomb_logarithm(T, n_e, particles, V=None): f" in Coulomb_logarithm.") try: - charges[i] = np.abs(e * atomic.charge_state(particles[i])) + charges[i] = np.abs(e * atomic.integer_charge(particles[i])) except Exception: raise ValueError(f"Unable to find charge of particle: " f"{particles[i]} in Coulomb_logarithm.") @@ -463,7 +463,7 @@ def __init__(self, T_e, n_e, T_i, n_i, ion_particle, m_i=None, Z=None, self.m_i = m_i.to(units.kg) if Z is None: try: - self.Z = atomic.charge_state(ion_particle) + self.Z = atomic.integer_charge(ion_particle) except Exception: raise ValueError(f"Unable to find charge of particle: " f"{ion_particle} in classical_transport.") @@ -713,11 +713,13 @@ def all_variables(self) -> dict: """ d = {'resistivity': self.resistivity(), 'thermoelectric_conductivity': self.thermoelectric_conductivity(), - 'electron_thermal_conductivity': self.electron_thermal_conductivity(), + 'electron_thermal_conductivity': + self.electron_thermal_conductivity(), 'electron_viscosity': self.electron_viscosity()} if self.model != "spitzer": - d = dict(d, **{'ion_thermal_conductivity': self.ion_thermal_conductivity(), - 'ion_viscosity': self.ion_viscosity()}) + d = dict(d, **{'ion_thermal_conductivity': + self.ion_thermal_conductivity(), + 'ion_viscosity': self.ion_viscosity()}) return d @@ -1663,7 +1665,8 @@ def Delta_perp_i2(r, zeta, Delta_par_i2): def f_eta_2(r, zeta, Delta_perp_i2): eta_2_i = ((3 / 5 * np.sqrt(2) + 2 * zeta) * r ** 4 + - (2.680 + 25.98 * zeta + 90.71 * zeta**2 + 104 * zeta**3) * r ** 2 + + (2.680 + 25.98 * zeta + 90.71 * zeta**2 + 104 * zeta**3) + * r ** 2 + 0.4483 * eta_0_i * Delta_par_i2 ** 2 ) / Delta_perp_i2 return eta_2_i
Should we rename the charge_state function? The `charge_state` function in the atomic subpackage current returns the integer electric charge (e.g., `-1` for `'e-'`, `1` for `'p'`, `16` for `'Fe 16+'`, and so on). The name `charge_state` is appropriate for ions where it also means the ionization state. However, it makes less sense to talk about the charge state of electrons, muons, and neutrons, so I'm wondering if we should change it to something else. I have been thinking `integer_charge` is intuitive and easy to understand, though it would not be as appropriate for describing quarks if we were to extend the atomic subpackage further. Are there any other good alternatives?
PlasmaPy/PlasmaPy
diff --git a/plasmapy/atomic/tests/test_atomic.py b/plasmapy/atomic/tests/test_atomic.py index e99b8054..7ee9a731 100644 --- a/plasmapy/atomic/tests/test_atomic.py +++ b/plasmapy/atomic/tests/test_atomic.py @@ -15,7 +15,7 @@ _is_alpha, _is_proton, _is_antineutron, - _extract_charge_state) + _extract_integer_charge) from ..isotopes import _Isotopes @@ -30,7 +30,7 @@ common_isotopes, stable_isotopes, isotopic_abundance, - charge_state, + integer_charge, electric_charge) from ..nuclear import (nuclear_binding_energy, nuclear_reaction_energy) @@ -824,7 +824,7 @@ def test_atomic_TypeErrors(func, argument): stable_isotopes, common_isotopes, isotopic_abundance, - charge_state, + integer_charge, electric_charge, ] @@ -952,7 +952,7 @@ def test_isotopic_abundances_sum(element, isotopes): # (argument, expected) -charge_state_table = [ +integer_charge_table = [ ('H+', 1), ('D +1', 1), ('tritium 1+', 1), @@ -976,16 +976,16 @@ def test_isotopic_abundances_sum(element, isotopes): ] [email protected]("argument, expected", charge_state_table) -def test_charge_state(argument, expected): - """Test that charge_state returns the expected results.""" - assert charge_state(argument) == expected, \ - (f"charge_state({argument}) is returning {charge_state(argument)} " - f"which differs from the expected result of {expected}.") [email protected]("argument, expected", integer_charge_table) +def test_integer_charge(argument, expected): + """Test that integer_charge returns the expected results.""" + assert integer_charge(argument) == expected, \ + (f"integer_charge({argument}) is returning {integer_charge(argument)}" + f" which differs from the expected result of {expected}.") # (argument, expected_error) -charge_state_error_table = [ +integer_charge_error_table = [ ('fads', InvalidParticleError), ('H++', InvalidParticleError), ('h+', InvalidParticleError), @@ -996,28 +996,29 @@ def test_charge_state(argument, expected): ] [email protected]("argument, expected_error", charge_state_error_table) -def test_charge_state_error(argument, expected_error): - """Test that charge_state raises the expected exceptions.""" [email protected]("argument, expected_error", + integer_charge_error_table) +def test_integer_charge_error(argument, expected_error): + """Test that integer_charge raises the expected exceptions.""" with pytest.raises(expected_error, message=( - f"charge_state({argument} is not raising a {expected_error}.")): - charge_state(argument) + f"integer_charge({argument} is not raising a {expected_error}.")): + integer_charge(argument) # (argument, expected_warning) -charge_state_warning_table = [ +integer_charge_warning_table = [ ('H---', AtomicWarning), ('Fe -26', AtomicWarning), ('Og 10-', AtomicWarning)] @pytest.mark.parametrize("argument, expected_warning", - charge_state_warning_table) -def test_charge_state_warnings(argument, expected_warning): - """Test that charge_state issues appropriate warnings.""" + integer_charge_warning_table) +def test_integer_charge_warnings(argument, expected_warning): + """Test that integer_charge issues appropriate warnings.""" with pytest.warns(expected_warning, message=( - f"charge_state({argument}) is not issuing a {expected_warning}")): - charge_state(argument) + f"integer_charge({argument}) is not issuing {expected_warning}")): + integer_charge(argument) def test_electric_charge(): @@ -1254,14 +1255,14 @@ def test_is_alpha(test_input, expected): ('N-7+++', 'N-7', 3), ('H-1-', 'H-1', -1), ('He-4-', 'He-4', -1)]) -def test_extract_charge_state(test_input, expected_newarg, expected_Z): - """Test that _extract_charge_state returns the expected values.""" - new_symbol, new_Z = _extract_charge_state(test_input) +def test_extract_integer_charge(test_input, expected_newarg, expected_Z): + """Test that _extract_integer_charge returns the expected values.""" + new_symbol, new_Z = _extract_integer_charge(test_input) assert new_symbol == expected_newarg, \ - (f"_extract_charge_state should return {expected_newarg} as " + (f"_extract_integer_charge should return {expected_newarg} as " f" its first argument, but is instead returning {new_symbol}") assert new_Z == expected_Z, \ - (f"_extract_charge_state should return {expected_Z} as its second" + (f"_extract_integer_charge should return {expected_Z} as its second" f"argument, but is instead returning {new_Z}.") @@ -1269,17 +1270,17 @@ def test_extract_charge_state(test_input, expected_newarg, expected_Z): [('H-1-+-+', InvalidParticleError), ('H ++', InvalidParticleError), ('Fe +21+', InvalidParticleError)]) -def test_extract_charge_state_errors(test_input, expected_error): - """Test that _extract_charge_state raises the expected exceptions.""" +def test_extract_integer_charge_errors(test_input, expected_error): + """Test that _extract_integer_charge raises the expected exceptions.""" with pytest.raises(expected_error): - _extract_charge_state(test_input) + _extract_integer_charge(test_input) @pytest.mark.parametrize("test_input,expected_warning", [('H-1----', AtomicWarning), ('Fe -4', AtomicWarning), ('lead 4-', AtomicWarning)]) -def test_extract_charge_state_warnings(test_input, expected_warning): - """Test that _extract_charge_state issues the expected warnings.""" +def test_extract_integer_charge_warnings(test_input, expected_warning): + """Test that _extract_integer_charge issues the expected warnings.""" with pytest.warns(expected_warning): - _extract_charge_state(test_input) + _extract_integer_charge(test_input) diff --git a/plasmapy/diagnostics/tests/test_langmuir.py b/plasmapy/diagnostics/tests/test_langmuir.py index 0ff8881e..ad960d48 100644 --- a/plasmapy/diagnostics/tests/test_langmuir.py +++ b/plasmapy/diagnostics/tests/test_langmuir.py @@ -3,8 +3,9 @@ import numpy as np from plasmapy.diagnostics.langmuir import (swept_probe_analysis, - obtain_EEDF, - obtain_EEPF) + obtain_EEDF, + obtain_EEPF) + class Test_swept_probe_analysis(): def test_no_signal(self): @@ -16,6 +17,7 @@ def test_no_signal(self): "when zero signal is passed, but doesn't.") assert True, errStr + class Test_obtain_EEDF(): def test_no_signal(self): """ @@ -26,6 +28,7 @@ def test_no_signal(self): "zero signal is passed, but doesn't.") assert True, errStr + class Test_obtain_EEPF(): def test_no_signal(self): """ diff --git a/plasmapy/physics/tests/test_dielectric.py b/plasmapy/physics/tests/test_dielectric.py index f1e0f512..623650df 100644 --- a/plasmapy/physics/tests/test_dielectric.py +++ b/plasmapy/physics/tests/test_dielectric.py @@ -21,7 +21,7 @@ class Test_ColdPlasmaPermittivity(object): def test_proton_electron_plasma(self): """ - Test proton-electron plasma against the (approximate) + Test proton-electron plasma against the (approximate) analytical formulas """ B = 1*u.T diff --git a/plasmapy/physics/tests/test_distribution.py b/plasmapy/physics/tests/test_distribution.py index b63d1358..6073ae43 100644 --- a/plasmapy/physics/tests/test_distribution.py +++ b/plasmapy/physics/tests/test_distribution.py @@ -96,9 +96,9 @@ def test_std(self): """ Tests standard deviation of function? """ - std = (Maxwellian_1D(self.v_vect, - T=self.T_e, - particle=self.particle) * self.v_vect**2 * self.dv).sum() + std = (Maxwellian_1D( + self.v_vect, T=self.T_e, particle=self.particle) * + self.v_vect**2 * self.dv).sum() std = np.sqrt(std) T_distri = (std**2 / k_B * m_e).to(u.K) assert np.isclose(T_distri.value, self.T_e.value) @@ -698,7 +698,7 @@ def test_value_drift_units(self): atol=0.0), errStr -#%% kappa +# kappa # test class for kappa_velocity_1D function: class Test_kappa_velocity_1D(object): @@ -722,6 +722,7 @@ def setup_method(self): kappa=self.kappa, particle=self.particle) self.distFuncTrue = 6.637935187755855e-07 + def test_invalid_kappa(self): """ Checks if function raises error when kappa <= 3/2 is passed as an @@ -729,10 +730,11 @@ def test_invalid_kappa(self): """ with pytest.raises(ValueError): kappa_velocity_1D(v=self.v, - T=self.T_e, - kappa=self.kappaInvalid, - particle=self.particle, - units="units") + T=self.T_e, + kappa=self.kappaInvalid, + particle=self.particle, + units="units") + def test_max_noDrift(self): """ Checks maximum value of distribution function is in expected place, @@ -799,10 +801,9 @@ def test_std(self): """ Tests standard deviation of function? """ - std = (kappa_velocity_1D(self.v_vect, - T=self.T_e, - kappa=self.kappa, - particle=self.particle) * self.v_vect**2 * self.dv).sum() + std = (kappa_velocity_1D( + self.v_vect, T=self.T_e, kappa=self.kappa, + particle=self.particle) * self.v_vect**2 * self.dv).sum() std = np.sqrt(std) T_distri = (std**2 / k_B * m_e).to(u.K) assert np.isclose(T_distri.value, self.T_e.value) @@ -949,6 +950,7 @@ def setup_method(self): self.Vy_drift2 = 1e5 * u.m / u.s self.Vz_drift2 = 1e5 * u.m / u.s self.distFuncTrue = 1.1847914288918793e-22 + def test_invalid_kappa(self): """ Checks if function raises error when kappa <= 3/2 is passed as an @@ -956,12 +958,13 @@ def test_invalid_kappa(self): """ with pytest.raises(ValueError): kappa_velocity_3D(vx=self.vx, - vy=self.vy, - vz=self.vz, - T=self.T, - kappa=self.kappaInvalid, - particle=self.particle, - units="units") + vy=self.vy, + vz=self.vz, + T=self.T, + kappa=self.kappaInvalid, + particle=self.particle, + units="units") + # def test_maxwellian_limit(self): # """ # Tests the limit of large kappa to see if kappa distribution function @@ -996,6 +999,7 @@ def test_invalid_kappa(self): # atol=0.0), errStr # # return + def test_norm(self): """ Tests whether distribution function is normalized, and integrates to 1. diff --git a/plasmapy/physics/tests/test_parameters.py b/plasmapy/physics/tests/test_parameters.py index 2b7c4900..291f7c64 100644 --- a/plasmapy/physics/tests/test_parameters.py +++ b/plasmapy/physics/tests/test_parameters.py @@ -158,9 +158,9 @@ def test_ion_sound_speed(): ion='p', gamma_e=1, gamma_i=3).value, 218816.06086407552) - assert np.isclose(ion_sound_speed(T_i=0.88 * u.MK, T_e=1.28 * u.MK, ion='p', - gamma_e=1.2, gamma_i=3.4).value, - 193328.52857788358) + assert np.isclose(ion_sound_speed( + T_i=0.88 * u.MK, T_e=1.28 * u.MK, ion='p', gamma_e=1.2, + gamma_i=3.4).value, 193328.52857788358) # case when Z=1 is assumed assert ion_sound_speed(T_i=T_i, T_e=T_e, ion='p') == \ diff --git a/plasmapy/physics/tests/test_transport.py b/plasmapy/physics/tests/test_transport.py index c1299c78..c4ee7461 100644 --- a/plasmapy/physics/tests/test_transport.py +++ b/plasmapy/physics/tests/test_transport.py @@ -5,7 +5,7 @@ import numpy as np import pytest from astropy import units as u -from plasmapy.atomic.atomic import ion_mass, charge_state +from plasmapy.atomic.atomic import ion_mass, integer_charge from plasmapy.utils.exceptions import (PhysicsError, PhysicsWarning, RelativityWarning, RelativityError) from plasmapy.physics.parameters import Hall_parameter @@ -104,7 +104,7 @@ def setup_method(self): self.n_e = 2e13 / u.cm ** 3 self.ion_particle = 'D +1' self.m_i = ion_mass(self.ion_particle) - self.Z = charge_state(self.ion_particle) + self.Z = integer_charge(self.ion_particle) self.T_i = self.T_e self.n_i = self.n_e / self.Z self.B = 0.01 * u.T @@ -427,7 +427,7 @@ def test_ion_thermal_conductivity_by_model(self, model, expected): assert np.allclose(ct2.ion_thermal_conductivity(), expected, atol=1e-6 * u.W / (u.K * u.m)) - @pytest.mark.parametrize("key, expected",{ + @pytest.mark.parametrize("key, expected", { 'resistivity': [2.84304305e-08, 5.54447070e-08, 1.67853407e-12], @@ -436,7 +436,7 @@ def test_ion_thermal_conductivity_by_model(self, model, expected): 2.66496639e-05], 'electron_thermal_conductivity': [4.91374931e+06, 2.28808496e-03, - 6.90324259e+01] , + 6.90324259e+01], 'electron_viscosity': [7.51661800e-02, 5.23617668e-21, 2.09447067e-20, @@ -444,12 +444,12 @@ def test_ion_thermal_conductivity_by_model(self, model, expected): 3.23682681e-11], 'ion_thermal_conductivity': [1.41709276e+05, 4.20329493e-02, - 6.90323924e+01] , + 6.90323924e+01], 'ion_viscosity': [8.43463595e+00, 8.84513731e-13, 3.53805159e-12, 2.54483240e-06, - 5.08966116e-06] }.items()) + 5.08966116e-06]}.items()) def test_dictionary(self, key, expected): calculated = self.all_variables[key] assert np.allclose(expected, calculated.si.value)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 12 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astropy==4.1 attrs==22.2.0 certifi==2021.5.30 coverage==6.2 execnet==1.9.0 importlib-metadata==4.8.3 iniconfig==1.1.1 numpy==1.19.5 packaging==21.3 -e git+https://github.com/PlasmaPy/PlasmaPy.git@7655c9a4e756aaf1e2430902d80112c9f82f5953#egg=plasmapy pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 scipy==1.5.4 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: PlasmaPy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astropy==4.1 - attrs==22.2.0 - coverage==6.2 - execnet==1.9.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - scipy==1.5.4 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/PlasmaPy
[ "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[1-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[p-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[T-H0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[deuterium-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[deuteron-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Tritium-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[triton-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H-2-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[D-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[T-H1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H-3-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Hydrogen-3-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[helium-He]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[2-He]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[alpha-He]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[gold-Au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Gold-Au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[79-Au0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[79-Au1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[P-P]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[118-Og]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[N-14-N]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[N-N]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[hydrogen", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[deuterium", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe+-Fe]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe++-Fe]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe--Fe]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe++++++++++++++-Fe]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments0-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments1-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments2-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments3-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments4-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments5-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments6-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments7-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments8-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments9-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments10-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments11-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments12-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments13-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments14-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments15-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments16-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments17-Au-197]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments18-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments19-Be-8]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments20-N-13]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments21-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments22-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments23-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments24-N-13]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments25-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments26-n]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments27-n]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments28-n]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments29-n]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments30-n]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments31-n]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[H-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[D-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[deuterium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Deuterium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[tritium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[p-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[P-15]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Alpha-2]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[C-12-6]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Argon-18]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[protium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[H-3-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[p+-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Be-8-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[N-7]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[N", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[N+++-7]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[helium-3-3]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[Au-197-197]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[deuterium-2]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[D-2]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[H-2-2]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[tritium-3]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[T-3]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[p-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[n-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[neutron-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[n-1-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[Be-8-8]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[N-13-13]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[N-13", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[N-13+++-13]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[H-359-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[C-12b-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[-1.5-Exception]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[N-13+-+--InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[h-3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[D-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[deuterium-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Au-gold]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[alpha-helium0]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[helium-4-helium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[H-2-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Deuterium-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Hydrogen-3-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[hydrogen-3-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[H-3-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[tritium-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Alpha-helium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[alpha-helium1]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[1-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[26-iron]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[79-gold]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[p-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[P-phosphorus]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Be-8-beryllium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Li-7-lithium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[N-nitrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[N+++-nitrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[D--hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[vegancupcakes-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[C-+--InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[1.24-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[H++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[t-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[pb-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[d-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[h-3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[Pb-9-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[H", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_value_between", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_unit", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight[H-1.008]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight[1-1.008]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight[Hydrogen-1.008]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_berkelium_249", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_n", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_si_30_units", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg10-arg20]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg11-arg21]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg12-arg22]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg13-arg23]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg14-arg24]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg15-arg25]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg16-arg26]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg17-arg27]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_for_hydrogen_with_no_mass_number", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_unit", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[proton-kwargs0]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[H-1+-kwargs1]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[H-1", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[H-1-kwargs4]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[hydrogen-1-kwargs5]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[p+-kwargs6]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[antiproton-kwargs7]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[p--kwargs8]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_miscellaneous_cases", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[e+-kwargs10-positron-kwargs20-expected0]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[alpha-kwargs11-He-4++-kwargs21-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[alpha-kwargs12-helium-4", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[deuteron-kwargs13-H-kwargs23-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[D+-kwargs14-H-2+-kwargs24-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[D+-kwargs15-D", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[Deuterium+-kwargs16-D-kwargs26-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[triton-kwargs17-H-kwargs27-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[T+-kwargs18-H-3+-kwargs28-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[T+-kwargs19-T", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[Tritium+-kwargs110-T-kwargs210-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[H-kwargs113-1-kwargs213-None]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument0]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument1]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument2]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument3]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument4]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument5]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument6]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument7]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument8]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument9]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument10]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument11]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument12]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument13]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument14]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument15]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument0]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument1]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument2]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument3]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument4]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument5]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument6]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument7]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument8]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument9]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument10]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument11]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument12]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument13]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument14]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument15]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes", "plasmapy/atomic/tests/test_atomic.py::test_half_life", "plasmapy/atomic/tests/test_atomic.py::test_half_life_unstable_isotopes", "plasmapy/atomic/tests/test_atomic.py::test_half_life_u_220", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_symbol-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_symbol-argument1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_symbol-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_symbol-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_symbol-argument4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_symbol-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_number-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_number-argument7]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_number-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[is_isotope_stable-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[is_isotope_stable-argument10]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[is_isotope_stable-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[half_life-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[half_life-argument13]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[half_life-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[mass_number-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[mass_number-argument16]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[mass_number-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[element_name-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[element_name-argument19]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[element_name-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[standard_atomic_weight-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[standard_atomic_weight-argument22]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[standard_atomic_weight-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_mass-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_mass-argument25]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_mass-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[ion_mass-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[ion_mass-argument28]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[ion_mass-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_binding_energy-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_binding_energy-argument31]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_binding_energy-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_reaction_energy-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_reaction_energy-argument34]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_reaction_energy-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-H-934361079326356530741942970523610389]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_cases", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_len", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[1-isotopes0]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[3-isotopes1]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[5-isotopes2]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[7-isotopes3]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[9-isotopes4]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[11-isotopes5]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[13-isotopes6]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[15-isotopes7]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[17-isotopes8]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[19-isotopes9]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[21-isotopes10]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[23-isotopes11]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[25-isotopes12]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[27-isotopes13]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[29-isotopes14]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[31-isotopes15]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[33-isotopes16]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[35-isotopes17]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[37-isotopes18]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[39-isotopes19]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[41-isotopes20]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[43-isotopes21]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[45-isotopes22]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[47-isotopes23]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[49-isotopes24]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[51-isotopes25]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[53-isotopes26]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[55-isotopes27]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[57-isotopes28]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[59-isotopes29]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[61-isotopes30]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[63-isotopes31]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[65-isotopes32]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[67-isotopes33]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[69-isotopes34]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[71-isotopes35]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[73-isotopes36]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[75-isotopes37]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[77-isotopes38]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[79-isotopes39]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[81-isotopes40]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[89-isotopes41]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[91-isotopes42]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[H+-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[D", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[tritium", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[H---1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[Fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[N-----3]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[N++-2]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[alpha-2]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[proton-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[deuteron-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[triton-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[electron--1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[e---1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[e+-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[positron-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[n-0]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[neutron-0]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[p---1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[antiproton--1]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[n-kwargs0-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[n-1-kwargs1-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[N-kwargs2-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[N-1-kwargs3-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[N-7-kwargs4-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[neutron-kwargs5-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[James", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[0-kwargs7-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[0-kwargs8-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[n0-kwargs9-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen--False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen---False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H--False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[proton-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[protium-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[deuterium-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[tritium-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[triton-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[deuteron-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[h-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[D-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[D+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-2-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-2+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-2", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-3", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[He-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-1-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-7-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[antiproton-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[1-True-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[1-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[p--False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen_errors[H", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen_errors[D++-kwargs1-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[e--True]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[e+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[Electron-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[electron-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[positron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[-1-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[e--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[e+-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[Electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[positron-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[1-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[p-kwargs0-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[p+-kwargs1-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[hydrogen-1+-kwargs2-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1-kwargs4-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-kwargs5-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[p--kwargs6-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[antiproton-kwargs7-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[Antiproton-kwargs8-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[proton-kwargs9-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[Proton-kwargs10-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[P-kwargs11-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[P+-kwargs12-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[1-kwargs13-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[1-kwargs14-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-kwargs15-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1-kwargs16-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-kwargs17-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1-kwargs18-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[e--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[e+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[Electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[positron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p--True]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[Antiproton-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[antiproton-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p---False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[P--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[57-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[e--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[e+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[positron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Antiproton-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[antiproton-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p---False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[P--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[57-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[alpha-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-4", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-4++-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-3", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-5", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Helium-4", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-4-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[helium-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Fe-56-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Fe-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[he-4", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[H-H-None]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[H+-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[D", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[alpha-He-4-2]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[Fe-Fe-None]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[Titanium-Titanium-None]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[N-7+++-N-7-3]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[H-1--H-1--1]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[He-4--He-4--1]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_errors[H-1-+-+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_errors[H", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_errors[Fe", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_warnings[H-1-----AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_warnings[Fe", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_warnings[lead", "plasmapy/diagnostics/tests/test_langmuir.py::Test_swept_probe_analysis::test_no_signal", "plasmapy/diagnostics/tests/test_langmuir.py::Test_obtain_EEDF::test_no_signal", "plasmapy/diagnostics/tests/test_langmuir.py::Test_obtain_EEPF::test_no_signal", "plasmapy/physics/tests/test_dielectric.py::Test_ColdPlasmaPermittivity::test_proton_electron_plasma", "plasmapy/physics/tests/test_dielectric.py::Test_ColdPlasmaPermittivity::test_three_species", "plasmapy/physics/tests/test_dielectric.py::Test_ColdPlasmaPermittivity::test_SD_to_LR_relationships", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_max_noDrift", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_max_drift", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_norm", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_std", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_valErr", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_value_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_norm", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_value_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_norm", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_value_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_norm", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_value_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_invalid_kappa", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_max_noDrift", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_max_drift", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_maxwellian_limit", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_norm", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_std", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_valErr", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_invalid_kappa", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_norm", "plasmapy/physics/tests/test_parameters.py::test_thermal_speed", "plasmapy/physics/tests/test_parameters.py::Test_kappa_thermal_speed::test_invalid_kappa", "plasmapy/physics/tests/test_parameters.py::Test_kappa_thermal_speed::test_invalid_method", "plasmapy/physics/tests/test_parameters.py::Test_kappa_thermal_speed::test_probable1", "plasmapy/physics/tests/test_parameters.py::Test_kappa_thermal_speed::test_rms1", "plasmapy/physics/tests/test_parameters.py::Test_kappa_thermal_speed::test_mean1", "plasmapy/physics/tests/test_parameters.py::test_gyroradius", "plasmapy/physics/tests/test_parameters.py::test_Debye_length", "plasmapy/physics/tests/test_parameters.py::test_Debye_number", "plasmapy/physics/tests/test_parameters.py::test_inertial_length", "plasmapy/physics/tests/test_parameters.py::test_magnetic_pressure", "plasmapy/physics/tests/test_parameters.py::test_magnetic_energy_density", "plasmapy/physics/tests/test_parameters.py::test_upper_hybrid_frequency", "plasmapy/physics/tests/test_parameters.py::test_lower_hybrid_frequency", "plasmapy/physics/tests/test_transport.py::test_Coulomb_logarithm", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_spitzer_vs_formulary", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_resistivity_units", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_thermoelectric_conductivity_units", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_ion_thermal_conductivity_units", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_thermal_conductivity_units", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_ion_viscosity_units", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_viscosity_units", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_particle_mass", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_particle_charge_state", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_Z_checks", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_coulomb_log_warnings", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_coulomb_log_errors", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_coulomb_log_calc", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_hall_calc", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_invalid_model", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_invalid_field", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_precalculated_parameters", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_number_of_returns[ji-held-resistivity-all-3]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_number_of_returns[ji-held-thermoelectric_conductivity-all-3]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_number_of_returns[ji-held-electron_thermal_conductivity-all-3]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_number_of_returns[ji-held-ion_thermal_conductivity-all-3]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_number_of_returns[spitzer-resistivity-all-2]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_resistivity_by_model[ji-held-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_resistivity_by_model[spitzer-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_resistivity_by_model[braginskii-expected2]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_thermoelectric_conductivity_by_model[ji-held-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_thermoelectric_conductivity_by_model[spitzer-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_thermoelectric_conductivity_by_model[braginskii-expected2]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_viscosity_by_model[ji-held-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_viscosity_by_model[braginskii-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_ion_viscosity_by_model[ji-held-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_ion_viscosity_by_model[braginskii-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_thermal_conductivity_by_model[ji-held-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_thermal_conductivity_by_model[spitzer-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_electron_thermal_conductivity_by_model[braginskii-expected2]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_ion_thermal_conductivity_by_model[ji-held-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_ion_thermal_conductivity_by_model[braginskii-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_dictionary[resistivity-expected0]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_dictionary[thermoelectric_conductivity-expected1]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_dictionary[electron_thermal_conductivity-expected2]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_dictionary[electron_viscosity-expected3]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_dictionary[ion_thermal_conductivity-expected4]", "plasmapy/physics/tests/test_transport.py::Test_classical_transport::test_dictionary[ion_viscosity-expected5]", "plasmapy/physics/tests/test_transport.py::test_nondim_thermal_conductivity_unrecognized_model[e]", "plasmapy/physics/tests/test_transport.py::test_nondim_thermal_conductivity_unrecognized_model[p]", "plasmapy/physics/tests/test_transport.py::test_nondim_resistivity_unrecognized_model", "plasmapy/physics/tests/test_transport.py::test_nondim_te_conductivity_unrecognized_model", "plasmapy/physics/tests/test_transport.py::test_nondim_viscosity_unrecognized_model[e]", "plasmapy/physics/tests/test_transport.py::test_nondim_viscosity_unrecognized_model[p]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_par[1-par-3.16]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_par[2-par-4.9]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_par[3-par-6.1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_par[4-par-6.9]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_par[inf-par-12.5]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_perp[1-perp-4.66]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_perp[2-perp-4.0]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_perp[3-perp-3.7]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_perp[4-perp-3.6]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_known_values_perp[inf-perp-3.2]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_unmagnetized[1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_unmagnetized[2]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_unmagnetized[3]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_unmagnetized[4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_unmagnetized[inf]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_cross_vs_ji_held[1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_e_braginskii::test_cross_vs_ji_held[4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_i_braginskii::test_known_values_par", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_i_braginskii::test_known_values_perp", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_i_braginskii::test_unmagnetized", "plasmapy/physics/tests/test_transport.py::Test__nondim_tc_i_braginskii::test_cross_vs_ji_held_K2", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_known_values_par[1-par-0.71]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_known_values_par[2-par-0.9]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_known_values_par[3-par-1.0]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_known_values_par[4-par-1.1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_known_values_par[inf-par-1.5]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_unmagnetized[1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_unmagnetized[2]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_unmagnetized[3]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_unmagnetized[4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_unmagnetized[inf]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_cross_vs_ji_held[1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_tec_braginskii::test_cross_vs_ji_held[4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_known_values_par[1-par-0.51]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_known_values_par[2-par-0.44]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_known_values_par[3-par-0.4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_known_values_par[4-par-0.38]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_known_values_par[inf-par-0.29]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_unmagnetized[1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_unmagnetized[2]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_unmagnetized[3]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_unmagnetized[4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_unmagnetized[inf]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_cross_vs_ji_held[1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_resist_braginskii::test_cross_vs_ji_held[4]", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_i_braginskii::test_known_values[expected0-power0]", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_i_braginskii::test_vs_ji_held_K2", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_e_braginskii::test_known_values[1-0.73-0]", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_e_braginskii::test_known_values[1-0.51-1]", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_e_braginskii::test_known_values[1-2.04-2]", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_e_braginskii::test_known_values[1-0.5-3]", "plasmapy/physics/tests/test_transport.py::Test__nondim_visc_e_braginskii::test_known_values[1-1.0-4]", "plasmapy/physics/tests/test_transport.py::test_fail__check_Z_nan", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_spitzer[1]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_spitzer[2]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_spitzer[4]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_spitzer[16]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_spitzer[inf]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_spitzer[1]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_spitzer[2]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_spitzer[4]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_spitzer[16]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_spitzer[inf]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_spitzer[1]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_spitzer[2]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_spitzer[4]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_spitzer[16]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_spitzer[inf]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.0501-1-perp-3.187]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.2522-1-perp-2.597]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[1.004-1-perp-0.9942]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[3.178-1-perp-0.2218]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[10.03-1-perp-0.03216]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[31.66-1-perp-0.003878]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[100.6-1-perp-0.0004241]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[315.7-1-perp-4.492e-05]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[1005-1-perp-4.543e-06]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.03175-1-cross-0.1899]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.1001-1-cross-0.5648]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.3166-1-cross-1.234]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[1.267-1-cross-1.157]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[4-1-cross-0.5359]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[12.64-1-cross-0.1906]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[40.04-1-cross-0.06191]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[126.4-1-cross-0.01981]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[401.6-1-cross-0.006282]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.02494-100-perp-11.57]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.09969-100-perp-6.707]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.3987-100-perp-1.964]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[1.586-100-perp-0.3524]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[4.991-100-perp-0.06185]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[15.85-100-perp-0.008857]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[49.85-100-perp-0.001078]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[158-100-perp-0.0001184]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[499.9-100-perp-1.236e-05]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.0319-100-cross-3.68]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.1271-100-cross-5.023]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[0.502-100-cross-2.945]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[1.595-100-cross-1.283]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[5.017-100-cross-0.462]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[15.95-100-cross-0.1534]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[50.24-100-cross-0.04949]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[158.1-100-cross-0.01572]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_e_ji_held[500.8-100-cross-0.004972]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.03939-1-perp-0.6959]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.2498-1-perp-0.6216]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[1.258-1-perp-0.3007]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[6.321-1-perp-0.06303]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[25.14-1-perp-0.01126]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[100.3-1-perp-0.00161]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[317.2-1-perp-0.0002877]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[1006-1-perp-4.83e-05]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[3191-1-perp-7.741e-06]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[9958-1-perp-1.226e-06]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.02515-1-cross-0.02218]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.06343-1-cross-0.0551]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.1589-1-cross-0.1268]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.5041-1-cross-0.2523]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[2.006-1-cross-0.24]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[6.321-1-cross-0.1335]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[19.97-1-cross-0.05613]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[50.26-1-cross-0.0253]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[126.9-1-cross-0.01083]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[317.5-1-cross-0.004495]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[795.3-1-cross-0.001839]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.03975-100-perp-1.335]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.2522-100-perp-0.7647]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[1.258-100-perp-0.2709]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[6.345-100-perp-0.05833]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[25.12-100-perp-0.01112]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[100.2-100-perp-0.001649]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[317.8-100-perp-0.0002915]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[992.3-100-perp-4.875e-05]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[3170-100-perp-7.839e-06]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[9994-100-perp-1.213e-06]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.02507-100-cross-0.2022]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.07935-100-cross-0.4037]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[0.3155-100-cross-0.4764]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[1.258-100-cross-0.3272]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[3.958-100-cross-0.1795]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[12.53-100-cross-0.08046]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[39.62-100-cross-0.03088]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[100.2-100-cross-0.01332]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[250.3-100-cross-0.00562]", "plasmapy/physics/tests/test_transport.py::test__nondim_tec_ji_held[629.8-100-cross-0.002308]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.06317-1-perp-0.5064]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.3966-1-perp-0.5316]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[1.586-1-perp-0.619]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[5.041-1-perp-0.7309]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[15.8-1-perp-0.8343]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[63.35-1-perp-0.92]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[315.6-1-perp-0.9701]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[1998-1-perp-0.9912]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.02495-1-cross-0.005026]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.06249-1-cross-0.01255]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.1574-1-cross-0.03007]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.4998-1-cross-0.07338]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[1.995-1-cross-0.1254]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[12.6-1-cross-0.1211]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[62.96-1-cross-0.07421]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[252.4-1-cross-0.03992]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[998.4-1-cross-0.01908]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[3194-1-cross-0.009749]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[9963-1-cross-0.004812]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.06333-100-perp-0.3144]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.3137-100-perp-0.3894]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.9954-100-perp-0.4979]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[2.507-100-perp-0.6091]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[6.324-100-perp-0.7221]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[20.02-100-perp-0.8401]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[79.68-100-perp-0.9275]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[399.2-100-perp-0.9743]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[2509-100-perp-0.9922]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.02505-100-cross-0.02138]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.07929-100-cross-0.05403]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[0.3138-100-cross-0.1133]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[1.581-100-cross-0.1693]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[9.994-100-cross-0.1539]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[49.88-100-cross-0.09238]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[199.1-100-cross-0.04845]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[786.3-100-cross-0.02278]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[2504-100-cross-0.01152]", "plasmapy/physics/tests/test_transport.py::test__nondim_resist_ji_held[7879-100-cross-0.005652]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.01968-1-2-0.7368]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.1338-1-2-0.7171]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.4766-1-2-0.6003]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[1.339-1-2-0.3241]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[4.479-1-2-0.06964]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[15.28-1-2-0.008006]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[48.82-1-2-0.0008496]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[94.89-1-2-0.0002257]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.01267-1-4-0.009038]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.03978-1-4-0.02831]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.1151-1-4-0.08041]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.2904-1-4-0.1823]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[0.8049-1-4-0.3158]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[1.77-1-4-0.3083]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[3.886-1-4-0.2062]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[16.68-1-4-0.05845]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[39.82-1-4-0.02501]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_e_ji_held[77.82-1-4-0.01288]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.01529-1-0-100-perp-3.99586042]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.0589-1-0-100-perp-3.96828326]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.22953-1-0-100-perp-3.34885772]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.50077-1-0-100-perp-2.22385083]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[1.29924-1-0-100-perp-0.76650375]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[5.48856-1-0-100-perp-0.06337091]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[25.99325-1-0-100-perp-0.00298328]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[68.00953-1-0-100-perp-0.00042822]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[120.53342-1-0-100-perp-0.00013739]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.01865-1-0-100-cross-0.13661303]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.04544-1-0-100-cross-0.32795613]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.14199-1-0-100-cross-0.95317994]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[0.38806-1-0-100-cross-1.73029029]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[1.12996-1-0-100-cross-1.53230039]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[2.96843-1-0-100-cross-0.77216061]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[12.42528-1-0-100-cross-0.19968696]", "plasmapy/physics/tests/test_transport.py::test__nondim_tc_i_ji_held[77.11707-1-0-100-cross-0.03235721]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.01981-1-0-100-2-0.96166522]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.17423-1-0-100-2-0.92206724]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.66369-1-0-100-2-0.6344162]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[1.72958-1-0-100-2-0.24890159]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[10.09041-1-0-100-2-0.01134199]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[52.50975-1-0-100-2-0.00042844]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.01829-1-0-100-4-0.01943837]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.07845-1-0-100-4-0.08251936]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.35765-1-0-100-4-0.31643028]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[0.99985-1-0-100-4-0.45346758]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[4.35295-1-0-100-4-0.21036427]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[22.74055-1-0-100-4-0.04358606]", "plasmapy/physics/tests/test_transport.py::test__nondim_visc_i_ji_held[80.42633-1-0-100-4-0.01238144]" ]
[ "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[Fe-56", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[H-0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[3.14159-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[Og-294b-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[H-934361079326356530741942970523610389-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[Fe+24-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[C++++++++++++++++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[C-++++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[n-1-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[h-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[d-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[he-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[au-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[p--InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[119-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[antiproton-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[Md-260-kwargs0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[protium-kwargs1-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[alpha-kwargs2-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[O-18-kwargs3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[lead-209-kwargs4-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-1-kwargs5-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[24-kwargs6-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[H-kwargs7-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[H-1-kwargs8-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[P-kwargs9-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[1-kwargs10-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[4-kwargs11-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[hydrogen-444444-kwargs12-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[Fe-kwargs13-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-kwargs14-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-3-kwargs15-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[D-kwargs16-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[T-kwargs17-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[Fe-kwargs18-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[d-kwargs19-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[h-3-kwargs20-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[h-kwargs21-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[d+-kwargs22-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[H-1-kwargs0-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[H-2-kwargs1-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[T-kwargs2-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[Li-6-kwargs3-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[lithium-6-kwargs4-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[alpha-kwargs5-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[p-kwargs6-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[H-3934-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[C-12b-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[-1.5-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[n-1-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[Neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[d-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[t-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[s-36-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[H-1-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[help", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[1.1-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[p-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[alpha-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[deuteron-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[tritium-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[Au+-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[Og", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[h-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[fe-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[H-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[1.1-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[alpha-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[He-4", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[he-4-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[deuteron-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[triton-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[H-1", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[H-1+-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Fe-56-kwargs1-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[n-kwargs2-InvalidIonError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[H-1", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[26-kwargs4-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[26-kwargs5-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og-kwargs6-MissingAtomicDataError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og-kwargs7-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[n-kwargs8-InvalidIonError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[He", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[argument10-kwargs10-UnitConversionError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og-kwargs11-MissingAtomicDataError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[fe-56", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_warnings[argument0-kwargs0-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_warnings[argument1-kwargs1-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument1-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument2-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument4-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_error[common_isotopes]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_error[stable_isotopes]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_error[known_isotopes]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundance", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[fads-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[H++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[h+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[d+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[H-1-ChargeError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_warnings[H----AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_warnings[Fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_warnings[Og", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_error[badinput-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_error[h+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_error[Au", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_warning[Au", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_warning[H----AtomicWarning]", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_units_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_units_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_unitless_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_unitless_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_1D::test_zero_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_units_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_units_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_unitless_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_unitless_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_1D::test_zero_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_units_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_units_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_unitless_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_unitless_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_velocity_3D::test_zero_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_units_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_units_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_unitless_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_unitless_vTh", "plasmapy/physics/tests/test_distribution.py::Test_Maxwellian_speed_3D::test_zero_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_units_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_units_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_unitless_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_unitless_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_zero_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_1D::test_value_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_units_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_units_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_unitless_no_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_unitless_vTh", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_zero_drift_units", "plasmapy/physics/tests/test_distribution.py::Test_kappa_velocity_3D::test_value_drift_units", "plasmapy/physics/tests/test_parameters.py::test_Alfven_speed", "plasmapy/physics/tests/test_parameters.py::test_ion_sound_speed", "plasmapy/physics/tests/test_parameters.py::test_gyrofrequency", "plasmapy/physics/tests/test_parameters.py::test_plasma_frequency" ]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,053
[ "plasmapy/atomic/names.py", "plasmapy/atomic/__init__.py", "plasmapy/physics/quantum.py", "plasmapy/mathematics/mathematics.py", "plasmapy/diagnostics/langmuir.py", "plasmapy/classes/species.py", "plasmapy/atomic/atomic.py", "plasmapy/physics/parameters.py", "plasmapy/atomic/nuclear.py", "plasmapy/physics/transport.py", "plasmapy/physics/distribution.py", "plasmapy/physics/__init__.py" ]
[ "plasmapy/atomic/names.py", "plasmapy/atomic/__init__.py", "plasmapy/physics/quantum.py", "plasmapy/mathematics/mathematics.py", "plasmapy/diagnostics/langmuir.py", "plasmapy/classes/species.py", "plasmapy/atomic/atomic.py", "plasmapy/physics/parameters.py", "plasmapy/atomic/nuclear.py", "plasmapy/physics/transport.py", "plasmapy/physics/distribution.py", "plasmapy/physics/__init__.py" ]
nginxinc__crossplane-23
8709d938119f967ce938dd5163b233ce5439d30d
2018-01-18 01:31:29
8709d938119f967ce938dd5163b233ce5439d30d
diff --git a/crossplane/analyzer.py b/crossplane/analyzer.py index f5a5ffb..ec89dab 100644 --- a/crossplane/analyzer.py +++ b/crossplane/analyzer.py @@ -1920,7 +1920,7 @@ def analyze(fname, stmt, term, ctx=()): reason = '"%s" directive is not allowed here' % directive raise NgxParserDirectiveContextError(reason, fname, line) - valid_flag = lambda x: x in ('on', 'off') + valid_flag = lambda x: x.lower() in ('on', 'off') # do this in reverse because we only throw errors at the end if no masks # are valid, and typically the first bit mask is what the parser expects @@ -1942,6 +1942,8 @@ def analyze(fname, stmt, term, ctx=()): (mask & NGX_CONF_1MORE and n_args >= 1) or (mask & NGX_CONF_2MORE and n_args >= 2)): return + elif mask & NGX_CONF_FLAG and n_args == 1 and not valid_flag(args[0]): + reason = 'invalid value "%s" in "%%s" directive, it must be "on" or "off"' % args[0] else: reason = 'invalid number of arguments in "%s" directive'
NGINX_CONF_FLAG directives should support uppercase ON or OFF as args `crossplane parse` throws an error for this config but `nginx` does not: ```nginx events { accept_mutex OFF; } ```
nginxinc/crossplane
diff --git a/tests/test_analyze.py b/tests/test_analyze.py index ccb4091..8c36815 100644 --- a/tests/test_analyze.py +++ b/tests/test_analyze.py @@ -31,3 +31,30 @@ def test_state_directive(): raise Exception("bad context for 'state' passed: " + repr(ctx)) except crossplane.errors.NgxParserDirectiveContextError: continue + + +def test_flag_directive_args(): + fname = '/path/to/nginx.conf' + ctx = ('events',) + + # an NGINX_CONF_FLAG directive + stmt = { + 'directive': 'accept_mutex', + 'line': 2 # this is arbitrary + } + + good_args = [['on'], ['off'], ['On'], ['Off'], ['ON'], ['OFF']] + + for args in good_args: + stmt['args'] = args + crossplane.analyzer.analyze(fname, stmt, term=';', ctx=ctx) + + bad_args = [['1'], ['0'], ['true'], ['okay'], ['']] + + for args in bad_args: + stmt['args'] = args + try: + crossplane.analyzer.analyze(fname, stmt, term=';', ctx=ctx) + raise Exception('bad args for flag directive: ' + repr(args)) + except crossplane.errors.NgxParserDirectiveArgumentsError as e: + assert e.strerror.endswith('it must be "on" or "off"')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 -e git+https://github.com/nginxinc/crossplane.git@8709d938119f967ce938dd5163b233ce5439d30d#egg=crossplane importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: crossplane channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/crossplane
[ "tests/test_analyze.py::test_flag_directive_args" ]
[]
[ "tests/test_analyze.py::test_state_directive" ]
[]
Apache License 2.0
2,054
[ "crossplane/analyzer.py" ]
[ "crossplane/analyzer.py" ]
python-pillow__Pillow-2965
b9ea73738ea74554bed5a1a7b90ffade0f01ce32
2018-01-18 13:36:23
0b578f25d54a86884c87b5e1cb0d3a14d3dfc747
diff --git a/docs/handbook/image-file-formats.rst b/docs/handbook/image-file-formats.rst index 1ee6540ea..d265561de 100644 --- a/docs/handbook/image-file-formats.rst +++ b/docs/handbook/image-file-formats.rst @@ -612,6 +612,14 @@ The :py:meth:`~PIL.Image.Image.save` method can take the following keyword argum .. versionadded:: 3.4.0 +**append_images** + A list of images to append as additional frames. Each of the + images in the list can be single or multiframe images. Note however, that for + correct results, all the appended images should have the same + ``encoderinfo`` and ``encoderconfig`` properties. + + .. versionadded:: 4.2.0 + **tiffinfo** A :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` object or dict object containing tiff tags and values. The TIFF field type is @@ -944,14 +952,68 @@ The format code is ``Palm``, the extension is ``.palm``. PDF ^^^ -PIL can write PDF (Acrobat) images. Such images are written as binary PDF 1.1 +PIL can write PDF (Acrobat) images. Such images are written as binary PDF 1.4 files, using either JPEG or HEX encoding depending on the image mode (and whether JPEG support is available or not). -When calling :py:meth:`~PIL.Image.Image.save`, if a multiframe image is used, -by default, only the first image will be saved. To save all frames, each frame -to a separate page of the PDF, the ``save_all`` parameter must be present and -set to ``True``. +The :py:meth:`~PIL.Image.Image.save` method can take the following keyword arguments: + +**save_all** + If a multiframe image is used, by default, only the first image will be saved. + To save all frames, each frame to a separate page of the PDF, the ``save_all`` + parameter must be present and set to ``True``. + + .. versionadded:: 3.0.0 + +**append_images** + A list of images to append as additional pages. Each of the + images in the list can be single or multiframe images. + + .. versionadded:: 4.2.0 + +**append** + Set to True to append pages to an existing PDF file. If the file doesn't + exist, an :py:exc:`IOError` will be raised. + + .. versionadded:: 5.1.0 + +**resolution** + Image resolution in DPI. This, together with the number of pixels in the + image, will determine the physical dimensions of the page that will be + saved in the PDF. + +**title** + The document’s title. + + .. versionadded:: 5.1.0 + +**author** + The name of the person who created the document. + + .. versionadded:: 5.1.0 + +**subject** + The subject of the document. + + .. versionadded:: 5.1.0 + +**keywords** + Keywords associated with the document. + + .. versionadded:: 5.1.0 + +**creator** + If the document was converted to PDF from another format, the name of the + conforming product that created the original document from which it was + converted. + + .. versionadded:: 5.1.0 + +**producer** + If the document was converted to PDF from another format, the name of the + conforming product that converted it to PDF. + + .. versionadded:: 5.1.0 XV Thumbnails ^^^^^^^^^^^^^ diff --git a/src/PIL/Image.py b/src/PIL/Image.py index 8d4d9c8a8..7c3dc1fd7 100644 --- a/src/PIL/Image.py +++ b/src/PIL/Image.py @@ -1922,9 +1922,12 @@ class Image(object): save_handler = SAVE[format.upper()] if open_fp: - # Open also for reading ("+"), because TIFF save_all - # writer needs to go back and edit the written data. - fp = builtins.open(filename, "w+b") + if params.get('append', False): + fp = builtins.open(filename, "r+b") + else: + # Open also for reading ("+"), because TIFF save_all + # writer needs to go back and edit the written data. + fp = builtins.open(filename, "w+b") try: save_handler(self, fp, filename) diff --git a/src/PIL/PdfImagePlugin.py b/src/PIL/PdfImagePlugin.py index 86bc9c8e9..fa2388a74 100644 --- a/src/PIL/PdfImagePlugin.py +++ b/src/PIL/PdfImagePlugin.py @@ -20,11 +20,10 @@ # Image plugin for PDF images (output only). ## -from . import Image, ImageFile, ImageSequence -from ._binary import i8 +from . import Image, ImageFile, ImageSequence, PdfParser import io -__version__ = "0.4" +__version__ = "0.5" # @@ -37,19 +36,6 @@ __version__ = "0.4" # 4. page # 5. page contents -def _obj(fp, obj, **dictionary): - fp.write("%d 0 obj\n" % obj) - if dictionary: - fp.write("<<\n") - for k, v in dictionary.items(): - if v is not None: - fp.write("/%s %s\n" % (k, v)) - fp.write(">>\n") - - -def _endobj(fp): - fp.write("endobj\n") - def _save_all(im, fp, filename): _save(im, fp, filename, save_all=True) @@ -60,76 +46,39 @@ def _save_all(im, fp, filename): def _save(im, fp, filename, save_all=False): resolution = im.encoderinfo.get("resolution", 72.0) + is_appending = im.encoderinfo.get("append", False) + title = im.encoderinfo.get("title", None) + author = im.encoderinfo.get("author", None) + subject = im.encoderinfo.get("subject", None) + keywords = im.encoderinfo.get("keywords", None) + creator = im.encoderinfo.get("creator", None) + producer = im.encoderinfo.get("producer", None) + + if is_appending: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b") + else: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b") + + if title: + existing_pdf.info.Title = title + if author: + existing_pdf.info.Author = author + if subject: + existing_pdf.info.Subject = subject + if keywords: + existing_pdf.info.Keywords = keywords + if creator: + existing_pdf.info.Creator = creator + if producer: + existing_pdf.info.Producer = producer # # make sure image data is available im.load() - xref = [0] - - class TextWriter(object): - def __init__(self, fp): - self.fp = fp - - def __getattr__(self, name): - return getattr(self.fp, name) - - def write(self, value): - self.fp.write(value.encode('latin-1')) - - fp = TextWriter(fp) - - fp.write("%PDF-1.2\n") - fp.write("% created by PIL PDF driver " + __version__ + "\n") - - # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) - # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports - # Flatedecode (zip compression). - - bits = 8 - params = None - - if im.mode == "1": - filter = "/ASCIIHexDecode" - colorspace = "/DeviceGray" - procset = "/ImageB" # grayscale - bits = 1 - elif im.mode == "L": - filter = "/DCTDecode" - # params = "<< /Predictor 15 /Columns %d >>" % (width-2) - colorspace = "/DeviceGray" - procset = "/ImageB" # grayscale - elif im.mode == "P": - filter = "/ASCIIHexDecode" - colorspace = "[ /Indexed /DeviceRGB 255 <" - palette = im.im.getpalette("RGB") - for i in range(256): - r = i8(palette[i*3]) - g = i8(palette[i*3+1]) - b = i8(palette[i*3+2]) - colorspace += "%02x%02x%02x " % (r, g, b) - colorspace += "> ]" - procset = "/ImageI" # indexed color - elif im.mode == "RGB": - filter = "/DCTDecode" - colorspace = "/DeviceRGB" - procset = "/ImageC" # color images - elif im.mode == "CMYK": - filter = "/DCTDecode" - colorspace = "/DeviceCMYK" - procset = "/ImageC" # color images - else: - raise ValueError("cannot save mode %s" % im.mode) - - # - # catalogue - - xref.append(fp.tell()) - _obj( - fp, 1, - Type="/Catalog", - Pages="2 0 R") - _endobj(fp) + existing_pdf.start_writing() + existing_pdf.write_header() + existing_pdf.write_comment("created by PIL PDF driver " + __version__) # # pages @@ -137,11 +86,12 @@ def _save(im, fp, filename, save_all=False): if save_all: append_images = im.encoderinfo.get("append_images", []) for append_im in append_images: - if append_im.mode != im.mode: - append_im = append_im.convert(im.mode) append_im.encoderinfo = im.encoderinfo.copy() ims.append(append_im) numberOfPages = 0 + image_refs = [] + page_refs = [] + contents_refs = [] for im in ims: im_numberOfPages = 1 if save_all: @@ -151,26 +101,58 @@ def _save(im, fp, filename, save_all=False): # Image format does not have n_frames. It is a single frame image pass numberOfPages += im_numberOfPages - pages = [str(pageNumber*3+4)+" 0 R" - for pageNumber in range(0, numberOfPages)] + for i in range(im_numberOfPages): + image_refs.append(existing_pdf.next_object_id(0)) + page_refs.append(existing_pdf.next_object_id(0)) + contents_refs.append(existing_pdf.next_object_id(0)) + existing_pdf.pages.append(page_refs[-1]) - xref.append(fp.tell()) - _obj( - fp, 2, - Type="/Pages", - Count=len(pages), - Kids="["+"\n".join(pages)+"]") - _endobj(fp) + # + # catalog and list of pages + existing_pdf.write_catalog() pageNumber = 0 for imSequence in ims: for im in ImageSequence.Iterator(imSequence): + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) + # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports + # Flatedecode (zip compression). + + bits = 8 + params = None + + if im.mode == "1": + filter = "ASCIIHexDecode" + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "DCTDecode" + # params = "<< /Predictor 15 /Columns %d >>" % (width-2) + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "P": + filter = "ASCIIHexDecode" + palette = im.im.getpalette("RGB") + colorspace = [PdfParser.PdfName("Indexed"), PdfParser.PdfName("DeviceRGB"), 255, PdfParser.PdfBinary(palette)] + procset = "ImageI" # indexed color + elif im.mode == "RGB": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceRGB") + procset = "ImageC" # color images + elif im.mode == "CMYK": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceCMYK") + procset = "ImageC" # color images + else: + raise ValueError("cannot save mode %s" % im.mode) + # # image op = io.BytesIO() - if filter == "/ASCIIHexDecode": + if filter == "ASCIIHexDecode": if bits == 1: # FIXME: the hex encoder doesn't support packed 1-bit # images; do things the hard way... @@ -178,11 +160,11 @@ def _save(im, fp, filename, save_all=False): im = Image.new("L", (len(data), 1), None) im.putdata(data) ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) - elif filter == "/DCTDecode": + elif filter == "DCTDecode": Image.SAVE["JPEG"](im, op, filename) - elif filter == "/FlateDecode": + elif filter == "FlateDecode": ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) - elif filter == "/RunLengthDecode": + elif filter == "RunLengthDecode": ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) else: raise ValueError("unsupported PDF filter (%s)" % filter) @@ -192,73 +174,45 @@ def _save(im, fp, filename, save_all=False): width, height = im.size - xref.append(fp.tell()) - _obj( - fp, pageNumber*3+3, - Type="/XObject", - Subtype="/Image", + existing_pdf.write_obj(image_refs[pageNumber], stream=op.getvalue(), + Type=PdfParser.PdfName("XObject"), + Subtype=PdfParser.PdfName("Image"), Width=width, # * 72.0 / resolution, Height=height, # * 72.0 / resolution, - Length=len(op.getvalue()), - Filter=filter, + Filter=PdfParser.PdfName(filter), BitsPerComponent=bits, DecodeParams=params, ColorSpace=colorspace) - fp.write("stream\n") - fp.fp.write(op.getvalue()) - fp.write("\nendstream\n") - - _endobj(fp) - # # page - xref.append(fp.tell()) - _obj(fp, pageNumber*3+4) - fp.write( - "<<\n/Type /Page\n/Parent 2 0 R\n" - "/Resources <<\n/ProcSet [ /PDF %s ]\n" - "/XObject << /image %d 0 R >>\n>>\n" - "/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % ( - procset, - pageNumber*3+3, - int(width * 72.0 / resolution), - int(height * 72.0 / resolution), - pageNumber*3+5)) - _endobj(fp) + existing_pdf.write_page(page_refs[pageNumber], + Resources=PdfParser.PdfDict( + ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)], + XObject=PdfParser.PdfDict(image=image_refs[pageNumber])), + MediaBox=[0, 0, int(width * 72.0 / resolution), int(height * 72.0 / resolution)], + Contents=contents_refs[pageNumber] + ) # # page contents - op = TextWriter(io.BytesIO()) - - op.write( + page_contents = PdfParser.make_bytes( "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( int(width * 72.0 / resolution), int(height * 72.0 / resolution))) - xref.append(fp.tell()) - _obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue())) - - fp.write("stream\n") - fp.fp.write(op.fp.getvalue()) - fp.write("\nendstream\n") - - _endobj(fp) + existing_pdf.write_obj(contents_refs[pageNumber], stream=page_contents) pageNumber += 1 # # trailer - startxref = fp.tell() - fp.write("xref\n0 %d\n0000000000 65535 f \n" % len(xref)) - for x in xref[1:]: - fp.write("%010d 00000 n \n" % x) - fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref)) - fp.write("startxref\n%d\n%%%%EOF\n" % startxref) + existing_pdf.write_xref_and_trailer() if hasattr(fp, "flush"): fp.flush() + existing_pdf.close() # # -------------------------------------------------------------------- diff --git a/src/PIL/PdfParser.py b/src/PIL/PdfParser.py new file mode 100644 index 000000000..b6938fdb7 --- /dev/null +++ b/src/PIL/PdfParser.py @@ -0,0 +1,846 @@ +import codecs +import collections +import mmap +import os +import re +import zlib + +try: + from UserDict import UserDict # Python 2.x +except ImportError: + UserDict = collections.UserDict # Python 3.x + + +if str == bytes: # Python 2.x + def make_bytes(s): # pragma: no cover + return s # pragma: no cover +else: # Python 3.x + def make_bytes(s): + return s.encode("us-ascii") + + +# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set on page 656 +def encode_text(s): + return codecs.BOM_UTF16_BE + s.encode("utf_16_be") + + +PDFDocEncoding = { + 0x16: u"\u0017", + 0x18: u"\u02D8", + 0x19: u"\u02C7", + 0x1A: u"\u02C6", + 0x1B: u"\u02D9", + 0x1C: u"\u02DD", + 0x1D: u"\u02DB", + 0x1E: u"\u02DA", + 0x1F: u"\u02DC", + 0x80: u"\u2022", + 0x81: u"\u2020", + 0x82: u"\u2021", + 0x83: u"\u2026", + 0x84: u"\u2014", + 0x85: u"\u2013", + 0x86: u"\u0192", + 0x87: u"\u2044", + 0x88: u"\u2039", + 0x89: u"\u203A", + 0x8A: u"\u2212", + 0x8B: u"\u2030", + 0x8C: u"\u201E", + 0x8D: u"\u201C", + 0x8E: u"\u201D", + 0x8F: u"\u2018", + 0x90: u"\u2019", + 0x91: u"\u201A", + 0x92: u"\u2122", + 0x93: u"\uFB01", + 0x94: u"\uFB02", + 0x95: u"\u0141", + 0x96: u"\u0152", + 0x97: u"\u0160", + 0x98: u"\u0178", + 0x99: u"\u017D", + 0x9A: u"\u0131", + 0x9B: u"\u0142", + 0x9C: u"\u0153", + 0x9D: u"\u0161", + 0x9E: u"\u017E", + 0xA0: u"\u20AC", + } + + +def decode_text(b): + if b[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: + return b[len(codecs.BOM_UTF16_BE):].decode("utf_16_be") + elif str == bytes: # Python 2.x + return u"".join(PDFDocEncoding.get(ord(byte), byte) for byte in b) + else: + return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) + + +class PdfFormatError(RuntimeError): + """An error that probably indicates a syntactic or semantic error in the PDF file structure""" + pass + + +def check_format_condition(condition, error_message): + if not condition: + raise PdfFormatError(error_message) + + +class IndirectReference(collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"])): + def __str__(self): + return "%s %s R" % self + + def __bytes__(self): + return self.__str__().encode("us-ascii") + + def __eq__(self, other): + return other.__class__ is self.__class__ and other.object_id == self.object_id and other.generation == self.generation + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash((self.object_id, self.generation)) + + +class IndirectObjectDef(IndirectReference): + def __str__(self): + return "%s %s obj" % self + + +class XrefTable: + def __init__(self): + self.existing_entries = {} # object ID => (offset, generation) + self.new_entries = {} # object ID => (offset, generation) + self.deleted_entries = {0: 65536} # object ID => generation + self.reading_finished = False + + def __setitem__(self, key, value): + if self.reading_finished: + self.new_entries[key] = value + else: + self.existing_entries[key] = value + if key in self.deleted_entries: + del self.deleted_entries[key] + + def __getitem__(self, key): + try: + return self.new_entries[key] + except KeyError: + return self.existing_entries[key] + + def __delitem__(self, key): + if key in self.new_entries: + generation = self.new_entries[key][1] + 1 + del self.new_entries[key] + self.deleted_entries[key] = generation + elif key in self.existing_entries: + generation = self.existing_entries[key][1] + 1 + self.deleted_entries[key] = generation + elif key in self.deleted_entries: + generation = self.deleted_entries[key] + else: + raise IndexError("object ID " + str(key) + " cannot be deleted because it doesn't exist") + + def __contains__(self, key): + return key in self.existing_entries or key in self.new_entries + + def __len__(self): + return len(set(self.existing_entries.keys()) | set(self.new_entries.keys()) | set(self.deleted_entries.keys())) + + def keys(self): + return (set(self.existing_entries.keys()) - set(self.deleted_entries.keys())) | set(self.new_entries.keys()) + + def write(self, f): + keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys())) + deleted_keys = sorted(set(self.deleted_entries.keys())) + startxref = f.tell() + f.write(b"xref\n") + while keys: + # find a contiguous sequence of object IDs + prev = None + for index, key in enumerate(keys): + if prev is None or prev+1 == key: + prev = key + else: + contiguous_keys = keys[:index] + keys = keys[index:] + break + else: + contiguous_keys = keys + keys = None + f.write(make_bytes("%d %d\n" % (contiguous_keys[0], len(contiguous_keys)))) + for object_id in contiguous_keys: + if object_id in self.new_entries: + f.write(make_bytes("%010d %05d n \n" % self.new_entries[object_id])) + else: + this_deleted_object_id = deleted_keys.pop(0) + check_format_condition(object_id == this_deleted_object_id, "expected the next deleted object ID to be %s, instead found %s" % (object_id, this_deleted_object_id)) + try: + next_in_linked_list = deleted_keys[0] + except IndexError: + next_in_linked_list = 0 + f.write(make_bytes("%010d %05d f \n" % (next_in_linked_list, self.deleted_entries[object_id]))) + return startxref + + +class PdfName: + def __init__(self, name): + if isinstance(name, PdfName): + self.name = name.name + elif isinstance(name, bytes): + self.name = name + else: + self.name = name.encode("us-ascii") + + def name_as_str(self): + return self.name.decode("us-ascii") + + def __eq__(self, other): + return (isinstance(other, PdfName) and other.name == self.name) or other == self.name + + def __hash__(self): + return hash(self.name) + + def __repr__(self): + return "PdfName(%s)" % repr(self.name) + + @classmethod + def from_pdf_stream(klass, data): + return klass(PdfParser.interpret_name(data)) + + allowed_chars = set(range(33,127)) - set(ord(c) for c in "#%/()<>[]{}") + + def __bytes__(self): + if str == bytes: # Python 2.x + result = bytearray(b"/") + for b in self.name: + if ord(b) in self.allowed_chars: + result.append(b) + else: + result.extend(b"#%02X" % ord(b)) + else: # Python 3.x + result = bytearray(b"/") + for b in self.name: + if b in self.allowed_chars: + result.append(b) + else: + result.extend(make_bytes("#%02X" % b)) + return bytes(result) + + __str__ = __bytes__ + + +class PdfArray(list): + def __bytes__(self): + return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" + + __str__ = __bytes__ + + +class PdfDict(UserDict): + def __init__(self, *args, **kwargs): + UserDict.__init__(self, *args, **kwargs) + + def __setattr__(self, key, value): + if key == "data": + if hasattr(UserDict, "__setattr__"): + UserDict.__setattr__(self, key, value) + else: + self.__dict__[key] = value + else: + if isinstance(key, str): + key = key.encode("us-ascii") + self[key] = value + + def __getattr__(self, key): + try: + value = self[key] + except KeyError: + try: + value = self[key.encode("us-ascii")] + except KeyError: + raise AttributeError(key) + if isinstance(value, bytes): + return decode_text(value) + else: + return value + + def __bytes__(self): + out = bytearray(b"<<") + for key, value in self.items(): + if value is None: + continue + value = pdf_repr(value) + out.extend(b"\n") + out.extend(bytes(PdfName(key))) + out.extend(b" ") + out.extend(value) + out.extend(b"\n>>") + return bytes(out) + + if str == bytes: + __str__ = __bytes__ + + +class PdfBinary: + def __init__(self, data): + self.data = data + + if str == bytes: # Python 2.x + def __str__(self): + return "<%s>" % "".join("%02X" % ord(b) for b in self.data) + + else: # Python 3.x + def __bytes__(self): + return make_bytes("<%s>" % "".join("%02X" % b for b in self.data)) + + +class PdfStream: + def __init__(self, dictionary, buf): + self.dictionary = dictionary + self.buf = buf + + def decode(self): + try: + filter = self.dictionary.Filter + except AttributeError: + return self.buf + if filter == b"FlateDecode": + try: + expected_length = self.dictionary.DL + except AttributeError: + expected_length = self.dictionary.Length + return zlib.decompress(self.buf, bufsize=int(expected_length)) + else: + raise NotImplementedError("stream filter %s unknown/unsupported" % repr(self.dictionary.Filter)) + + +def pdf_repr(x): + if x is True: + return b"true" + elif x is False: + return b"false" + elif x is None: + return b"null" + elif isinstance(x, PdfName) or isinstance(x, PdfDict) or isinstance(x, PdfArray) or isinstance(x, PdfBinary): + return bytes(x) + elif isinstance(x, int): + return str(x).encode("us-ascii") + elif isinstance(x, dict): + return bytes(PdfDict(x)) + elif isinstance(x, list): + return bytes(PdfArray(x)) + elif (str == bytes and isinstance(x, unicode)) or (str != bytes and isinstance(x, str)): + return pdf_repr(encode_text(x)) + elif isinstance(x, bytes): + return b"(" + x.replace(b"\\", b"\\\\").replace(b"(", b"\\(").replace(b")", b"\\)") + b")" # XXX escape more chars? handle binary garbage + else: + return bytes(x) + + +class PdfParser: + """Based on http://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf + Supports PDF up to 1.4 + """ + + def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"): + # type: (PdfParser, str, file, Union[bytes, bytearray], int, str) -> None + if buf and f: + raise RuntimeError("specify buf or f or filename, but not both buf and f") + self.filename = filename + self.buf = buf + self.f = f + self.start_offset = start_offset + self.should_close_buf = False + self.should_close_file = False + if filename is not None and f is None: + self.f = f = open(filename, mode) + self.should_close_file = True + if f is not None: + self.buf = buf = self.get_buf_from_file(f) + self.should_close_buf = True + if not filename and hasattr(f, "name"): + self.filename = f.name + self.cached_objects = {} + if buf: + self.read_pdf_info() + else: + self.file_size_total = self.file_size_this = 0 + self.root = PdfDict() + self.root_ref = None + self.info = PdfDict() + self.info_ref = None + self.page_tree_root = {} + self.pages = [] + self.orig_pages = [] + self.pages_ref = None + self.last_xref_section_offset = None + self.trailer_dict = {} + self.xref_table = XrefTable() + self.xref_table.reading_finished = True + if f: + self.seek_end() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + return False # do not suppress exceptions + + def start_writing(self): + self.close_buf() + self.seek_end() + + def close_buf(self): + try: + self.buf.close() + except AttributeError: + pass + self.buf = None + + def close(self): + if self.should_close_buf: + self.close_buf() + if self.f is not None and self.should_close_file: + self.f.close() + self.f = None + + def seek_end(self): + self.f.seek(0, os.SEEK_END) + + def write_header(self): + self.f.write(b"%PDF-1.4\n") + + def write_comment(self, s): + self.f.write(("%% %s\n" % (s,)).encode("utf-8")) + + def write_catalog(self): + self.del_root() + self.root_ref = self.next_object_id(self.f.tell()) + self.pages_ref = self.next_object_id(0) + self.rewrite_pages() + self.write_obj(self.root_ref, + Type=PdfName(b"Catalog"), + Pages=self.pages_ref) + self.write_obj(self.pages_ref, + Type=PdfName(b"Pages"), + Count=len(self.pages), + Kids=self.pages) + return self.root_ref + + def rewrite_pages(self): + pages_tree_nodes_to_delete = [] + for i, page_ref in enumerate(self.orig_pages): + page_info = self.cached_objects[page_ref] + del self.xref_table[page_ref.object_id] + pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")]) + if page_ref not in self.pages: + # the page has been deleted + continue + # make dict keys into strings for passing to write_page + stringified_page_info = {} + for key, value in page_info.items(): + # key should be a PdfName + stringified_page_info[key.name_as_str()] = value + stringified_page_info["Parent"] = self.pages_ref + new_page_ref = self.write_page(None, **stringified_page_info) + for j, cur_page_ref in enumerate(self.pages): + if cur_page_ref == page_ref: + # replace the page reference with the new one + self.pages[j] = new_page_ref + # delete redundant Pages tree nodes from xref table + for pages_tree_node_ref in pages_tree_nodes_to_delete: + while pages_tree_node_ref: + pages_tree_node = self.cached_objects[pages_tree_node_ref] + if pages_tree_node_ref.object_id in self.xref_table: + del self.xref_table[pages_tree_node_ref.object_id] + pages_tree_node_ref = pages_tree_node.get(b"Parent", None) + self.orig_pages = [] + + def write_xref_and_trailer(self, new_root_ref=None): + if new_root_ref: + self.del_root() + self.root_ref = new_root_ref + if self.info: + self.info_ref = self.write_obj(None, self.info) + start_xref = self.xref_table.write(self.f) + num_entries = len(self.xref_table) + trailer_dict = {b"Root": self.root_ref, b"Size": num_entries} + if self.last_xref_section_offset is not None: + trailer_dict[b"Prev"] = self.last_xref_section_offset + if self.info: + trailer_dict[b"Info"] = self.info_ref + self.last_xref_section_offset = start_xref + self.f.write(b"trailer\n" + bytes(PdfDict(trailer_dict)) + make_bytes("\nstartxref\n%d\n%%%%EOF" % start_xref)) + + def write_page(self, ref, *objs, **dict_obj): + if isinstance(ref, int): + ref = self.pages[ref] + if "Type" not in dict_obj: + dict_obj["Type"] = PdfName(b"Page") + if "Parent" not in dict_obj: + dict_obj["Parent"] = self.pages_ref + return self.write_obj(ref, *objs, **dict_obj) + + def write_obj(self, ref, *objs, **dict_obj): + f = self.f + if ref is None: + ref = self.next_object_id(f.tell()) + else: + self.xref_table[ref.object_id] = (f.tell(), ref.generation) + f.write(bytes(IndirectObjectDef(*ref))) + stream = dict_obj.pop("stream", None) + if stream is not None: + dict_obj["Length"] = len(stream) + if dict_obj: + f.write(pdf_repr(dict_obj)) + for obj in objs: + f.write(pdf_repr(obj)) + if stream is not None: + f.write(b"stream\n") + f.write(stream) + f.write(b"\nendstream\n") + f.write(b"endobj\n") + return ref + + def del_root(self): + if self.root_ref is None: + return + del self.xref_table[self.root_ref.object_id] + del self.xref_table[self.root[b"Pages"].object_id] + + @staticmethod + def get_buf_from_file(f): + if hasattr(f, "getbuffer"): + return f.getbuffer() + elif hasattr(f, "getvalue"): + return f.getvalue() + else: + try: + return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: # cannot mmap an empty file + return b"" + + def read_pdf_info(self): + self.file_size_total = len(self.buf) + self.file_size_this = self.file_size_total - self.start_offset + self.read_trailer() + self.root_ref = self.trailer_dict[b"Root"] + self.info_ref = self.trailer_dict.get(b"Info", None) + self.root = PdfDict(self.read_indirect(self.root_ref)) + if self.info_ref is None: + self.info = PdfDict() + else: + self.info = PdfDict(self.read_indirect(self.info_ref)) + check_format_condition(b"Type" in self.root, "/Type missing in Root") + check_format_condition(self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog") + check_format_condition(b"Pages" in self.root, "/Pages missing in Root") + check_format_condition(isinstance(self.root[b"Pages"], IndirectReference), "/Pages in Root is not an indirect reference") + self.pages_ref = self.root[b"Pages"] + self.page_tree_root = self.read_indirect(self.pages_ref) + self.pages = self.linearize_page_tree(self.page_tree_root) + # save the original list of page references in case the user modifies, adds or deletes some pages and we need to rewrite the pages and their list + self.orig_pages = self.pages[:] + + def next_object_id(self, offset=None): + try: + # TODO: support reuse of deleted objects + reference = IndirectReference(max(self.xref_table.keys()) + 1, 0) + except ValueError: + reference = IndirectReference(1, 0) + if offset is not None: + self.xref_table[reference.object_id] = (offset, 0) + return reference + + delimiter = br"[][()<>{}/%]" + delimiter_or_ws = br"[][()<>{}/%\000\011\012\014\015\040]" + whitespace = br"[\000\011\012\014\015\040]" + whitespace_or_hex = br"[\000\011\012\014\015\0400-9a-fA-F]" + whitespace_optional = whitespace + b"*" + whitespace_mandatory = whitespace + b"+" + newline_only = br"[\r\n]+" + newline = whitespace_optional + newline_only + whitespace_optional + re_trailer_end = re.compile(whitespace_mandatory + br"trailer" + whitespace_optional + br"\<\<(.*\>\>)" + newline + + br"startxref" + newline + br"([0-9]+)" + newline + br"%%EOF" + whitespace_optional + br"$", re.DOTALL) + re_trailer_prev = re.compile(whitespace_optional + br"trailer" + whitespace_optional + br"\<\<(.*?\>\>)" + newline + + br"startxref" + newline + br"([0-9]+)" + newline + br"%%EOF" + whitespace_optional, re.DOTALL) + + def read_trailer(self): + search_start_offset = len(self.buf) - 16384 + if search_start_offset < self.start_offset: + search_start_offset = self.start_offset + m = self.re_trailer_end.search(self.buf, search_start_offset) + check_format_condition(m, "trailer end not found") + # make sure we found the LAST trailer + last_match = m + while m: + last_match = m + m = self.re_trailer_end.search(self.buf, m.start()+16) + if not m: + m = last_match + trailer_data = m.group(1) + self.last_xref_section_offset = int(m.group(2)) + self.trailer_dict = self.interpret_trailer(trailer_data) + self.xref_table = XrefTable() + self.read_xref_table(xref_section_offset=self.last_xref_section_offset) + if b"Prev" in self.trailer_dict: + self.read_prev_trailer(self.trailer_dict[b"Prev"]) + + def read_prev_trailer(self, xref_section_offset): + trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset) + m = self.re_trailer_prev.search(self.buf[trailer_offset:trailer_offset+16384]) + check_format_condition(m, "previous trailer not found") + trailer_data = m.group(1) + check_format_condition(int(m.group(2)) == xref_section_offset, "xref section offset in previous trailer doesn't match what was expected") + trailer_dict = self.interpret_trailer(trailer_data) + if b"Prev" in trailer_dict: + self.read_prev_trailer(trailer_dict[b"Prev"]) + + re_whitespace_optional = re.compile(whitespace_optional) + re_name = re.compile(whitespace_optional + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + delimiter_or_ws + br")") + re_dict_start = re.compile(whitespace_optional + br"\<\<") + re_dict_end = re.compile(whitespace_optional + br"\>\>" + whitespace_optional) + + @classmethod + def interpret_trailer(klass, trailer_data): + trailer = {} + offset = 0 + while True: + m = klass.re_name.match(trailer_data, offset) + if not m: + m = klass.re_dict_end.match(trailer_data, offset) + check_format_condition(m and m.end() == len(trailer_data), "name not found in trailer, remaining data: " + repr(trailer_data[offset:])) + break + key = klass.interpret_name(m.group(1)) + value, offset = klass.get_value(trailer_data, m.end()) + trailer[key] = value + check_format_condition(b"Size" in trailer and isinstance(trailer[b"Size"], int), "/Size not in trailer or not an integer") + check_format_condition(b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference), "/Root not in trailer or not an indirect reference") + return trailer + + re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?") + + @classmethod + def interpret_name(klass, raw, as_text=False): + name = b"" + for m in klass.re_hashes_in_name.finditer(raw): + if m.group(3): + name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii")) + else: + name += m.group(1) + if as_text: + return name.decode("utf-8") + else: + return bytes(name) + + re_null = re.compile(whitespace_optional + br"null(?=" + delimiter_or_ws + br")") + re_true = re.compile(whitespace_optional + br"true(?=" + delimiter_or_ws + br")") + re_false = re.compile(whitespace_optional + br"false(?=" + delimiter_or_ws + br")") + re_int = re.compile(whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")") + re_real = re.compile(whitespace_optional + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + delimiter_or_ws + br")") + re_array_start = re.compile(whitespace_optional + br"\[") + re_array_end = re.compile(whitespace_optional + br"]") + re_string_hex = re.compile(whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>") + re_string_lit = re.compile(whitespace_optional + br"\(") + re_indirect_reference = re.compile(whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + br"([-+]?[0-9]+)" + whitespace_mandatory + br"R(?=" + delimiter_or_ws + br")") + re_indirect_def_start = re.compile(whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + br"([-+]?[0-9]+)" + whitespace_mandatory + br"obj(?=" + delimiter_or_ws + br")") + re_indirect_def_end = re.compile(whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")") + re_comment = re.compile(br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*") + re_stream_start = re.compile(whitespace_optional + br"stream\r?\n") + re_stream_end = re.compile(whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")") + + @classmethod + def get_value(klass, data, offset, expect_indirect=None, max_nesting=-1): + if max_nesting == 0: + return None, None + m = klass.re_comment.match(data, offset) + if m: + offset = m.end() + m = klass.re_indirect_def_start.match(data, offset) + if m: + check_format_condition(int(m.group(1)) > 0, "indirect object definition: object ID must be greater than 0") + check_format_condition(int(m.group(2)) >= 0, "indirect object definition: generation must be non-negative") + check_format_condition(expect_indirect is None or expect_indirect == IndirectReference(int(m.group(1)), int(m.group(2))), + "indirect object definition different than expected") + object, offset = klass.get_value(data, m.end(), max_nesting=max_nesting-1) + if offset is None: + return object, None + m = klass.re_indirect_def_end.match(data, offset) + check_format_condition(m, "indirect object definition end not found") + return object, m.end() + check_format_condition(not expect_indirect, "indirect object definition not found") + m = klass.re_indirect_reference.match(data, offset) + if m: + check_format_condition(int(m.group(1)) > 0, "indirect object reference: object ID must be greater than 0") + check_format_condition(int(m.group(2)) >= 0, "indirect object reference: generation must be non-negative") + return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() + m = klass.re_dict_start.match(data, offset) + if m: + offset = m.end() + result = {} + m = klass.re_dict_end.match(data, offset) + while not m: + key, offset = klass.get_value(data, offset, max_nesting=max_nesting-1) + if offset is None: + return result, None + value, offset = klass.get_value(data, offset, max_nesting=max_nesting-1) + result[key] = value + if offset is None: + return result, None + m = klass.re_dict_end.match(data, offset) + offset = m.end() + m = klass.re_stream_start.match(data, offset) + if m: + try: + stream_len = int(result[b"Length"]) + except (TypeError, KeyError, ValueError): + raise PdfFormatError("bad or missing Length in stream dict (%r)" % result.get(b"Length", None)) + stream_data = data[m.end():m.end() + stream_len] + m = klass.re_stream_end.match(data, m.end() + stream_len) + check_format_condition(m, "stream end not found") + offset = m.end() + result = PdfStream(PdfDict(result), stream_data) + else: + result = PdfDict(result) + return result, offset + m = klass.re_array_start.match(data, offset) + if m: + offset = m.end() + result = [] + m = klass.re_array_end.match(data, offset) + while not m: + value, offset = klass.get_value(data, offset, max_nesting=max_nesting-1) + result.append(value) + if offset is None: + return result, None + m = klass.re_array_end.match(data, offset) + return result, m.end() + m = klass.re_null.match(data, offset) + if m: + return None, m.end() + m = klass.re_true.match(data, offset) + if m: + return True, m.end() + m = klass.re_false.match(data, offset) + if m: + return False, m.end() + m = klass.re_name.match(data, offset) + if m: + return PdfName(klass.interpret_name(m.group(1))), m.end() + m = klass.re_int.match(data, offset) + if m: + return int(m.group(1)), m.end() + m = klass.re_real.match(data, offset) + if m: + return float(m.group(1)), m.end() # XXX Decimal instead of float??? + m = klass.re_string_hex.match(data, offset) + if m: + hex_string = bytearray([b for b in m.group(1) if b in b"0123456789abcdefABCDEF"]) # filter out whitespace + if len(hex_string) % 2 == 1: + hex_string.append(ord(b"0")) # append a 0 if the length is not even - yes, at the end + return bytearray.fromhex(hex_string.decode("us-ascii")), m.end() + m = klass.re_string_lit.match(data, offset) + if m: + return klass.get_literal_string(data, m.end()) + #return None, offset # fallback (only for debugging) + raise PdfFormatError("unrecognized object: " + repr(data[offset:offset+32])) + + re_lit_str_token = re.compile(br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))") + escaped_chars = { + b"n": b"\n", + b"r": b"\r", + b"t": b"\t", + b"b": b"\b", + b"f": b"\f", + b"(": b"(", + b")": b")", + b"\\": b"\\", + ord(b"n"): b"\n", + ord(b"r"): b"\r", + ord(b"t"): b"\t", + ord(b"b"): b"\b", + ord(b"f"): b"\f", + ord(b"("): b"(", + ord(b")"): b")", + ord(b"\\"): b"\\", + } + + @classmethod + def get_literal_string(klass, data, offset): + nesting_depth = 0 + result = bytearray() + for m in klass.re_lit_str_token.finditer(data, offset): + result.extend(data[offset:m.start()]) + if m.group(1): + result.extend(klass.escaped_chars[m.group(1)[1]]) + elif m.group(2): + result.append(int(m.group(2)[1:], 8)) + elif m.group(3): + pass + elif m.group(5): + result.extend(b"\n") + elif m.group(6): + result.extend(b"(") + nesting_depth += 1 + elif m.group(7): + if nesting_depth == 0: + return bytes(result), m.end() + result.extend(b")") + nesting_depth -= 1 + offset = m.end() + raise PdfFormatError("unfinished literal string") + + re_xref_section_start = re.compile(whitespace_optional + br"xref" + newline) + re_xref_subsection_start = re.compile(whitespace_optional + br"([0-9]+)" + whitespace_mandatory + br"([0-9]+)" + whitespace_optional + newline_only) + re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") + + def read_xref_table(self, xref_section_offset): + subsection_found = False + m = self.re_xref_section_start.match(self.buf, xref_section_offset + self.start_offset) + check_format_condition(m, "xref section start not found") + offset = m.end() + while True: + m = self.re_xref_subsection_start.match(self.buf, offset) + if not m: + check_format_condition(subsection_found, "xref subsection start not found") + break + subsection_found = True + offset = m.end() + first_object = int(m.group(1)) + num_objects = int(m.group(2)) + for i in range(first_object, first_object+num_objects): + m = self.re_xref_entry.match(self.buf, offset) + check_format_condition(m, "xref entry not found") + offset = m.end() + is_free = m.group(3) == b"f" + generation = int(m.group(2)) + if not is_free: + new_entry = (int(m.group(1)), generation) + check_format_condition(i not in self.xref_table or self.xref_table[i] == new_entry, "xref entry duplicated (and not identical)") + self.xref_table[i] = new_entry + return offset + + def read_indirect(self, ref, max_nesting=-1): + offset, generation = self.xref_table[ref[0]] + check_format_condition(generation == ref[1], "expected to find generation %s for object ID %s in xref table, instead found generation %s at offset %s" \ + % (ref[1], ref[0], generation, offset)) + value = self.get_value(self.buf, offset + self.start_offset, expect_indirect=IndirectReference(*ref), max_nesting=max_nesting)[0] + self.cached_objects[ref] = value + return value + + def linearize_page_tree(self, node=None): + if node is None: + node = self.page_tree_root + check_format_condition(node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages") + pages = [] + for kid in node[b"Kids"]: + kid_object = self.read_indirect(kid) + if kid_object[b"Type"] == b"Page": + pages.append(kid) + else: + pages.extend(self.linearize_page_tree(node=kid_object)) + return pages
Support for appending to existing PDF / TIFF files I need the ability to append images as pages to existing PDF files. I have an implementation for PDFs up to version 1.4. Seems to be working fine as far as I can tell, but if anyone knows of any tools to verify the correctness of a PDF, or in general an idea for how to test this other than verify that I can open the PDF file and it looks right... Would a pull request be welcome for this? The same could also easily be done for TIFFs, I believe.
python-pillow/Pillow
diff --git a/Tests/test_file_pdf.py b/Tests/test_file_pdf.py index ee02d0694..f17da8d74 100644 --- a/Tests/test_file_pdf.py +++ b/Tests/test_file_pdf.py @@ -1,24 +1,31 @@ from helper import unittest, PillowTestCase, hopper -from PIL import Image +from PIL import Image, PdfParser +import io +import os import os.path +import tempfile class TestFilePdf(PillowTestCase): - def helper_save_as_pdf(self, mode, save_all=False): + def helper_save_as_pdf(self, mode, **kwargs): # Arrange im = hopper(mode) outfile = self.tempfile("temp_" + mode + ".pdf") # Act - if save_all: - im.save(outfile, save_all=True) - else: - im.save(outfile) + im.save(outfile, **kwargs) # Assert self.assertTrue(os.path.isfile(outfile)) self.assertGreater(os.path.getsize(outfile), 0) + with PdfParser.PdfParser(outfile) as pdf: + if kwargs.get("append_images", False) or kwargs.get("append", False): + self.assertGreater(len(pdf.pages), 1) + else: + self.assertGreater(len(pdf.pages), 0) + + return outfile def test_monochrome(self): # Arrange @@ -97,6 +104,135 @@ class TestFilePdf(PillowTestCase): self.assertTrue(os.path.isfile(outfile)) self.assertGreater(os.path.getsize(outfile), 0) + def test_pdf_open(self): + # fail on a buffer full of null bytes + self.assertRaises(PdfParser.PdfFormatError, PdfParser.PdfParser, buf=bytearray(65536)) + + # make an empty PDF object + with PdfParser.PdfParser() as empty_pdf: + self.assertEqual(len(empty_pdf.pages), 0) + self.assertEqual(len(empty_pdf.info), 0) + self.assertFalse(empty_pdf.should_close_buf) + self.assertFalse(empty_pdf.should_close_file) + + # make a PDF file + pdf_filename = self.helper_save_as_pdf("RGB") + + # open the PDF file + with PdfParser.PdfParser(filename=pdf_filename) as hopper_pdf: + self.assertEqual(len(hopper_pdf.pages), 1) + self.assertTrue(hopper_pdf.should_close_buf) + self.assertTrue(hopper_pdf.should_close_file) + + # read a PDF file from a buffer with a non-zero offset + with open(pdf_filename, "rb") as f: + content = b"xyzzy" + f.read() + with PdfParser.PdfParser(buf=content, start_offset=5) as hopper_pdf: + self.assertEqual(len(hopper_pdf.pages), 1) + self.assertFalse(hopper_pdf.should_close_buf) + self.assertFalse(hopper_pdf.should_close_file) + + # read a PDF file from an already open file + with open(pdf_filename, "rb") as f: + with PdfParser.PdfParser(f=f) as hopper_pdf: + self.assertEqual(len(hopper_pdf.pages), 1) + self.assertTrue(hopper_pdf.should_close_buf) + self.assertFalse(hopper_pdf.should_close_file) + + def test_pdf_append_fails_on_nonexistent_file(self): + im = hopper("RGB") + temp_dir = tempfile.mkdtemp() + try: + self.assertRaises(IOError, im.save, os.path.join(temp_dir, "nonexistent.pdf"), append=True) + finally: + os.rmdir(temp_dir) + + def check_pdf_pages_consistency(self, pdf): + pages_info = pdf.read_indirect(pdf.pages_ref) + self.assertNotIn(b"Parent", pages_info) + self.assertIn(b"Kids", pages_info) + kids_not_used = pages_info[b"Kids"] + for page_ref in pdf.pages: + while True: + if page_ref in kids_not_used: + kids_not_used.remove(page_ref) + page_info = pdf.read_indirect(page_ref) + self.assertIn(b"Parent", page_info) + page_ref = page_info[b"Parent"] + if page_ref == pdf.pages_ref: + break + self.assertEqual(pdf.pages_ref, page_info[b"Parent"]) + self.assertEqual(kids_not_used, []) + + def test_pdf_append(self): + # make a PDF file + pdf_filename = self.helper_save_as_pdf("RGB", producer="PdfParser") + + # open it, check pages and info + with PdfParser.PdfParser(pdf_filename, mode="r+b") as pdf: + self.assertEqual(len(pdf.pages), 1) + self.assertEqual(len(pdf.info), 1) + self.assertEqual(pdf.info.Producer, "PdfParser") + self.check_pdf_pages_consistency(pdf) + + # append some info + pdf.info.Title = "abc" + pdf.info.Author = "def" + pdf.info.Subject = u"ghi\uABCD" + pdf.info.Keywords = "qw)e\\r(ty" + pdf.info.Creator = "hopper()" + pdf.start_writing() + pdf.write_xref_and_trailer() + + # open it again, check pages and info again + with PdfParser.PdfParser(pdf_filename) as pdf: + self.assertEqual(len(pdf.pages), 1) + self.assertEqual(len(pdf.info), 6) + self.assertEqual(pdf.info.Title, "abc") + self.check_pdf_pages_consistency(pdf) + + # append two images + mode_CMYK = hopper("CMYK") + mode_P = hopper("P") + mode_CMYK.save(pdf_filename, append=True, save_all=True, append_images=[mode_P]) + + # open the PDF again, check pages and info again + with PdfParser.PdfParser(pdf_filename) as pdf: + self.assertEqual(len(pdf.pages), 3) + self.assertEqual(len(pdf.info), 6) + self.assertEqual(PdfParser.decode_text(pdf.info[b"Title"]), "abc") + self.assertEqual(pdf.info.Title, "abc") + self.assertEqual(pdf.info.Producer, "PdfParser") + self.assertEqual(pdf.info.Keywords, "qw)e\\r(ty") + self.assertEqual(pdf.info.Subject, u"ghi\uABCD") + self.check_pdf_pages_consistency(pdf) + + def test_pdf_info(self): + # make a PDF file + pdf_filename = self.helper_save_as_pdf("RGB", title="title", author="author", subject="subject", keywords="keywords", creator="creator", producer="producer") + + # open it, check pages and info + with PdfParser.PdfParser(pdf_filename) as pdf: + self.assertEqual(len(pdf.info), 6) + self.assertEqual(pdf.info.Title, "title") + self.assertEqual(pdf.info.Author, "author") + self.assertEqual(pdf.info.Subject, "subject") + self.assertEqual(pdf.info.Keywords, "keywords") + self.assertEqual(pdf.info.Creator, "creator") + self.assertEqual(pdf.info.Producer, "producer") + self.check_pdf_pages_consistency(pdf) + + def test_pdf_append_to_bytesio(self): + im = hopper("RGB") + f = io.BytesIO() + im.save(f, format="PDF") + initial_size = len(f.getvalue()) + self.assertGreater(initial_size, 0) + im = hopper("P") + f = io.BytesIO(f.getvalue()) + im.save(f, format="PDF", append=True) + self.assertGreater(len(f.getvalue()), initial_size) + if __name__ == '__main__': unittest.main() diff --git a/Tests/test_pdfparser.py b/Tests/test_pdfparser.py new file mode 100644 index 000000000..db97c97dd --- /dev/null +++ b/Tests/test_pdfparser.py @@ -0,0 +1,89 @@ +from helper import unittest, PillowTestCase + +from PIL.PdfParser import IndirectObjectDef, IndirectReference, PdfBinary, PdfDict, PdfFormatError, PdfName, PdfParser, PdfStream, decode_text, encode_text, pdf_repr + + +class TestPdfParser(PillowTestCase): + + def test_text_encode_decode(self): + self.assertEqual(encode_text("abc"), b"\xFE\xFF\x00a\x00b\x00c") + self.assertEqual(decode_text(b"\xFE\xFF\x00a\x00b\x00c"), "abc") + self.assertEqual(decode_text(b"abc"), "abc") + self.assertEqual(decode_text(b"\x1B a \x1C"), u"\u02D9 a \u02DD") + + def test_indirect_refs(self): + self.assertEqual(IndirectReference(1, 2), IndirectReference(1, 2)) + self.assertNotEqual(IndirectReference(1, 2), IndirectReference(1, 3)) + self.assertNotEqual(IndirectReference(1, 2), IndirectObjectDef(1, 2)) + self.assertNotEqual(IndirectReference(1, 2), (1, 2)) + self.assertEqual(IndirectObjectDef(1, 2), IndirectObjectDef(1, 2)) + self.assertNotEqual(IndirectObjectDef(1, 2), IndirectObjectDef(1, 3)) + self.assertNotEqual(IndirectObjectDef(1, 2), IndirectReference(1, 2)) + self.assertNotEqual(IndirectObjectDef(1, 2), (1, 2)) + + def test_parsing(self): + self.assertEqual(PdfParser.interpret_name(b"Name#23Hash"), b"Name#Hash") + self.assertEqual(PdfParser.interpret_name(b"Name#23Hash", as_text=True), "Name#Hash") + self.assertEqual(PdfParser.get_value(b"1 2 R ", 0), (IndirectReference(1, 2), 5)) + self.assertEqual(PdfParser.get_value(b"true[", 0), (True, 4)) + self.assertEqual(PdfParser.get_value(b"false%", 0), (False, 5)) + self.assertEqual(PdfParser.get_value(b"null<", 0), (None, 4)) + self.assertEqual(PdfParser.get_value(b"%cmt\n %cmt\n 123\n", 0), (123, 15)) + self.assertEqual(PdfParser.get_value(b"<901FA3>", 0), (b"\x90\x1F\xA3", 8)) + self.assertEqual(PdfParser.get_value(b"asd < 9 0 1 f A > qwe", 3), (b"\x90\x1F\xA0", 17)) + self.assertEqual(PdfParser.get_value(b"(asd)", 0), (b"asd", 5)) + self.assertEqual(PdfParser.get_value(b"(asd(qwe)zxc)zzz(aaa)", 0), (b"asd(qwe)zxc", 13)) + self.assertEqual(PdfParser.get_value(b"(Two \\\nwords.)", 0), (b"Two words.", 14)) + self.assertEqual(PdfParser.get_value(b"(Two\nlines.)", 0), (b"Two\nlines.", 12)) + self.assertEqual(PdfParser.get_value(b"(Two\r\nlines.)", 0), (b"Two\nlines.", 13)) + self.assertEqual(PdfParser.get_value(b"(Two\\nlines.)", 0), (b"Two\nlines.", 13)) + self.assertEqual(PdfParser.get_value(b"(One\\(paren).", 0), (b"One(paren", 12)) + self.assertEqual(PdfParser.get_value(b"(One\\)paren).", 0), (b"One)paren", 12)) + self.assertEqual(PdfParser.get_value(b"(\\0053)", 0), (b"\x053", 7)) + self.assertEqual(PdfParser.get_value(b"(\\053)", 0), (b"\x2B", 6)) + self.assertEqual(PdfParser.get_value(b"(\\53)", 0), (b"\x2B", 5)) + self.assertEqual(PdfParser.get_value(b"(\\53a)", 0), (b"\x2Ba", 6)) + self.assertEqual(PdfParser.get_value(b"(\\1111)", 0), (b"\x491", 7)) + self.assertEqual(PdfParser.get_value(b" 123 (", 0), (123, 4)) + self.assertAlmostEqual(PdfParser.get_value(b" 123.4 %", 0)[0], 123.4) + self.assertEqual(PdfParser.get_value(b" 123.4 %", 0)[1], 6) + self.assertRaises(PdfFormatError, PdfParser.get_value, b"]", 0) + d = PdfParser.get_value(b"<</Name (value) /N /V>>", 0)[0] + self.assertIsInstance(d, PdfDict) + self.assertEqual(len(d), 2) + self.assertEqual(d.Name, "value") + self.assertEqual(d[b"Name"], b"value") + self.assertEqual(d.N, PdfName("V")) + a = PdfParser.get_value(b"[/Name (value) /N /V]", 0)[0] + self.assertIsInstance(a, list) + self.assertEqual(len(a), 4) + self.assertEqual(a[0], PdfName("Name")) + s = PdfParser.get_value(b"<</Name (value) /Length 5>>\nstream\nabcde\nendstream<<...", 0)[0] + self.assertIsInstance(s, PdfStream) + self.assertEqual(s.dictionary.Name, "value") + self.assertEqual(s.decode(), b"abcde") + + def test_pdf_repr(self): + self.assertEqual(bytes(IndirectReference(1, 2)), b"1 2 R") + self.assertEqual(bytes(IndirectObjectDef(*IndirectReference(1, 2))), b"1 2 obj") + self.assertEqual(bytes(PdfName(b"Name#Hash")), b"/Name#23Hash") + self.assertEqual(bytes(PdfName("Name#Hash")), b"/Name#23Hash") + self.assertEqual(bytes(PdfDict({b"Name": IndirectReference(1, 2)})), b"<<\n/Name 1 2 R\n>>") + self.assertEqual(bytes(PdfDict({"Name": IndirectReference(1, 2)})), b"<<\n/Name 1 2 R\n>>") + self.assertEqual(pdf_repr(IndirectReference(1, 2)), b"1 2 R") + self.assertEqual(pdf_repr(IndirectObjectDef(*IndirectReference(1, 2))), b"1 2 obj") + self.assertEqual(pdf_repr(PdfName(b"Name#Hash")), b"/Name#23Hash") + self.assertEqual(pdf_repr(PdfName("Name#Hash")), b"/Name#23Hash") + self.assertEqual(pdf_repr(PdfDict({b"Name": IndirectReference(1, 2)})), b"<<\n/Name 1 2 R\n>>") + self.assertEqual(pdf_repr(PdfDict({"Name": IndirectReference(1, 2)})), b"<<\n/Name 1 2 R\n>>") + self.assertEqual(pdf_repr(123), b"123") + self.assertEqual(pdf_repr(True), b"true") + self.assertEqual(pdf_repr(False), b"false") + self.assertEqual(pdf_repr(None), b"null") + self.assertEqual(pdf_repr(b"a)/b\\(c"), br"(a\)/b\\\(c)") + self.assertEqual(pdf_repr([123, True, {"a": PdfName(b"b")}]), b"[ 123 true <<\n/a /b\n>> ]") + self.assertEqual(pdf_repr(PdfBinary(b"\x90\x1F\xA0")), b"<901FA0>") + + +if __name__ == '__main__': + unittest.main()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 3 }
5.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": [ "apt-get update", "apt-get install -y gcc libjpeg-dev zlib1g-dev libtiff5-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev libharfbuzz-dev libfribidi-dev libxcb1-dev" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.16 babel==2.17.0 blessed==1.20.0 build==1.2.2.post1 certifi==2025.1.31 charset-normalizer==3.4.1 check-manifest==0.50 cov-core==1.15.0 coverage==7.8.0 coveralls==4.0.1 docopt==0.6.2 docutils==0.21.2 exceptiongroup==1.2.2 idna==3.10 imagesize==1.4.1 importlib_metadata==8.6.1 iniconfig==2.1.0 jarn.viewdoc==2.7 Jinja2==3.1.6 MarkupSafe==3.0.2 olefile==0.47 packaging==24.2 -e git+https://github.com/python-pillow/Pillow.git@b9ea73738ea74554bed5a1a7b90ffade0f01ce32#egg=Pillow pluggy==1.5.0 pycodestyle==2.13.0 pyflakes==3.3.1 Pygments==2.19.1 pyproject_hooks==1.2.0 pyroma==4.2 pytest==8.3.5 pytest-cov==6.0.0 pytz==2025.2 requests==2.32.3 six==1.17.0 snowballstemmer==2.2.0 Sphinx==7.4.7 sphinx-rtd-theme==3.0.2 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 tomli==2.2.1 trove-classifiers==2025.3.19.19 urllib3==2.3.0 wcwidth==0.2.13 zipp==3.21.0
name: Pillow channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.16 - babel==2.17.0 - blessed==1.20.0 - build==1.2.2.post1 - certifi==2025.1.31 - charset-normalizer==3.4.1 - check-manifest==0.50 - cov-core==1.15.0 - coverage==7.8.0 - coveralls==4.0.1 - docopt==0.6.2 - docutils==0.21.2 - exceptiongroup==1.2.2 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - jarn-viewdoc==2.7 - jinja2==3.1.6 - markupsafe==3.0.2 - olefile==0.47 - packaging==24.2 - pluggy==1.5.0 - pycodestyle==2.13.0 - pyflakes==3.3.1 - pygments==2.19.1 - pyproject-hooks==1.2.0 - pyroma==4.2 - pytest==8.3.5 - pytest-cov==6.0.0 - pytz==2025.2 - requests==2.32.3 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==7.4.7 - sphinx-rtd-theme==3.0.2 - sphinxcontrib-applehelp==2.0.0 - sphinxcontrib-devhelp==2.0.0 - sphinxcontrib-htmlhelp==2.1.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==2.0.0 - sphinxcontrib-serializinghtml==2.0.0 - tomli==2.2.1 - trove-classifiers==2025.3.19.19 - urllib3==2.3.0 - wcwidth==0.2.13 - zipp==3.21.0 prefix: /opt/conda/envs/Pillow
[ "Tests/test_file_pdf.py::TestFilePdf::test_cmyk_mode", "Tests/test_file_pdf.py::TestFilePdf::test_greyscale", "Tests/test_file_pdf.py::TestFilePdf::test_monochrome", "Tests/test_file_pdf.py::TestFilePdf::test_p_mode", "Tests/test_file_pdf.py::TestFilePdf::test_pdf_append", "Tests/test_file_pdf.py::TestFilePdf::test_pdf_append_fails_on_nonexistent_file", "Tests/test_file_pdf.py::TestFilePdf::test_pdf_append_to_bytesio", "Tests/test_file_pdf.py::TestFilePdf::test_pdf_info", "Tests/test_file_pdf.py::TestFilePdf::test_pdf_open", "Tests/test_file_pdf.py::TestFilePdf::test_rgb", "Tests/test_file_pdf.py::TestFilePdf::test_save_all", "Tests/test_file_pdf.py::TestFilePdf::test_unsupported_mode", "Tests/test_pdfparser.py::TestPdfParser::test_indirect_refs", "Tests/test_pdfparser.py::TestPdfParser::test_parsing", "Tests/test_pdfparser.py::TestPdfParser::test_pdf_repr", "Tests/test_pdfparser.py::TestPdfParser::test_text_encode_decode" ]
[]
[]
[]
MIT-CMU License
2,056
[ "src/PIL/PdfParser.py", "src/PIL/PdfImagePlugin.py", "src/PIL/Image.py", "docs/handbook/image-file-formats.rst" ]
[ "src/PIL/PdfParser.py", "src/PIL/PdfImagePlugin.py", "src/PIL/Image.py", "docs/handbook/image-file-formats.rst" ]
ModellingWebLab__cellmlmanip-20
a71fa61adf7bf1cf3a3bf30a9392a2b8d949fa93
2018-01-18 15:05:00
a71fa61adf7bf1cf3a3bf30a9392a2b8d949fa93
diff --git a/cellmlmanip/mathml2sympy/transpiler.py b/cellmlmanip/mathml2sympy/transpiler.py index 4a20482..e0d7ad5 100644 --- a/cellmlmanip/mathml2sympy/transpiler.py +++ b/cellmlmanip/mathml2sympy/transpiler.py @@ -3,6 +3,7 @@ Parses Content MathML and returns equivalent SymPy expressions Content Markup specification: https://www.w3.org/TR/MathML2/chapter4.html """ +import logging from xml.dom import Node, minidom import sympy @@ -45,21 +46,16 @@ def transpile(xml_node): # (see cn_handler for an example), show a message text = child_node.data.strip() if text: - print('Hit text node with text "' + text + '"') + logging.warning('Unhandled text node in <%s>: "%s"', child_node.tagName, text) elif child_node.nodeType == child_node.ELEMENT_NODE: # Call the appropriate MathML handler function for this tag - name = child_node.tagName - if name in HANDLERS: - # If this tag element itself has children - if child_node.childNodes: - # We want to pass the node to the handler, and it will deal with children - sympy_expressions.append(HANDLERS[name](child_node)) - else: - # This tag has no children - sympy_expressions.append(HANDLERS[name]()) + tag_name = child_node.tagName + if tag_name in HANDLERS: + sympy_expressions.append(HANDLERS[tag_name](child_node)) + logging.debug('Transpiled node %s ⟶ %s', child_node.toxml(), sympy_expressions[-1]) else: # MathML handler function not found for this tag! - raise NotImplementedError('No handler for element <%s>' % child_node.tagName) + raise NotImplementedError('No handler for element <%s>' % tag_name) elif child_node.nodeType not in [Node.COMMENT_NODE, Node.PROCESSING_INSTRUCTION_NODE]: raise NotImplementedError('Unknown node type %d' % child_node.nodeType) return sympy_expressions @@ -81,7 +77,6 @@ def ci_handler(node): """ MathML: https://www.w3.org/TR/MathML2/chapter4.html#contm.ci SymPy: http://docs.sympy.org/latest/modules/core.html#id17 - TODO: 'type' attribute? """ identifier = node.childNodes[0].data.strip() return sympy.Symbol(identifier) @@ -91,8 +86,26 @@ def cn_handler(node): """ MathML: https://www.w3.org/TR/MathML2/chapter4.html#contm.cn SymPy: http://docs.sympy.org/latest/modules/core.html#number - TODO: 'type' attribute? """ + + # If this number is using scientific notation + if 'type' in node.attributes: + if node.attributes['type'].value == 'e-notation': + # A real number may also be presented in scientific notation. Such numbers have two + # parts (a mantissa and an exponent) separated by sep. The first part is a real number, + # while the second part is an integer exponent indicating a power of the base. + # For example, 12.3<sep/>5 represents 12.3 times 10^5. The default presentation of + # this example is 12.3e5. + if len(node.childNodes) == 3 and node.childNodes[1].tagName == 'sep': + mantissa = node.childNodes[0].data.strip() + exponent = int(node.childNodes[2].data.strip()) + return sympy.Float('%se%d' % (mantissa, exponent)) + else: + raise SyntaxError('Expecting <cn type="e-notation">significand<sep/>exponent</cn>.' + 'Got: ' + node.toxml()) + raise NotImplementedError('Unimplemented type attribute for <cn>: ' + + node.attributes['type'].value) + number = float(node.childNodes[0].data.strip()) return sympy.Number(number) @@ -104,6 +117,9 @@ def apply_handler(node): https://www.w3.org/TR/MathML2/chapter4.html#contm.apply """ result = transpile(node) + + logging.debug('Result of <apply>:\n\t%s\t⟶\t%s', node.toxml(), result) + if len(result) > 1: expression = result[0](*(result[1:])) else: @@ -147,23 +163,7 @@ def otherwise_handler(node): # ARITHMETIC, ALGEBRA AND LOGIC ################################################################ -def plus_handler(): - """ - MathML: https://www.w3.org/TR/MathML2/chapter4.html#contm.plus - n-ary arithmetic operator - """ - return sympy.Add - - -def times_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.times - n-ary arithmetic operator - """ - return sympy.Mul - - -def minus_handler(): +def minus_handler(node): """ https://www.w3.org/TR/MathML2/chapter4.html#contm.minus unary arithmetic operator OR binary arithmetic operator @@ -175,16 +175,16 @@ def minus_handler(): * Negation (-a) is equivalent to sympy.Mul(sympy.S.NegativeOne, a) * Subtraction (a - b) is equivalent to sympy.Add(a, sympy.Mul(sympy.S.NegativeOne, b)) """ - def __wrapped_minus(left_operand, right_operand=None): + def _wrapped_minus(left_operand, right_operand=None): if right_operand is None: # unary arithmetic operator => negation return -left_operand # otherwise, binary arithmetic operator => subtraction return left_operand - right_operand - return __wrapped_minus + return _wrapped_minus -def divide_handler(): +def divide_handler(node): """ https://www.w3.org/TR/MathML2/chapter4.html#contm.divide binary arithmetic operator @@ -192,130 +192,75 @@ def divide_handler(): Equivalent to sympy.Mul(a, sympy.Pow(b, sympy.S.NegativeOne)) """ - def __wrapped_divide(dividend, divisor): + def _wrapped_divide(dividend, divisor): return dividend / divisor - return __wrapped_divide + return _wrapped_divide -def rem_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.rem - binary arithmetic operator - """ - return sympy.Mod - - -def floor_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.floor - unary operator - """ - return sympy.floor - - -def abs_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.abs - unary arithmetic operator - """ - return sympy.Abs - - -def power_handler(): +def power_handler(node): """ https://www.w3.org/TR/MathML2/chapter4.html#contm.power binary arithmetic operator equivalent to sympy.Pow(a, b) """ - def __wrapped_power(base, exponent): + def _wrapped_power(base, exponent): return base ** exponent - return __wrapped_power + return _wrapped_power -def root_handler(): +def root_handler(node): """ https://www.w3.org/TR/MathML2/chapter4.html#contm.root operator taking qualifiers - TODO: implement <degree> - """ - def __wrapped_root(radicand, degree=None): - if degree is None: - # by default, sqrt - return sympy.root(radicand, 2) - else: - raise NotImplementedError - # return sympy.root(b, a) - return __wrapped_root - -# RELATIONS #################################################################################### - -def eq_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.eq - n-ary operator + Nasty: + The root element is used to construct roots. The kind of root to be taken is specified by a + degree element, which should be given as the second child of the apply element enclosing the + root element. Thus, square roots correspond to the case where degree contains the value 2, cube + roots correspond to 3, and so on. If no degree is present, a default value of 2 is used. """ - return sympy.Eq + def _wrapped_root(first_argument, second_argument=None): + # if no <degree> given, it's sqrt + if second_argument is None: + return sympy.root(first_argument, 2) + return sympy.root(second_argument, first_argument) + return _wrapped_root -def leq_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.leq +def degree_handler(node): """ - return sympy.Le - - -def lt_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.lt - """ - return sympy.Lt - - -def geq_handler(): + https://www.w3.org/TR/MathML2/chapter4.html#contm.degree + Meaning of <degree> depends on context! We implement it for order of <bvar> in <diff> and + the kind of root in <root> """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.geq - n-ary relation - """ - return sympy.Ge - - -def gt_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.gt - n-ary relation - """ - return sympy.Gt - - -def and_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.and - n-ary operator - """ - return sympy.And - - -def or_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.or - n-ary operator - """ - return sympy.Or + result = transpile(node) + if len(result) != 1: + raise ValueError('Expected single value in <degree> tag.' + 'Got: ' + node.toxml()) + return result[0] # CALCULUS AND VECTOR CALCULUS ################################################################# -def diff_handler(): +def diff_handler(node): """ https://www.w3.org/TR/MathML2/chapter4.html#contm.diff operator taking qualifiers """ - def __wrapped_diff(x_symbol, y_symbol, evaluate=False): + def _wrapped_diff(x_symbol, y_symbol, evaluate=False): # dx / dy - y_function = sympy.Function(y_symbol.name) # given by child element <bvar> + y_function = sympy.Function(y_symbol.name) + + # if bound variable element <bvar> contains <degree>, argument x_symbol is a list, + # otherwise, it is a symbol + if isinstance(x_symbol, list) and len(x_symbol) == 2: + bound_variable = x_symbol[0] + order = int(x_symbol[1]) + return sympy.Derivative(y_function(bound_variable), bound_variable, order, + evaluate=evaluate) + return sympy.Derivative(y_function(x_symbol), x_symbol, evaluate=evaluate) - return __wrapped_diff + return _wrapped_diff def bvar_handler(node): @@ -324,112 +269,166 @@ def bvar_handler(node): NASTY: bvar element depends on the context it is being used In a derivative, it indicates the variable with respect to which a function is being differentiated. + + The bound variable <bvar> can also specify degree. In this case, we'll have two elements """ result = transpile(node) - if len(result) > 1: - raise NotImplementedError('multiple <bvar> not implemented') - return result[0] + if len(result) == 1: + # Bound variable without specifying degree + return result[0] + elif len(result) == 2: + return result + else: + raise SyntaxError("Don't know how to handle <bvar> " + node.toxml()) # ELEMENTARY CLASSICAL FUNCTIONS ############################################################### -def exp_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.exp - unary arithmetic operator - """ - return sympy.exp - - -def ln_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.ln - unary calculus operator - """ - return sympy.ln - - -def log_handler(): +def log_handler(node): """ https://www.w3.org/TR/MathML2/chapter4.html#contm.log operator taking qualifiers or a unary calculus operator - TODO: implement <logbase> """ - def __wrapped_log(term, base=None): - if base is None: + def _wrapped_log(first_element, second_element=None): + if second_element is None: # if no <logbase> element is present, the base is assumed to be 10 - return sympy.log(term, 10) - else: - # return sympy.log(b, a) - raise NotImplementedError - return __wrapped_log + return sympy.log(first_element, 10) + # Has <logbase> element, which is the first_element after <log/> + return sympy.log(second_element, first_element) + return _wrapped_log -def cos_handler(): + +def logbase_handler(node): """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.trig - unary trigonometric operator + Qualifier for <log> + + The log function accepts only the logbase schema. If present, the logbase schema denotes the + base with respect to which the logarithm is being taken. Otherwise, the log is assumed to be b + ase 10. When used with log, the logbase schema is expected to contain a single child schema; + otherwise an error is generated. + + Should be the first element following log, i.e. the second child of the containing apply + element. """ - return sympy.cos + return transpile(node)[0] -def tanh_handler(): +def get_nary_relation_callback(sympy_relation): """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.trig - unary trigonometric operator + Wraps the Sympy binary relation to handle n-ary MathML relations + + :param sympy_relation: handle for binary Sympy relation (Eq, Le, Lt, Ge, Gt) + :return: callback used by the apply_handler to handle n-ary relations """ - return sympy.tanh + def _wrapper_relational(*expressions): + # If the MathML relation is chaining more than 2 expressions + if len(expressions) > 2: + # Convert to multiple Sympy binary relations bundled in an 'And' boolean + relations = [] + for first, second in zip(expressions[:-1], expressions[1:]): + relations.append(sympy_relation(first, second)) + return sympy.And(*relations) + return sympy_relation(*expressions) + return _wrapper_relational -def arccos_handler(): +def simple_operator_handler(node): """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.trig - unary trigonometric operator + This function handles simple MathML <tagName> to sympy.Class operators, where no unique handling + of tag children etc. is required. """ - return sympy.acos + tag_name = node.tagName + handler = getattr(sympy, SIMPLE_MATHML_TO_SYMPY_NAMES[tag_name]) -# CONSTANT AND SYMBOL ELEMENTS ################################################################# + # Some MathML relations allow chaining but Sympy relations are binary operations + if tag_name in MATHML_NARY_RELATIONS: + return get_nary_relation_callback(handler) -def pi_handler(): - """ - https://www.w3.org/TR/MathML2/chapter4.html#contm.pi - """ - return sympy.pi + return handler # END OF MATHML HANDLERS ####################################################################### -# Mapping MathML tag element names (keys) to appropriate function for SymPy output (values) -HANDLERS = {'abs': abs_handler, - 'and': and_handler, - 'apply': apply_handler, - 'arccos': arccos_handler, - 'bvar': bvar_handler, - 'ci': ci_handler, - 'cn': cn_handler, - 'cos': cos_handler, - 'diff': diff_handler, - 'divide': divide_handler, - 'eq': eq_handler, - 'exp': exp_handler, - 'floor': floor_handler, - 'geq': geq_handler, - 'gt': gt_handler, - 'leq': leq_handler, - 'ln': ln_handler, - 'log': log_handler, - 'lt': lt_handler, - 'math': math_handler, - 'minus': minus_handler, - 'or': or_handler, - 'otherwise': otherwise_handler, - 'pi': pi_handler, - 'piece': piece_handler, - 'piecewise': piecewise_handler, - 'plus': plus_handler, - 'power': power_handler, - 'rem': rem_handler, - 'root': root_handler, - 'tanh': tanh_handler, - 'times': times_handler} +# These MathML tags map directly to Sympy classes and don't require any extra handling +SIMPLE_MATHML_TO_SYMPY_NAMES = { + 'abs': 'Abs', + 'and': 'And', + 'arccos': 'acos', + 'arccosh': 'acosh', + 'arccot': 'acot', + 'arccoth': 'acoth', + 'arccsc': 'acsc', + 'arccsch': 'acsch', + 'arcsec': 'asec', + 'arcsech': 'asech', + 'arcsin': 'asin', + 'arcsinh': 'asinh', + 'arctan': 'atan', + 'arctanh': 'atanh', + 'ceiling': 'ceiling', + 'cos': 'cos', + 'cosh': 'cosh', + 'cot': 'cot', + 'coth': 'coth', + 'csc': 'csc', + 'csch': 'csch', + 'eq': 'Eq', + 'exp': 'exp', + 'exponentiale': 'E', + 'false': 'false', + 'floor': 'floor', + 'geq': 'Ge', + 'gt': 'Gt', + 'infinity': 'oo', + 'leq': 'Le', + 'ln': 'ln', + 'lt': 'Lt', + 'max': 'Max', + 'min': 'Min', + 'neq': 'Ne', + 'not': 'Not', + 'notanumber': 'nan', + 'or': 'Or', + 'pi': 'pi', + 'plus': 'Add', + 'rem': 'Mod', + 'sec': 'sec', + 'sech': 'sech', + 'sin': 'sin', + 'sinh': 'sinh', + 'tan': 'tan', + 'tanh': 'tanh', + 'times': 'Mul', + 'true': 'true', + 'xor': 'Xor', +} + +# MathML relation elements that are n-ary operators +MATHML_NARY_RELATIONS = {'eq', 'leq', 'lt', 'geq', 'gt'} + +# Mapping MathML tag element names (keys) to appropriate handler for SymPy output (values) +# These tags require explicit handling because they have children or context etc. +HANDLERS = { + 'apply': apply_handler, + 'bvar': bvar_handler, + 'ci': ci_handler, + 'cn': cn_handler, + 'degree': degree_handler, + 'diff': diff_handler, + 'divide': divide_handler, + 'log': log_handler, + 'logbase': logbase_handler, + 'math': math_handler, + 'minus': minus_handler, + 'otherwise': otherwise_handler, + 'piece': piece_handler, + 'piecewise': piecewise_handler, + 'power': power_handler, + 'root': root_handler +} + +# Add tags that can be handled by simple_operator_handler +for tagName in SIMPLE_MATHML_TO_SYMPY_NAMES: + HANDLERS[tagName] = simple_operator_handler
Expand the range of supported MathML We should probably support the [CellML subset](https://www.cellml.org/specifications/cellml_1.1/#sec_math_cellml_subset) eventually (see also in [CellML 2](https://docs.google.com/document/d/1MleZJF6DYp4FO4vxslAHInrRFMqrSpARa1s_dPhwYU0/edit#heading=h.zegia3mrify8)). Not urgent though - just the elements in the models we have currently is sufficient to get started :)
ModellingWebLab/cellmlmanip
diff --git a/tests/test_mathml2sympy.py b/tests/test_mathml2sympy.py index 47d7b08..ded0740 100644 --- a/tests/test_mathml2sympy.py +++ b/tests/test_mathml2sympy.py @@ -17,7 +17,6 @@ class TestParser(object): def assert_equal(self, content_xml, sympy_expression): mathml_string = self.make_mathml(content_xml) transpiled_sympy = mathml2sympy.parse_string(mathml_string) - # print(mathml_string, "⟶", transpiled_sympy, "==", sympy_expression) assert transpiled_sympy == sympy_expression def test_symbol(self): @@ -58,6 +57,10 @@ class TestParser(object): self.assert_equal('<apply><eq/><ci>a</ci><ci>b</ci></apply>', [sympy.Eq(sympy.Symbol('a'), sympy.Symbol('b'))]) + def test_neq(self): + self.assert_equal('<apply><neq/><ci>a</ci><ci>b</ci></apply>', + [sympy.Ne(sympy.Symbol('a'), sympy.Symbol('b'))]) + def test_leq(self): self.assert_equal('<apply><leq/><ci>a</ci><ci>b</ci></apply>', [sympy.Symbol('a') <= sympy.Symbol('b')]) @@ -106,6 +109,10 @@ class TestParser(object): self.assert_equal('<apply><root/><ci>a</ci></apply>', [sympy.sqrt(sympy.Symbol('a'))]) + def test_root_degree(self): + self.assert_equal('<apply><root/><degree><ci>n</ci></degree><ci>a</ci></apply>', + [sympy.root(sympy.Symbol('a'), sympy.Symbol('n'))]) + def test_pi(self): self.assert_equal('<pi/>', [sympy.pi]) @@ -118,12 +125,30 @@ class TestParser(object): self.assert_equal('<apply><log/><ci>x</ci></apply>', [sympy.log(sympy.Symbol('x'), 10)]) + def test_log_with_base(self): + # numeric base + self.assert_equal('<apply><log/><logbase><cn>3</cn></logbase><ci>x</ci></apply>', + [sympy.log(sympy.Symbol('x'), sympy.Float(3.0))]) + # symbolic base + self.assert_equal('<apply><log/><logbase><ci>y</ci></logbase><ci>x</ci></apply>', + [sympy.log(sympy.Symbol('x'), sympy.Symbol('y'))]) + def test_diff(self): time = sympy.Symbol('time') V = sympy.Function('V') self.assert_equal('<apply><diff/><bvar><ci>time</ci></bvar><ci>V</ci></apply>', [sympy.Derivative(V(time), time)]) + def test_diff_with_order(self): + time = sympy.Symbol('time') + V = sympy.Function('V') + self.assert_equal('<apply>' + '<diff/>' + '<bvar><ci>time</ci><degree><cn>2</cn></degree></bvar>' + '<ci>V</ci>' + '</apply>', + [sympy.Derivative(V(time), time, 2)]) + def test_piecewise(self): x = sympy.Symbol('x') self.assert_equal('<piecewise>' @@ -132,10 +157,28 @@ class TestParser(object): '</piecewise>', [sympy.Piecewise((0, x < 0.0), (x, True))]) - def test_multiple_equalities(self): - self.assert_equal('<apply><eq/><ci>x</ci><cn>1</cn></apply>' - '<apply><eq/><ci>y</ci><cn>2</cn></apply>', - [sympy.Eq(sympy.Symbol('x'), 1), sympy.Eq(sympy.Symbol('y'), 2)]) + self.assert_equal('<piecewise>' + '<piece><cn>10</cn><apply><gt/><ci>x</ci><cn>0</cn></apply></piece>' + '<piece><cn>20</cn><apply><gt/><ci>x</ci><cn>1</cn></apply></piece>' + '<piece><cn>30</cn><apply><gt/><ci>x</ci><cn>2</cn></apply></piece>' + '<otherwise><cn>0</cn></otherwise>' + '</piecewise>', + [sympy.Piecewise((10, x > 0), (20, x > 1), (30, x > 2), (0, True))]) + + def test_multiple_relations(self): + from sympy.abc import a, b, c, x, y, z + eq_xml = '<apply><eq/><ci>x</ci><ci>y</ci><ci>z</ci><cn>2.0</cn></apply>' + lt_xml = '<apply><lt/><ci>a</ci><ci>b</ci><ci>c</ci><cn>2.0</cn></apply>' + ge_xml = '<apply><geq/><ci>x</ci><ci>y</ci><ci>z</ci><cn>2.0</cn></apply>' + + self.assert_equal(eq_xml, [sympy.And(sympy.Eq(x, y), sympy.Eq(y, z), sympy.Eq(z, 2.0))]) + self.assert_equal(lt_xml, [sympy.And(sympy.Lt(a, b), sympy.Lt(b, c), sympy.Lt(c, 2.0))]) + self.assert_equal(ge_xml, [sympy.And(sympy.Ge(x, y), sympy.Ge(y, z), sympy.Ge(z, 2.0))]) + self.assert_equal('<apply><and/>%s%s</apply>' % (eq_xml, lt_xml), + [sympy.And( + sympy.And(sympy.Eq(x, y), sympy.Eq(y, z), sympy.Eq(z, 2.0)), + sympy.And(sympy.Lt(a, b), sympy.Lt(b, c), sympy.Lt(c, 2.0)) + )]) def test_cellml_namespace(self): mathml_xml = '<math xmlns="http://www.w3.org/1998/Math/MathML" ' \ @@ -154,6 +197,37 @@ class TestParser(object): '></apply>', [sympy.Eq(sympy.Derivative(V(t), t), -(i_Stim+i_Na+i_K+i_L) / Cm)]) + def test_scientific_notation(self): + self.assert_equal('<cn type="e-notation">1.234<sep/>5</cn>', [sympy.Number(1.234e5)]) + + def test_xor(self): + self.assert_equal('<apply><xor/><ci>a</ci><ci>b</ci></apply>', + [sympy.Xor(sympy.Symbol('a'), sympy.Symbol('b'))]) + + def test_not(self): + self.assert_equal('<apply><not/><ci>a</ci></apply>', + [sympy.Not(sympy.Symbol('a'))]) + + def test_ceiling(self): + self.assert_equal('<apply><ceiling/><ci>a</ci></apply>', + [sympy.ceiling(sympy.Symbol('a'))]) + + def test_min(self): + self.assert_equal('<apply><min/><ci>a</ci><ci>b</ci></apply>', + [sympy.Min(sympy.Symbol('a'), sympy.Symbol('b'))]) + + def test_max(self): + self.assert_equal('<apply><max/><ci>a</ci><ci>b</ci></apply>', + [sympy.Max(sympy.Symbol('a'), sympy.Symbol('b'))]) + + def test_trig(self): + self.assert_equal('<apply><cos/><ci>x</ci></apply>', + [sympy.cos(sympy.Symbol('x'))]) + self.assert_equal('<apply><sin/><ci>x</ci></apply>', + [sympy.sin(sympy.Symbol('x'))]) + self.assert_equal('<apply><arctanh/><ci>x</ci></apply>', + [sympy.atanh(sympy.Symbol('x'))]) + def test_noble_1962(self): cellml_path = os.path.join(os.path.dirname(__file__), "noble_model_1962.cellml")
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": [], "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "flake8", "isort", "codecov" ], "pre_install": [], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 -e git+https://github.com/ModellingWebLab/cellmlmanip.git@a71fa61adf7bf1cf3a3bf30a9392a2b8d949fa93#egg=cellmlmanip certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 flake8==5.0.4 idna==3.10 importlib-metadata==4.2.0 iniconfig==1.1.1 isort==5.10.1 mccabe==0.7.0 mpmath==1.0.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 requests==2.27.1 sympy==1.1.1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: cellmlmanip channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - flake8==5.0.4 - idna==3.10 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - isort==5.10.1 - mccabe==0.7.0 - mpmath==1.0.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - requests==2.27.1 - sympy==1.1.1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/cellmlmanip
[ "tests/test_mathml2sympy.py::TestParser::test_neq", "tests/test_mathml2sympy.py::TestParser::test_root_degree", "tests/test_mathml2sympy.py::TestParser::test_log_with_base", "tests/test_mathml2sympy.py::TestParser::test_diff_with_order", "tests/test_mathml2sympy.py::TestParser::test_multiple_relations", "tests/test_mathml2sympy.py::TestParser::test_scientific_notation", "tests/test_mathml2sympy.py::TestParser::test_xor", "tests/test_mathml2sympy.py::TestParser::test_not", "tests/test_mathml2sympy.py::TestParser::test_ceiling", "tests/test_mathml2sympy.py::TestParser::test_min", "tests/test_mathml2sympy.py::TestParser::test_max", "tests/test_mathml2sympy.py::TestParser::test_trig" ]
[]
[ "tests/test_mathml2sympy.py::TestParser::test_symbol", "tests/test_mathml2sympy.py::TestParser::test_number", "tests/test_mathml2sympy.py::TestParser::test_ignore_comment", "tests/test_mathml2sympy.py::TestParser::test_ignore_processing", "tests/test_mathml2sympy.py::TestParser::test_plus", "tests/test_mathml2sympy.py::TestParser::test_mul", "tests/test_mathml2sympy.py::TestParser::test_minus", "tests/test_mathml2sympy.py::TestParser::test_negative", "tests/test_mathml2sympy.py::TestParser::test_divide", "tests/test_mathml2sympy.py::TestParser::test_eq", "tests/test_mathml2sympy.py::TestParser::test_leq", "tests/test_mathml2sympy.py::TestParser::test_lt", "tests/test_mathml2sympy.py::TestParser::test_floor", "tests/test_mathml2sympy.py::TestParser::test_geq", "tests/test_mathml2sympy.py::TestParser::test_gt", "tests/test_mathml2sympy.py::TestParser::test_and", "tests/test_mathml2sympy.py::TestParser::test_or", "tests/test_mathml2sympy.py::TestParser::test_exp", "tests/test_mathml2sympy.py::TestParser::test_power", "tests/test_mathml2sympy.py::TestParser::test_ln", "tests/test_mathml2sympy.py::TestParser::test_abs", "tests/test_mathml2sympy.py::TestParser::test_root", "tests/test_mathml2sympy.py::TestParser::test_pi", "tests/test_mathml2sympy.py::TestParser::test_mod", "tests/test_mathml2sympy.py::TestParser::test_log", "tests/test_mathml2sympy.py::TestParser::test_diff", "tests/test_mathml2sympy.py::TestParser::test_piecewise", "tests/test_mathml2sympy.py::TestParser::test_cellml_namespace", "tests/test_mathml2sympy.py::TestParser::test_diff_eq", "tests/test_mathml2sympy.py::TestParser::test_noble_1962" ]
[]
BSD 3-Clause License
2,057
[ "cellmlmanip/mathml2sympy/transpiler.py" ]
[ "cellmlmanip/mathml2sympy/transpiler.py" ]
zopefoundation__ZEO-103
fdcc27393cf83f3e3b8f350a9b90aa7fb4b81d2f
2018-01-19 02:18:53
5efce5d6821ac2455f37a425de8b377493d71101
diff --git a/.gitignore b/.gitignore index 83f9b0d0..fc62ef39 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ eggs parts testing.log .dir-locals.el +data.fs* diff --git a/src/ZEO/__init__.py b/src/ZEO/__init__.py index 4cf580e7..ba340e4c 100644 --- a/src/ZEO/__init__.py +++ b/src/ZEO/__init__.py @@ -40,7 +40,7 @@ def connection(*args, **kw): return db.open_then_close_db_when_connection_closes() except Exception: db.close() - ra + raise def server(path=None, blob_dir=None, storage_conf=None, zeo_conf=None, port=0, threaded=True, **kw): diff --git a/tox.ini b/tox.ini index cf839f76..33079fd5 100644 --- a/tox.ini +++ b/tox.ini @@ -21,6 +21,7 @@ deps = zope.testing zope.testrunner mock + msgpack # ZopeUndo is needed as soft-dependency for a regression test ZopeUndo [testenv:simple]
mistake in writing `ZEO/src/ZEO/__init__.py` line 43 `ra `should be `raise `
zopefoundation/ZEO
diff --git a/src/ZEO/tests/testZEO.py b/src/ZEO/tests/testZEO.py index 546b7091..9bba0b99 100644 --- a/src/ZEO/tests/testZEO.py +++ b/src/ZEO/tests/testZEO.py @@ -75,6 +75,94 @@ class CreativeGetState(persistent.Persistent): return super(CreativeGetState, self).__getstate__() + +class Test_convenience_functions(unittest.TestCase): + + def test_ZEO_client_convenience(self): + import mock + import ZEO + + client_thread = mock.Mock( + spec=['call', 'async', 'async_iter', 'wait']) + client = ZEO.client( + 8001, wait=False, _client_factory=client_thread) + self.assertIsInstance(client, ClientStorage) + + def test_ZEO_DB_convenience_ok(self): + import mock + import ZEO + + client_mock = mock.Mock(spec=['close']) + client_patch = mock.patch('ZEO.client', return_value=client_mock) + DB_patch = mock.patch('ZODB.DB') + + dummy = object() + + with client_patch as client: + with DB_patch as patched: + db = ZEO.DB(dummy) + + self.assertIs(db, patched()) + client.assert_called_once_with(dummy) + client_mock.close.assert_not_called() + + def test_ZEO_DB_convenience_error(self): + import mock + import ZEO + + client_mock = mock.Mock(spec=['close']) + client_patch = mock.patch('ZEO.client', return_value=client_mock) + DB_patch = mock.patch('ZODB.DB', side_effect=ValueError) + + dummy = object() + + with client_patch as client: + with DB_patch: + with self.assertRaises(ValueError): + ZEO.DB(dummy) + + client.assert_called_once_with(dummy) + client_mock.close.assert_called_once() + + def test_ZEO_connection_convenience_ok(self): + import mock + import ZEO + + ret = object() + DB_mock = mock.Mock(spec=[ + 'close', 'open_then_close_db_when_connection_closes']) + DB_mock.open_then_close_db_when_connection_closes.return_value = ret + DB_patch = mock.patch('ZEO.DB', return_value=DB_mock) + + dummy = object() + + with DB_patch as patched: + conn = ZEO.connection(dummy) + + self.assertIs(conn, ret) + patched.assert_called_once_with(dummy) + DB_mock.close.assert_not_called() + + def test_ZEO_connection_convenience_value(self): + import mock + import ZEO + + DB_mock = mock.Mock(spec=[ + 'close', 'open_then_close_db_when_connection_closes']) + otc = DB_mock.open_then_close_db_when_connection_closes + otc.side_effect = ValueError + DB_patch = mock.patch('ZEO.DB', return_value=DB_mock) + + dummy = object() + + with DB_patch as patched: + with self.assertRaises(ValueError): + ZEO.connection(dummy) + + patched.assert_called_once_with(dummy) + DB_mock.close.assert_called_once() + + class MiscZEOTests(object): """ZEO tests that don't fit in elsewhere.""" @@ -1636,7 +1724,9 @@ class ServerManagingClientStorageForIExternalGCTest( ZEO.ClientStorage._check_blob_cache_size(self.blob_dir, 0) def test_suite(): - suite = unittest.TestSuite() + suite = unittest.TestSuite(( + unittest.makeSuite(Test_convenience_functions), + )) zeo = unittest.TestSuite() zeo.addTest(unittest.makeSuite(ZODB.tests.util.AAAA_Test_Runner_Hack))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 3 }
5.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "zope-testrunner", "manuel", "random2", "mock", "msgpack-python", "pytest" ], "pre_install": null, "python": "3.4", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 BTrees==4.11.3 certifi==2021.5.30 cffi==1.15.1 importlib-metadata==4.8.3 iniconfig==1.1.1 manuel==1.13.0 mock==5.2.0 msgpack-python==0.5.6 packaging==21.3 persistent==4.9.3 pluggy==1.0.0 py==1.11.0 pycparser==2.21 pyparsing==3.1.4 pytest==7.0.1 random2==1.0.2 six==1.17.0 tomli==1.2.3 transaction==3.1.0 typing_extensions==4.1.1 zc.lockfile==2.0 ZConfig==3.6.1 zdaemon==4.4 -e git+https://github.com/zopefoundation/ZEO.git@fdcc27393cf83f3e3b8f350a9b90aa7fb4b81d2f#egg=ZEO zipp==3.6.0 ZODB==5.8.1 zodbpickle==2.6 zope.exceptions==4.6 zope.interface==5.5.2 zope.testing==5.0.1 zope.testrunner==5.6
name: ZEO channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - btrees==4.11.3 - cffi==1.15.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - manuel==1.13.0 - mock==5.2.0 - msgpack-python==0.5.6 - packaging==21.3 - persistent==4.9.3 - pluggy==1.0.0 - py==1.11.0 - pycparser==2.21 - pyparsing==3.1.4 - pytest==7.0.1 - random2==1.0.2 - six==1.17.0 - tomli==1.2.3 - transaction==3.1.0 - typing-extensions==4.1.1 - zc-lockfile==2.0 - zconfig==3.6.1 - zdaemon==4.4 - zipp==3.6.0 - zodb==5.8.1 - zodbpickle==2.6 - zope-exceptions==4.6 - zope-interface==5.5.2 - zope-testing==5.0.1 - zope-testrunner==5.6 prefix: /opt/conda/envs/ZEO
[ "src/ZEO/tests/testZEO.py::Test_convenience_functions::test_ZEO_connection_convenience_value" ]
[]
[ "src/ZEO/tests/testZEO.py::Test_convenience_functions::test_ZEO_DB_convenience_error", "src/ZEO/tests/testZEO.py::Test_convenience_functions::test_ZEO_DB_convenience_ok", "src/ZEO/tests/testZEO.py::Test_convenience_functions::test_ZEO_client_convenience", "src/ZEO/tests/testZEO.py::Test_convenience_functions::test_ZEO_connection_convenience_ok", "src/ZEO/tests/testZEO.py::test_server_status", "src/ZEO/tests/testZEO.py::test_ruok", "src/ZEO/tests/testZEO.py::test_runzeo_msgpack_support", "src/ZEO/tests/testZEO.py::MultiprocessingTests::test_work_with_multiprocessing", "src/ZEO/tests/testZEO.py::test_suite" ]
[]
Zope Public License 2.1
2,058
[ ".gitignore", "tox.ini", "src/ZEO/__init__.py" ]
[ ".gitignore", "tox.ini", "src/ZEO/__init__.py" ]
PlasmaPy__PlasmaPy-239
f747b371c5245b620d0463d6e315629fa61a7bf0
2018-01-19 18:29:17
f747b371c5245b620d0463d6e315629fa61a7bf0
diff --git a/.circleci/config.yml b/.circleci/config.yml index ba6080c6..f204611b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,11 +10,14 @@ latex-install: &latex-installer name: Install Latex dependencies command: | sudo apt update - sudo apt install texlive texlive-xetex texlive-fonts-extra texlive-latex-extra texlive-plain-extra latexmk + sudo apt install texlive texlive-xetex texlive-fonts-extra texlive-latex-extra texlive-plain-extra latexmk graphviz html-run: &doc-html name: Build HTML documentation - command: make html SPHINXOPTS='-W' + command: | + sudo apt update + sudo apt install graphviz + make html SPHINXOPTS='-W' working_directory: docs latex-run: &doc-latex diff --git a/plasmapy/atomic/__init__.py b/plasmapy/atomic/__init__.py index bf053c42..cfa343a1 100644 --- a/plasmapy/atomic/__init__.py +++ b/plasmapy/atomic/__init__.py @@ -26,3 +26,5 @@ nuclear_binding_energy, nuclear_reaction_energy, ) + +from .classes import Particle diff --git a/plasmapy/atomic/atomic.py b/plasmapy/atomic/atomic.py index bc103071..b3385068 100644 --- a/plasmapy/atomic/atomic.py +++ b/plasmapy/atomic/atomic.py @@ -277,9 +277,9 @@ def standard_atomic_weight(argument: Union[str, int]) -> Quantity: try: atomic_weight = _Elements[element]['atomic_mass'] - except KeyError: + except KeyError as e: raise MissingAtomicDataError( - f"No standard atomic weight is available for {element}.") + f"No standard atomic weight is available for {element}.") from None return atomic_weight @@ -480,7 +480,7 @@ def ion_mass(argument: Union[str, int, Quantity], Z: int = None, try: m_i = argument.to(u.kg) - except Exception: + except u.UnitConversionError: raise u.UnitConversionError("If the ion in given as a Quantity, " "then it must have units of mass.") @@ -534,7 +534,9 @@ def ion_mass(argument: Union[str, int, Quantity], Z: int = None, try: isotope = isotope_symbol(argument, mass_numb) - except Exception: + except InvalidParticleError as e: + raise InvalidParticleError("Invalid particle in ion_mass.") + except InvalidIsotopeError: is_isotope = False else: is_isotope = True @@ -554,7 +556,7 @@ def ion_mass(argument: Union[str, int, Quantity], Z: int = None, try: atomic_mass = standard_atomic_weight(argument) - except Exception: # coveralls: ignore + except MissingAtomicDataError: # coveralls: ignore errormessage = ("No isotope mass or standard atomic weight is " f"available to get ion mass for {argument}") @@ -1244,7 +1246,7 @@ def stable_isotopes_for_element(argument: Union[str, int], stable_isotopes_for_element(element, not unstable) except InvalidParticleError: raise InvalidParticleError("Invalid particle in stable_isotopes") - except Exception: + except InvalidElementError: raise InvalidElementError( "stable_isotopes is unable to get isotopes " f"from an input of: {argument}") diff --git a/plasmapy/atomic/classes.py b/plasmapy/atomic/classes.py new file mode 100644 index 00000000..4fc49f1e --- /dev/null +++ b/plasmapy/atomic/classes.py @@ -0,0 +1,850 @@ +from typing import (Union, Set, Tuple, List, Optional) +import warnings + +from astropy import units as u, constants as const +import numpy as np + +from ..utils import ( + AtomicError, + AtomicWarning, + InvalidParticleError, + InvalidElementError, + InvalidIsotopeError, + InvalidIonError, + ChargeError, + MissingAtomicDataError, +) + +from .parsing import ( + _dealias_particle_aliases, + _parse_and_check_atomic_input, + _invalid_particle_errmsg, +) + +from .elements import _Elements +from .isotopes import _Isotopes +from .particles import _Particles, ParticleZoo + +# TODO: Write a decorator to turn atomic inputs into a Particle. + + +class Particle: + r"""A class for individual particles or antiparticles. + + Parameters + ---------- + argument : str or int + A string representing a particle, element, isotope, or ion; or + an integer representing the atomic number of an element. + + mass_numb : int, optional + The mass number of an isotope or nuclide. + + Z : int, optional + The integer charge of the particle. + + Attributes + ---------- + particle : str + The particle symbol. + + element : str + The atomic symbol, or None when the particle is not an element. + + isotope : str + The isotope symbol, or None when the particle is not an isotope. + + ion : str + The ion symbol, or None when the particle is not an ion. + + element_name : str + The name of the element. + + integer_charge : int + The charge in units of the elementary charge. + + charge : Quantity + The charge in units of coulombs. + + mass : Quantity + The mass of the particle, element, isotope, or ion. + + standard_atomic_weight : Quantity + The standard atomic weight of an element, if available. + + nuclide_mass : Quantity + The mass of a nucleon or of the nucleus of an isotope. + + atomic_number : int + The atomic number of an element. + + mass_number : int + The mass number of an isotope. + + baryon_number : int + The number of baryons (protons and neutrons) minus the number of + antibaryons (antiprotons and antineutrons) in the particle. + + lepton_number : int + The number of leptons minus the number of antileptons for special + particles and nuclides. + + binding_energy : Quantity + The nuclear binding energy. + + half_life : Quantity + The half-life of the particle or isotope in seconds. + + spin : int or float + The spin of the particle, if available. + + reduced_mass + Returns the reduced mass of the particle and another particle. + + is_category + Tests whether or not the particle is in or not in certain categories. + Possible categories include: 'lepton', 'antilepton', 'fermion', + 'boson', 'baryon', 'neutrino', 'antineutrino', 'element', 'isotope', + 'ion', 'matter', 'antimatter', 'stable', and 'unstable'. + + reduced_mass + Returns the reduced mass of the particle and another particle. + + Raises + ------ + InvalidParticleError + Raised when the particle input does not correspond to a valid particle + or is contradictory. + + InvalidElementError + For when an attribute is being accessed that requires information + about an element, but the particle is not an element, isotope, or ion. + + InvalidIsotopeError + For when an attribute is being accessed that requires information + about an isotope or nuclide, but the particle is not an isotope (or + an ion of an isotope). + + ChargeError + For when either the charge or integer_charge attributes is being + accessed but the charge information for the particle is not + available. + + TypeError + For when any of the arguments or keywords is not of the required + type. + + """ + + def __init__(self, + argument: Union[str, int], + mass_numb: int = None, + Z: int = None): + r"""Initializes a Particle object by setting all necessary private + attributes.""" + + if not isinstance(argument, (int, str)): + raise TypeError( + "The first positional argument when creating a Particle " + "object must be either an integer or string.") + + if mass_numb is not None and not isinstance(mass_numb, int): + raise TypeError("mass_numb is not an integer") + + if Z is not None and not isinstance(Z, int): + raise TypeError("Z is not an integer.") + + # If the data is missing, then the private attribute should still + # exist but just be set to None. This initialization had previously + # been done in a loop using exec on a string, but this does not play + # well with static type checkers such as PyCharm. + + self._particle_symbol = None + self._atomic_symbol = None + self._isotope_symbol = None + self._ion_symbol = None + self._atomic_symbol = None + self._isotope_symbol = None + self._ion_symbol = None + self._unicode_symbol = None + self._element_name = None + self._atomic_number = None + self._mass_number = None + self._lepton_number = None + self._baryon_number = None + self._integer_charge = None + self._electric_charge = None + self._standard_atomic_weight = None + self._mass = None + self._nuclide_mass = None + self._half_life = None + self._spin = None + self._generation = None + self._periodic_table_group = None + self._periodic_table_period = None + self._charge = None + self._electric_charge = None + + # Use this set to keep track of particle categories such as 'lepton' + # for use with the is_category method later on. + + self._categories = set() + + # If the argument corresponds to one of the numerous case-sensitive or + # case-insensitive aliases for particles, return the standard symbol. + # Otherwise, just return the original argument. + + particle = _dealias_particle_aliases(argument) + + if particle in _Particles.keys(): # special particles + self._particle_symbol = particle + self._name = _Particles[particle]['name'] + self._spin = _Particles[particle]['spin'] + self._class = _Particles[particle]['class'] + self._lepton_number = _Particles[particle]['lepton number'] + self._baryon_number = _Particles[particle]['baryon number'] + self._integer_charge = _Particles[particle]['charge'] + self._half_life = _Particles[particle]['half-life'] + self._mass = _Particles[particle].get('mass', None) + + particle_taxonomy_dict = ParticleZoo._taxonomy_dict + categories = particle_taxonomy_dict.keys() + + for category in categories: + if particle in particle_taxonomy_dict[category]: + self._categories.add(category) + + if particle == 'p+': + + # Protons are a special case amongst special cases, since they + # are both a special particle and correspond to an element and + # an isotope. Protons are not as weird as electrons, though. + # Electrons are weird. + + self._atomic_symbol = 'H' + self._atomic_number = 1 + self._mass_number = 1 + self._element_name = 'hydrogen' + self._isotope_symbol = 'H-1' + self._ion_symbol = 'p+' + self._categories.update({'element', 'isotope', 'ion'}) + + if mass_numb is not None or Z is not None: + warnings.warn( + "Redundant mass number or charge information.", + AtomicWarning) + + elif mass_numb is not None or Z is not None: + raise InvalidParticleError( + "The keywords 'mass_numb' and 'Z' cannot be used when " + "creating Particle objects for special particles. To " + f"create a Particle object for {self._name}s, " + f"use: Particle({repr(self._particle_symbol)})") + + else: # elements, isotopes, and ions (besides protons) + try: + + atomic_nomenclature_dict = _parse_and_check_atomic_input( + argument, mass_numb=mass_numb, Z=Z) + + self._particle_symbol = atomic_nomenclature_dict['symbol'] + self._atomic_symbol = atomic_nomenclature_dict['element'] + self._isotope_symbol = atomic_nomenclature_dict['isotope'] + self._ion_symbol = atomic_nomenclature_dict['ion'] + self._mass_number = atomic_nomenclature_dict['mass_numb'] + self._integer_charge = atomic_nomenclature_dict['Z'] + + except Exception as exc: + errmsg = _invalid_particle_errmsg( + argument, mass_numb=mass_numb, Z=Z) + raise InvalidParticleError(errmsg) from exc + + element = self._atomic_symbol + isotope = self._isotope_symbol + ion = self._ion_symbol + + if element: + self._categories.add('element') + if isotope: + self._categories.add('isotope') + if ion: + self._categories.add('ion') + + # Element properties + + self._atomic_number = _Elements[element]['atomic_number'] + self._element_name = _Elements[element]['name'] + + self._baryon_number = self._mass_number + + # For the moment, set the lepton number to zero for elements, + # isotopes, and ions. The lepton number will probably come up + # primarily during + + self._lepton_number = 0 + + if isotope: + if _Isotopes[isotope]['is_stable']: + self._half_life = np.inf * u.s + else: + self._half_life = _Isotopes[isotope].get('half_life', None) + elif element and not isotope: + self._half_life = None + + if ion == 'He-4 2+': + self._spin = 0 + self._categories.add('boson') + + # Set the masses + + if element and not isotope and not ion: + try: + self._standard_atomic_weight = \ + _Elements[element]['atomic_mass'].to(u.kg) + self._mass = self._standard_atomic_weight + except KeyError: + self._standard_atomic_weight = None + elif element and isotope and not ion: + self._isotope_mass = _Isotopes[isotope]['atomic_mass'] + if isotope: + self._isotope_mass = \ + _Isotopes[isotope].get('atomic_mass', None).to(u.kg) + self._standard_atomic_weight = None + self._mass = self._isotope_mass + else: + self._standard_atomic_weight = \ + _Elements[element].get('atomic_mass', None) + self._isotope_mass = None + + # Set the charge + + if self._integer_charge is not None: + self._electric_charge = self._integer_charge * const.e.si + + self._is_element = self._atomic_symbol is not None + self._is_isotope = self._isotope_symbol is not None + self._is_ion = self._ion_symbol is not None + + self._element_errmsg = ( + f"The particle '{self.particle}' is not an element, so " + f"this attribute is not available.") + + self._isotope_errmsg = ( + f"The particle '{self.particle}' does not have an " + f"isotope specified, so this attribute is not available.") + + self._ion_errmsg = ( + f"The particle '{self.particle}' is not an ion, so this" + f"attribute is not available.") + + def __repr__(self) -> str: + r"""Returns a string of the call that would recreate this object.""" + return f'Particle("{self.particle}")' + + def __str__(self) -> str: + r"""Returns a string of the particle symbol.""" + return f"{self.particle}" + + def __eq__(self, other) -> bool: + r"""Returns True when comparing two Particle objects that correspond + to the same particle, and False when the two objects differ.""" + try: + if self.__dict__ == other.__dict__: + return True + else: + return False + except Exception: + return False + + def __ne__(self, other) -> bool: + r"""Returns True when the two objects differ, and False when + comparing two Particle objects that correspond to the same particle.""" + return not self.__eq__(other) + + @property + def particle(self) -> str: + r"""Returns the particle symbol. + + Examples + -------- + >>> electron = Particle('electron') + >>> electron.particle + 'e-' + >>> alpha = Particle('alpha') + >>> alpha.particle + 'He-4 2+' + """ + return self._particle_symbol + + @property + def element(self) -> Optional[str]: + r"""Returns the atomic symbol if the particle corresponds to an + element, and None otherwise. + + Examples + -------- + >>> alpha = Particle('He-4 2+') + >>> alpha.element + 'He' + >>> proton = Particle('proton') + >>> electron = Particle('electron') + >>> print(electron.element) + None + >>> if proton.element: print(proton.element) # can use as conditional + H + """ + return self._atomic_symbol + + @property + def isotope(self) -> Optional[str]: + r"""Returns the isotope symbol if the particle corresponds to an + isotope, and None otherwise. + + Example + ------- + >>> alpha = Particle('alpha') + >>> alpha.isotope + 'He-4' + """ + return self._isotope_symbol + + @property + def ion(self) -> Optional[str]: + r"""Returns the ion symbol if the particle corresponds to an ion, + and None otherwise. + + Example + ------- + >>> alpha = Particle('alpha') + >>> alpha.ion + 'He-4 2+' + """ + return self._ion_symbol + + @property + def element_name(self) -> str: + r"""Returns the name of the element corresponding to this particle, + or raises an InvalidElementError if the particle does not correspond + to an element. + + Example + ------- + >>> deuterium = Particle('D') + >>> deuterium.element_name + 'hydrogen' + """ + if not self.element: + raise InvalidElementError(self._element_errmsg) + return self._element_name + + @property + def integer_charge(self) -> int: + r"""Returns the integer charge of the partile, or raises a ChargeError + if the charge has not been specified. + + Example + ------- + >>> alpha = Particle('alpha') + >>> alpha.integer_charge + 2 + """ + if self._integer_charge is None: + raise ChargeError( + f"The charge of particle {self.particle} has not been " + f"specified.") + return self._integer_charge + + @property + def charge(self) -> u.C: + r"""Returns the electric charge as a Quantity in units of coulombs, + or raises a ChargeError if the charge has not been specified. + + Example + ------- + >>> alpha = Particle('alpha') + >>> alpha.charge + <Quantity 3.20435324e-19 C> + """ + if self._electric_charge is None: + raise ChargeError( + f"The charge of particle {self.particle} has not been " + f"specified.") + return self._electric_charge + + @property + def mass(self) -> u.kg: + r"""Returns the mass of the element, isotope, ion, particle, or + antiparticle; or raises a MissingAtomicDataError if the mass + is unavailable. + + Notes + ----- + For special particles, this attribute will return the standard value + of the mass of the particle. The masses of neutrinos are not + available. + + If the particle is an element and not an isotope or ion, then this + attribute will return the standard atomic weight if available. + + If the particle is an isotope but not an ion, then this attribute + will return the isotopic mass. + + If this particle is an ion, then this attribute will return the mass + of the element or isotope (as described above) minus the integer charge + times the electron mass. + + Example + ------- + >>> alpha = Particle('alpha') + >>> alpha.mass + <Quantity 6.64647897e-27 kg> + """ + if self._mass is None: + raise MissingAtomicDataError( + f"The mass of particle '{self.particle}' is unavailable.") + return self._mass.to(u.kg) + + @property + def standard_atomic_weight(self) -> u.kg: + r"""Returns the standard atomic weight of an element if available. + Raises a MissingAtomicDataError if the particle is an element for + which the standard_atomic_weight is unavailable. Raises an + InvalidElementError if the particle is not an element. + + Example + ------- + >>> H = Particle('H') + >>> H.standard_atomic_weight + <Quantity 1.67382335e-27 kg> + """ + if self.element and not self.isotope and not self.ion: + if self._standard_atomic_weight is None: + raise MissingAtomicDataError( + f"The standard atomic weight of {self.element} is " + f"unavailable.") + else: + return self._standard_atomic_weight.to(u.kg) + else: + raise InvalidElementError(self._element_errmsg) + + @property + def nuclide_mass(self) -> u.kg: + r"""Returns the mass of the nucleus of an isotope, or raises an + InvalidIsotopeError if the particle is not an isotope or neutron. + + Example + ------- + >>> isotope = Particle('O-18') + >>> isotope.nuclide_mass + <Quantity 2.98810197e-26 kg> + """ + if self.particle in ['H-1', 'p+']: + _nuclide_mass = const.m_p + elif self.particle == 'n': + _nuclide_mass = const.m_n + elif self._is_isotope and not self._is_ion: + try: + _atomic_number = self._atomic_number + _isotope_mass = _Isotopes[self.isotope]['atomic_mass'].to(u.kg) + _nuclide_mass = _isotope_mass - _atomic_number * const.m_e + except KeyError: # coveralls: ignore + raise MissingAtomicDataError( + f"The mass of a {self.isotope} nuclide is not available.") + else: + raise InvalidIsotopeError(self._isotope_errmsg) + + return _nuclide_mass + + @property + def atomic_number(self) -> int: + r"""Returns the atomic number of the element corresponding to this + particle, or raises an InvalidElementError if the particle does not + correspond to an element. + + Example + ------- + >>> iron = Particle('Fe') + >>> iron.atomic_number + 26 + """ + if not self._is_element: + raise InvalidElementError(self._element_errmsg) + return self._atomic_number + + @property + def mass_number(self) -> int: + r"""Returns the mass number of the isotope corresponding to this + particle, or raises an InvalidIsotopeError if the particle does not + correspond to an isotope. + + Example + ------- + >>> tritium = Particle('H-3') + >>> tritium.mass_number + 3 + """ + if not self._is_isotope: + raise InvalidIsotopeError(self._isotope_errmsg) + return self._mass_number + + @property + def baryon_number(self) -> int: + r"""Returns the number of protons plus neutrons minus the number of + antiprotons and antineutrons in the particle, or raises an + AtomicError if the baryon number is unavailable. The baryon number + is equivalent to the mass number for isotopes. + + Example + ------- + >>> antineutron = Particle('antineutron') + >>> electron = Particle('electron') + >>> alpha = Particle('He-4 2+') + >>> antineutron.baryon_number + -1 + >>> electron.baryon_number + 0 + >>> alpha.baryon_number + 4 + >>> alpha.baryon_number == alpha.mass_number + True + """ + if self._baryon_number is None: # coveralls: ignore + raise AtomicError( + f"The baryon number for '{self.particle}' is not " + f"available.") + return self._baryon_number + + @property + def lepton_number(self) -> int: + r"""Returns 1 for leptons, -1 for antileptons, and 0 for + nuclides/isotopes; or raises an AtomicError if the lepton number is + not available. This attribute does not include the electrons in + an atom or ion. + + Example + ------- + >>> positron = Particle('e+') + >>> electron = Particle('e-') + >>> proton = Particle('p') + >>> positron.lepton_number + -1 + >>> electron.lepton_number + 1 + >>> proton.lepton_number + 0 + """ + if self._lepton_number is None: # coveralls: ignore + raise AtomicError( + f"The lepton number for {self.particle} is not available.") + return self._lepton_number + + @property + def binding_energy(self) -> u.J: + r"""Returns the nuclear binding energy, or raises an + InvalidIsotopeError if the particle is not a nucleon or isotope. + + Example + ------- + >>> iron59 = Particle('Fe-59') + >>> iron59.binding_energy + <Quantity 8.27574334e-11 J> + """ + + if self._baryon_number == 1: + return 0 * u.J + + if not self.element: + raise InvalidIsotopeError( + f"The nuclear binding energy may only be calculated for " + f"nucleons and isotopes.") + + number_of_protons = self.atomic_number + number_of_neutrons = self.mass_number - self.atomic_number + + mass_of_protons = number_of_protons * const.m_p + mass_of_neutrons = number_of_neutrons * const.m_n + + mass_of_nucleons = mass_of_protons + mass_of_neutrons + mass_of_nuclide = self.mass - const.m_e * self.atomic_number + + mass_defect = mass_of_nucleons - mass_of_nuclide + nuclear_binding_energy = mass_defect * const.c ** 2 + + return nuclear_binding_energy.to(u.J) + + @property + def half_life(self) -> u.s: + r"""Returns the half-life of the particle, or raises a + MissingAtomicDataError if the half-life is unavailable. + + Examples + -------- + >>> from astropy import units + >>> neutron = Particle('n') + >>> neutron.half_life + <Quantity 881.5 s> + >>> carbon14 = Particle('C-14') + >>> carbon14.half_life.to(units.yr) + <Quantity 5730. yr> + + """ + if self._atomic_symbol and not self._isotope_symbol: + raise InvalidIsotopeError(self._isotope_errmsg) + if self._half_life is None: + raise MissingAtomicDataError( + f"The half-life of '{self.particle}' is not available.") + return self._half_life + + @property + def spin(self) -> Union[int, float]: + r"""Returns the spin of the particle, or raises a + MissingAtomicDataError if the spin is not available. + + Example + ------- + >>> electron = Particle('e-') + >>> electron.spin + 0.5 + """ + if self._spin is None: + raise MissingAtomicDataError( + f"The spin of particle '{self.particle}' is unavailable.") + return self._spin + + def reduced_mass(self, other, Z=None, mass_numb=None) -> u.kg: + r"""Finds the reduced mass between two particles, or will raise a + MissingAtomicDataError if either particle's mass is unavailable or + an AtomicError for any other errors. The other particle may be + represented by another Particle object, a Quantity with units of, + mass, or a string of the other particle's symbol (in conjunction + with keywords Z and mass_numb) + + Example + ------- + >>> from plasmapy.atomic import Particle + >>> electron = Particle('e-') + >>> proton = Particle('p+') + >>> proton.reduced_mass(electron) + <Quantity 9.10442514e-31 kg> + + """ + + try: + mass_this = self.mass.to(u.kg) + except MissingAtomicDataError: + raise MissingAtomicDataError( + f"Unable to find the reduced mass because the mass of " + f"{self.particle} is not available.") from None + + if isinstance(other, (str, int)): + other = Particle(other, Z=Z, mass_numb=mass_numb) + + if isinstance(other, Particle): + try: + mass_that = other.mass.to(u.kg) + except MissingAtomicDataError: + raise MissingAtomicDataError( + f"Unable to find the reduced mass because the mass of " + f"{other.particle} is not available.") from None + else: + try: + mass_that = other.to(u.kg) + except Exception as exc: # coveralls: ignore + raise AtomicError( + f"{other} must be either a Particle or a Quantity or " + f"Constant with units of mass in order to calculate " + f"reduced mass.") from exc + + return (mass_this * mass_that) / (mass_this + mass_that) + + def is_category(self, *categories, any=False, + exclude: Union[Set, Tuple, List] = set()) -> bool: + r"""Returns True if the particle is in all of the inputted categories. + + If any is True, then the particle will return True if any of the listed + categories are met. + + If the exclude keyword is set, then is_category will return False if + the particle is in any of the excluded categories, whether or not the + the particle matches the other criteria. + + The valid categories are: 'lepton', 'antilepton', 'baryon', + 'antibaryon', 'fermion', 'boson', 'neutrino', 'antineutrino', 'matter', + 'antimatter', 'element', 'isotope', and 'ion'. + + Examples + -------- + >>> neutron = Particle('n') + >>> neutron.is_category({'fermion'}) + True + >>> neutron.is_category('fermion', 'lepton', 'boson', any=True) + True + >>> neutron.is_category(['baryon', 'matter'], exclude=['fermion']) + False + >>> neutron.is_category([], exclude={'boson'}) + True + + """ + + def _make_into_set(arg: Union[str, Set, Tuple, List]) -> Set[str]: + r"""Turns the input (a string, set, tuple, or list) into + a set containing the items in input.""" + if len(arg) == 0: + return set() + + if isinstance(arg, set): + return arg + + if isinstance(arg, str): + return {arg} + + if isinstance(arg[0], (tuple, list, set)): + return set(arg[0]) + else: + return set(arg) + + if not isinstance(any, bool): + raise TypeError( + f"The keyword any in {self.__repr__()}.is_category must be " + f"set to either True or False.") + elif any and len(categories) == 0: + raise AtomicError( + f"The keyword 'any' to {self.__repr__()}.is_category " + f"cannot be set to True if no categories to be matched " + f"are inputted.") + + categories = _make_into_set(categories) + exclude = _make_into_set(exclude) + + # If valid_categories is changed, remember to change the docstring + # for the Particle class. + + valid_categories = { + 'lepton', 'antilepton', 'fermion', 'boson', 'baryon', 'neutrino', + 'antineutrino', 'element', 'isotope', 'ion', 'matter', + 'antimatter', 'stable', 'unstable' + } + + if categories - valid_categories: + raise AtomicError( + f"The following categories in {self.__repr__()}.is_category " + f"are not valid categories: {categories - valid_categories}") + + if exclude - valid_categories: + raise AtomicError( + f"The following categories to be excluded in " + f"{self.__repr__()}.is_category are not valid categories: " + f"{exclude - valid_categories}") + + if exclude & categories: + raise AtomicError( + f"The following are duplicate categories in " + f"{self.__repr__()}.is_category: {categories & exclude}") + + if exclude & self._categories: + return False + + if any and categories & self._categories: + return True + + return categories <= self._categories diff --git a/plasmapy/atomic/isotopes.py b/plasmapy/atomic/isotopes.py index d0504238..c98d280b 100644 --- a/plasmapy/atomic/isotopes.py +++ b/plasmapy/atomic/isotopes.py @@ -4,8 +4,8 @@ from astropy.units import u, s -def Iso(symbol, name, atomic_number, mass_number, relative_atomic_mass, - is_stable, isotopic_abundance=None, half_life=None): +def _iso(symbol, name, atomic_number, mass_number, relative_atomic_mass, + is_stable, isotopic_abundance=None, half_life=None): """Create a dictionary containing isotope information.""" Isotope = {'name': name, 'atomic_number': atomic_number, @@ -27,3706 +27,3707 @@ def Iso(symbol, name, atomic_number, mass_number, relative_atomic_mass, # The half-life data is presently incomplete. _Isotopes = { - 'n': Iso('n', 'neutron', 0, 1, 1.00866491588, False, half_life=881.5), - 'H-1': Iso('H-1', 'hydrogen-1', 1, 1, 1.00782503223, True, - isotopic_abundance=0.999885), - 'D': Iso('D', 'deuterium', 1, 2, 2.01410177812, True, - isotopic_abundance=0.000115), - 'T': Iso('T', 'tritium', 1, 3, 3.0160492779, False, - half_life=388800000.0), - 'H-4': Iso('H-4', 'hydrogen-4', 1, 4, 4.02643, False), - 'H-5': Iso('H-5', 'hydrogen-5', 1, 5, 5.035311, False), - 'H-6': Iso('H-6', 'hydrogen-6', 1, 6, 6.04496, False), - 'H-7': Iso('H-7', 'hydrogen-7', 1, 7, 7.0527, False), - 'He-3': Iso('He-3', 'helium-3', 2, 3, 3.0160293201, True, - isotopic_abundance=0.00000134), - 'He-4': Iso('He-4', 'helium-4', 2, 4, 4.00260325413, True, - isotopic_abundance=0.99999866), - 'He-5': Iso('He-5', 'helium-5', 2, 5, 5.012057, False), - 'He-6': Iso('He-6', 'helium-6', 2, 6, 6.018885891, False), - 'He-7': Iso('He-7', 'helium-7', 2, 7, 7.0279907, False), - 'He-8': Iso('He-8', 'helium-8', 2, 8, 8.033934390, False), - 'He-9': Iso('He-9', 'helium-9', 2, 9, 9.043946, False), - 'He-10': Iso('He-10', 'helium-10', 2, 10, 10.05279, False), - 'Li-3': Iso('Li-3', 'lithium-3', 3, 3, 3.0308, False), - 'Li-4': Iso('Li-4', 'lithium-4', 3, 4, 4.02719, False), - 'Li-5': Iso('Li-5', 'lithium-5', 3, 5, 5.012538, False), - 'Li-6': Iso('Li-6', 'lithium-6', 3, 6, 6.0151228874, True, - isotopic_abundance=0.0759), - 'Li-7': Iso('Li-7', 'lithium-7', 3, 7, 7.0160034366, True, - isotopic_abundance=0.9241), - 'Li-8': Iso('Li-8', 'lithium-8', 3, 8, 8.022486246, False), - 'Li-9': Iso('Li-9', 'lithium-9', 3, 9, 9.02679019, False), - 'Li-10': Iso('Li-10', 'lithium-10', 3, 10, 10.035483, False), - 'Li-11': Iso('Li-11', 'lithium-11', 3, 11, 11.04372358, False), - 'Li-12': Iso('Li-12', 'lithium-12', 3, 12, 12.052517, False), - 'Li-13': Iso('Li-13', 'lithium-13', 3, 13, 13.06263, False), - 'Be-5': Iso('Be-5', 'beryllium-5', 4, 5, 5.0399, False), - 'Be-6': Iso('Be-6', 'beryllium-6', 4, 6, 6.0197264, False), - 'Be-7': Iso('Be-7', 'beryllium-7', 4, 7, 7.016928717, False), - 'Be-8': Iso('Be-8', 'beryllium-8', 4, 8, 8.005305102, False, - half_life=6.7e-17), - 'Be-9': Iso('Be-9', 'beryllium-9', 4, 9, 9.012183065, True, - isotopic_abundance=1), - 'Be-10': Iso('Be-10', 'beryllium-10', 4, 10, 10.013534695, False), - 'Be-11': Iso('Be-11', 'beryllium-11', 4, 11, 11.02166108, False), - 'Be-12': Iso('Be-12', 'beryllium-12', 4, 12, 12.0269221, False), - 'Be-13': Iso('Be-13', 'beryllium-13', 4, 13, 13.036135, False), - 'Be-14': Iso('Be-14', 'beryllium-14', 4, 14, 14.04289, False), - 'Be-15': Iso('Be-15', 'beryllium-15', 4, 15, 15.05342, False), - 'Be-16': Iso('Be-16', 'beryllium-16', 4, 16, 16.06167, False), - 'B-6': Iso('B-6', 'boron-6', 5, 6, 6.0508, False), - 'B-7': Iso('B-7', 'boron-7', 5, 7, 7.029712, False), - 'B-8': Iso('B-8', 'boron-8', 5, 8, 8.0246073, False), - 'B-9': Iso('B-9', 'boron-9', 5, 9, 9.01332965, False), - 'B-10': Iso('B-10', 'boron-10', 5, 10, 10.01293695, True, - isotopic_abundance=0.199), - 'B-11': Iso('B-11', 'boron-11', 5, 11, 11.00930536, True, - isotopic_abundance=0.801), - 'B-12': Iso('B-12', 'boron-12', 5, 12, 12.0143527, False), - 'B-13': Iso('B-13', 'boron-13', 5, 13, 13.0177802, False), - 'B-14': Iso('B-14', 'boron-14', 5, 14, 14.025404, False), - 'B-15': Iso('B-15', 'boron-15', 5, 15, 15.031088, False), - 'B-16': Iso('B-16', 'boron-16', 5, 16, 16.039842, False), - 'B-17': Iso('B-17', 'boron-17', 5, 17, 17.04699, False), - 'B-18': Iso('B-18', 'boron-18', 5, 18, 18.05566, False), - 'B-19': Iso('B-19', 'boron-19', 5, 19, 19.06310, False), - 'B-20': Iso('B-20', 'boron-20', 5, 20, 20.07207, False), - 'B-21': Iso('B-21', 'boron-21', 5, 21, 21.08129, False), - 'C-8': Iso('C-8', 'carbon-8', 6, 8, 8.037643, False), - 'C-9': Iso('C-9', 'carbon-9', 6, 9, 9.0310372, False), - 'C-10': Iso('C-10', 'carbon-10', 6, 10, 10.01685331, False), - 'C-11': Iso('C-11', 'carbon-11', 6, 11, 11.0114336, False), - 'C-12': Iso('C-12', 'carbon-12', 6, 12, 12.0000000, True, - isotopic_abundance=0.9893), - 'C-13': Iso('C-13', 'carbon-13', 6, 13, 13.00335483507, True, - isotopic_abundance=0.0107), - 'C-14': Iso('C-14', 'carbon-14', 6, 14, 14.0032419884, False), - 'C-15': Iso('C-15', 'carbon-15', 6, 15, 15.01059926, False), - 'C-16': Iso('C-16', 'carbon-16', 6, 16, 16.0147013, False), - 'C-17': Iso('C-17', 'carbon-17', 6, 17, 17.022577, False), - 'C-18': Iso('C-18', 'carbon-18', 6, 18, 18.026751, False), - 'C-19': Iso('C-19', 'carbon-19', 6, 19, 19.03480, False), - 'C-20': Iso('C-20', 'carbon-20', 6, 20, 20.04032, False), - 'C-21': Iso('C-21', 'carbon-21', 6, 21, 21.04900, False), - 'C-22': Iso('C-22', 'carbon-22', 6, 22, 22.05753, False), - 'C-23': Iso('C-23', 'carbon-23', 6, 23, 23.0689, False), - 'N-10': Iso('N-10', 'nitrogen-10', 7, 10, 10.04165, False), - 'N-11': Iso('N-11', 'nitrogen-11', 7, 11, 11.026091, False), - 'N-12': Iso('N-12', 'nitrogen-12', 7, 12, 12.0186132, False), - 'N-13': Iso('N-13', 'nitrogen-13', 7, 13, 13.00573861, False), - 'N-14': Iso('N-14', 'nitrogen-14', 7, 14, 14.00307400443, True, - isotopic_abundance=0.99636), - 'N-15': Iso('N-15', 'nitrogen-15', 7, 15, 15.00010889888, True, - isotopic_abundance=0.00364), - 'N-16': Iso('N-16', 'nitrogen-16', 7, 16, 16.0061019, False), - 'N-17': Iso('N-17', 'nitrogen-17', 7, 17, 17.008449, False), - 'N-18': Iso('N-18', 'nitrogen-18', 7, 18, 18.014078, False), - 'N-19': Iso('N-19', 'nitrogen-19', 7, 19, 19.017022, False), - 'N-20': Iso('N-20', 'nitrogen-20', 7, 20, 20.023366, False), - 'N-21': Iso('N-21', 'nitrogen-21', 7, 21, 21.02711, False), - 'N-22': Iso('N-22', 'nitrogen-22', 7, 22, 22.03439, False), - 'N-23': Iso('N-23', 'nitrogen-23', 7, 23, 23.04114, False), - 'N-24': Iso('N-24', 'nitrogen-24', 7, 24, 24.05039, False), - 'N-25': Iso('N-25', 'nitrogen-25', 7, 25, 25.06010, False), - 'O-12': Iso('O-12', 'oxygen-12', 8, 12, 12.034262, False), - 'O-13': Iso('O-13', 'oxygen-13', 8, 13, 13.024815, False), - 'O-14': Iso('O-14', 'oxygen-14', 8, 14, 14.00859636, False), - 'O-15': Iso('O-15', 'oxygen-15', 8, 15, 15.00306562, False), - 'O-16': Iso('O-16', 'oxygen-16', 8, 16, 15.99491461957, True, - isotopic_abundance=0.99757), - 'O-17': Iso('O-17', 'oxygen-17', 8, 17, 16.99913175650, True, - isotopic_abundance=0.00038), - 'O-18': Iso('O-18', 'oxygen-18', 8, 18, 17.99915961286, True, - isotopic_abundance=0.00205), - 'O-19': Iso('O-19', 'oxygen-19', 8, 19, 19.0035780, False), - 'O-20': Iso('O-20', 'oxygen-20', 8, 20, 20.00407535, False), - 'O-21': Iso('O-21', 'oxygen-21', 8, 21, 21.008655, False), - 'O-22': Iso('O-22', 'oxygen-22', 8, 22, 22.009966, False), - 'O-23': Iso('O-23', 'oxygen-23', 8, 23, 23.015696, False), - 'O-24': Iso('O-24', 'oxygen-24', 8, 24, 24.01986, False), - 'O-25': Iso('O-25', 'oxygen-25', 8, 25, 25.02936, False), - 'O-26': Iso('O-26', 'oxygen-26', 8, 26, 26.03729, False), - 'O-27': Iso('O-27', 'oxygen-27', 8, 27, 27.04772, False), - 'O-28': Iso('O-28', 'oxygen-28', 8, 28, 28.05591, False), - 'F-14': Iso('F-14', 'fluorine-14', 9, 14, 14.034315, False), - 'F-15': Iso('F-15', 'fluorine-15', 9, 15, 15.018043, False), - 'F-16': Iso('F-16', 'fluorine-16', 9, 16, 16.0114657, False), - 'F-17': Iso('F-17', 'fluorine-17', 9, 17, 17.00209524, False), - 'F-18': Iso('F-18', 'fluorine-18', 9, 18, 18.00093733, False, - half_life=6586.236), - 'F-19': Iso('F-19', 'fluorine-19', 9, 19, 18.99840316273, True, - isotopic_abundance=1), - 'F-20': Iso('F-20', 'fluorine-20', 9, 20, 19.999981252, False), - 'F-21': Iso('F-21', 'fluorine-21', 9, 21, 20.9999489, False), - 'F-22': Iso('F-22', 'fluorine-22', 9, 22, 22.002999, False), - 'F-23': Iso('F-23', 'fluorine-23', 9, 23, 23.003557, False), - 'F-24': Iso('F-24', 'fluorine-24', 9, 24, 24.008115, False), - 'F-25': Iso('F-25', 'fluorine-25', 9, 25, 25.012199, False), - 'F-26': Iso('F-26', 'fluorine-26', 9, 26, 26.020038, False), - 'F-27': Iso('F-27', 'fluorine-27', 9, 27, 27.02644, False), - 'F-28': Iso('F-28', 'fluorine-28', 9, 28, 28.03534, False), - 'F-29': Iso('F-29', 'fluorine-29', 9, 29, 29.04254, False), - 'F-30': Iso('F-30', 'fluorine-30', 9, 30, 30.05165, False), - 'F-31': Iso('F-31', 'fluorine-31', 9, 31, 31.05971, False), - 'Ne-16': Iso('Ne-16', 'neon-16', 10, 16, 16.025750, False), - 'Ne-17': Iso('Ne-17', 'neon-17', 10, 17, 17.01771396, False), - 'Ne-18': Iso('Ne-18', 'neon-18', 10, 18, 18.00570870, False), - 'Ne-19': Iso('Ne-19', 'neon-19', 10, 19, 19.00188091, False), - 'Ne-20': Iso('Ne-20', 'neon-20', 10, 20, 19.9924401762, True, - isotopic_abundance=0.9048), - 'Ne-21': Iso('Ne-21', 'neon-21', 10, 21, 20.993846685, True, - isotopic_abundance=0.0027), - 'Ne-22': Iso('Ne-22', 'neon-22', 10, 22, 21.991385114, True, - isotopic_abundance=0.0925), - 'Ne-23': Iso('Ne-23', 'neon-23', 10, 23, 22.99446691, False), - 'Ne-24': Iso('Ne-24', 'neon-24', 10, 24, 23.99361065, False), - 'Ne-25': Iso('Ne-25', 'neon-25', 10, 25, 24.997789, False), - 'Ne-26': Iso('Ne-26', 'neon-26', 10, 26, 26.000515, False), - 'Ne-27': Iso('Ne-27', 'neon-27', 10, 27, 27.007553, False), - 'Ne-28': Iso('Ne-28', 'neon-28', 10, 28, 28.01212, False), - 'Ne-29': Iso('Ne-29', 'neon-29', 10, 29, 29.01975, False), - 'Ne-30': Iso('Ne-30', 'neon-30', 10, 30, 30.02473, False), - 'Ne-31': Iso('Ne-31', 'neon-31', 10, 31, 31.0331, False), - 'Ne-32': Iso('Ne-32', 'neon-32', 10, 32, 32.03972, False), - 'Ne-33': Iso('Ne-33', 'neon-33', 10, 33, 33.04938, False), - 'Ne-34': Iso('Ne-34', 'neon-34', 10, 34, 34.05673, False), - 'Na-18': Iso('Na-18', 'sodium-18', 11, 18, 18.02688, False), - 'Na-19': Iso('Na-19', 'sodium-19', 11, 19, 19.013880, False), - 'Na-20': Iso('Na-20', 'sodium-20', 11, 20, 20.0073544, False), - 'Na-21': Iso('Na-21', 'sodium-21', 11, 21, 20.99765469, False), - 'Na-22': Iso('Na-22', 'sodium-22', 11, 22, 21.99443741, False, - half_life=82163808.0), - 'Na-23': Iso('Na-23', 'sodium-23', 11, 23, 22.9897692820, True, + 'n': _iso('n', 'neutron', 0, 1, 1.00866491588, False, half_life=881.5), + 'H-1': _iso('H-1', 'hydrogen-1', 1, 1, 1.00782503223, True, + isotopic_abundance=0.999885), + 'D': _iso('D', 'deuterium', 1, 2, 2.01410177812, True, + isotopic_abundance=0.000115), + 'T': _iso('T', 'tritium', 1, 3, 3.0160492779, False, + half_life=388800000.0), + 'H-4': _iso('H-4', 'hydrogen-4', 1, 4, 4.02643, False), + 'H-5': _iso('H-5', 'hydrogen-5', 1, 5, 5.035311, False), + 'H-6': _iso('H-6', 'hydrogen-6', 1, 6, 6.04496, False), + 'H-7': _iso('H-7', 'hydrogen-7', 1, 7, 7.0527, False), + 'He-3': _iso('He-3', 'helium-3', 2, 3, 3.0160293201, True, + isotopic_abundance=0.00000134), + 'He-4': _iso('He-4', 'helium-4', 2, 4, 4.00260325413, True, + isotopic_abundance=0.99999866), + 'He-5': _iso('He-5', 'helium-5', 2, 5, 5.012057, False), + 'He-6': _iso('He-6', 'helium-6', 2, 6, 6.018885891, False), + 'He-7': _iso('He-7', 'helium-7', 2, 7, 7.0279907, False), + 'He-8': _iso('He-8', 'helium-8', 2, 8, 8.033934390, False), + 'He-9': _iso('He-9', 'helium-9', 2, 9, 9.043946, False), + 'He-10': _iso('He-10', 'helium-10', 2, 10, 10.05279, False), + 'Li-3': _iso('Li-3', 'lithium-3', 3, 3, 3.0308, False), + 'Li-4': _iso('Li-4', 'lithium-4', 3, 4, 4.02719, False), + 'Li-5': _iso('Li-5', 'lithium-5', 3, 5, 5.012538, False), + 'Li-6': _iso('Li-6', 'lithium-6', 3, 6, 6.0151228874, True, + isotopic_abundance=0.0759), + 'Li-7': _iso('Li-7', 'lithium-7', 3, 7, 7.0160034366, True, + isotopic_abundance=0.9241), + 'Li-8': _iso('Li-8', 'lithium-8', 3, 8, 8.022486246, False), + 'Li-9': _iso('Li-9', 'lithium-9', 3, 9, 9.02679019, False), + 'Li-10': _iso('Li-10', 'lithium-10', 3, 10, 10.035483, False), + 'Li-11': _iso('Li-11', 'lithium-11', 3, 11, 11.04372358, False), + 'Li-12': _iso('Li-12', 'lithium-12', 3, 12, 12.052517, False), + 'Li-13': _iso('Li-13', 'lithium-13', 3, 13, 13.06263, False), + 'Be-5': _iso('Be-5', 'beryllium-5', 4, 5, 5.0399, False), + 'Be-6': _iso('Be-6', 'beryllium-6', 4, 6, 6.0197264, False), + 'Be-7': _iso('Be-7', 'beryllium-7', 4, 7, 7.016928717, False), + 'Be-8': _iso('Be-8', 'beryllium-8', 4, 8, 8.005305102, False, + half_life=6.7e-17), + 'Be-9': _iso('Be-9', 'beryllium-9', 4, 9, 9.012183065, True, isotopic_abundance=1), - 'Na-24': Iso('Na-24', 'sodium-24', 11, 24, 23.990962950, False, - half_life=53824.32), - 'Na-25': Iso('Na-25', 'sodium-25', 11, 25, 24.9899540, False), - 'Na-26': Iso('Na-26', 'sodium-26', 11, 26, 25.9926346, False), - 'Na-27': Iso('Na-27', 'sodium-27', 11, 27, 26.9940765, False), - 'Na-28': Iso('Na-28', 'sodium-28', 11, 28, 27.998939, False), - 'Na-29': Iso('Na-29', 'sodium-29', 11, 29, 29.0028771, False), - 'Na-30': Iso('Na-30', 'sodium-30', 11, 30, 30.0090979, False), - 'Na-31': Iso('Na-31', 'sodium-31', 11, 31, 31.013163, False), - 'Na-32': Iso('Na-32', 'sodium-32', 11, 32, 32.02019, False), - 'Na-33': Iso('Na-33', 'sodium-33', 11, 33, 33.02573, False), - 'Na-34': Iso('Na-34', 'sodium-34', 11, 34, 34.03359, False), - 'Na-35': Iso('Na-35', 'sodium-35', 11, 35, 35.04062, False), - 'Na-36': Iso('Na-36', 'sodium-36', 11, 36, 36.04929, False), - 'Na-37': Iso('Na-37', 'sodium-37', 11, 37, 37.05705, False), - 'Mg-19': Iso('Mg-19', 'magnesium-19', 12, 19, 19.034169, False), - 'Mg-20': Iso('Mg-20', 'magnesium-20', 12, 20, 20.018850, False), - 'Mg-21': Iso('Mg-21', 'magnesium-21', 12, 21, 21.011716, False), - 'Mg-22': Iso('Mg-22', 'magnesium-22', 12, 22, 21.99957065, False), - 'Mg-23': Iso('Mg-23', 'magnesium-23', 12, 23, 22.99412421, False), - 'Mg-24': Iso('Mg-24', 'magnesium-24', 12, 24, 23.985041697, True, - isotopic_abundance=0.7899), - 'Mg-25': Iso('Mg-25', 'magnesium-25', 12, 25, 24.985836976, True, - isotopic_abundance=0.1000), - 'Mg-26': Iso('Mg-26', 'magnesium-26', 12, 26, 25.982592968, True, - isotopic_abundance=0.1101), - 'Mg-27': Iso('Mg-27', 'magnesium-27', 12, 27, 26.984340624, False), - 'Mg-28': Iso('Mg-28', 'magnesium-28', 12, 28, 27.9838767, False), - 'Mg-29': Iso('Mg-29', 'magnesium-29', 12, 29, 28.988617, False), - 'Mg-30': Iso('Mg-30', 'magnesium-30', 12, 30, 29.9904629, False), - 'Mg-31': Iso('Mg-31', 'magnesium-31', 12, 31, 30.9966480, False), - 'Mg-32': Iso('Mg-32', 'magnesium-32', 12, 32, 31.9991102, False), - 'Mg-33': Iso('Mg-33', 'magnesium-33', 12, 33, 33.0053271, False), - 'Mg-34': Iso('Mg-34', 'magnesium-34', 12, 34, 34.008935, False), - 'Mg-35': Iso('Mg-35', 'magnesium-35', 12, 35, 35.01679, False), - 'Mg-36': Iso('Mg-36', 'magnesium-36', 12, 36, 36.02188, False), - 'Mg-37': Iso('Mg-37', 'magnesium-37', 12, 37, 37.03037, False), - 'Mg-38': Iso('Mg-38', 'magnesium-38', 12, 38, 38.03658, False), - 'Mg-39': Iso('Mg-39', 'magnesium-39', 12, 39, 39.04538, False), - 'Mg-40': Iso('Mg-40', 'magnesium-40', 12, 40, 40.05218, False), - 'Al-21': Iso('Al-21', 'aluminium-21', 13, 21, 21.02897, False), - 'Al-22': Iso('Al-22', 'aluminium-22', 13, 22, 22.01954, False), - 'Al-23': Iso('Al-23', 'aluminium-23', 13, 23, 23.00724435, False), - 'Al-24': Iso('Al-24', 'aluminium-24', 13, 24, 23.9999489, False), - 'Al-25': Iso('Al-25', 'aluminium-25', 13, 25, 24.99042810, False), - 'Al-26': Iso('Al-26', 'aluminium-26', 13, 26, 25.986891904, False), - 'Al-27': Iso('Al-27', 'aluminium-27', 13, 27, 26.98153853, True, + 'Be-10': _iso('Be-10', 'beryllium-10', 4, 10, 10.013534695, False), + 'Be-11': _iso('Be-11', 'beryllium-11', 4, 11, 11.02166108, False), + 'Be-12': _iso('Be-12', 'beryllium-12', 4, 12, 12.0269221, False), + 'Be-13': _iso('Be-13', 'beryllium-13', 4, 13, 13.036135, False), + 'Be-14': _iso('Be-14', 'beryllium-14', 4, 14, 14.04289, False), + 'Be-15': _iso('Be-15', 'beryllium-15', 4, 15, 15.05342, False), + 'Be-16': _iso('Be-16', 'beryllium-16', 4, 16, 16.06167, False), + 'B-6': _iso('B-6', 'boron-6', 5, 6, 6.0508, False), + 'B-7': _iso('B-7', 'boron-7', 5, 7, 7.029712, False), + 'B-8': _iso('B-8', 'boron-8', 5, 8, 8.0246073, False), + 'B-9': _iso('B-9', 'boron-9', 5, 9, 9.01332965, False), + 'B-10': _iso('B-10', 'boron-10', 5, 10, 10.01293695, True, + isotopic_abundance=0.199), + 'B-11': _iso('B-11', 'boron-11', 5, 11, 11.00930536, True, + isotopic_abundance=0.801), + 'B-12': _iso('B-12', 'boron-12', 5, 12, 12.0143527, False), + 'B-13': _iso('B-13', 'boron-13', 5, 13, 13.0177802, False), + 'B-14': _iso('B-14', 'boron-14', 5, 14, 14.025404, False), + 'B-15': _iso('B-15', 'boron-15', 5, 15, 15.031088, False), + 'B-16': _iso('B-16', 'boron-16', 5, 16, 16.039842, False), + 'B-17': _iso('B-17', 'boron-17', 5, 17, 17.04699, False), + 'B-18': _iso('B-18', 'boron-18', 5, 18, 18.05566, False), + 'B-19': _iso('B-19', 'boron-19', 5, 19, 19.06310, False), + 'B-20': _iso('B-20', 'boron-20', 5, 20, 20.07207, False), + 'B-21': _iso('B-21', 'boron-21', 5, 21, 21.08129, False), + 'C-8': _iso('C-8', 'carbon-8', 6, 8, 8.037643, False), + 'C-9': _iso('C-9', 'carbon-9', 6, 9, 9.0310372, False), + 'C-10': _iso('C-10', 'carbon-10', 6, 10, 10.01685331, False), + 'C-11': _iso('C-11', 'carbon-11', 6, 11, 11.0114336, False), + 'C-12': _iso('C-12', 'carbon-12', 6, 12, 12.0000000, True, + isotopic_abundance=0.9893), + 'C-13': _iso('C-13', 'carbon-13', 6, 13, 13.00335483507, True, + isotopic_abundance=0.0107), + 'C-14': _iso('C-14', 'carbon-14', 6, 14, 14.0032419884, False, + half_life=1.80825048e11), + 'C-15': _iso('C-15', 'carbon-15', 6, 15, 15.01059926, False), + 'C-16': _iso('C-16', 'carbon-16', 6, 16, 16.0147013, False), + 'C-17': _iso('C-17', 'carbon-17', 6, 17, 17.022577, False), + 'C-18': _iso('C-18', 'carbon-18', 6, 18, 18.026751, False), + 'C-19': _iso('C-19', 'carbon-19', 6, 19, 19.03480, False), + 'C-20': _iso('C-20', 'carbon-20', 6, 20, 20.04032, False), + 'C-21': _iso('C-21', 'carbon-21', 6, 21, 21.04900, False), + 'C-22': _iso('C-22', 'carbon-22', 6, 22, 22.05753, False), + 'C-23': _iso('C-23', 'carbon-23', 6, 23, 23.0689, False), + 'N-10': _iso('N-10', 'nitrogen-10', 7, 10, 10.04165, False), + 'N-11': _iso('N-11', 'nitrogen-11', 7, 11, 11.026091, False), + 'N-12': _iso('N-12', 'nitrogen-12', 7, 12, 12.0186132, False), + 'N-13': _iso('N-13', 'nitrogen-13', 7, 13, 13.00573861, False), + 'N-14': _iso('N-14', 'nitrogen-14', 7, 14, 14.00307400443, True, + isotopic_abundance=0.99636), + 'N-15': _iso('N-15', 'nitrogen-15', 7, 15, 15.00010889888, True, + isotopic_abundance=0.00364), + 'N-16': _iso('N-16', 'nitrogen-16', 7, 16, 16.0061019, False), + 'N-17': _iso('N-17', 'nitrogen-17', 7, 17, 17.008449, False), + 'N-18': _iso('N-18', 'nitrogen-18', 7, 18, 18.014078, False), + 'N-19': _iso('N-19', 'nitrogen-19', 7, 19, 19.017022, False), + 'N-20': _iso('N-20', 'nitrogen-20', 7, 20, 20.023366, False), + 'N-21': _iso('N-21', 'nitrogen-21', 7, 21, 21.02711, False), + 'N-22': _iso('N-22', 'nitrogen-22', 7, 22, 22.03439, False), + 'N-23': _iso('N-23', 'nitrogen-23', 7, 23, 23.04114, False), + 'N-24': _iso('N-24', 'nitrogen-24', 7, 24, 24.05039, False), + 'N-25': _iso('N-25', 'nitrogen-25', 7, 25, 25.06010, False), + 'O-12': _iso('O-12', 'oxygen-12', 8, 12, 12.034262, False), + 'O-13': _iso('O-13', 'oxygen-13', 8, 13, 13.024815, False), + 'O-14': _iso('O-14', 'oxygen-14', 8, 14, 14.00859636, False), + 'O-15': _iso('O-15', 'oxygen-15', 8, 15, 15.00306562, False), + 'O-16': _iso('O-16', 'oxygen-16', 8, 16, 15.99491461957, True, + isotopic_abundance=0.99757), + 'O-17': _iso('O-17', 'oxygen-17', 8, 17, 16.99913175650, True, + isotopic_abundance=0.00038), + 'O-18': _iso('O-18', 'oxygen-18', 8, 18, 17.99915961286, True, + isotopic_abundance=0.00205), + 'O-19': _iso('O-19', 'oxygen-19', 8, 19, 19.0035780, False), + 'O-20': _iso('O-20', 'oxygen-20', 8, 20, 20.00407535, False), + 'O-21': _iso('O-21', 'oxygen-21', 8, 21, 21.008655, False), + 'O-22': _iso('O-22', 'oxygen-22', 8, 22, 22.009966, False), + 'O-23': _iso('O-23', 'oxygen-23', 8, 23, 23.015696, False), + 'O-24': _iso('O-24', 'oxygen-24', 8, 24, 24.01986, False), + 'O-25': _iso('O-25', 'oxygen-25', 8, 25, 25.02936, False), + 'O-26': _iso('O-26', 'oxygen-26', 8, 26, 26.03729, False), + 'O-27': _iso('O-27', 'oxygen-27', 8, 27, 27.04772, False), + 'O-28': _iso('O-28', 'oxygen-28', 8, 28, 28.05591, False), + 'F-14': _iso('F-14', 'fluorine-14', 9, 14, 14.034315, False), + 'F-15': _iso('F-15', 'fluorine-15', 9, 15, 15.018043, False), + 'F-16': _iso('F-16', 'fluorine-16', 9, 16, 16.0114657, False), + 'F-17': _iso('F-17', 'fluorine-17', 9, 17, 17.00209524, False), + 'F-18': _iso('F-18', 'fluorine-18', 9, 18, 18.00093733, False, + half_life=6586.236), + 'F-19': _iso('F-19', 'fluorine-19', 9, 19, 18.99840316273, True, isotopic_abundance=1), - 'Al-28': Iso('Al-28', 'aluminium-28', 13, 28, 27.98191021, False), - 'Al-29': Iso('Al-29', 'aluminium-29', 13, 29, 28.9804565, False), - 'Al-30': Iso('Al-30', 'aluminium-30', 13, 30, 29.982960, False), - 'Al-31': Iso('Al-31', 'aluminium-31', 13, 31, 30.983945, False), - 'Al-32': Iso('Al-32', 'aluminium-32', 13, 32, 31.988085, False), - 'Al-33': Iso('Al-33', 'aluminium-33', 13, 33, 32.990909, False), - 'Al-34': Iso('Al-34', 'aluminium-34', 13, 34, 33.996705, False), - 'Al-35': Iso('Al-35', 'aluminium-35', 13, 35, 34.999764, False), - 'Al-36': Iso('Al-36', 'aluminium-36', 13, 36, 36.00639, False), - 'Al-37': Iso('Al-37', 'aluminium-37', 13, 37, 37.01053, False), - 'Al-38': Iso('Al-38', 'aluminium-38', 13, 38, 38.01740, False), - 'Al-39': Iso('Al-39', 'aluminium-39', 13, 39, 39.02254, False), - 'Al-40': Iso('Al-40', 'aluminium-40', 13, 40, 40.03003, False), - 'Al-41': Iso('Al-41', 'aluminium-41', 13, 41, 41.03638, False), - 'Al-42': Iso('Al-42', 'aluminium-42', 13, 42, 42.04384, False), - 'Al-43': Iso('Al-43', 'aluminium-43', 13, 43, 43.05147, False), - 'Si-22': Iso('Si-22', 'silicon-22', 14, 22, 22.03579, False), - 'Si-23': Iso('Si-23', 'silicon-23', 14, 23, 23.02544, False), - 'Si-24': Iso('Si-24', 'silicon-24', 14, 24, 24.011535, False), - 'Si-25': Iso('Si-25', 'silicon-25', 14, 25, 25.004109, False), - 'Si-26': Iso('Si-26', 'silicon-26', 14, 26, 25.99233384, False), - 'Si-27': Iso('Si-27', 'silicon-27', 14, 27, 26.98670481, False), - 'Si-28': Iso('Si-28', 'silicon-28', 14, 28, 27.97692653465, True, - isotopic_abundance=0.92223), - 'Si-29': Iso('Si-29', 'silicon-29', 14, 29, 28.97649466490, True, - isotopic_abundance=0.04685), - 'Si-30': Iso('Si-30', 'silicon-30', 14, 30, 29.973770136, True, - isotopic_abundance=0.03092), - 'Si-31': Iso('Si-31', 'silicon-31', 14, 31, 30.975363194, False), - 'Si-32': Iso('Si-32', 'silicon-32', 14, 32, 31.97415154, False), - 'Si-33': Iso('Si-33', 'silicon-33', 14, 33, 32.97797696, False), - 'Si-34': Iso('Si-34', 'silicon-34', 14, 34, 33.978576, False), - 'Si-35': Iso('Si-35', 'silicon-35', 14, 35, 34.984583, False), - 'Si-36': Iso('Si-36', 'silicon-36', 14, 36, 35.986695, False), - 'Si-37': Iso('Si-37', 'silicon-37', 14, 37, 36.992921, False), - 'Si-38': Iso('Si-38', 'silicon-38', 14, 38, 37.995523, False), - 'Si-39': Iso('Si-39', 'silicon-39', 14, 39, 39.002491, False), - 'Si-40': Iso('Si-40', 'silicon-40', 14, 40, 40.00583, False), - 'Si-41': Iso('Si-41', 'silicon-41', 14, 41, 41.01301, False), - 'Si-42': Iso('Si-42', 'silicon-42', 14, 42, 42.01778, False), - 'Si-43': Iso('Si-43', 'silicon-43', 14, 43, 43.02480, False), - 'Si-44': Iso('Si-44', 'silicon-44', 14, 44, 44.03061, False), - 'Si-45': Iso('Si-45', 'silicon-45', 14, 45, 45.03995, False), - 'P-24': Iso('P-24', 'phosphorus-24', 15, 24, 24.03577, False), - 'P-25': Iso('P-25', 'phosphorus-25', 15, 25, 25.02119, False), - 'P-26': Iso('P-26', 'phosphorus-26', 15, 26, 26.01178, False), - 'P-27': Iso('P-27', 'phosphorus-27', 15, 27, 26.999224, False), - 'P-28': Iso('P-28', 'phosphorus-28', 15, 28, 27.9923266, False), - 'P-29': Iso('P-29', 'phosphorus-29', 15, 29, 28.98180079, False), - 'P-30': Iso('P-30', 'phosphorus-30', 15, 30, 29.97831375, False), - 'P-31': Iso('P-31', 'phosphorus-31', 15, 31, 30.97376199842, True, - isotopic_abundance=1), - 'P-32': Iso('P-32', 'phosphorus-32', 15, 32, 31.973907643, False, - half_life=1232323.2), - 'P-33': Iso('P-33', 'phosphorus-33', 15, 33, 32.9717257, False), - 'P-34': Iso('P-34', 'phosphorus-34', 15, 34, 33.97364589, False), - 'P-35': Iso('P-35', 'phosphorus-35', 15, 35, 34.9733141, False), - 'P-36': Iso('P-36', 'phosphorus-36', 15, 36, 35.978260, False), - 'P-37': Iso('P-37', 'phosphorus-37', 15, 37, 36.979607, False), - 'P-38': Iso('P-38', 'phosphorus-38', 15, 38, 37.984252, False), - 'P-39': Iso('P-39', 'phosphorus-39', 15, 39, 38.986227, False), - 'P-40': Iso('P-40', 'phosphorus-40', 15, 40, 39.99133, False), - 'P-41': Iso('P-41', 'phosphorus-41', 15, 41, 40.994654, False), - 'P-42': Iso('P-42', 'phosphorus-42', 15, 42, 42.00108, False), - 'P-43': Iso('P-43', 'phosphorus-43', 15, 43, 43.00502, False), - 'P-44': Iso('P-44', 'phosphorus-44', 15, 44, 44.01121, False), - 'P-45': Iso('P-45', 'phosphorus-45', 15, 45, 45.01645, False), - 'P-46': Iso('P-46', 'phosphorus-46', 15, 46, 46.02446, False), - 'P-47': Iso('P-47', 'phosphorus-47', 15, 47, 47.03139, False), - 'S-26': Iso('S-26', 'sulfur-26', 16, 26, 26.02907, False), - 'S-27': Iso('S-27', 'sulfur-27', 16, 27, 27.01828, False), - 'S-28': Iso('S-28', 'sulfur-28', 16, 28, 28.00437, False), - 'S-29': Iso('S-29', 'sulfur-29', 16, 29, 28.996611, False), - 'S-30': Iso('S-30', 'sulfur-30', 16, 30, 29.98490703, False), - 'S-31': Iso('S-31', 'sulfur-31', 16, 31, 30.97955701, False), - 'S-32': Iso('S-32', 'sulfur-32', 16, 32, 31.9720711744, True, - isotopic_abundance=0.9499), - 'S-33': Iso('S-33', 'sulfur-33', 16, 33, 32.9714589098, True, - isotopic_abundance=0.0075), - 'S-34': Iso('S-34', 'sulfur-34', 16, 34, 33.967867004, True, - isotopic_abundance=0.0425), - 'S-35': Iso('S-35', 'sulfur-35', 16, 35, 34.969032310, False), - 'S-36': Iso('S-36', 'sulfur-36', 16, 36, 35.96708071, True, - isotopic_abundance=0.0001), - 'S-37': Iso('S-37', 'sulfur-37', 16, 37, 36.97112551, False), - 'S-38': Iso('S-38', 'sulfur-38', 16, 38, 37.9711633, False), - 'S-39': Iso('S-39', 'sulfur-39', 16, 39, 38.975134, False), - 'S-40': Iso('S-40', 'sulfur-40', 16, 40, 39.9754826, False), - 'S-41': Iso('S-41', 'sulfur-41', 16, 41, 40.9795935, False), - 'S-42': Iso('S-42', 'sulfur-42', 16, 42, 41.9810651, False), - 'S-43': Iso('S-43', 'sulfur-43', 16, 43, 42.9869076, False), - 'S-44': Iso('S-44', 'sulfur-44', 16, 44, 43.9901188, False), - 'S-45': Iso('S-45', 'sulfur-45', 16, 45, 44.99572, False), - 'S-46': Iso('S-46', 'sulfur-46', 16, 46, 46.00004, False), - 'S-47': Iso('S-47', 'sulfur-47', 16, 47, 47.00795, False), - 'S-48': Iso('S-48', 'sulfur-48', 16, 48, 48.01370, False), - 'S-49': Iso('S-49', 'sulfur-49', 16, 49, 49.02276, False), - 'Cl-28': Iso('Cl-28', 'chlorine-28', 17, 28, 28.02954, False), - 'Cl-29': Iso('Cl-29', 'chlorine-29', 17, 29, 29.01478, False), - 'Cl-30': Iso('Cl-30', 'chlorine-30', 17, 30, 30.00477, False), - 'Cl-31': Iso('Cl-31', 'chlorine-31', 17, 31, 30.992414, False), - 'Cl-32': Iso('Cl-32', 'chlorine-32', 17, 32, 31.98568464, False), - 'Cl-33': Iso('Cl-33', 'chlorine-33', 17, 33, 32.97745199, False), - 'Cl-34': Iso('Cl-34', 'chlorine-34', 17, 34, 33.973762485, False), - 'Cl-35': Iso('Cl-35', 'chlorine-35', 17, 35, 34.968852682, True, - isotopic_abundance=0.7576), - 'Cl-36': Iso('Cl-36', 'chlorine-36', 17, 36, 35.968306809, False), - 'Cl-37': Iso('Cl-37', 'chlorine-37', 17, 37, 36.965902602, True, - isotopic_abundance=0.2424), - 'Cl-38': Iso('Cl-38', 'chlorine-38', 17, 38, 37.96801044, False), - 'Cl-39': Iso('Cl-39', 'chlorine-39', 17, 39, 38.9680082, False), - 'Cl-40': Iso('Cl-40', 'chlorine-40', 17, 40, 39.970415, False), - 'Cl-41': Iso('Cl-41', 'chlorine-41', 17, 41, 40.970685, False), - 'Cl-42': Iso('Cl-42', 'chlorine-42', 17, 42, 41.97325, False), - 'Cl-43': Iso('Cl-43', 'chlorine-43', 17, 43, 42.97389, False), - 'Cl-44': Iso('Cl-44', 'chlorine-44', 17, 44, 43.97787, False), - 'Cl-45': Iso('Cl-45', 'chlorine-45', 17, 45, 44.98029, False), - 'Cl-46': Iso('Cl-46', 'chlorine-46', 17, 46, 45.98517, False), - 'Cl-47': Iso('Cl-47', 'chlorine-47', 17, 47, 46.98916, False), - 'Cl-48': Iso('Cl-48', 'chlorine-48', 17, 48, 47.99564, False), - 'Cl-49': Iso('Cl-49', 'chlorine-49', 17, 49, 49.00123, False), - 'Cl-50': Iso('Cl-50', 'chlorine-50', 17, 50, 50.00905, False), - 'Cl-51': Iso('Cl-51', 'chlorine-51', 17, 51, 51.01554, False), - 'Ar-30': Iso('Ar-30', 'argon-30', 18, 30, 30.02307, False), - 'Ar-31': Iso('Ar-31', 'argon-31', 18, 31, 31.01212, False), - 'Ar-32': Iso('Ar-32', 'argon-32', 18, 32, 31.9976378, False), - 'Ar-33': Iso('Ar-33', 'argon-33', 18, 33, 32.98992555, False), - 'Ar-34': Iso('Ar-34', 'argon-34', 18, 34, 33.980270090, False), - 'Ar-35': Iso('Ar-35', 'argon-35', 18, 35, 34.97525759, False), - 'Ar-36': Iso('Ar-36', 'argon-36', 18, 36, 35.967545105, True, - isotopic_abundance=0.003336), - 'Ar-37': Iso('Ar-37', 'argon-37', 18, 37, 36.96677633, False), - 'Ar-38': Iso('Ar-38', 'argon-38', 18, 38, 37.96273211, True, - isotopic_abundance=0.000629), - 'Ar-39': Iso('Ar-39', 'argon-39', 18, 39, 38.9643130, False), - 'Ar-40': Iso('Ar-40', 'argon-40', 18, 40, 39.9623831237, True, - isotopic_abundance=0.996035), - 'Ar-41': Iso('Ar-41', 'argon-41', 18, 41, 40.96450057, False), - 'Ar-42': Iso('Ar-42', 'argon-42', 18, 42, 41.9630457, False), - 'Ar-43': Iso('Ar-43', 'argon-43', 18, 43, 42.9656361, False), - 'Ar-44': Iso('Ar-44', 'argon-44', 18, 44, 43.9649238, False), - 'Ar-45': Iso('Ar-45', 'argon-45', 18, 45, 44.96803973, False), - 'Ar-46': Iso('Ar-46', 'argon-46', 18, 46, 45.968083, False), - 'Ar-47': Iso('Ar-47', 'argon-47', 18, 47, 46.972935, False), - 'Ar-48': Iso('Ar-48', 'argon-48', 18, 48, 47.97591, False), - 'Ar-49': Iso('Ar-49', 'argon-49', 18, 49, 48.98190, False), - 'Ar-50': Iso('Ar-50', 'argon-50', 18, 50, 49.98613, False), - 'Ar-51': Iso('Ar-51', 'argon-51', 18, 51, 50.99370, False), - 'Ar-52': Iso('Ar-52', 'argon-52', 18, 52, 51.99896, False), - 'Ar-53': Iso('Ar-53', 'argon-53', 18, 53, 53.00729, False), - 'K-32': Iso('K-32', 'potassium-32', 19, 32, 32.02265, False), - 'K-33': Iso('K-33', 'potassium-33', 19, 33, 33.00756, False), - 'K-34': Iso('K-34', 'potassium-34', 19, 34, 33.99869, False), - 'K-35': Iso('K-35', 'potassium-35', 19, 35, 34.98800541, False), - 'K-36': Iso('K-36', 'potassium-36', 19, 36, 35.98130201, False), - 'K-37': Iso('K-37', 'potassium-37', 19, 37, 36.97337589, False), - 'K-38': Iso('K-38', 'potassium-38', 19, 38, 37.96908112, False), - 'K-39': Iso('K-39', 'potassium-39', 19, 39, 38.9637064864, True, - isotopic_abundance=0.932581), - 'K-40': Iso('K-40', 'potassium-40', 19, 40, 39.963998166, False, - isotopic_abundance=0.000117), - 'K-41': Iso('K-41', 'potassium-41', 19, 41, 40.9618252579, True, - isotopic_abundance=0.067302), - 'K-42': Iso('K-42', 'potassium-42', 19, 42, 41.96240231, False), - 'K-43': Iso('K-43', 'potassium-43', 19, 43, 42.96073470, False), - 'K-44': Iso('K-44', 'potassium-44', 19, 44, 43.96158699, False), - 'K-45': Iso('K-45', 'potassium-45', 19, 45, 44.96069149, False), - 'K-46': Iso('K-46', 'potassium-46', 19, 46, 45.96198159, False), - 'K-47': Iso('K-47', 'potassium-47', 19, 47, 46.9616616, False), - 'K-48': Iso('K-48', 'potassium-48', 19, 48, 47.96534119, False), - 'K-49': Iso('K-49', 'potassium-49', 19, 49, 48.96821075, False), - 'K-50': Iso('K-50', 'potassium-50', 19, 50, 49.9723800, False), - 'K-51': Iso('K-51', 'potassium-51', 19, 51, 50.975828, False), - 'K-52': Iso('K-52', 'potassium-52', 19, 52, 51.98224, False), - 'K-53': Iso('K-53', 'potassium-53', 19, 53, 52.98746, False), - 'K-54': Iso('K-54', 'potassium-54', 19, 54, 53.99463, False), - 'K-55': Iso('K-55', 'potassium-55', 19, 55, 55.00076, False), - 'K-56': Iso('K-56', 'potassium-56', 19, 56, 56.00851, False), - 'Ca-34': Iso('Ca-34', 'calcium-34', 20, 34, 34.01487, False), - 'Ca-35': Iso('Ca-35', 'calcium-35', 20, 35, 35.00514, False), - 'Ca-36': Iso('Ca-36', 'calcium-36', 20, 36, 35.993074, False), - 'Ca-37': Iso('Ca-37', 'calcium-37', 20, 37, 36.98589785, False), - 'Ca-38': Iso('Ca-38', 'calcium-38', 20, 38, 37.97631922, False), - 'Ca-39': Iso('Ca-39', 'calcium-39', 20, 39, 38.97071081, False), - 'Ca-40': Iso('Ca-40', 'calcium-40', 20, 40, 39.962590863, True, - isotopic_abundance=0.96941), - 'Ca-41': Iso('Ca-41', 'calcium-41', 20, 41, 40.96227792, False), - 'Ca-42': Iso('Ca-42', 'calcium-42', 20, 42, 41.95861783, True, - isotopic_abundance=0.00647), - 'Ca-43': Iso('Ca-43', 'calcium-43', 20, 43, 42.95876644, True, - isotopic_abundance=0.00135), - 'Ca-44': Iso('Ca-44', 'calcium-44', 20, 44, 43.95548156, True, - isotopic_abundance=0.02086), - 'Ca-45': Iso('Ca-45', 'calcium-45', 20, 45, 44.95618635, False), - 'Ca-46': Iso('Ca-46', 'calcium-46', 20, 46, 45.9536890, True, - isotopic_abundance=0.00004), - 'Ca-47': Iso('Ca-47', 'calcium-47', 20, 47, 46.9545424, False), - 'Ca-48': Iso('Ca-48', 'calcium-48', 20, 48, 47.95252276, False, - isotopic_abundance=0.00187), - 'Ca-49': Iso('Ca-49', 'calcium-49', 20, 49, 48.95566274, False), - 'Ca-50': Iso('Ca-50', 'calcium-50', 20, 50, 49.9574992, False), - 'Ca-51': Iso('Ca-51', 'calcium-51', 20, 51, 50.960989, False), - 'Ca-52': Iso('Ca-52', 'calcium-52', 20, 52, 51.963217, False), - 'Ca-53': Iso('Ca-53', 'calcium-53', 20, 53, 52.96945, False), - 'Ca-54': Iso('Ca-54', 'calcium-54', 20, 54, 53.97340, False), - 'Ca-55': Iso('Ca-55', 'calcium-55', 20, 55, 54.98030, False), - 'Ca-56': Iso('Ca-56', 'calcium-56', 20, 56, 55.98508, False), - 'Ca-57': Iso('Ca-57', 'calcium-57', 20, 57, 56.99262, False), - 'Ca-58': Iso('Ca-58', 'calcium-58', 20, 58, 57.99794, False), - 'Sc-36': Iso('Sc-36', 'scandium-36', 21, 36, 36.01648, False), - 'Sc-37': Iso('Sc-37', 'scandium-37', 21, 37, 37.00374, False), - 'Sc-38': Iso('Sc-38', 'scandium-38', 21, 38, 37.99512, False), - 'Sc-39': Iso('Sc-39', 'scandium-39', 21, 39, 38.984785, False), - 'Sc-40': Iso('Sc-40', 'scandium-40', 21, 40, 39.9779673, False), - 'Sc-41': Iso('Sc-41', 'scandium-41', 21, 41, 40.969251105, False), - 'Sc-42': Iso('Sc-42', 'scandium-42', 21, 42, 41.96551653, False), - 'Sc-43': Iso('Sc-43', 'scandium-43', 21, 43, 42.9611505, False), - 'Sc-44': Iso('Sc-44', 'scandium-44', 21, 44, 43.9594029, False), - 'Sc-45': Iso('Sc-45', 'scandium-45', 21, 45, 44.95590828, True, - isotopic_abundance=1), - 'Sc-46': Iso('Sc-46', 'scandium-46', 21, 46, 45.95516826, False, - half_life=7242998.4), - 'Sc-47': Iso('Sc-47', 'scandium-47', 21, 47, 46.9524037, False), - 'Sc-48': Iso('Sc-48', 'scandium-48', 21, 48, 47.9522236, False), - 'Sc-49': Iso('Sc-49', 'scandium-49', 21, 49, 48.9500146, False), - 'Sc-50': Iso('Sc-50', 'scandium-50', 21, 50, 49.952176, False), - 'Sc-51': Iso('Sc-51', 'scandium-51', 21, 51, 50.953592, False), - 'Sc-52': Iso('Sc-52', 'scandium-52', 21, 52, 51.95688, False), - 'Sc-53': Iso('Sc-53', 'scandium-53', 21, 53, 52.95909, False), - 'Sc-54': Iso('Sc-54', 'scandium-54', 21, 54, 53.96393, False), - 'Sc-55': Iso('Sc-55', 'scandium-55', 21, 55, 54.96782, False), - 'Sc-56': Iso('Sc-56', 'scandium-56', 21, 56, 55.97345, False), - 'Sc-57': Iso('Sc-57', 'scandium-57', 21, 57, 56.97777, False), - 'Sc-58': Iso('Sc-58', 'scandium-58', 21, 58, 57.98403, False), - 'Sc-59': Iso('Sc-59', 'scandium-59', 21, 59, 58.98894, False), - 'Sc-60': Iso('Sc-60', 'scandium-60', 21, 60, 59.99565, False), - 'Sc-61': Iso('Sc-61', 'scandium-61', 21, 61, 61.00100, False), - 'Ti-38': Iso('Ti-38', 'titanium-38', 22, 38, 38.01145, False), - 'Ti-39': Iso('Ti-39', 'titanium-39', 22, 39, 39.00236, False), - 'Ti-40': Iso('Ti-40', 'titanium-40', 22, 40, 39.99050, False), - 'Ti-41': Iso('Ti-41', 'titanium-41', 22, 41, 40.983148, False), - 'Ti-42': Iso('Ti-42', 'titanium-42', 22, 42, 41.97304903, False), - 'Ti-43': Iso('Ti-43', 'titanium-43', 22, 43, 42.9685225, False), - 'Ti-44': Iso('Ti-44', 'titanium-44', 22, 44, 43.95968995, False, - half_life=1914105600.0), - 'Ti-45': Iso('Ti-45', 'titanium-45', 22, 45, 44.95812198, False), - 'Ti-46': Iso('Ti-46', 'titanium-46', 22, 46, 45.95262772, True, - isotopic_abundance=0.0825), - 'Ti-47': Iso('Ti-47', 'titanium-47', 22, 47, 46.95175879, True, - isotopic_abundance=0.0744), - 'Ti-48': Iso('Ti-48', 'titanium-48', 22, 48, 47.94794198, True, - isotopic_abundance=0.7372), - 'Ti-49': Iso('Ti-49', 'titanium-49', 22, 49, 48.94786568, True, - isotopic_abundance=0.0541), - 'Ti-50': Iso('Ti-50', 'titanium-50', 22, 50, 49.94478689, True, - isotopic_abundance=0.0518), - 'Ti-51': Iso('Ti-51', 'titanium-51', 22, 51, 50.94661065, False), - 'Ti-52': Iso('Ti-52', 'titanium-52', 22, 52, 51.9468930, False), - 'Ti-53': Iso('Ti-53', 'titanium-53', 22, 53, 52.94973, False), - 'Ti-54': Iso('Ti-54', 'titanium-54', 22, 54, 53.95105, False), - 'Ti-55': Iso('Ti-55', 'titanium-55', 22, 55, 54.95527, False), - 'Ti-56': Iso('Ti-56', 'titanium-56', 22, 56, 55.95791, False), - 'Ti-57': Iso('Ti-57', 'titanium-57', 22, 57, 56.96364, False), - 'Ti-58': Iso('Ti-58', 'titanium-58', 22, 58, 57.96660, False), - 'Ti-59': Iso('Ti-59', 'titanium-59', 22, 59, 58.97247, False), - 'Ti-60': Iso('Ti-60', 'titanium-60', 22, 60, 59.97603, False), - 'Ti-61': Iso('Ti-61', 'titanium-61', 22, 61, 60.98245, False), - 'Ti-62': Iso('Ti-62', 'titanium-62', 22, 62, 61.98651, False), - 'Ti-63': Iso('Ti-63', 'titanium-63', 22, 63, 62.99375, False), - 'V-40': Iso('V-40', 'vanadium-40', 23, 40, 40.01276, False), - 'V-41': Iso('V-41', 'vanadium-41', 23, 41, 41.00021, False), - 'V-42': Iso('V-42', 'vanadium-42', 23, 42, 41.99182, False), - 'V-43': Iso('V-43', 'vanadium-43', 23, 43, 42.980766, False), - 'V-44': Iso('V-44', 'vanadium-44', 23, 44, 43.97411, False), - 'V-45': Iso('V-45', 'vanadium-45', 23, 45, 44.9657748, False), - 'V-46': Iso('V-46', 'vanadium-46', 23, 46, 45.96019878, False), - 'V-47': Iso('V-47', 'vanadium-47', 23, 47, 46.95490491, False), - 'V-48': Iso('V-48', 'vanadium-48', 23, 48, 47.9522522, False), - 'V-49': Iso('V-49', 'vanadium-49', 23, 49, 48.94851180, False), - 'V-50': Iso('V-50', 'vanadium-50', 23, 50, 49.94715601, False, - isotopic_abundance=0.00250), - 'V-51': Iso('V-51', 'vanadium-51', 23, 51, 50.94395704, True, - isotopic_abundance=0.99750), - 'V-52': Iso('V-52', 'vanadium-52', 23, 52, 51.94477301, False), - 'V-53': Iso('V-53', 'vanadium-53', 23, 53, 52.9443367, False), - 'V-54': Iso('V-54', 'vanadium-54', 23, 54, 53.946439, False), - 'V-55': Iso('V-55', 'vanadium-55', 23, 55, 54.94724, False), - 'V-56': Iso('V-56', 'vanadium-56', 23, 56, 55.95048, False), - 'V-57': Iso('V-57', 'vanadium-57', 23, 57, 56.95252, False), - 'V-58': Iso('V-58', 'vanadium-58', 23, 58, 57.95672, False), - 'V-59': Iso('V-59', 'vanadium-59', 23, 59, 58.95939, False), - 'V-60': Iso('V-60', 'vanadium-60', 23, 60, 59.96431, False), - 'V-61': Iso('V-61', 'vanadium-61', 23, 61, 60.96725, False), - 'V-62': Iso('V-62', 'vanadium-62', 23, 62, 61.97265, False), - 'V-63': Iso('V-63', 'vanadium-63', 23, 63, 62.97639, False), - 'V-64': Iso('V-64', 'vanadium-64', 23, 64, 63.98264, False), - 'V-65': Iso('V-65', 'vanadium-65', 23, 65, 64.98750, False), - 'V-66': Iso('V-66', 'vanadium-66', 23, 66, 65.99398, False), - 'Cr-42': Iso('Cr-42', 'chromium-42', 24, 42, 42.00670, False), - 'Cr-43': Iso('Cr-43', 'chromium-43', 24, 43, 42.99753, False), - 'Cr-44': Iso('Cr-44', 'chromium-44', 24, 44, 43.98536, False), - 'Cr-45': Iso('Cr-45', 'chromium-45', 24, 45, 44.979050, False), - 'Cr-46': Iso('Cr-46', 'chromium-46', 24, 46, 45.968359, False), - 'Cr-47': Iso('Cr-47', 'chromium-47', 24, 47, 46.9628974, False), - 'Cr-48': Iso('Cr-48', 'chromium-48', 24, 48, 47.9540291, False), - 'Cr-49': Iso('Cr-49', 'chromium-49', 24, 49, 48.9513333, False), - 'Cr-50': Iso('Cr-50', 'chromium-50', 24, 50, 49.94604183, True, - isotopic_abundance=0.04345), - 'Cr-51': Iso('Cr-51', 'chromium-51', 24, 51, 50.94476502, False, - half_life=2393366.4), - 'Cr-52': Iso('Cr-52', 'chromium-52', 24, 52, 51.94050623, True, - isotopic_abundance=0.83789), - 'Cr-53': Iso('Cr-53', 'chromium-53', 24, 53, 52.94064815, True, - isotopic_abundance=0.09501), - 'Cr-54': Iso('Cr-54', 'chromium-54', 24, 54, 53.93887916, True, - isotopic_abundance=0.02365), - 'Cr-55': Iso('Cr-55', 'chromium-55', 24, 55, 54.94083843, False), - 'Cr-56': Iso('Cr-56', 'chromium-56', 24, 56, 55.9406531, False), - 'Cr-57': Iso('Cr-57', 'chromium-57', 24, 57, 56.9436130, False), - 'Cr-58': Iso('Cr-58', 'chromium-58', 24, 58, 57.94435, False), - 'Cr-59': Iso('Cr-59', 'chromium-59', 24, 59, 58.94859, False), - 'Cr-60': Iso('Cr-60', 'chromium-60', 24, 60, 59.95008, False), - 'Cr-61': Iso('Cr-61', 'chromium-61', 24, 61, 60.95442, False), - 'Cr-62': Iso('Cr-62', 'chromium-62', 24, 62, 61.95610, False), - 'Cr-63': Iso('Cr-63', 'chromium-63', 24, 63, 62.96165, False), - 'Cr-64': Iso('Cr-64', 'chromium-64', 24, 64, 63.96408, False), - 'Cr-65': Iso('Cr-65', 'chromium-65', 24, 65, 64.96996, False), - 'Cr-66': Iso('Cr-66', 'chromium-66', 24, 66, 65.97366, False), - 'Cr-67': Iso('Cr-67', 'chromium-67', 24, 67, 66.98016, False), - 'Cr-68': Iso('Cr-68', 'chromium-68', 24, 68, 67.98403, False), - 'Mn-44': Iso('Mn-44', 'manganese-44', 25, 44, 44.00715, False), - 'Mn-45': Iso('Mn-45', 'manganese-45', 25, 45, 44.99449, False), - 'Mn-46': Iso('Mn-46', 'manganese-46', 25, 46, 45.98609, False), - 'Mn-47': Iso('Mn-47', 'manganese-47', 25, 47, 46.975775, False), - 'Mn-48': Iso('Mn-48', 'manganese-48', 25, 48, 47.96852, False), - 'Mn-49': Iso('Mn-49', 'manganese-49', 25, 49, 48.959595, False), - 'Mn-50': Iso('Mn-50', 'manganese-50', 25, 50, 49.95423778, False), - 'Mn-51': Iso('Mn-51', 'manganese-51', 25, 51, 50.94820847, False), - 'Mn-52': Iso('Mn-52', 'manganese-52', 25, 52, 51.9455639, False), - 'Mn-53': Iso('Mn-53', 'manganese-53', 25, 53, 52.94128889, False), - 'Mn-54': Iso('Mn-54', 'manganese-54', 25, 54, 53.9403576, False, - half_life=26959219.200000003), - 'Mn-55': Iso('Mn-55', 'manganese-55', 25, 55, 54.93804391, True, - isotopic_abundance=1), - 'Mn-56': Iso('Mn-56', 'manganese-56', 25, 56, 55.93890369, False), - 'Mn-57': Iso('Mn-57', 'manganese-57', 25, 57, 56.9382861, False), - 'Mn-58': Iso('Mn-58', 'manganese-58', 25, 58, 57.9400666, False), - 'Mn-59': Iso('Mn-59', 'manganese-59', 25, 59, 58.9403911, False), - 'Mn-60': Iso('Mn-60', 'manganese-60', 25, 60, 59.9431366, False), - 'Mn-61': Iso('Mn-61', 'manganese-61', 25, 61, 60.9444525, False), - 'Mn-62': Iso('Mn-62', 'manganese-62', 25, 62, 61.94795, False), - 'Mn-63': Iso('Mn-63', 'manganese-63', 25, 63, 62.9496647, False), - 'Mn-64': Iso('Mn-64', 'manganese-64', 25, 64, 63.9538494, False), - 'Mn-65': Iso('Mn-65', 'manganese-65', 25, 65, 64.9560198, False), - 'Mn-66': Iso('Mn-66', 'manganese-66', 25, 66, 65.960547, False), - 'Mn-67': Iso('Mn-67', 'manganese-67', 25, 67, 66.96424, False), - 'Mn-68': Iso('Mn-68', 'manganese-68', 25, 68, 67.96962, False), - 'Mn-69': Iso('Mn-69', 'manganese-69', 25, 69, 68.97366, False), - 'Mn-70': Iso('Mn-70', 'manganese-70', 25, 70, 69.97937, False), - 'Mn-71': Iso('Mn-71', 'manganese-71', 25, 71, 70.98368, False), - 'Fe-45': Iso('Fe-45', 'iron-45', 26, 45, 45.01442, False), - 'Fe-46': Iso('Fe-46', 'iron-46', 26, 46, 46.00063, False), - 'Fe-47': Iso('Fe-47', 'iron-47', 26, 47, 46.99185, False), - 'Fe-48': Iso('Fe-48', 'iron-48', 26, 48, 47.98023, False), - 'Fe-49': Iso('Fe-49', 'iron-49', 26, 49, 48.973429, False), - 'Fe-50': Iso('Fe-50', 'iron-50', 26, 50, 49.962975, False), - 'Fe-51': Iso('Fe-51', 'iron-51', 26, 51, 50.9568410, False), - 'Fe-52': Iso('Fe-52', 'iron-52', 26, 52, 51.9481131, False), - 'Fe-53': Iso('Fe-53', 'iron-53', 26, 53, 52.9453064, False), - 'Fe-54': Iso('Fe-54', 'iron-54', 26, 54, 53.93960899, True, - isotopic_abundance=0.05845), - 'Fe-55': Iso('Fe-55', 'iron-55', 26, 55, 54.93829199, False), - 'Fe-56': Iso('Fe-56', 'iron-56', 26, 56, 55.93493633, True, - isotopic_abundance=0.91754), - 'Fe-57': Iso('Fe-57', 'iron-57', 26, 57, 56.93539284, True, - isotopic_abundance=0.02119), - 'Fe-58': Iso('Fe-58', 'iron-58', 26, 58, 57.93327443, True, - isotopic_abundance=0.00282), - 'Fe-59': Iso('Fe-59', 'iron-59', 26, 59, 58.93487434, False, - half_life=3845439.36), - 'Fe-60': Iso('Fe-60', 'iron-60', 26, 60, 59.9340711, False), - 'Fe-61': Iso('Fe-61', 'iron-61', 26, 61, 60.9367462, False), - 'Fe-62': Iso('Fe-62', 'iron-62', 26, 62, 61.9367918, False), - 'Fe-63': Iso('Fe-63', 'iron-63', 26, 63, 62.9402727, False), - 'Fe-64': Iso('Fe-64', 'iron-64', 26, 64, 63.9409878, False), - 'Fe-65': Iso('Fe-65', 'iron-65', 26, 65, 64.9450115, False), - 'Fe-66': Iso('Fe-66', 'iron-66', 26, 66, 65.9462500, False), - 'Fe-67': Iso('Fe-67', 'iron-67', 26, 67, 66.95054, False), - 'Fe-68': Iso('Fe-68', 'iron-68', 26, 68, 67.95295, False), - 'Fe-69': Iso('Fe-69', 'iron-69', 26, 69, 68.95807, False), - 'Fe-70': Iso('Fe-70', 'iron-70', 26, 70, 69.96102, False), - 'Fe-71': Iso('Fe-71', 'iron-71', 26, 71, 70.96672, False), - 'Fe-72': Iso('Fe-72', 'iron-72', 26, 72, 71.96983, False), - 'Fe-73': Iso('Fe-73', 'iron-73', 26, 73, 72.97572, False), - 'Fe-74': Iso('Fe-74', 'iron-74', 26, 74, 73.97935, False), - 'Co-47': Iso('Co-47', 'cobalt-47', 27, 47, 47.01057, False), - 'Co-48': Iso('Co-48', 'cobalt-48', 27, 48, 48.00093, False), - 'Co-49': Iso('Co-49', 'cobalt-49', 27, 49, 48.98891, False), - 'Co-50': Iso('Co-50', 'cobalt-50', 27, 50, 49.98091, False), - 'Co-51': Iso('Co-51', 'cobalt-51', 27, 51, 50.970647, False), - 'Co-52': Iso('Co-52', 'cobalt-52', 27, 52, 51.96351, False), - 'Co-53': Iso('Co-53', 'cobalt-53', 27, 53, 52.9542041, False), - 'Co-54': Iso('Co-54', 'cobalt-54', 27, 54, 53.94845987, False), - 'Co-55': Iso('Co-55', 'cobalt-55', 27, 55, 54.94199720, False), - 'Co-56': Iso('Co-56', 'cobalt-56', 27, 56, 55.93983880, False), - 'Co-57': Iso('Co-57', 'cobalt-57', 27, 57, 56.93629057, False, - half_life=23510304.0), - 'Co-58': Iso('Co-58', 'cobalt-58', 27, 58, 57.9357521, False, - half_life=6114528.0), - 'Co-59': Iso('Co-59', 'cobalt-59', 27, 59, 58.93319429, True, - isotopic_abundance=1), - 'Co-60': Iso('Co-60', 'cobalt-60', 27, 60, 59.93381630, False, - half_life=166337280.0), - 'Co-61': Iso('Co-61', 'cobalt-61', 27, 61, 60.93247662, False), - 'Co-62': Iso('Co-62', 'cobalt-62', 27, 62, 61.934059, False), - 'Co-63': Iso('Co-63', 'cobalt-63', 27, 63, 62.933600, False), - 'Co-64': Iso('Co-64', 'cobalt-64', 27, 64, 63.935811, False), - 'Co-65': Iso('Co-65', 'cobalt-65', 27, 65, 64.9364621, False), - 'Co-66': Iso('Co-66', 'cobalt-66', 27, 66, 65.939443, False), - 'Co-67': Iso('Co-67', 'cobalt-67', 27, 67, 66.9406096, False), - 'Co-68': Iso('Co-68', 'cobalt-68', 27, 68, 67.94426, False), - 'Co-69': Iso('Co-69', 'cobalt-69', 27, 69, 68.94614, False), - 'Co-70': Iso('Co-70', 'cobalt-70', 27, 70, 69.94963, False), - 'Co-71': Iso('Co-71', 'cobalt-71', 27, 71, 70.95237, False), - 'Co-72': Iso('Co-72', 'cobalt-72', 27, 72, 71.95729, False), - 'Co-73': Iso('Co-73', 'cobalt-73', 27, 73, 72.96039, False), - 'Co-74': Iso('Co-74', 'cobalt-74', 27, 74, 73.96515, False), - 'Co-75': Iso('Co-75', 'cobalt-75', 27, 75, 74.96876, False), - 'Co-76': Iso('Co-76', 'cobalt-76', 27, 76, 75.97413, False), - 'Ni-48': Iso('Ni-48', 'nickel-48', 28, 48, 48.01769, False), - 'Ni-49': Iso('Ni-49', 'nickel-49', 28, 49, 49.00770, False), - 'Ni-50': Iso('Ni-50', 'nickel-50', 28, 50, 49.99474, False), - 'Ni-51': Iso('Ni-51', 'nickel-51', 28, 51, 50.98611, False), - 'Ni-52': Iso('Ni-52', 'nickel-52', 28, 52, 51.97480, False), - 'Ni-53': Iso('Ni-53', 'nickel-53', 28, 53, 52.968190, False), - 'Ni-54': Iso('Ni-54', 'nickel-54', 28, 54, 53.957892, False), - 'Ni-55': Iso('Ni-55', 'nickel-55', 28, 55, 54.95133063, False), - 'Ni-56': Iso('Ni-56', 'nickel-56', 28, 56, 55.94212855, False), - 'Ni-57': Iso('Ni-57', 'nickel-57', 28, 57, 56.93979218, False), - 'Ni-58': Iso('Ni-58', 'nickel-58', 28, 58, 57.93534241, True, - isotopic_abundance=0.68077), - 'Ni-59': Iso('Ni-59', 'nickel-59', 28, 59, 58.93434620, False), - 'Ni-60': Iso('Ni-60', 'nickel-60', 28, 60, 59.93078588, True, - isotopic_abundance=0.26223), - 'Ni-61': Iso('Ni-61', 'nickel-61', 28, 61, 60.93105557, True, - isotopic_abundance=0.011399), - 'Ni-62': Iso('Ni-62', 'nickel-62', 28, 62, 61.92834537, True, - isotopic_abundance=0.036346), - 'Ni-63': Iso('Ni-63', 'nickel-63', 28, 63, 62.92966963, False), - 'Ni-64': Iso('Ni-64', 'nickel-64', 28, 64, 63.92796682, True, - isotopic_abundance=0.009255), - 'Ni-65': Iso('Ni-65', 'nickel-65', 28, 65, 64.93008517, False), - 'Ni-66': Iso('Ni-66', 'nickel-66', 28, 66, 65.9291393, False), - 'Ni-67': Iso('Ni-67', 'nickel-67', 28, 67, 66.9315694, False), - 'Ni-68': Iso('Ni-68', 'nickel-68', 28, 68, 67.9318688, False), - 'Ni-69': Iso('Ni-69', 'nickel-69', 28, 69, 68.9356103, False), - 'Ni-70': Iso('Ni-70', 'nickel-70', 28, 70, 69.9364313, False), - 'Ni-71': Iso('Ni-71', 'nickel-71', 28, 71, 70.9405190, False), - 'Ni-72': Iso('Ni-72', 'nickel-72', 28, 72, 71.9417859, False), - 'Ni-73': Iso('Ni-73', 'nickel-73', 28, 73, 72.9462067, False), - 'Ni-74': Iso('Ni-74', 'nickel-74', 28, 74, 73.94798, False), - 'Ni-75': Iso('Ni-75', 'nickel-75', 28, 75, 74.95250, False), - 'Ni-76': Iso('Ni-76', 'nickel-76', 28, 76, 75.95533, False), - 'Ni-77': Iso('Ni-77', 'nickel-77', 28, 77, 76.96055, False), - 'Ni-78': Iso('Ni-78', 'nickel-78', 28, 78, 77.96336, False), - 'Ni-79': Iso('Ni-79', 'nickel-79', 28, 79, 78.97025, False), - 'Cu-52': Iso('Cu-52', 'copper-52', 29, 52, 51.99671, False), - 'Cu-53': Iso('Cu-53', 'copper-53', 29, 53, 52.98459, False), - 'Cu-54': Iso('Cu-54', 'copper-54', 29, 54, 53.97666, False), - 'Cu-55': Iso('Cu-55', 'copper-55', 29, 55, 54.96604, False), - 'Cu-56': Iso('Cu-56', 'copper-56', 29, 56, 55.95895, False), - 'Cu-57': Iso('Cu-57', 'copper-57', 29, 57, 56.94921250, False), - 'Cu-58': Iso('Cu-58', 'copper-58', 29, 58, 57.94453305, False), - 'Cu-59': Iso('Cu-59', 'copper-59', 29, 59, 58.93949748, False), - 'Cu-60': Iso('Cu-60', 'copper-60', 29, 60, 59.9373645, False), - 'Cu-61': Iso('Cu-61', 'copper-61', 29, 61, 60.9334576, False), - 'Cu-62': Iso('Cu-62', 'copper-62', 29, 62, 61.93259541, False), - 'Cu-63': Iso('Cu-63', 'copper-63', 29, 63, 62.92959772, True, - isotopic_abundance=0.6915), - 'Cu-64': Iso('Cu-64', 'copper-64', 29, 64, 63.92976434, False), - 'Cu-65': Iso('Cu-65', 'copper-65', 29, 65, 64.92778970, True, - isotopic_abundance=0.3085), - 'Cu-66': Iso('Cu-66', 'copper-66', 29, 66, 65.92886903, False), - 'Cu-67': Iso('Cu-67', 'copper-67', 29, 67, 66.9277303, False), - 'Cu-68': Iso('Cu-68', 'copper-68', 29, 68, 67.9296109, False), - 'Cu-69': Iso('Cu-69', 'copper-69', 29, 69, 68.9294293, False), - 'Cu-70': Iso('Cu-70', 'copper-70', 29, 70, 69.9323921, False), - 'Cu-71': Iso('Cu-71', 'copper-71', 29, 71, 70.9326768, False), - 'Cu-72': Iso('Cu-72', 'copper-72', 29, 72, 71.9358203, False), - 'Cu-73': Iso('Cu-73', 'copper-73', 29, 73, 72.9366744, False), - 'Cu-74': Iso('Cu-74', 'copper-74', 29, 74, 73.9398749, False), - 'Cu-75': Iso('Cu-75', 'copper-75', 29, 75, 74.9415226, False), - 'Cu-76': Iso('Cu-76', 'copper-76', 29, 76, 75.9452750, False), - 'Cu-77': Iso('Cu-77', 'copper-77', 29, 77, 76.94792, False), - 'Cu-78': Iso('Cu-78', 'copper-78', 29, 78, 77.95223, False), - 'Cu-79': Iso('Cu-79', 'copper-79', 29, 79, 78.95502, False), - 'Cu-80': Iso('Cu-80', 'copper-80', 29, 80, 79.96089, False), - 'Cu-81': Iso('Cu-81', 'copper-81', 29, 81, 80.96587, False), - 'Cu-82': Iso('Cu-82', 'copper-82', 29, 82, 81.97244, False), - 'Zn-54': Iso('Zn-54', 'zinc-54', 30, 54, 53.99204, False), - 'Zn-55': Iso('Zn-55', 'zinc-55', 30, 55, 54.98398, False), - 'Zn-56': Iso('Zn-56', 'zinc-56', 30, 56, 55.97254, False), - 'Zn-57': Iso('Zn-57', 'zinc-57', 30, 57, 56.96506, False), - 'Zn-58': Iso('Zn-58', 'zinc-58', 30, 58, 57.954591, False), - 'Zn-59': Iso('Zn-59', 'zinc-59', 30, 59, 58.94931266, False), - 'Zn-60': Iso('Zn-60', 'zinc-60', 30, 60, 59.94184210, False), - 'Zn-61': Iso('Zn-61', 'zinc-61', 30, 61, 60.939507, False), - 'Zn-62': Iso('Zn-62', 'zinc-62', 30, 62, 61.93433397, False), - 'Zn-63': Iso('Zn-63', 'zinc-63', 30, 63, 62.9332115, False), - 'Zn-64': Iso('Zn-64', 'zinc-64', 30, 64, 63.92914201, True, - isotopic_abundance=0.4917), - 'Zn-65': Iso('Zn-65', 'zinc-65', 30, 65, 64.92924077, False, - half_life=21095769.599999998), - 'Zn-66': Iso('Zn-66', 'zinc-66', 30, 66, 65.92603381, True, - isotopic_abundance=0.2773), - 'Zn-67': Iso('Zn-67', 'zinc-67', 30, 67, 66.92712775, True, - isotopic_abundance=0.0404), - 'Zn-68': Iso('Zn-68', 'zinc-68', 30, 68, 67.92484455, True, - isotopic_abundance=0.1845), - 'Zn-69': Iso('Zn-69', 'zinc-69', 30, 69, 68.9265507, False), - 'Zn-70': Iso('Zn-70', 'zinc-70', 30, 70, 69.9253192, True, - isotopic_abundance=0.0061), - 'Zn-71': Iso('Zn-71', 'zinc-71', 30, 71, 70.9277196, False), - 'Zn-72': Iso('Zn-72', 'zinc-72', 30, 72, 71.9268428, False), - 'Zn-73': Iso('Zn-73', 'zinc-73', 30, 73, 72.9295826, False), - 'Zn-74': Iso('Zn-74', 'zinc-74', 30, 74, 73.9294073, False), - 'Zn-75': Iso('Zn-75', 'zinc-75', 30, 75, 74.9328402, False), - 'Zn-76': Iso('Zn-76', 'zinc-76', 30, 76, 75.9331150, False), - 'Zn-77': Iso('Zn-77', 'zinc-77', 30, 77, 76.9368872, False), - 'Zn-78': Iso('Zn-78', 'zinc-78', 30, 78, 77.9382892, False), - 'Zn-79': Iso('Zn-79', 'zinc-79', 30, 79, 78.9426381, False), - 'Zn-80': Iso('Zn-80', 'zinc-80', 30, 80, 79.9445529, False), - 'Zn-81': Iso('Zn-81', 'zinc-81', 30, 81, 80.9504026, False), - 'Zn-82': Iso('Zn-82', 'zinc-82', 30, 82, 81.95426, False), - 'Zn-83': Iso('Zn-83', 'zinc-83', 30, 83, 82.96056, False), - 'Zn-84': Iso('Zn-84', 'zinc-84', 30, 84, 83.96521, False), - 'Zn-85': Iso('Zn-85', 'zinc-85', 30, 85, 84.97226, False), - 'Ga-56': Iso('Ga-56', 'gallium-56', 31, 56, 55.99536, False), - 'Ga-57': Iso('Ga-57', 'gallium-57', 31, 57, 56.98320, False), - 'Ga-58': Iso('Ga-58', 'gallium-58', 31, 58, 57.97478, False), - 'Ga-59': Iso('Ga-59', 'gallium-59', 31, 59, 58.96353, False), - 'Ga-60': Iso('Ga-60', 'gallium-60', 31, 60, 59.95729, False), - 'Ga-61': Iso('Ga-61', 'gallium-61', 31, 61, 60.949399, False), - 'Ga-62': Iso('Ga-62', 'gallium-62', 31, 62, 61.94419025, False), - 'Ga-63': Iso('Ga-63', 'gallium-63', 31, 63, 62.9392942, False), - 'Ga-64': Iso('Ga-64', 'gallium-64', 31, 64, 63.9368404, False), - 'Ga-65': Iso('Ga-65', 'gallium-65', 31, 65, 64.93273459, False), - 'Ga-66': Iso('Ga-66', 'gallium-66', 31, 66, 65.9315894, False), - 'Ga-67': Iso('Ga-67', 'gallium-67', 31, 67, 66.9282025, False, - half_life=281797.056), - 'Ga-68': Iso('Ga-68', 'gallium-68', 31, 68, 67.9279805, False), - 'Ga-69': Iso('Ga-69', 'gallium-69', 31, 69, 68.9255735, True, - isotopic_abundance=0.60108), - 'Ga-70': Iso('Ga-70', 'gallium-70', 31, 70, 69.9260219, False), - 'Ga-71': Iso('Ga-71', 'gallium-71', 31, 71, 70.92470258, True, - isotopic_abundance=0.39892), - 'Ga-72': Iso('Ga-72', 'gallium-72', 31, 72, 71.92636747, False), - 'Ga-73': Iso('Ga-73', 'gallium-73', 31, 73, 72.9251747, False), - 'Ga-74': Iso('Ga-74', 'gallium-74', 31, 74, 73.9269457, False), - 'Ga-75': Iso('Ga-75', 'gallium-75', 31, 75, 74.9265002, False), - 'Ga-76': Iso('Ga-76', 'gallium-76', 31, 76, 75.9288276, False), - 'Ga-77': Iso('Ga-77', 'gallium-77', 31, 77, 76.9291543, False), - 'Ga-78': Iso('Ga-78', 'gallium-78', 31, 78, 77.9316088, False), - 'Ga-79': Iso('Ga-79', 'gallium-79', 31, 79, 78.9328523, False), - 'Ga-80': Iso('Ga-80', 'gallium-80', 31, 80, 79.9364208, False), - 'Ga-81': Iso('Ga-81', 'gallium-81', 31, 81, 80.9381338, False), - 'Ga-82': Iso('Ga-82', 'gallium-82', 31, 82, 81.9431765, False), - 'Ga-83': Iso('Ga-83', 'gallium-83', 31, 83, 82.9471203, False), - 'Ga-84': Iso('Ga-84', 'gallium-84', 31, 84, 83.95246, False), - 'Ga-85': Iso('Ga-85', 'gallium-85', 31, 85, 84.95699, False), - 'Ga-86': Iso('Ga-86', 'gallium-86', 31, 86, 85.96301, False), - 'Ga-87': Iso('Ga-87', 'gallium-87', 31, 87, 86.96824, False), - 'Ge-58': Iso('Ge-58', 'germanium-58', 32, 58, 57.99172, False), - 'Ge-59': Iso('Ge-59', 'germanium-59', 32, 59, 58.98249, False), - 'Ge-60': Iso('Ge-60', 'germanium-60', 32, 60, 59.97036, False), - 'Ge-61': Iso('Ge-61', 'germanium-61', 32, 61, 60.96379, False), - 'Ge-62': Iso('Ge-62', 'germanium-62', 32, 62, 61.95502, False), - 'Ge-63': Iso('Ge-63', 'germanium-63', 32, 63, 62.949628, False), - 'Ge-64': Iso('Ge-64', 'germanium-64', 32, 64, 63.9416899, False), - 'Ge-65': Iso('Ge-65', 'germanium-65', 32, 65, 64.9393681, False), - 'Ge-66': Iso('Ge-66', 'germanium-66', 32, 66, 65.9338621, False), - 'Ge-67': Iso('Ge-67', 'germanium-67', 32, 67, 66.9327339, False), - 'Ge-68': Iso('Ge-68', 'germanium-68', 32, 68, 67.9280953, False), - 'Ge-69': Iso('Ge-69', 'germanium-69', 32, 69, 68.9279645, False), - 'Ge-70': Iso('Ge-70', 'germanium-70', 32, 70, 69.92424875, True, - isotopic_abundance=0.2057), - 'Ge-71': Iso('Ge-71', 'germanium-71', 32, 71, 70.92495233, False), - 'Ge-72': Iso('Ge-72', 'germanium-72', 32, 72, 71.922075826, True, - isotopic_abundance=0.2745), - 'Ge-73': Iso('Ge-73', 'germanium-73', 32, 73, 72.923458956, True, - isotopic_abundance=0.0775), - 'Ge-74': Iso('Ge-74', 'germanium-74', 32, 74, 73.921177761, True, - isotopic_abundance=0.3650), - 'Ge-75': Iso('Ge-75', 'germanium-75', 32, 75, 74.922858370, False), - 'Ge-76': Iso('Ge-76', 'germanium-76', 32, 76, 75.921402726, False, - isotopic_abundance=0.0773), - 'Ge-77': Iso('Ge-77', 'germanium-77', 32, 77, 76.923549843, False), - 'Ge-78': Iso('Ge-78', 'germanium-78', 32, 78, 77.9228529, False), - 'Ge-79': Iso('Ge-79', 'germanium-79', 32, 79, 78.925360, False), - 'Ge-80': Iso('Ge-80', 'germanium-80', 32, 80, 79.9253508, False), - 'Ge-81': Iso('Ge-81', 'germanium-81', 32, 81, 80.9288329, False), - 'Ge-82': Iso('Ge-82', 'germanium-82', 32, 82, 81.9297740, False), - 'Ge-83': Iso('Ge-83', 'germanium-83', 32, 83, 82.9345391, False), - 'Ge-84': Iso('Ge-84', 'germanium-84', 32, 84, 83.9375751, False), - 'Ge-85': Iso('Ge-85', 'germanium-85', 32, 85, 84.9429697, False), - 'Ge-86': Iso('Ge-86', 'germanium-86', 32, 86, 85.94658, False), - 'Ge-87': Iso('Ge-87', 'germanium-87', 32, 87, 86.95268, False), - 'Ge-88': Iso('Ge-88', 'germanium-88', 32, 88, 87.95691, False), - 'Ge-89': Iso('Ge-89', 'germanium-89', 32, 89, 88.96379, False), - 'Ge-90': Iso('Ge-90', 'germanium-90', 32, 90, 89.96863, False), - 'As-60': Iso('As-60', 'arsenic-60', 33, 60, 59.99388, False), - 'As-61': Iso('As-61', 'arsenic-61', 33, 61, 60.98112, False), - 'As-62': Iso('As-62', 'arsenic-62', 33, 62, 61.97361, False), - 'As-63': Iso('As-63', 'arsenic-63', 33, 63, 62.96390, False), - 'As-64': Iso('As-64', 'arsenic-64', 33, 64, 63.95743, False), - 'As-65': Iso('As-65', 'arsenic-65', 33, 65, 64.949611, False), - 'As-66': Iso('As-66', 'arsenic-66', 33, 66, 65.9441488, False), - 'As-67': Iso('As-67', 'arsenic-67', 33, 67, 66.93925111, False), - 'As-68': Iso('As-68', 'arsenic-68', 33, 68, 67.9367741, False), - 'As-69': Iso('As-69', 'arsenic-69', 33, 69, 68.932246, False), - 'As-70': Iso('As-70', 'arsenic-70', 33, 70, 69.930926, False), - 'As-71': Iso('As-71', 'arsenic-71', 33, 71, 70.9271138, False), - 'As-72': Iso('As-72', 'arsenic-72', 33, 72, 71.9267523, False), - 'As-73': Iso('As-73', 'arsenic-73', 33, 73, 72.9238291, False), - 'As-74': Iso('As-74', 'arsenic-74', 33, 74, 73.9239286, False), - 'As-75': Iso('As-75', 'arsenic-75', 33, 75, 74.92159457, True, - isotopic_abundance=1), - 'As-76': Iso('As-76', 'arsenic-76', 33, 76, 75.92239202, False), - 'As-77': Iso('As-77', 'arsenic-77', 33, 77, 76.9206476, False), - 'As-78': Iso('As-78', 'arsenic-78', 33, 78, 77.921828, False), - 'As-79': Iso('As-79', 'arsenic-79', 33, 79, 78.9209484, False), - 'As-80': Iso('As-80', 'arsenic-80', 33, 80, 79.9224746, False), - 'As-81': Iso('As-81', 'arsenic-81', 33, 81, 80.9221323, False), - 'As-82': Iso('As-82', 'arsenic-82', 33, 82, 81.9247412, False), - 'As-83': Iso('As-83', 'arsenic-83', 33, 83, 82.9252069, False), - 'As-84': Iso('As-84', 'arsenic-84', 33, 84, 83.9293033, False), - 'As-85': Iso('As-85', 'arsenic-85', 33, 85, 84.9321637, False), - 'As-86': Iso('As-86', 'arsenic-86', 33, 86, 85.9367015, False), - 'As-87': Iso('As-87', 'arsenic-87', 33, 87, 86.9402917, False), - 'As-88': Iso('As-88', 'arsenic-88', 33, 88, 87.94555, False), - 'As-89': Iso('As-89', 'arsenic-89', 33, 89, 88.94976, False), - 'As-90': Iso('As-90', 'arsenic-90', 33, 90, 89.95563, False), - 'As-91': Iso('As-91', 'arsenic-91', 33, 91, 90.96039, False), - 'As-92': Iso('As-92', 'arsenic-92', 33, 92, 91.96674, False), - 'Se-64': Iso('Se-64', 'selenium-64', 34, 64, 63.97109, False), - 'Se-65': Iso('Se-65', 'selenium-65', 34, 65, 64.96440, False), - 'Se-66': Iso('Se-66', 'selenium-66', 34, 66, 65.95559, False), - 'Se-67': Iso('Se-67', 'selenium-67', 34, 67, 66.949994, False), - 'Se-68': Iso('Se-68', 'selenium-68', 34, 68, 67.94182524, False), - 'Se-69': Iso('Se-69', 'selenium-69', 34, 69, 68.9394148, False), - 'Se-70': Iso('Se-70', 'selenium-70', 34, 70, 69.9335155, False), - 'Se-71': Iso('Se-71', 'selenium-71', 34, 71, 70.9322094, False), - 'Se-72': Iso('Se-72', 'selenium-72', 34, 72, 71.9271405, False), - 'Se-73': Iso('Se-73', 'selenium-73', 34, 73, 72.9267549, False), - 'Se-74': Iso('Se-74', 'selenium-74', 34, 74, 73.922475934, True, - isotopic_abundance=0.0089), - 'Se-75': Iso('Se-75', 'selenium-75', 34, 75, 74.922522870, False, - half_life=10351497.6), - 'Se-76': Iso('Se-76', 'selenium-76', 34, 76, 75.919213704, True, - isotopic_abundance=0.0937), - 'Se-77': Iso('Se-77', 'selenium-77', 34, 77, 76.919914154, True, - isotopic_abundance=0.0763), - 'Se-78': Iso('Se-78', 'selenium-78', 34, 78, 77.91730928, True, - isotopic_abundance=0.2377), - 'Se-79': Iso('Se-79', 'selenium-79', 34, 79, 78.91849929, False), - 'Se-80': Iso('Se-80', 'selenium-80', 34, 80, 79.9165218, True, - isotopic_abundance=0.4961), - 'Se-81': Iso('Se-81', 'selenium-81', 34, 81, 80.9179930, False), - 'Se-82': Iso('Se-82', 'selenium-82', 34, 82, 81.9166995, False, - isotopic_abundance=0.0873), - 'Se-83': Iso('Se-83', 'selenium-83', 34, 83, 82.9191186, False), - 'Se-84': Iso('Se-84', 'selenium-84', 34, 84, 83.9184668, False), - 'Se-85': Iso('Se-85', 'selenium-85', 34, 85, 84.9222608, False), - 'Se-86': Iso('Se-86', 'selenium-86', 34, 86, 85.9243117, False), - 'Se-87': Iso('Se-87', 'selenium-87', 34, 87, 86.9286886, False), - 'Se-88': Iso('Se-88', 'selenium-88', 34, 88, 87.9314175, False), - 'Se-89': Iso('Se-89', 'selenium-89', 34, 89, 88.9366691, False), - 'Se-90': Iso('Se-90', 'selenium-90', 34, 90, 89.94010, False), - 'Se-91': Iso('Se-91', 'selenium-91', 34, 91, 90.94596, False), - 'Se-92': Iso('Se-92', 'selenium-92', 34, 92, 91.94984, False), - 'Se-93': Iso('Se-93', 'selenium-93', 34, 93, 92.95629, False), - 'Se-94': Iso('Se-94', 'selenium-94', 34, 94, 93.96049, False), - 'Se-95': Iso('Se-95', 'selenium-95', 34, 95, 94.96730, False), - 'Br-67': Iso('Br-67', 'bromine-67', 35, 67, 66.96465, False), - 'Br-68': Iso('Br-68', 'bromine-68', 35, 68, 67.95873, False), - 'Br-69': Iso('Br-69', 'bromine-69', 35, 69, 68.950497, False), - 'Br-70': Iso('Br-70', 'bromine-70', 35, 70, 69.944792, False), - 'Br-71': Iso('Br-71', 'bromine-71', 35, 71, 70.9393422, False), - 'Br-72': Iso('Br-72', 'bromine-72', 35, 72, 71.9365886, False), - 'Br-73': Iso('Br-73', 'bromine-73', 35, 73, 72.9316715, False), - 'Br-74': Iso('Br-74', 'bromine-74', 35, 74, 73.9299102, False), - 'Br-75': Iso('Br-75', 'bromine-75', 35, 75, 74.9258105, False), - 'Br-76': Iso('Br-76', 'bromine-76', 35, 76, 75.924542, False), - 'Br-77': Iso('Br-77', 'bromine-77', 35, 77, 76.9213792, False), - 'Br-78': Iso('Br-78', 'bromine-78', 35, 78, 77.9211459, False), - 'Br-79': Iso('Br-79', 'bromine-79', 35, 79, 78.9183376, True, - isotopic_abundance=0.5069), - 'Br-80': Iso('Br-80', 'bromine-80', 35, 80, 79.9185298, False), - 'Br-81': Iso('Br-81', 'bromine-81', 35, 81, 80.9162897, True, - isotopic_abundance=0.4931), - 'Br-82': Iso('Br-82', 'bromine-82', 35, 82, 81.9168032, False), - 'Br-83': Iso('Br-83', 'bromine-83', 35, 83, 82.9151756, False), - 'Br-84': Iso('Br-84', 'bromine-84', 35, 84, 83.916496, False), - 'Br-85': Iso('Br-85', 'bromine-85', 35, 85, 84.9156458, False), - 'Br-86': Iso('Br-86', 'bromine-86', 35, 86, 85.9188054, False), - 'Br-87': Iso('Br-87', 'bromine-87', 35, 87, 86.9206740, False), - 'Br-88': Iso('Br-88', 'bromine-88', 35, 88, 87.9240833, False), - 'Br-89': Iso('Br-89', 'bromine-89', 35, 89, 88.9267046, False), - 'Br-90': Iso('Br-90', 'bromine-90', 35, 90, 89.9312928, False), - 'Br-91': Iso('Br-91', 'bromine-91', 35, 91, 90.9343986, False), - 'Br-92': Iso('Br-92', 'bromine-92', 35, 92, 91.9396316, False), - 'Br-93': Iso('Br-93', 'bromine-93', 35, 93, 92.94313, False), - 'Br-94': Iso('Br-94', 'bromine-94', 35, 94, 93.94890, False), - 'Br-95': Iso('Br-95', 'bromine-95', 35, 95, 94.95301, False), - 'Br-96': Iso('Br-96', 'bromine-96', 35, 96, 95.95903, False), - 'Br-97': Iso('Br-97', 'bromine-97', 35, 97, 96.96344, False), - 'Br-98': Iso('Br-98', 'bromine-98', 35, 98, 97.96946, False), - 'Kr-69': Iso('Kr-69', 'krypton-69', 36, 69, 68.96518, False), - 'Kr-70': Iso('Kr-70', 'krypton-70', 36, 70, 69.95604, False), - 'Kr-71': Iso('Kr-71', 'krypton-71', 36, 71, 70.95027, False), - 'Kr-72': Iso('Kr-72', 'krypton-72', 36, 72, 71.9420924, False), - 'Kr-73': Iso('Kr-73', 'krypton-73', 36, 73, 72.9392892, False), - 'Kr-74': Iso('Kr-74', 'krypton-74', 36, 74, 73.9330840, False), - 'Kr-75': Iso('Kr-75', 'krypton-75', 36, 75, 74.9309457, False), - 'Kr-76': Iso('Kr-76', 'krypton-76', 36, 76, 75.9259103, False), - 'Kr-77': Iso('Kr-77', 'krypton-77', 36, 77, 76.9246700, False), - 'Kr-78': Iso('Kr-78', 'krypton-78', 36, 78, 77.92036494, True, - isotopic_abundance=0.00355), - 'Kr-79': Iso('Kr-79', 'krypton-79', 36, 79, 78.9200829, False), - 'Kr-80': Iso('Kr-80', 'krypton-80', 36, 80, 79.91637808, True, - isotopic_abundance=0.02286), - 'Kr-81': Iso('Kr-81', 'krypton-81', 36, 81, 80.9165912, False), - 'Kr-82': Iso('Kr-82', 'krypton-82', 36, 82, 81.91348273, True, - isotopic_abundance=0.11593), - 'Kr-83': Iso('Kr-83', 'krypton-83', 36, 83, 82.91412716, True, - isotopic_abundance=0.11500), - 'Kr-84': Iso('Kr-84', 'krypton-84', 36, 84, 83.9114977282, True, - isotopic_abundance=0.56987), - 'Kr-85': Iso('Kr-85', 'krypton-85', 36, 85, 84.9125273, False, - half_life=340044480.0), - 'Kr-86': Iso('Kr-86', 'krypton-86', 36, 86, 85.9106106269, True, - isotopic_abundance=0.17279), - 'Kr-87': Iso('Kr-87', 'krypton-87', 36, 87, 86.91335476, False), - 'Kr-88': Iso('Kr-88', 'krypton-88', 36, 88, 87.9144479, False), - 'Kr-89': Iso('Kr-89', 'krypton-89', 36, 89, 88.9178355, False), - 'Kr-90': Iso('Kr-90', 'krypton-90', 36, 90, 89.9195279, False), - 'Kr-91': Iso('Kr-91', 'krypton-91', 36, 91, 90.9238063, False), - 'Kr-92': Iso('Kr-92', 'krypton-92', 36, 92, 91.9261731, False), - 'Kr-93': Iso('Kr-93', 'krypton-93', 36, 93, 92.9311472, False), - 'Kr-94': Iso('Kr-94', 'krypton-94', 36, 94, 93.934140, False), - 'Kr-95': Iso('Kr-95', 'krypton-95', 36, 95, 94.939711, False), - 'Kr-96': Iso('Kr-96', 'krypton-96', 36, 96, 95.943017, False), - 'Kr-97': Iso('Kr-97', 'krypton-97', 36, 97, 96.94909, False), - 'Kr-98': Iso('Kr-98', 'krypton-98', 36, 98, 97.95243, False), - 'Kr-99': Iso('Kr-99', 'krypton-99', 36, 99, 98.95839, False), - 'Kr-100': Iso('Kr-100', 'krypton-100', 36, 100, 99.96237, False), - 'Kr-101': Iso('Kr-101', 'krypton-101', 36, 101, 100.96873, False), - 'Rb-71': Iso('Rb-71', 'rubidium-71', 37, 71, 70.96532, False), - 'Rb-72': Iso('Rb-72', 'rubidium-72', 37, 72, 71.95908, False), - 'Rb-73': Iso('Rb-73', 'rubidium-73', 37, 73, 72.95053, False), - 'Rb-74': Iso('Rb-74', 'rubidium-74', 37, 74, 73.9442659, False), - 'Rb-75': Iso('Rb-75', 'rubidium-75', 37, 75, 74.9385732, False), - 'Rb-76': Iso('Rb-76', 'rubidium-76', 37, 76, 75.9350730, False), - 'Rb-77': Iso('Rb-77', 'rubidium-77', 37, 77, 76.9304016, False), - 'Rb-78': Iso('Rb-78', 'rubidium-78', 37, 78, 77.9281419, False), - 'Rb-79': Iso('Rb-79', 'rubidium-79', 37, 79, 78.9239899, False), - 'Rb-80': Iso('Rb-80', 'rubidium-80', 37, 80, 79.9225164, False), - 'Rb-81': Iso('Rb-81', 'rubidium-81', 37, 81, 80.9189939, False), - 'Rb-82': Iso('Rb-82', 'rubidium-82', 37, 82, 81.9182090, False), - 'Rb-83': Iso('Rb-83', 'rubidium-83', 37, 83, 82.9151142, False), - 'Rb-84': Iso('Rb-84', 'rubidium-84', 37, 84, 83.9143752, False), - 'Rb-85': Iso('Rb-85', 'rubidium-85', 37, 85, 84.9117897379, True, - isotopic_abundance=0.7217), - 'Rb-86': Iso('Rb-86', 'rubidium-86', 37, 86, 85.91116743, False), - 'Rb-87': Iso('Rb-87', 'rubidium-87', 37, 87, 86.9091805310, False, - isotopic_abundance=0.2783), - 'Rb-88': Iso('Rb-88', 'rubidium-88', 37, 88, 87.91131559, False), - 'Rb-89': Iso('Rb-89', 'rubidium-89', 37, 89, 88.9122783, False), - 'Rb-90': Iso('Rb-90', 'rubidium-90', 37, 90, 89.9147985, False), - 'Rb-91': Iso('Rb-91', 'rubidium-91', 37, 91, 90.9165372, False), - 'Rb-92': Iso('Rb-92', 'rubidium-92', 37, 92, 91.9197284, False), - 'Rb-93': Iso('Rb-93', 'rubidium-93', 37, 93, 92.9220393, False), - 'Rb-94': Iso('Rb-94', 'rubidium-94', 37, 94, 93.9263948, False), - 'Rb-95': Iso('Rb-95', 'rubidium-95', 37, 95, 94.929260, False), - 'Rb-96': Iso('Rb-96', 'rubidium-96', 37, 96, 95.9341334, False), - 'Rb-97': Iso('Rb-97', 'rubidium-97', 37, 97, 96.9371771, False), - 'Rb-98': Iso('Rb-98', 'rubidium-98', 37, 98, 97.9416869, False), - 'Rb-99': Iso('Rb-99', 'rubidium-99', 37, 99, 98.94503, False), - 'Rb-100': Iso('Rb-100', 'rubidium-100', 37, 100, 99.95003, False), - 'Rb-101': Iso('Rb-101', 'rubidium-101', 37, 101, 100.95404, False), - 'Rb-102': Iso('Rb-102', 'rubidium-102', 37, 102, 101.95952, False), - 'Rb-103': Iso('Rb-103', 'rubidium-103', 37, 103, 102.96392, False), - 'Sr-73': Iso('Sr-73', 'strontium-73', 38, 73, 72.96570, False), - 'Sr-74': Iso('Sr-74', 'strontium-74', 38, 74, 73.95617, False), - 'Sr-75': Iso('Sr-75', 'strontium-75', 38, 75, 74.94995, False), - 'Sr-76': Iso('Sr-76', 'strontium-76', 38, 76, 75.941763, False), - 'Sr-77': Iso('Sr-77', 'strontium-77', 38, 77, 76.9379455, False), - 'Sr-78': Iso('Sr-78', 'strontium-78', 38, 78, 77.9321800, False), - 'Sr-79': Iso('Sr-79', 'strontium-79', 38, 79, 78.9297077, False), - 'Sr-80': Iso('Sr-80', 'strontium-80', 38, 80, 79.9245175, False), - 'Sr-81': Iso('Sr-81', 'strontium-81', 38, 81, 80.9232114, False), - 'Sr-82': Iso('Sr-82', 'strontium-82', 38, 82, 81.9183999, False), - 'Sr-83': Iso('Sr-83', 'strontium-83', 38, 83, 82.9175544, False), - 'Sr-84': Iso('Sr-84', 'strontium-84', 38, 84, 83.9134191, True, - isotopic_abundance=0.0056), - 'Sr-85': Iso('Sr-85', 'strontium-85', 38, 85, 84.9129320, False, - half_life=5603299.199999999), - 'Sr-86': Iso('Sr-86', 'strontium-86', 38, 86, 85.9092606, True, - isotopic_abundance=0.0986), - 'Sr-87': Iso('Sr-87', 'strontium-87', 38, 87, 86.9088775, True, - isotopic_abundance=0.0700), - 'Sr-88': Iso('Sr-88', 'strontium-88', 38, 88, 87.9056125, True, - isotopic_abundance=0.8258), - 'Sr-89': Iso('Sr-89', 'strontium-89', 38, 89, 88.9074511, False), - 'Sr-90': Iso('Sr-90', 'strontium-90', 38, 90, 89.9077300, False), - 'Sr-91': Iso('Sr-91', 'strontium-91', 38, 91, 90.9101954, False), - 'Sr-92': Iso('Sr-92', 'strontium-92', 38, 92, 91.9110382, False), - 'Sr-93': Iso('Sr-93', 'strontium-93', 38, 93, 92.9140242, False), - 'Sr-94': Iso('Sr-94', 'strontium-94', 38, 94, 93.9153556, False), - 'Sr-95': Iso('Sr-95', 'strontium-95', 38, 95, 94.9193529, False), - 'Sr-96': Iso('Sr-96', 'strontium-96', 38, 96, 95.9217066, False), - 'Sr-97': Iso('Sr-97', 'strontium-97', 38, 97, 96.9263740, False), - 'Sr-98': Iso('Sr-98', 'strontium-98', 38, 98, 97.9286888, False), - 'Sr-99': Iso('Sr-99', 'strontium-99', 38, 99, 98.9328907, False), - 'Sr-100': Iso('Sr-100', 'strontium-100', 38, 100, 99.935770, False), - 'Sr-101': Iso('Sr-101', 'strontium-101', 38, 101, 100.940352, False), - 'Sr-102': Iso('Sr-102', 'strontium-102', 38, 102, 101.943791, False), - 'Sr-103': Iso('Sr-103', 'strontium-103', 38, 103, 102.94909, False), - 'Sr-104': Iso('Sr-104', 'strontium-104', 38, 104, 103.95265, False), - 'Sr-105': Iso('Sr-105', 'strontium-105', 38, 105, 104.95855, False), - 'Sr-106': Iso('Sr-106', 'strontium-106', 38, 106, 105.96265, False), - 'Sr-107': Iso('Sr-107', 'strontium-107', 38, 107, 106.96897, False), - 'Y-76': Iso('Y-76', 'yttrium-76', 39, 76, 75.95856, False), - 'Y-77': Iso('Y-77', 'yttrium-77', 39, 77, 76.949781, False), - 'Y-78': Iso('Y-78', 'yttrium-78', 39, 78, 77.94361, False), - 'Y-79': Iso('Y-79', 'yttrium-79', 39, 79, 78.93735, False), - 'Y-80': Iso('Y-80', 'yttrium-80', 39, 80, 79.9343561, False), - 'Y-81': Iso('Y-81', 'yttrium-81', 39, 81, 80.9294556, False), - 'Y-82': Iso('Y-82', 'yttrium-82', 39, 82, 81.9269314, False), - 'Y-83': Iso('Y-83', 'yttrium-83', 39, 83, 82.922485, False), - 'Y-84': Iso('Y-84', 'yttrium-84', 39, 84, 83.9206721, False), - 'Y-85': Iso('Y-85', 'yttrium-85', 39, 85, 84.916433, False), - 'Y-86': Iso('Y-86', 'yttrium-86', 39, 86, 85.914886, False), - 'Y-87': Iso('Y-87', 'yttrium-87', 39, 87, 86.9108761, False), - 'Y-88': Iso('Y-88', 'yttrium-88', 39, 88, 87.9095016, False, - half_life=9212486.4), - 'Y-89': Iso('Y-89', 'yttrium-89', 39, 89, 88.9058403, True, - isotopic_abundance=1), - 'Y-90': Iso('Y-90', 'yttrium-90', 39, 90, 89.9071439, False), - 'Y-91': Iso('Y-91', 'yttrium-91', 39, 91, 90.9072974, False), - 'Y-92': Iso('Y-92', 'yttrium-92', 39, 92, 91.9089451, False), - 'Y-93': Iso('Y-93', 'yttrium-93', 39, 93, 92.909578, False), - 'Y-94': Iso('Y-94', 'yttrium-94', 39, 94, 93.9115906, False), - 'Y-95': Iso('Y-95', 'yttrium-95', 39, 95, 94.9128161, False), - 'Y-96': Iso('Y-96', 'yttrium-96', 39, 96, 95.9158968, False), - 'Y-97': Iso('Y-97', 'yttrium-97', 39, 97, 96.9182741, False), - 'Y-98': Iso('Y-98', 'yttrium-98', 39, 98, 97.9223821, False), - 'Y-99': Iso('Y-99', 'yttrium-99', 39, 99, 98.9241480, False), - 'Y-100': Iso('Y-100', 'yttrium-100', 39, 100, 99.927715, False), - 'Y-101': Iso('Y-101', 'yttrium-101', 39, 101, 100.9301477, False), - 'Y-102': Iso('Y-102', 'yttrium-102', 39, 102, 101.9343277, False), - 'Y-103': Iso('Y-103', 'yttrium-103', 39, 103, 102.937243, False), - 'Y-104': Iso('Y-104', 'yttrium-104', 39, 104, 103.94196, False), - 'Y-105': Iso('Y-105', 'yttrium-105', 39, 105, 104.94544, False), - 'Y-106': Iso('Y-106', 'yttrium-106', 39, 106, 105.95056, False), - 'Y-107': Iso('Y-107', 'yttrium-107', 39, 107, 106.95452, False), - 'Y-108': Iso('Y-108', 'yttrium-108', 39, 108, 107.95996, False), - 'Y-109': Iso('Y-109', 'yttrium-109', 39, 109, 108.96436, False), - 'Zr-78': Iso('Zr-78', 'zirconium-78', 40, 78, 77.95566, False), - 'Zr-79': Iso('Zr-79', 'zirconium-79', 40, 79, 78.94948, False), - 'Zr-80': Iso('Zr-80', 'zirconium-80', 40, 80, 79.9404, False), - 'Zr-81': Iso('Zr-81', 'zirconium-81', 40, 81, 80.93731, False), - 'Zr-82': Iso('Zr-82', 'zirconium-82', 40, 82, 81.93135, False), - 'Zr-83': Iso('Zr-83', 'zirconium-83', 40, 83, 82.9292421, False), - 'Zr-84': Iso('Zr-84', 'zirconium-84', 40, 84, 83.9233269, False), - 'Zr-85': Iso('Zr-85', 'zirconium-85', 40, 85, 84.9214444, False), - 'Zr-86': Iso('Zr-86', 'zirconium-86', 40, 86, 85.9162972, False), - 'Zr-87': Iso('Zr-87', 'zirconium-87', 40, 87, 86.9148180, False), - 'Zr-88': Iso('Zr-88', 'zirconium-88', 40, 88, 87.9102213, False), - 'Zr-89': Iso('Zr-89', 'zirconium-89', 40, 89, 88.9088814, False), - 'Zr-90': Iso('Zr-90', 'zirconium-90', 40, 90, 89.9046977, True, - isotopic_abundance=0.5145), - 'Zr-91': Iso('Zr-91', 'zirconium-91', 40, 91, 90.9056396, True, - isotopic_abundance=0.1122), - 'Zr-92': Iso('Zr-92', 'zirconium-92', 40, 92, 91.9050347, True, - isotopic_abundance=0.1715), - 'Zr-93': Iso('Zr-93', 'zirconium-93', 40, 93, 92.9064699, False), - 'Zr-94': Iso('Zr-94', 'zirconium-94', 40, 94, 93.9063108, True, - isotopic_abundance=0.1738), - 'Zr-95': Iso('Zr-95', 'zirconium-95', 40, 95, 94.9080385, False), - 'Zr-96': Iso('Zr-96', 'zirconium-96', 40, 96, 95.9082714, False, - isotopic_abundance=0.0280), - 'Zr-97': Iso('Zr-97', 'zirconium-97', 40, 97, 96.9109512, False), - 'Zr-98': Iso('Zr-98', 'zirconium-98', 40, 98, 97.9127289, False), - 'Zr-99': Iso('Zr-99', 'zirconium-99', 40, 99, 98.916667, False), - 'Zr-100': Iso('Zr-100', 'zirconium-100', 40, 100, 99.9180006, False), - 'Zr-101': Iso('Zr-101', 'zirconium-101', 40, 101, 100.9214480, False), - 'Zr-102': Iso('Zr-102', 'zirconium-102', 40, 102, 101.9231409, False), - 'Zr-103': Iso('Zr-103', 'zirconium-103', 40, 103, 102.927191, False), - 'Zr-104': Iso('Zr-104', 'zirconium-104', 40, 104, 103.929436, False), - 'Zr-105': Iso('Zr-105', 'zirconium-105', 40, 105, 104.934008, False), - 'Zr-106': Iso('Zr-106', 'zirconium-106', 40, 106, 105.93676, False), - 'Zr-107': Iso('Zr-107', 'zirconium-107', 40, 107, 106.94174, False), - 'Zr-108': Iso('Zr-108', 'zirconium-108', 40, 108, 107.94487, False), - 'Zr-109': Iso('Zr-109', 'zirconium-109', 40, 109, 108.95041, False), - 'Zr-110': Iso('Zr-110', 'zirconium-110', 40, 110, 109.95396, False), - 'Zr-111': Iso('Zr-111', 'zirconium-111', 40, 111, 110.95968, False), - 'Zr-112': Iso('Zr-112', 'zirconium-112', 40, 112, 111.96370, False), - 'Nb-81': Iso('Nb-81', 'niobium-81', 41, 81, 80.94960, False), - 'Nb-82': Iso('Nb-82', 'niobium-82', 41, 82, 81.94396, False), - 'Nb-83': Iso('Nb-83', 'niobium-83', 41, 83, 82.93729, False), - 'Nb-84': Iso('Nb-84', 'niobium-84', 41, 84, 83.93449, False), - 'Nb-85': Iso('Nb-85', 'niobium-85', 41, 85, 84.9288458, False), - 'Nb-86': Iso('Nb-86', 'niobium-86', 41, 86, 85.9257828, False), - 'Nb-87': Iso('Nb-87', 'niobium-87', 41, 87, 86.9206937, False), - 'Nb-88': Iso('Nb-88', 'niobium-88', 41, 88, 87.918222, False), - 'Nb-89': Iso('Nb-89', 'niobium-89', 41, 89, 88.913445, False), - 'Nb-90': Iso('Nb-90', 'niobium-90', 41, 90, 89.9112584, False), - 'Nb-91': Iso('Nb-91', 'niobium-91', 41, 91, 90.9069897, False), - 'Nb-92': Iso('Nb-92', 'niobium-92', 41, 92, 91.9071881, False), - 'Nb-93': Iso('Nb-93', 'niobium-93', 41, 93, 92.9063730, True, - isotopic_abundance=1), - 'Nb-94': Iso('Nb-94', 'niobium-94', 41, 94, 93.9072788, False), - 'Nb-95': Iso('Nb-95', 'niobium-95', 41, 95, 94.90683240, False), - 'Nb-96': Iso('Nb-96', 'niobium-96', 41, 96, 95.9080973, False), - 'Nb-97': Iso('Nb-97', 'niobium-97', 41, 97, 96.9080959, False), - 'Nb-98': Iso('Nb-98', 'niobium-98', 41, 98, 97.9103265, False), - 'Nb-99': Iso('Nb-99', 'niobium-99', 41, 99, 98.911613, False), - 'Nb-100': Iso('Nb-100', 'niobium-100', 41, 100, 99.9143276, False), - 'Nb-101': Iso('Nb-101', 'niobium-101', 41, 101, 100.9153103, False), - 'Nb-102': Iso('Nb-102', 'niobium-102', 41, 102, 101.9180772, False), - 'Nb-103': Iso('Nb-103', 'niobium-103', 41, 103, 102.9194572, False), - 'Nb-104': Iso('Nb-104', 'niobium-104', 41, 104, 103.9228925, False), - 'Nb-105': Iso('Nb-105', 'niobium-105', 41, 105, 104.9249465, False), - 'Nb-106': Iso('Nb-106', 'niobium-106', 41, 106, 105.9289317, False), - 'Nb-107': Iso('Nb-107', 'niobium-107', 41, 107, 106.9315937, False), - 'Nb-108': Iso('Nb-108', 'niobium-108', 41, 108, 107.9360748, False), - 'Nb-109': Iso('Nb-109', 'niobium-109', 41, 109, 108.93922, False), - 'Nb-110': Iso('Nb-110', 'niobium-110', 41, 110, 109.94403, False), - 'Nb-111': Iso('Nb-111', 'niobium-111', 41, 111, 110.94753, False), - 'Nb-112': Iso('Nb-112', 'niobium-112', 41, 112, 111.95247, False), - 'Nb-113': Iso('Nb-113', 'niobium-113', 41, 113, 112.95651, False), - 'Nb-114': Iso('Nb-114', 'niobium-114', 41, 114, 113.96201, False), - 'Nb-115': Iso('Nb-115', 'niobium-115', 41, 115, 114.96634, False), - 'Mo-83': Iso('Mo-83', 'molybdenum-83', 42, 83, 82.94988, False), - 'Mo-84': Iso('Mo-84', 'molybdenum-84', 42, 84, 83.94149, False), - 'Mo-85': Iso('Mo-85', 'molybdenum-85', 42, 85, 84.938261, False), - 'Mo-86': Iso('Mo-86', 'molybdenum-86', 42, 86, 85.9311748, False), - 'Mo-87': Iso('Mo-87', 'molybdenum-87', 42, 87, 86.9281962, False), - 'Mo-88': Iso('Mo-88', 'molybdenum-88', 42, 88, 87.9219678, False), - 'Mo-89': Iso('Mo-89', 'molybdenum-89', 42, 89, 88.9194682, False), - 'Mo-90': Iso('Mo-90', 'molybdenum-90', 42, 90, 89.9139309, False), - 'Mo-91': Iso('Mo-91', 'molybdenum-91', 42, 91, 90.9117453, False), - 'Mo-92': Iso('Mo-92', 'molybdenum-92', 42, 92, 91.90680796, True, - isotopic_abundance=0.1453), - 'Mo-93': Iso('Mo-93', 'molybdenum-93', 42, 93, 92.90680958, False), - 'Mo-94': Iso('Mo-94', 'molybdenum-94', 42, 94, 93.90508490, True, - isotopic_abundance=0.0915), - 'Mo-95': Iso('Mo-95', 'molybdenum-95', 42, 95, 94.90583877, True, - isotopic_abundance=0.1584), - 'Mo-96': Iso('Mo-96', 'molybdenum-96', 42, 96, 95.90467612, True, - isotopic_abundance=0.1667), - 'Mo-97': Iso('Mo-97', 'molybdenum-97', 42, 97, 96.90601812, True, - isotopic_abundance=0.0960), - 'Mo-98': Iso('Mo-98', 'molybdenum-98', 42, 98, 97.90540482, True, - isotopic_abundance=0.2439), - 'Mo-99': Iso('Mo-99', 'molybdenum-99', 42, 99, 98.90770851, False, - half_life=237326.04), - 'Mo-100': Iso('Mo-100', 'molybdenum-100', 42, 100, 99.9074718, False, - isotopic_abundance=0.0982), - 'Mo-101': Iso('Mo-101', 'molybdenum-101', 42, 101, 100.9103414, False), - 'Mo-102': Iso('Mo-102', 'molybdenum-102', 42, 102, 101.9102834, False), - 'Mo-103': Iso('Mo-103', 'molybdenum-103', 42, 103, 102.913079, False), - 'Mo-104': Iso('Mo-104', 'molybdenum-104', 42, 104, 103.9137344, False), - 'Mo-105': Iso('Mo-105', 'molybdenum-105', 42, 105, 104.916969, False), - 'Mo-106': Iso('Mo-106', 'molybdenum-106', 42, 106, 105.918259, False), - 'Mo-107': Iso('Mo-107', 'molybdenum-107', 42, 107, 106.922106, False), - 'Mo-108': Iso('Mo-108', 'molybdenum-108', 42, 108, 107.924033, False), - 'Mo-109': Iso('Mo-109', 'molybdenum-109', 42, 109, 108.928424, False), - 'Mo-110': Iso('Mo-110', 'molybdenum-110', 42, 110, 109.930704, False), - 'Mo-111': Iso('Mo-111', 'molybdenum-111', 42, 111, 110.935654, False), - 'Mo-112': Iso('Mo-112', 'molybdenum-112', 42, 112, 111.93831, False), - 'Mo-113': Iso('Mo-113', 'molybdenum-113', 42, 113, 112.94335, False), - 'Mo-114': Iso('Mo-114', 'molybdenum-114', 42, 114, 113.94653, False), - 'Mo-115': Iso('Mo-115', 'molybdenum-115', 42, 115, 114.95196, False), - 'Mo-116': Iso('Mo-116', 'molybdenum-116', 42, 116, 115.95545, False), - 'Mo-117': Iso('Mo-117', 'molybdenum-117', 42, 117, 116.96117, False), - 'Tc-85': Iso('Tc-85', 'technetium-85', 43, 85, 84.95058, False), - 'Tc-86': Iso('Tc-86', 'technetium-86', 43, 86, 85.94493, False), - 'Tc-87': Iso('Tc-87', 'technetium-87', 43, 87, 86.9380672, False), - 'Tc-88': Iso('Tc-88', 'technetium-88', 43, 88, 87.93378, False), - 'Tc-89': Iso('Tc-89', 'technetium-89', 43, 89, 88.9276487, False), - 'Tc-90': Iso('Tc-90', 'technetium-90', 43, 90, 89.9240739, False), - 'Tc-91': Iso('Tc-91', 'technetium-91', 43, 91, 90.9184254, False), - 'Tc-92': Iso('Tc-92', 'technetium-92', 43, 92, 91.9152698, False), - 'Tc-93': Iso('Tc-93', 'technetium-93', 43, 93, 92.9102460, False), - 'Tc-94': Iso('Tc-94', 'technetium-94', 43, 94, 93.9096536, False), - 'Tc-95': Iso('Tc-95', 'technetium-95', 43, 95, 94.9076536, False), - 'Tc-96': Iso('Tc-96', 'technetium-96', 43, 96, 95.9078680, False), - 'Tc-97': Iso('Tc-97', 'technetium-97', 43, 97, 96.9063667, False), - 'Tc-98': Iso('Tc-98', 'technetium-98', 43, 98, 97.9072124, False), - 'Tc-99': Iso('Tc-99', 'technetium-99', 43, 99, 98.9062508, False, - half_life=21636.0), - 'Tc-100': Iso('Tc-100', 'technetium-100', 43, 100, 99.9076539, False), - 'Tc-101': Iso('Tc-101', 'technetium-101', 43, 101, 100.907309, False), - 'Tc-102': Iso('Tc-102', 'technetium-102', 43, 102, 101.9092097, False), - 'Tc-103': Iso('Tc-103', 'technetium-103', 43, 103, 102.909176, False), - 'Tc-104': Iso('Tc-104', 'technetium-104', 43, 104, 103.911425, False), - 'Tc-105': Iso('Tc-105', 'technetium-105', 43, 105, 104.911655, False), - 'Tc-106': Iso('Tc-106', 'technetium-106', 43, 106, 105.914358, False), - 'Tc-107': Iso('Tc-107', 'technetium-107', 43, 107, 106.9154606, False), - 'Tc-108': Iso('Tc-108', 'technetium-108', 43, 108, 107.9184957, False), - 'Tc-109': Iso('Tc-109', 'technetium-109', 43, 109, 108.920256, False), - 'Tc-110': Iso('Tc-110', 'technetium-110', 43, 110, 109.923744, False), - 'Tc-111': Iso('Tc-111', 'technetium-111', 43, 111, 110.925901, False), - 'Tc-112': Iso('Tc-112', 'technetium-112', 43, 112, 111.9299458, False), - 'Tc-113': Iso('Tc-113', 'technetium-113', 43, 113, 112.9325690, False), - 'Tc-114': Iso('Tc-114', 'technetium-114', 43, 114, 113.93691, False), - 'Tc-115': Iso('Tc-115', 'technetium-115', 43, 115, 114.93998, False), - 'Tc-116': Iso('Tc-116', 'technetium-116', 43, 116, 115.94476, False), - 'Tc-117': Iso('Tc-117', 'technetium-117', 43, 117, 116.94806, False), - 'Tc-118': Iso('Tc-118', 'technetium-118', 43, 118, 117.95299, False), - 'Tc-119': Iso('Tc-119', 'technetium-119', 43, 119, 118.95666, False), - 'Tc-120': Iso('Tc-120', 'technetium-120', 43, 120, 119.96187, False), - 'Ru-87': Iso('Ru-87', 'ruthenium-87', 44, 87, 86.95069, False), - 'Ru-88': Iso('Ru-88', 'ruthenium-88', 44, 88, 87.94160, False), - 'Ru-89': Iso('Ru-89', 'ruthenium-89', 44, 89, 88.93762, False), - 'Ru-90': Iso('Ru-90', 'ruthenium-90', 44, 90, 89.9303444, False), - 'Ru-91': Iso('Ru-91', 'ruthenium-91', 44, 91, 90.9267419, False), - 'Ru-92': Iso('Ru-92', 'ruthenium-92', 44, 92, 91.9202344, False), - 'Ru-93': Iso('Ru-93', 'ruthenium-93', 44, 93, 92.9171044, False), - 'Ru-94': Iso('Ru-94', 'ruthenium-94', 44, 94, 93.9113429, False), - 'Ru-95': Iso('Ru-95', 'ruthenium-95', 44, 95, 94.910406, False), - 'Ru-96': Iso('Ru-96', 'ruthenium-96', 44, 96, 95.90759025, True, - isotopic_abundance=0.0554), - 'Ru-97': Iso('Ru-97', 'ruthenium-97', 44, 97, 96.9075471, False), - 'Ru-98': Iso('Ru-98', 'ruthenium-98', 44, 98, 97.9052868, True, - isotopic_abundance=0.0187), - 'Ru-99': Iso('Ru-99', 'ruthenium-99', 44, 99, 98.9059341, True, - isotopic_abundance=0.1276), - 'Ru-100': Iso('Ru-100', 'ruthenium-100', 44, 100, 99.9042143, True, - isotopic_abundance=0.1260), - 'Ru-101': Iso('Ru-101', 'ruthenium-101', 44, 101, 100.9055769, True, - isotopic_abundance=0.1706), - 'Ru-102': Iso('Ru-102', 'ruthenium-102', 44, 102, 101.9043441, True, - isotopic_abundance=0.3155), - 'Ru-103': Iso('Ru-103', 'ruthenium-103', 44, 103, 102.9063186, False, - half_life=3396384.0), - 'Ru-104': Iso('Ru-104', 'ruthenium-104', 44, 104, 103.9054275, True, - isotopic_abundance=0.1862), - 'Ru-105': Iso('Ru-105', 'ruthenium-105', 44, 105, 104.9077476, False), - 'Ru-106': Iso('Ru-106', 'ruthenium-106', 44, 106, 105.9073291, False), - 'Ru-107': Iso('Ru-107', 'ruthenium-107', 44, 107, 106.9099720, False), - 'Ru-108': Iso('Ru-108', 'ruthenium-108', 44, 108, 107.9101880, False), - 'Ru-109': Iso('Ru-109', 'ruthenium-109', 44, 109, 108.9133260, False), - 'Ru-110': Iso('Ru-110', 'ruthenium-110', 44, 110, 109.9140407, False), - 'Ru-111': Iso('Ru-111', 'ruthenium-111', 44, 111, 110.917570, False), - 'Ru-112': Iso('Ru-112', 'ruthenium-112', 44, 112, 111.918809, False), - 'Ru-113': Iso('Ru-113', 'ruthenium-113', 44, 113, 112.922844, False), - 'Ru-114': Iso('Ru-114', 'ruthenium-114', 44, 114, 113.9246136, False), - 'Ru-115': Iso('Ru-115', 'ruthenium-115', 44, 115, 114.928820, False), - 'Ru-116': Iso('Ru-116', 'ruthenium-116', 44, 116, 115.9312192, False), - 'Ru-117': Iso('Ru-117', 'ruthenium-117', 44, 117, 116.93610, False), - 'Ru-118': Iso('Ru-118', 'ruthenium-118', 44, 118, 117.93853, False), - 'Ru-119': Iso('Ru-119', 'ruthenium-119', 44, 119, 118.94357, False), - 'Ru-120': Iso('Ru-120', 'ruthenium-120', 44, 120, 119.94631, False), - 'Ru-121': Iso('Ru-121', 'ruthenium-121', 44, 121, 120.95164, False), - 'Ru-122': Iso('Ru-122', 'ruthenium-122', 44, 122, 121.95447, False), - 'Ru-123': Iso('Ru-123', 'ruthenium-123', 44, 123, 122.95989, False), - 'Ru-124': Iso('Ru-124', 'ruthenium-124', 44, 124, 123.96305, False), - 'Rh-89': Iso('Rh-89', 'rhodium-89', 45, 89, 88.95058, False), - 'Rh-90': Iso('Rh-90', 'rhodium-90', 45, 90, 89.94422, False), - 'Rh-91': Iso('Rh-91', 'rhodium-91', 45, 91, 90.93688, False), - 'Rh-92': Iso('Rh-92', 'rhodium-92', 45, 92, 91.9323677, False), - 'Rh-93': Iso('Rh-93', 'rhodium-93', 45, 93, 92.9259128, False), - 'Rh-94': Iso('Rh-94', 'rhodium-94', 45, 94, 93.9217305, False), - 'Rh-95': Iso('Rh-95', 'rhodium-95', 45, 95, 94.9158979, False), - 'Rh-96': Iso('Rh-96', 'rhodium-96', 45, 96, 95.914453, False), - 'Rh-97': Iso('Rh-97', 'rhodium-97', 45, 97, 96.911329, False), - 'Rh-98': Iso('Rh-98', 'rhodium-98', 45, 98, 97.910708, False), - 'Rh-99': Iso('Rh-99', 'rhodium-99', 45, 99, 98.9081282, False), - 'Rh-100': Iso('Rh-100', 'rhodium-100', 45, 100, 99.908117, False), - 'Rh-101': Iso('Rh-101', 'rhodium-101', 45, 101, 100.9061606, False), - 'Rh-102': Iso('Rh-102', 'rhodium-102', 45, 102, 101.9068374, False), - 'Rh-103': Iso('Rh-103', 'rhodium-103', 45, 103, 102.9054980, True, + 'F-20': _iso('F-20', 'fluorine-20', 9, 20, 19.999981252, False), + 'F-21': _iso('F-21', 'fluorine-21', 9, 21, 20.9999489, False), + 'F-22': _iso('F-22', 'fluorine-22', 9, 22, 22.002999, False), + 'F-23': _iso('F-23', 'fluorine-23', 9, 23, 23.003557, False), + 'F-24': _iso('F-24', 'fluorine-24', 9, 24, 24.008115, False), + 'F-25': _iso('F-25', 'fluorine-25', 9, 25, 25.012199, False), + 'F-26': _iso('F-26', 'fluorine-26', 9, 26, 26.020038, False), + 'F-27': _iso('F-27', 'fluorine-27', 9, 27, 27.02644, False), + 'F-28': _iso('F-28', 'fluorine-28', 9, 28, 28.03534, False), + 'F-29': _iso('F-29', 'fluorine-29', 9, 29, 29.04254, False), + 'F-30': _iso('F-30', 'fluorine-30', 9, 30, 30.05165, False), + 'F-31': _iso('F-31', 'fluorine-31', 9, 31, 31.05971, False), + 'Ne-16': _iso('Ne-16', 'neon-16', 10, 16, 16.025750, False), + 'Ne-17': _iso('Ne-17', 'neon-17', 10, 17, 17.01771396, False), + 'Ne-18': _iso('Ne-18', 'neon-18', 10, 18, 18.00570870, False), + 'Ne-19': _iso('Ne-19', 'neon-19', 10, 19, 19.00188091, False), + 'Ne-20': _iso('Ne-20', 'neon-20', 10, 20, 19.9924401762, True, + isotopic_abundance=0.9048), + 'Ne-21': _iso('Ne-21', 'neon-21', 10, 21, 20.993846685, True, + isotopic_abundance=0.0027), + 'Ne-22': _iso('Ne-22', 'neon-22', 10, 22, 21.991385114, True, + isotopic_abundance=0.0925), + 'Ne-23': _iso('Ne-23', 'neon-23', 10, 23, 22.99446691, False), + 'Ne-24': _iso('Ne-24', 'neon-24', 10, 24, 23.99361065, False), + 'Ne-25': _iso('Ne-25', 'neon-25', 10, 25, 24.997789, False), + 'Ne-26': _iso('Ne-26', 'neon-26', 10, 26, 26.000515, False), + 'Ne-27': _iso('Ne-27', 'neon-27', 10, 27, 27.007553, False), + 'Ne-28': _iso('Ne-28', 'neon-28', 10, 28, 28.01212, False), + 'Ne-29': _iso('Ne-29', 'neon-29', 10, 29, 29.01975, False), + 'Ne-30': _iso('Ne-30', 'neon-30', 10, 30, 30.02473, False), + 'Ne-31': _iso('Ne-31', 'neon-31', 10, 31, 31.0331, False), + 'Ne-32': _iso('Ne-32', 'neon-32', 10, 32, 32.03972, False), + 'Ne-33': _iso('Ne-33', 'neon-33', 10, 33, 33.04938, False), + 'Ne-34': _iso('Ne-34', 'neon-34', 10, 34, 34.05673, False), + 'Na-18': _iso('Na-18', 'sodium-18', 11, 18, 18.02688, False), + 'Na-19': _iso('Na-19', 'sodium-19', 11, 19, 19.013880, False), + 'Na-20': _iso('Na-20', 'sodium-20', 11, 20, 20.0073544, False), + 'Na-21': _iso('Na-21', 'sodium-21', 11, 21, 20.99765469, False), + 'Na-22': _iso('Na-22', 'sodium-22', 11, 22, 21.99443741, False, + half_life=82163808.0), + 'Na-23': _iso('Na-23', 'sodium-23', 11, 23, 22.9897692820, True, isotopic_abundance=1), - 'Rh-104': Iso('Rh-104', 'rhodium-104', 45, 104, 103.9066492, False), - 'Rh-105': Iso('Rh-105', 'rhodium-105', 45, 105, 104.9056885, False), - 'Rh-106': Iso('Rh-106', 'rhodium-106', 45, 106, 105.9072868, False), - 'Rh-107': Iso('Rh-107', 'rhodium-107', 45, 107, 106.906748, False), - 'Rh-108': Iso('Rh-108', 'rhodium-108', 45, 108, 107.908714, False), - 'Rh-109': Iso('Rh-109', 'rhodium-109', 45, 109, 108.9087488, False), - 'Rh-110': Iso('Rh-110', 'rhodium-110', 45, 110, 109.911079, False), - 'Rh-111': Iso('Rh-111', 'rhodium-111', 45, 111, 110.9116423, False), - 'Rh-112': Iso('Rh-112', 'rhodium-112', 45, 112, 111.914403, False), - 'Rh-113': Iso('Rh-113', 'rhodium-113', 45, 113, 112.9154393, False), - 'Rh-114': Iso('Rh-114', 'rhodium-114', 45, 114, 113.918718, False), - 'Rh-115': Iso('Rh-115', 'rhodium-115', 45, 115, 114.9203116, False), - 'Rh-116': Iso('Rh-116', 'rhodium-116', 45, 116, 115.924059, False), - 'Rh-117': Iso('Rh-117', 'rhodium-117', 45, 117, 116.9260354, False), - 'Rh-118': Iso('Rh-118', 'rhodium-118', 45, 118, 117.930340, False), - 'Rh-119': Iso('Rh-119', 'rhodium-119', 45, 119, 118.932557, False), - 'Rh-120': Iso('Rh-120', 'rhodium-120', 45, 120, 119.93686, False), - 'Rh-121': Iso('Rh-121', 'rhodium-121', 45, 121, 120.93942, False), - 'Rh-122': Iso('Rh-122', 'rhodium-122', 45, 122, 121.94399, False), - 'Rh-123': Iso('Rh-123', 'rhodium-123', 45, 123, 122.94685, False), - 'Rh-124': Iso('Rh-124', 'rhodium-124', 45, 124, 123.95151, False), - 'Rh-125': Iso('Rh-125', 'rhodium-125', 45, 125, 124.95469, False), - 'Rh-126': Iso('Rh-126', 'rhodium-126', 45, 126, 125.95946, False), - 'Pd-91': Iso('Pd-91', 'palladium-91', 46, 91, 90.95032, False), - 'Pd-92': Iso('Pd-92', 'palladium-92', 46, 92, 91.94088, False), - 'Pd-93': Iso('Pd-93', 'palladium-93', 46, 93, 92.93651, False), - 'Pd-94': Iso('Pd-94', 'palladium-94', 46, 94, 93.9290376, False), - 'Pd-95': Iso('Pd-95', 'palladium-95', 46, 95, 94.9248898, False), - 'Pd-96': Iso('Pd-96', 'palladium-96', 46, 96, 95.9182151, False), - 'Pd-97': Iso('Pd-97', 'palladium-97', 46, 97, 96.9164720, False), - 'Pd-98': Iso('Pd-98', 'palladium-98', 46, 98, 97.9126983, False), - 'Pd-99': Iso('Pd-99', 'palladium-99', 46, 99, 98.9117748, False), - 'Pd-100': Iso('Pd-100', 'palladium-100', 46, 100, 99.908505, False), - 'Pd-101': Iso('Pd-101', 'palladium-101', 46, 101, 100.9082864, False), - 'Pd-102': Iso('Pd-102', 'palladium-102', 46, 102, 101.9056022, True, - isotopic_abundance=0.0102), - 'Pd-103': Iso('Pd-103', 'palladium-103', 46, 103, 102.9060809, False), - 'Pd-104': Iso('Pd-104', 'palladium-104', 46, 104, 103.9040305, True, - isotopic_abundance=0.1114), - 'Pd-105': Iso('Pd-105', 'palladium-105', 46, 105, 104.9050796, True, - isotopic_abundance=0.2233), - 'Pd-106': Iso('Pd-106', 'palladium-106', 46, 106, 105.9034804, True, - isotopic_abundance=0.2733), - 'Pd-107': Iso('Pd-107', 'palladium-107', 46, 107, 106.9051282, False), - 'Pd-108': Iso('Pd-108', 'palladium-108', 46, 108, 107.9038916, True, - isotopic_abundance=0.2646), - 'Pd-109': Iso('Pd-109', 'palladium-109', 46, 109, 108.9059504, False), - 'Pd-110': Iso('Pd-110', 'palladium-110', 46, 110, 109.90517220, True, - isotopic_abundance=0.1172), - 'Pd-111': Iso('Pd-111', 'palladium-111', 46, 111, 110.90768968, False), - 'Pd-112': Iso('Pd-112', 'palladium-112', 46, 112, 111.9073297, False), - 'Pd-113': Iso('Pd-113', 'palladium-113', 46, 113, 112.9102610, False), - 'Pd-114': Iso('Pd-114', 'palladium-114', 46, 114, 113.9103686, False), - 'Pd-115': Iso('Pd-115', 'palladium-115', 46, 115, 114.913659, False), - 'Pd-116': Iso('Pd-116', 'palladium-116', 46, 116, 115.9142970, False), - 'Pd-117': Iso('Pd-117', 'palladium-117', 46, 117, 116.9179547, False), - 'Pd-118': Iso('Pd-118', 'palladium-118', 46, 118, 117.9190667, False), - 'Pd-119': Iso('Pd-119', 'palladium-119', 46, 119, 118.9233402, False), - 'Pd-120': Iso('Pd-120', 'palladium-120', 46, 120, 119.9245511, False), - 'Pd-121': Iso('Pd-121', 'palladium-121', 46, 121, 120.9289503, False), - 'Pd-122': Iso('Pd-122', 'palladium-122', 46, 122, 121.930632, False), - 'Pd-123': Iso('Pd-123', 'palladium-123', 46, 123, 122.93514, False), - 'Pd-124': Iso('Pd-124', 'palladium-124', 46, 124, 123.93714, False), - 'Pd-125': Iso('Pd-125', 'palladium-125', 46, 125, 124.94179, False), - 'Pd-126': Iso('Pd-126', 'palladium-126', 46, 126, 125.94416, False), - 'Pd-127': Iso('Pd-127', 'palladium-127', 46, 127, 126.94907, False), - 'Pd-128': Iso('Pd-128', 'palladium-128', 46, 128, 127.95183, False), - 'Ag-93': Iso('Ag-93', 'silver-93', 47, 93, 92.95033, False), - 'Ag-94': Iso('Ag-94', 'silver-94', 47, 94, 93.94373, False), - 'Ag-95': Iso('Ag-95', 'silver-95', 47, 95, 94.93602, False), - 'Ag-96': Iso('Ag-96', 'silver-96', 47, 96, 95.930744, False), - 'Ag-97': Iso('Ag-97', 'silver-97', 47, 97, 96.92397, False), - 'Ag-98': Iso('Ag-98', 'silver-98', 47, 98, 97.921560, False), - 'Ag-99': Iso('Ag-99', 'silver-99', 47, 99, 98.9176458, False), - 'Ag-100': Iso('Ag-100', 'silver-100', 47, 100, 99.9161154, False), - 'Ag-101': Iso('Ag-101', 'silver-101', 47, 101, 100.9126840, False), - 'Ag-102': Iso('Ag-102', 'silver-102', 47, 102, 101.9117047, False), - 'Ag-103': Iso('Ag-103', 'silver-103', 47, 103, 102.9089631, False), - 'Ag-104': Iso('Ag-104', 'silver-104', 47, 104, 103.9086239, False), - 'Ag-105': Iso('Ag-105', 'silver-105', 47, 105, 104.9065256, False), - 'Ag-106': Iso('Ag-106', 'silver-106', 47, 106, 105.9066636, False), - 'Ag-107': Iso('Ag-107', 'silver-107', 47, 107, 106.9050916, True, - isotopic_abundance=0.51839), - 'Ag-108': Iso('Ag-108', 'silver-108', 47, 108, 107.9059503, False), - 'Ag-109': Iso('Ag-109', 'silver-109', 47, 109, 108.9047553, True, - isotopic_abundance=0.48161), - 'Ag-110': Iso('Ag-110', 'silver-110', 47, 110, 109.9061102, False), - 'Ag-111': Iso('Ag-111', 'silver-111', 47, 111, 110.9052959, False), - 'Ag-112': Iso('Ag-112', 'silver-112', 47, 112, 111.9070486, False), - 'Ag-113': Iso('Ag-113', 'silver-113', 47, 113, 112.906573, False), - 'Ag-114': Iso('Ag-114', 'silver-114', 47, 114, 113.9088230, False), - 'Ag-115': Iso('Ag-115', 'silver-115', 47, 115, 114.908767, False), - 'Ag-116': Iso('Ag-116', 'silver-116', 47, 116, 115.9113868, False), - 'Ag-117': Iso('Ag-117', 'silver-117', 47, 117, 116.911774, False), - 'Ag-118': Iso('Ag-118', 'silver-118', 47, 118, 117.9145955, False), - 'Ag-119': Iso('Ag-119', 'silver-119', 47, 119, 118.915570, False), - 'Ag-120': Iso('Ag-120', 'silver-120', 47, 120, 119.9187848, False), - 'Ag-121': Iso('Ag-121', 'silver-121', 47, 121, 120.920125, False), - 'Ag-122': Iso('Ag-122', 'silver-122', 47, 122, 121.923664, False), - 'Ag-123': Iso('Ag-123', 'silver-123', 47, 123, 122.925337, False), - 'Ag-124': Iso('Ag-124', 'silver-124', 47, 124, 123.92893, False), - 'Ag-125': Iso('Ag-125', 'silver-125', 47, 125, 124.93105, False), - 'Ag-126': Iso('Ag-126', 'silver-126', 47, 126, 125.93475, False), - 'Ag-127': Iso('Ag-127', 'silver-127', 47, 127, 126.93711, False), - 'Ag-128': Iso('Ag-128', 'silver-128', 47, 128, 127.94106, False), - 'Ag-129': Iso('Ag-129', 'silver-129', 47, 129, 128.94395, False), - 'Ag-130': Iso('Ag-130', 'silver-130', 47, 130, 129.95070, False), - 'Cd-95': Iso('Cd-95', 'cadmium-95', 48, 95, 94.94994, False), - 'Cd-96': Iso('Cd-96', 'cadmium-96', 48, 96, 95.94034, False), - 'Cd-97': Iso('Cd-97', 'cadmium-97', 48, 97, 96.93510, False), - 'Cd-98': Iso('Cd-98', 'cadmium-98', 48, 98, 97.927389, False), - 'Cd-99': Iso('Cd-99', 'cadmium-99', 48, 99, 98.9249258, False), - 'Cd-100': Iso('Cd-100', 'cadmium-100', 48, 100, 99.9203488, False), - 'Cd-101': Iso('Cd-101', 'cadmium-101', 48, 101, 100.9185862, False), - 'Cd-102': Iso('Cd-102', 'cadmium-102', 48, 102, 101.9144820, False), - 'Cd-103': Iso('Cd-103', 'cadmium-103', 48, 103, 102.9134165, False), - 'Cd-104': Iso('Cd-104', 'cadmium-104', 48, 104, 103.9098564, False), - 'Cd-105': Iso('Cd-105', 'cadmium-105', 48, 105, 104.9094639, False), - 'Cd-106': Iso('Cd-106', 'cadmium-106', 48, 106, 105.9064599, True, - isotopic_abundance=0.0125), - 'Cd-107': Iso('Cd-107', 'cadmium-107', 48, 107, 106.9066121, False), - 'Cd-108': Iso('Cd-108', 'cadmium-108', 48, 108, 107.9041834, True, - isotopic_abundance=0.0089), - 'Cd-109': Iso('Cd-109', 'cadmium-109', 48, 109, 108.9049867, False, - half_life=40025664.0), - 'Cd-110': Iso('Cd-110', 'cadmium-110', 48, 110, 109.90300661, True, - isotopic_abundance=0.1249), - 'Cd-111': Iso('Cd-111', 'cadmium-111', 48, 111, 110.90418287, True, - isotopic_abundance=0.1280), - 'Cd-112': Iso('Cd-112', 'cadmium-112', 48, 112, 111.90276287, True, - isotopic_abundance=0.2413), - 'Cd-113': Iso('Cd-113', 'cadmium-113', 48, 113, 112.90440813, False, - isotopic_abundance=0.1222), - 'Cd-114': Iso('Cd-114', 'cadmium-114', 48, 114, 113.90336509, True, - isotopic_abundance=0.2873), - 'Cd-115': Iso('Cd-115', 'cadmium-115', 48, 115, 114.90543751, False), - 'Cd-116': Iso('Cd-116', 'cadmium-116', 48, 116, 115.90476315, False, - isotopic_abundance=0.0749), - 'Cd-117': Iso('Cd-117', 'cadmium-117', 48, 117, 116.9072260, False), - 'Cd-118': Iso('Cd-118', 'cadmium-118', 48, 118, 117.906922, False), - 'Cd-119': Iso('Cd-119', 'cadmium-119', 48, 119, 118.909847, False), - 'Cd-120': Iso('Cd-120', 'cadmium-120', 48, 120, 119.9098681, False), - 'Cd-121': Iso('Cd-121', 'cadmium-121', 48, 121, 120.9129637, False), - 'Cd-122': Iso('Cd-122', 'cadmium-122', 48, 122, 121.9134591, False), - 'Cd-123': Iso('Cd-123', 'cadmium-123', 48, 123, 122.9168925, False), - 'Cd-124': Iso('Cd-124', 'cadmium-124', 48, 124, 123.9176574, False), - 'Cd-125': Iso('Cd-125', 'cadmium-125', 48, 125, 124.9212576, False), - 'Cd-126': Iso('Cd-126', 'cadmium-126', 48, 126, 125.9224291, False), - 'Cd-127': Iso('Cd-127', 'cadmium-127', 48, 127, 126.926472, False), - 'Cd-128': Iso('Cd-128', 'cadmium-128', 48, 128, 127.9278129, False), - 'Cd-129': Iso('Cd-129', 'cadmium-129', 48, 129, 128.93182, False), - 'Cd-130': Iso('Cd-130', 'cadmium-130', 48, 130, 129.93394, False), - 'Cd-131': Iso('Cd-131', 'cadmium-131', 48, 131, 130.94060, False), - 'Cd-132': Iso('Cd-132', 'cadmium-132', 48, 132, 131.94604, False), - 'Cd-133': Iso('Cd-133', 'cadmium-133', 48, 133, 132.95285, False), - 'In-97': Iso('In-97', 'indium-97', 49, 97, 96.94934, False), - 'In-98': Iso('In-98', 'indium-98', 49, 98, 97.94214, False), - 'In-99': Iso('In-99', 'indium-99', 49, 99, 98.93411, False), - 'In-100': Iso('In-100', 'indium-100', 49, 100, 99.93096, False), - 'In-101': Iso('In-101', 'indium-101', 49, 101, 100.92634, False), - 'In-102': Iso('In-102', 'indium-102', 49, 102, 101.9241071, False), - 'In-103': Iso('In-103', 'indium-103', 49, 103, 102.9198819, False), - 'In-104': Iso('In-104', 'indium-104', 49, 104, 103.9182145, False), - 'In-105': Iso('In-105', 'indium-105', 49, 105, 104.914502, False), - 'In-106': Iso('In-106', 'indium-106', 49, 106, 105.913464, False), - 'In-107': Iso('In-107', 'indium-107', 49, 107, 106.910290, False), - 'In-108': Iso('In-108', 'indium-108', 49, 108, 107.9096935, False), - 'In-109': Iso('In-109', 'indium-109', 49, 109, 108.9071514, False), - 'In-110': Iso('In-110', 'indium-110', 49, 110, 109.907170, False), - 'In-111': Iso('In-111', 'indium-111', 49, 111, 110.9051085, False, - half_life=242332.128), - 'In-112': Iso('In-112', 'indium-112', 49, 112, 111.9055377, False), - 'In-113': Iso('In-113', 'indium-113', 49, 113, 112.90406184, True, - isotopic_abundance=0.0429), - 'In-114': Iso('In-114', 'indium-114', 49, 114, 113.90491791, False), - 'In-115': Iso('In-115', 'indium-115', 49, 115, 114.903878776, False, - isotopic_abundance=0.9571), - 'In-116': Iso('In-116', 'indium-116', 49, 116, 115.90525999, False), - 'In-117': Iso('In-117', 'indium-117', 49, 117, 116.9045157, False), - 'In-118': Iso('In-118', 'indium-118', 49, 118, 117.9063566, False), - 'In-119': Iso('In-119', 'indium-119', 49, 119, 118.9058507, False), - 'In-120': Iso('In-120', 'indium-120', 49, 120, 119.907967, False), - 'In-121': Iso('In-121', 'indium-121', 49, 121, 120.907851, False), - 'In-122': Iso('In-122', 'indium-122', 49, 122, 121.910281, False), - 'In-123': Iso('In-123', 'indium-123', 49, 123, 122.910434, False), - 'In-124': Iso('In-124', 'indium-124', 49, 124, 123.913182, False), - 'In-125': Iso('In-125', 'indium-125', 49, 125, 124.913605, False), - 'In-126': Iso('In-126', 'indium-126', 49, 126, 125.916507, False), - 'In-127': Iso('In-127', 'indium-127', 49, 127, 126.917446, False), - 'In-128': Iso('In-128', 'indium-128', 49, 128, 127.92040, False), - 'In-129': Iso('In-129', 'indium-129', 49, 129, 128.9218053, False), - 'In-130': Iso('In-130', 'indium-130', 49, 130, 129.924977, False), - 'In-131': Iso('In-131', 'indium-131', 49, 131, 130.9269715, False), - 'In-132': Iso('In-132', 'indium-132', 49, 132, 131.933001, False), - 'In-133': Iso('In-133', 'indium-133', 49, 133, 132.93831, False), - 'In-134': Iso('In-134', 'indium-134', 49, 134, 133.94454, False), - 'In-135': Iso('In-135', 'indium-135', 49, 135, 134.95005, False), - 'Sn-99': Iso('Sn-99', 'tin-99', 50, 99, 98.94853, False), - 'Sn-100': Iso('Sn-100', 'tin-100', 50, 100, 99.93850, False), - 'Sn-101': Iso('Sn-101', 'tin-101', 50, 101, 100.93526, False), - 'Sn-102': Iso('Sn-102', 'tin-102', 50, 102, 101.93029, False), - 'Sn-103': Iso('Sn-103', 'tin-103', 50, 103, 102.928105, False), - 'Sn-104': Iso('Sn-104', 'tin-104', 50, 104, 103.9231052, False), - 'Sn-105': Iso('Sn-105', 'tin-105', 50, 105, 104.9212684, False), - 'Sn-106': Iso('Sn-106', 'tin-106', 50, 106, 105.9169574, False), - 'Sn-107': Iso('Sn-107', 'tin-107', 50, 107, 106.9157137, False), - 'Sn-108': Iso('Sn-108', 'tin-108', 50, 108, 107.9118943, False), - 'Sn-109': Iso('Sn-109', 'tin-109', 50, 109, 108.9112921, False), - 'Sn-110': Iso('Sn-110', 'tin-110', 50, 110, 109.907845, False), - 'Sn-111': Iso('Sn-111', 'tin-111', 50, 111, 110.9077401, False), - 'Sn-112': Iso('Sn-112', 'tin-112', 50, 112, 111.90482387, True, - isotopic_abundance=0.0097), - 'Sn-113': Iso('Sn-113', 'tin-113', 50, 113, 112.9051757, False, - half_life=9942825.6), - 'Sn-114': Iso('Sn-114', 'tin-114', 50, 114, 113.9027827, True, - isotopic_abundance=0.0066), - 'Sn-115': Iso('Sn-115', 'tin-115', 50, 115, 114.903344699, True, - isotopic_abundance=0.0034), - 'Sn-116': Iso('Sn-116', 'tin-116', 50, 116, 115.90174280, True, - isotopic_abundance=0.1454), - 'Sn-117': Iso('Sn-117', 'tin-117', 50, 117, 116.90295398, True, - isotopic_abundance=0.0768), - 'Sn-118': Iso('Sn-118', 'tin-118', 50, 118, 117.90160657, True, - isotopic_abundance=0.2422), - 'Sn-119': Iso('Sn-119', 'tin-119', 50, 119, 118.90331117, True, - isotopic_abundance=0.0859), - 'Sn-120': Iso('Sn-120', 'tin-120', 50, 120, 119.90220163, True, - isotopic_abundance=0.3258), - 'Sn-121': Iso('Sn-121', 'tin-121', 50, 121, 120.9042426, False), - 'Sn-122': Iso('Sn-122', 'tin-122', 50, 122, 121.9034438, True, - isotopic_abundance=0.0463), - 'Sn-123': Iso('Sn-123', 'tin-123', 50, 123, 122.9057252, False), - 'Sn-124': Iso('Sn-124', 'tin-124', 50, 124, 123.9052766, True, - isotopic_abundance=0.0579), - 'Sn-125': Iso('Sn-125', 'tin-125', 50, 125, 124.9077864, False), - 'Sn-126': Iso('Sn-126', 'tin-126', 50, 126, 125.907659, False), - 'Sn-127': Iso('Sn-127', 'tin-127', 50, 127, 126.910390, False), - 'Sn-128': Iso('Sn-128', 'tin-128', 50, 128, 127.910507, False), - 'Sn-129': Iso('Sn-129', 'tin-129', 50, 129, 128.913465, False), - 'Sn-130': Iso('Sn-130', 'tin-130', 50, 130, 129.9139738, False), - 'Sn-131': Iso('Sn-131', 'tin-131', 50, 131, 130.9170450, False), - 'Sn-132': Iso('Sn-132', 'tin-132', 50, 132, 131.9178267, False), - 'Sn-133': Iso('Sn-133', 'tin-133', 50, 133, 132.9239134, False), - 'Sn-134': Iso('Sn-134', 'tin-134', 50, 134, 133.9286821, False), - 'Sn-135': Iso('Sn-135', 'tin-135', 50, 135, 134.9349086, False), - 'Sn-136': Iso('Sn-136', 'tin-136', 50, 136, 135.93999, False), - 'Sn-137': Iso('Sn-137', 'tin-137', 50, 137, 136.94655, False), - 'Sn-138': Iso('Sn-138', 'tin-138', 50, 138, 137.95184, False), - 'Sb-103': Iso('Sb-103', 'antimony-103', 51, 103, 102.93969, False), - 'Sb-104': Iso('Sb-104', 'antimony-104', 51, 104, 103.93648, False), - 'Sb-105': Iso('Sb-105', 'antimony-105', 51, 105, 104.931276, False), - 'Sb-106': Iso('Sb-106', 'antimony-106', 51, 106, 105.9286380, False), - 'Sb-107': Iso('Sb-107', 'antimony-107', 51, 107, 106.9241506, False), - 'Sb-108': Iso('Sb-108', 'antimony-108', 51, 108, 107.9222267, False), - 'Sb-109': Iso('Sb-109', 'antimony-109', 51, 109, 108.9181411, False), - 'Sb-110': Iso('Sb-110', 'antimony-110', 51, 110, 109.9168543, False), - 'Sb-111': Iso('Sb-111', 'antimony-111', 51, 111, 110.9132182, False), - 'Sb-112': Iso('Sb-112', 'antimony-112', 51, 112, 111.912400, False), - 'Sb-113': Iso('Sb-113', 'antimony-113', 51, 113, 112.909375, False), - 'Sb-114': Iso('Sb-114', 'antimony-114', 51, 114, 113.909290, False), - 'Sb-115': Iso('Sb-115', 'antimony-115', 51, 115, 114.906598, False), - 'Sb-116': Iso('Sb-116', 'antimony-116', 51, 116, 115.9067931, False), - 'Sb-117': Iso('Sb-117', 'antimony-117', 51, 117, 116.9048415, False), - 'Sb-118': Iso('Sb-118', 'antimony-118', 51, 118, 117.9055321, False), - 'Sb-119': Iso('Sb-119', 'antimony-119', 51, 119, 118.9039455, False), - 'Sb-120': Iso('Sb-120', 'antimony-120', 51, 120, 119.9050794, False), - 'Sb-121': Iso('Sb-121', 'antimony-121', 51, 121, 120.9038120, True, - isotopic_abundance=0.5721), - 'Sb-122': Iso('Sb-122', 'antimony-122', 51, 122, 121.9051699, False), - 'Sb-123': Iso('Sb-123', 'antimony-123', 51, 123, 122.9042132, True, - isotopic_abundance=0.4279), - 'Sb-124': Iso('Sb-124', 'antimony-124', 51, 124, 123.9059350, False), - 'Sb-125': Iso('Sb-125', 'antimony-125', 51, 125, 124.9052530, False, - half_life=87053184.0), - 'Sb-126': Iso('Sb-126', 'antimony-126', 51, 126, 125.907253, False), - 'Sb-127': Iso('Sb-127', 'antimony-127', 51, 127, 126.9069243, False), - 'Sb-128': Iso('Sb-128', 'antimony-128', 51, 128, 127.909146, False), - 'Sb-129': Iso('Sb-129', 'antimony-129', 51, 129, 128.909147, False), - 'Sb-130': Iso('Sb-130', 'antimony-130', 51, 130, 129.911662, False), - 'Sb-131': Iso('Sb-131', 'antimony-131', 51, 131, 130.9119888, False), - 'Sb-132': Iso('Sb-132', 'antimony-132', 51, 132, 131.9145077, False), - 'Sb-133': Iso('Sb-133', 'antimony-133', 51, 133, 132.9152732, False), - 'Sb-134': Iso('Sb-134', 'antimony-134', 51, 134, 133.9205357, False), - 'Sb-135': Iso('Sb-135', 'antimony-135', 51, 135, 134.9251851, False), - 'Sb-136': Iso('Sb-136', 'antimony-136', 51, 136, 135.9307459, False), - 'Sb-137': Iso('Sb-137', 'antimony-137', 51, 137, 136.93555, False), - 'Sb-138': Iso('Sb-138', 'antimony-138', 51, 138, 137.94145, False), - 'Sb-139': Iso('Sb-139', 'antimony-139', 51, 139, 138.94655, False), - 'Sb-140': Iso('Sb-140', 'antimony-140', 51, 140, 139.95283, False), - 'Te-105': Iso('Te-105', 'tellurium-105', 52, 105, 104.94330, False), - 'Te-106': Iso('Te-106', 'tellurium-106', 52, 106, 105.93750, False), - 'Te-107': Iso('Te-107', 'tellurium-107', 52, 107, 106.935012, False), - 'Te-108': Iso('Te-108', 'tellurium-108', 52, 108, 107.9293805, False), - 'Te-109': Iso('Te-109', 'tellurium-109', 52, 109, 108.9273045, False), - 'Te-110': Iso('Te-110', 'tellurium-110', 52, 110, 109.9224581, False), - 'Te-111': Iso('Te-111', 'tellurium-111', 52, 111, 110.9210006, False), - 'Te-112': Iso('Te-112', 'tellurium-112', 52, 112, 111.9167279, False), - 'Te-113': Iso('Te-113', 'tellurium-113', 52, 113, 112.915891, False), - 'Te-114': Iso('Te-114', 'tellurium-114', 52, 114, 113.912089, False), - 'Te-115': Iso('Te-115', 'tellurium-115', 52, 115, 114.911902, False), - 'Te-116': Iso('Te-116', 'tellurium-116', 52, 116, 115.908460, False), - 'Te-117': Iso('Te-117', 'tellurium-117', 52, 117, 116.908646, False), - 'Te-118': Iso('Te-118', 'tellurium-118', 52, 118, 117.905854, False), - 'Te-119': Iso('Te-119', 'tellurium-119', 52, 119, 118.9064071, False), - 'Te-120': Iso('Te-120', 'tellurium-120', 52, 120, 119.9040593, True, - isotopic_abundance=0.0009), - 'Te-121': Iso('Te-121', 'tellurium-121', 52, 121, 120.904944, False), - 'Te-122': Iso('Te-122', 'tellurium-122', 52, 122, 121.9030435, True, - isotopic_abundance=0.0255), - 'Te-123': Iso('Te-123', 'tellurium-123', 52, 123, 122.9042698, True, - isotopic_abundance=0.0089), - 'Te-124': Iso('Te-124', 'tellurium-124', 52, 124, 123.9028171, True, - isotopic_abundance=0.0474), - 'Te-125': Iso('Te-125', 'tellurium-125', 52, 125, 124.9044299, True, - isotopic_abundance=0.0707), - 'Te-126': Iso('Te-126', 'tellurium-126', 52, 126, 125.9033109, True, - isotopic_abundance=0.1884), - 'Te-127': Iso('Te-127', 'tellurium-127', 52, 127, 126.9052257, False), - 'Te-128': Iso('Te-128', 'tellurium-128', 52, 128, 127.90446128, False, - isotopic_abundance=0.3174), - 'Te-129': Iso('Te-129', 'tellurium-129', 52, 129, 128.90659646, False), - 'Te-130': Iso('Te-130', 'tellurium-130', 52, 130, 129.906222748, False, - isotopic_abundance=0.3408), - 'Te-131': Iso('Te-131', 'tellurium-131', 52, 131, 130.908522213, False), - 'Te-132': Iso('Te-132', 'tellurium-132', 52, 132, 131.9085467, False), - 'Te-133': Iso('Te-133', 'tellurium-133', 52, 133, 132.9109688, False), - 'Te-134': Iso('Te-134', 'tellurium-134', 52, 134, 133.9113940, False), - 'Te-135': Iso('Te-135', 'tellurium-135', 52, 135, 134.9165557, False), - 'Te-136': Iso('Te-136', 'tellurium-136', 52, 136, 135.9201006, False), - 'Te-137': Iso('Te-137', 'tellurium-137', 52, 137, 136.9255989, False), - 'Te-138': Iso('Te-138', 'tellurium-138', 52, 138, 137.9294722, False), - 'Te-139': Iso('Te-139', 'tellurium-139', 52, 139, 138.9353672, False), - 'Te-140': Iso('Te-140', 'tellurium-140', 52, 140, 139.939499, False), - 'Te-141': Iso('Te-141', 'tellurium-141', 52, 141, 140.94580, False), - 'Te-142': Iso('Te-142', 'tellurium-142', 52, 142, 141.95022, False), - 'Te-143': Iso('Te-143', 'tellurium-143', 52, 143, 142.95676, False), - 'I-107': Iso('I-107', 'iodine-107', 53, 107, 106.94678, False), - 'I-108': Iso('I-108', 'iodine-108', 53, 108, 107.94348, False), - 'I-109': Iso('I-109', 'iodine-109', 53, 109, 108.9380853, False), - 'I-110': Iso('I-110', 'iodine-110', 53, 110, 109.935089, False), - 'I-111': Iso('I-111', 'iodine-111', 53, 111, 110.9302692, False), - 'I-112': Iso('I-112', 'iodine-112', 53, 112, 111.928005, False), - 'I-113': Iso('I-113', 'iodine-113', 53, 113, 112.9236501, False), - 'I-114': Iso('I-114', 'iodine-114', 53, 114, 113.92185, False), - 'I-115': Iso('I-115', 'iodine-115', 53, 115, 114.918048, False), - 'I-116': Iso('I-116', 'iodine-116', 53, 116, 115.91681, False), - 'I-117': Iso('I-117', 'iodine-117', 53, 117, 116.913648, False), - 'I-118': Iso('I-118', 'iodine-118', 53, 118, 117.913074, False), - 'I-119': Iso('I-119', 'iodine-119', 53, 119, 118.910074, False), - 'I-120': Iso('I-120', 'iodine-120', 53, 120, 119.910087, False), - 'I-121': Iso('I-121', 'iodine-121', 53, 121, 120.9074051, False), - 'I-122': Iso('I-122', 'iodine-122', 53, 122, 121.9075888, False), - 'I-123': Iso('I-123', 'iodine-123', 53, 123, 122.9055885, False, - half_life=47604.6), - 'I-124': Iso('I-124', 'iodine-124', 53, 124, 123.9062090, False), - 'I-125': Iso('I-125', 'iodine-125', 53, 125, 124.9046294, False, - half_life=5139936.0), - 'I-126': Iso('I-126', 'iodine-126', 53, 126, 125.9056233, False), - 'I-127': Iso('I-127', 'iodine-127', 53, 127, 126.9044719, True, - isotopic_abundance=1), - 'I-128': Iso('I-128', 'iodine-128', 53, 128, 127.9058086, False), - 'I-129': Iso('I-129', 'iodine-129', 53, 129, 128.9049837, False), - 'I-130': Iso('I-130', 'iodine-130', 53, 130, 129.9066702, False), - 'I-131': Iso('I-131', 'iodine-131', 53, 131, 130.90612630, False, - half_life=692902.0800000001), - 'I-132': Iso('I-132', 'iodine-132', 53, 132, 131.9079935, False), - 'I-133': Iso('I-133', 'iodine-133', 53, 133, 132.9077970, False), - 'I-134': Iso('I-134', 'iodine-134', 53, 134, 133.9097588, False), - 'I-135': Iso('I-135', 'iodine-135', 53, 135, 134.9100488, False), - 'I-136': Iso('I-136', 'iodine-136', 53, 136, 135.914604, False), - 'I-137': Iso('I-137', 'iodine-137', 53, 137, 136.9180282, False), - 'I-138': Iso('I-138', 'iodine-138', 53, 138, 137.9227264, False), - 'I-139': Iso('I-139', 'iodine-139', 53, 139, 138.926506, False), - 'I-140': Iso('I-140', 'iodine-140', 53, 140, 139.93173, False), - 'I-141': Iso('I-141', 'iodine-141', 53, 141, 140.93569, False), - 'I-142': Iso('I-142', 'iodine-142', 53, 142, 141.94120, False), - 'I-143': Iso('I-143', 'iodine-143', 53, 143, 142.94565, False), - 'I-144': Iso('I-144', 'iodine-144', 53, 144, 143.95139, False), - 'I-145': Iso('I-145', 'iodine-145', 53, 145, 144.95605, False), - 'Xe-109': Iso('Xe-109', 'xenon-109', 54, 109, 108.95043, False), - 'Xe-110': Iso('Xe-110', 'xenon-110', 54, 110, 109.94426, False), - 'Xe-111': Iso('Xe-111', 'xenon-111', 54, 111, 110.941607, False), - 'Xe-112': Iso('Xe-112', 'xenon-112', 54, 112, 111.9355590, False), - 'Xe-113': Iso('Xe-113', 'xenon-113', 54, 113, 112.9332217, False), - 'Xe-114': Iso('Xe-114', 'xenon-114', 54, 114, 113.927980, False), - 'Xe-115': Iso('Xe-115', 'xenon-115', 54, 115, 114.926294, False), - 'Xe-116': Iso('Xe-116', 'xenon-116', 54, 116, 115.921581, False), - 'Xe-117': Iso('Xe-117', 'xenon-117', 54, 117, 116.920359, False), - 'Xe-118': Iso('Xe-118', 'xenon-118', 54, 118, 117.916179, False), - 'Xe-119': Iso('Xe-119', 'xenon-119', 54, 119, 118.915411, False), - 'Xe-120': Iso('Xe-120', 'xenon-120', 54, 120, 119.911784, False), - 'Xe-121': Iso('Xe-121', 'xenon-121', 54, 121, 120.911453, False), - 'Xe-122': Iso('Xe-122', 'xenon-122', 54, 122, 121.908368, False), - 'Xe-123': Iso('Xe-123', 'xenon-123', 54, 123, 122.908482, False), - 'Xe-124': Iso('Xe-124', 'xenon-124', 54, 124, 123.9058920, True, - isotopic_abundance=0.000952), - 'Xe-125': Iso('Xe-125', 'xenon-125', 54, 125, 124.9063944, False), - 'Xe-126': Iso('Xe-126', 'xenon-126', 54, 126, 125.9042983, True, - isotopic_abundance=0.000890), - 'Xe-127': Iso('Xe-127', 'xenon-127', 54, 127, 126.9051829, False, - half_life=3140173.44), - 'Xe-128': Iso('Xe-128', 'xenon-128', 54, 128, 127.9035310, True, - isotopic_abundance=0.019102), - 'Xe-129': Iso('Xe-129', 'xenon-129', 54, 129, 128.9047808611, True, - isotopic_abundance=0.264006), - 'Xe-130': Iso('Xe-130', 'xenon-130', 54, 130, 129.903509349, True, - isotopic_abundance=0.040710), - 'Xe-131': Iso('Xe-131', 'xenon-131', 54, 131, 130.90508406, True, - isotopic_abundance=0.212324), - 'Xe-132': Iso('Xe-132', 'xenon-132', 54, 132, 131.9041550856, True, - isotopic_abundance=0.269086), - 'Xe-133': Iso('Xe-133', 'xenon-133', 54, 133, 132.9059108, False, - half_life=453381.408), - 'Xe-134': Iso('Xe-134', 'xenon-134', 54, 134, 133.90539466, True, - isotopic_abundance=0.104357), - 'Xe-135': Iso('Xe-135', 'xenon-135', 54, 135, 134.9072278, False), - 'Xe-136': Iso('Xe-136', 'xenon-136', 54, 136, 135.907214484, False, - isotopic_abundance=0.088573), - 'Xe-137': Iso('Xe-137', 'xenon-137', 54, 137, 136.91155778, False), - 'Xe-138': Iso('Xe-138', 'xenon-138', 54, 138, 137.9141463, False), - 'Xe-139': Iso('Xe-139', 'xenon-139', 54, 139, 138.9187922, False), - 'Xe-140': Iso('Xe-140', 'xenon-140', 54, 140, 139.9216458, False), - 'Xe-141': Iso('Xe-141', 'xenon-141', 54, 141, 140.9267872, False), - 'Xe-142': Iso('Xe-142', 'xenon-142', 54, 142, 141.9299731, False), - 'Xe-143': Iso('Xe-143', 'xenon-143', 54, 143, 142.9353696, False), - 'Xe-144': Iso('Xe-144', 'xenon-144', 54, 144, 143.9389451, False), - 'Xe-145': Iso('Xe-145', 'xenon-145', 54, 145, 144.944720, False), - 'Xe-146': Iso('Xe-146', 'xenon-146', 54, 146, 145.948518, False), - 'Xe-147': Iso('Xe-147', 'xenon-147', 54, 147, 146.95426, False), - 'Xe-148': Iso('Xe-148', 'xenon-148', 54, 148, 147.95813, False), - 'Cs-112': Iso('Cs-112', 'caesium-112', 55, 112, 111.950309, False), - 'Cs-113': Iso('Cs-113', 'caesium-113', 55, 113, 112.9444291, False), - 'Cs-114': Iso('Cs-114', 'caesium-114', 55, 114, 113.941296, False), - 'Cs-115': Iso('Cs-115', 'caesium-115', 55, 115, 114.93591, False), - 'Cs-116': Iso('Cs-116', 'caesium-116', 55, 116, 115.93337, False), - 'Cs-117': Iso('Cs-117', 'caesium-117', 55, 117, 116.928617, False), - 'Cs-118': Iso('Cs-118', 'caesium-118', 55, 118, 117.926560, False), - 'Cs-119': Iso('Cs-119', 'caesium-119', 55, 119, 118.922377, False), - 'Cs-120': Iso('Cs-120', 'caesium-120', 55, 120, 119.920677, False), - 'Cs-121': Iso('Cs-121', 'caesium-121', 55, 121, 120.917227, False), - 'Cs-122': Iso('Cs-122', 'caesium-122', 55, 122, 121.916108, False), - 'Cs-123': Iso('Cs-123', 'caesium-123', 55, 123, 122.912996, False), - 'Cs-124': Iso('Cs-124', 'caesium-124', 55, 124, 123.9122578, False), - 'Cs-125': Iso('Cs-125', 'caesium-125', 55, 125, 124.9097280, False), - 'Cs-126': Iso('Cs-126', 'caesium-126', 55, 126, 125.909446, False), - 'Cs-127': Iso('Cs-127', 'caesium-127', 55, 127, 126.9074174, False), - 'Cs-128': Iso('Cs-128', 'caesium-128', 55, 128, 127.9077487, False), - 'Cs-129': Iso('Cs-129', 'caesium-129', 55, 129, 128.9060657, False), - 'Cs-130': Iso('Cs-130', 'caesium-130', 55, 130, 129.9067093, False), - 'Cs-131': Iso('Cs-131', 'caesium-131', 55, 131, 130.9054649, False), - 'Cs-132': Iso('Cs-132', 'caesium-132', 55, 132, 131.9064339, False), - 'Cs-133': Iso('Cs-133', 'caesium-133', 55, 133, 132.9054519610, True, - isotopic_abundance=1), - 'Cs-134': Iso('Cs-134', 'caesium-134', 55, 134, 133.906718503, False, - half_life=65135232.0), - 'Cs-135': Iso('Cs-135', 'caesium-135', 55, 135, 134.9059770, False), - 'Cs-136': Iso('Cs-136', 'caesium-136', 55, 136, 135.9073114, False), - 'Cs-137': Iso('Cs-137', 'caesium-137', 55, 137, 136.90708923, False, - half_life=951981119.9999999), - 'Cs-138': Iso('Cs-138', 'caesium-138', 55, 138, 137.9110171, False), - 'Cs-139': Iso('Cs-139', 'caesium-139', 55, 139, 138.9133638, False), - 'Cs-140': Iso('Cs-140', 'caesium-140', 55, 140, 139.9172831, False), - 'Cs-141': Iso('Cs-141', 'caesium-141', 55, 141, 140.9200455, False), - 'Cs-142': Iso('Cs-142', 'caesium-142', 55, 142, 141.9242960, False), - 'Cs-143': Iso('Cs-143', 'caesium-143', 55, 143, 142.927349, False), - 'Cs-144': Iso('Cs-144', 'caesium-144', 55, 144, 143.932076, False), - 'Cs-145': Iso('Cs-145', 'caesium-145', 55, 145, 144.935527, False), - 'Cs-146': Iso('Cs-146', 'caesium-146', 55, 146, 145.940344, False), - 'Cs-147': Iso('Cs-147', 'caesium-147', 55, 147, 146.944156, False), - 'Cs-148': Iso('Cs-148', 'caesium-148', 55, 148, 147.94923, False), - 'Cs-149': Iso('Cs-149', 'caesium-149', 55, 149, 148.95302, False), - 'Cs-150': Iso('Cs-150', 'caesium-150', 55, 150, 149.95833, False), - 'Cs-151': Iso('Cs-151', 'caesium-151', 55, 151, 150.96258, False), - 'Ba-114': Iso('Ba-114', 'barium-114', 56, 114, 113.95066, False), - 'Ba-115': Iso('Ba-115', 'barium-115', 56, 115, 114.94737, False), - 'Ba-116': Iso('Ba-116', 'barium-116', 56, 116, 115.94128, False), - 'Ba-117': Iso('Ba-117', 'barium-117', 56, 117, 116.93814, False), - 'Ba-118': Iso('Ba-118', 'barium-118', 56, 118, 117.93306, False), - 'Ba-119': Iso('Ba-119', 'barium-119', 56, 119, 118.93066, False), - 'Ba-120': Iso('Ba-120', 'barium-120', 56, 120, 119.92605, False), - 'Ba-121': Iso('Ba-121', 'barium-121', 56, 121, 120.92405, False), - 'Ba-122': Iso('Ba-122', 'barium-122', 56, 122, 121.919904, False), - 'Ba-123': Iso('Ba-123', 'barium-123', 56, 123, 122.918781, False), - 'Ba-124': Iso('Ba-124', 'barium-124', 56, 124, 123.915094, False), - 'Ba-125': Iso('Ba-125', 'barium-125', 56, 125, 124.914472, False), - 'Ba-126': Iso('Ba-126', 'barium-126', 56, 126, 125.911250, False), - 'Ba-127': Iso('Ba-127', 'barium-127', 56, 127, 126.911091, False), - 'Ba-128': Iso('Ba-128', 'barium-128', 56, 128, 127.9083420, False), - 'Ba-129': Iso('Ba-129', 'barium-129', 56, 129, 128.908681, False), - 'Ba-130': Iso('Ba-130', 'barium-130', 56, 130, 129.9063207, False, - isotopic_abundance=0.00106), - 'Ba-131': Iso('Ba-131', 'barium-131', 56, 131, 130.9069410, False), - 'Ba-132': Iso('Ba-132', 'barium-132', 56, 132, 131.9050611, True, - isotopic_abundance=0.00101), - 'Ba-133': Iso('Ba-133', 'barium-133', 56, 133, 132.9060074, False, - half_life=333046080.0), - 'Ba-134': Iso('Ba-134', 'barium-134', 56, 134, 133.90450818, True, - isotopic_abundance=0.02417), - 'Ba-135': Iso('Ba-135', 'barium-135', 56, 135, 134.90568838, True, - isotopic_abundance=0.06592), - 'Ba-136': Iso('Ba-136', 'barium-136', 56, 136, 135.90457573, True, - isotopic_abundance=0.07854), - 'Ba-137': Iso('Ba-137', 'barium-137', 56, 137, 136.90582714, True, - isotopic_abundance=0.11232), - 'Ba-138': Iso('Ba-138', 'barium-138', 56, 138, 137.90524700, True, - isotopic_abundance=0.71698), - 'Ba-139': Iso('Ba-139', 'barium-139', 56, 139, 138.90884110, False), - 'Ba-140': Iso('Ba-140', 'barium-140', 56, 140, 139.9106057, False, - half_life=1101833.28), - 'Ba-141': Iso('Ba-141', 'barium-141', 56, 141, 140.9144033, False), - 'Ba-142': Iso('Ba-142', 'barium-142', 56, 142, 141.9164324, False), - 'Ba-143': Iso('Ba-143', 'barium-143', 56, 143, 142.9206253, False), - 'Ba-144': Iso('Ba-144', 'barium-144', 56, 144, 143.9229549, False), - 'Ba-145': Iso('Ba-145', 'barium-145', 56, 145, 144.9275184, False), - 'Ba-146': Iso('Ba-146', 'barium-146', 56, 146, 145.930284, False), - 'Ba-147': Iso('Ba-147', 'barium-147', 56, 147, 146.935304, False), - 'Ba-148': Iso('Ba-148', 'barium-148', 56, 148, 147.938171, False), - 'Ba-149': Iso('Ba-149', 'barium-149', 56, 149, 148.94308, False), - 'Ba-150': Iso('Ba-150', 'barium-150', 56, 150, 149.94605, False), - 'Ba-151': Iso('Ba-151', 'barium-151', 56, 151, 150.95127, False), - 'Ba-152': Iso('Ba-152', 'barium-152', 56, 152, 151.95481, False), - 'Ba-153': Iso('Ba-153', 'barium-153', 56, 153, 152.96036, False), - 'La-116': Iso('La-116', 'lanthanum-116', 57, 116, 115.95630, False), - 'La-117': Iso('La-117', 'lanthanum-117', 57, 117, 116.94999, False), - 'La-118': Iso('La-118', 'lanthanum-118', 57, 118, 117.94673, False), - 'La-119': Iso('La-119', 'lanthanum-119', 57, 119, 118.94099, False), - 'La-120': Iso('La-120', 'lanthanum-120', 57, 120, 119.93807, False), - 'La-121': Iso('La-121', 'lanthanum-121', 57, 121, 120.93315, False), - 'La-122': Iso('La-122', 'lanthanum-122', 57, 122, 121.93071, False), - 'La-123': Iso('La-123', 'lanthanum-123', 57, 123, 122.92630, False), - 'La-124': Iso('La-124', 'lanthanum-124', 57, 124, 123.924574, False), - 'La-125': Iso('La-125', 'lanthanum-125', 57, 125, 124.920816, False), - 'La-126': Iso('La-126', 'lanthanum-126', 57, 126, 125.919513, False), - 'La-127': Iso('La-127', 'lanthanum-127', 57, 127, 126.916375, False), - 'La-128': Iso('La-128', 'lanthanum-128', 57, 128, 127.915592, False), - 'La-129': Iso('La-129', 'lanthanum-129', 57, 129, 128.912694, False), - 'La-130': Iso('La-130', 'lanthanum-130', 57, 130, 129.912369, False), - 'La-131': Iso('La-131', 'lanthanum-131', 57, 131, 130.910070, False), - 'La-132': Iso('La-132', 'lanthanum-132', 57, 132, 131.910119, False), - 'La-133': Iso('La-133', 'lanthanum-133', 57, 133, 132.908218, False), - 'La-134': Iso('La-134', 'lanthanum-134', 57, 134, 133.908514, False), - 'La-135': Iso('La-135', 'lanthanum-135', 57, 135, 134.906984, False), - 'La-136': Iso('La-136', 'lanthanum-136', 57, 136, 135.907635, False), - 'La-137': Iso('La-137', 'lanthanum-137', 57, 137, 136.9064504, False), - 'La-138': Iso('La-138', 'lanthanum-138', 57, 138, 137.9071149, False, - isotopic_abundance=0.0008881), - 'La-139': Iso('La-139', 'lanthanum-139', 57, 139, 138.9063563, True, - isotopic_abundance=0.9991119), - 'La-140': Iso('La-140', 'lanthanum-140', 57, 140, 139.9094806, False, - half_life=145054.8), - 'La-141': Iso('La-141', 'lanthanum-141', 57, 141, 140.9109660, False), - 'La-142': Iso('La-142', 'lanthanum-142', 57, 142, 141.9140909, False), - 'La-143': Iso('La-143', 'lanthanum-143', 57, 143, 142.9160795, False), - 'La-144': Iso('La-144', 'lanthanum-144', 57, 144, 143.919646, False), - 'La-145': Iso('La-145', 'lanthanum-145', 57, 145, 144.921808, False), - 'La-146': Iso('La-146', 'lanthanum-146', 57, 146, 145.925875, False), - 'La-147': Iso('La-147', 'lanthanum-147', 57, 147, 146.928418, False), - 'La-148': Iso('La-148', 'lanthanum-148', 57, 148, 147.932679, False), - 'La-149': Iso('La-149', 'lanthanum-149', 57, 149, 148.93535, False), - 'La-150': Iso('La-150', 'lanthanum-150', 57, 150, 149.93947, False), - 'La-151': Iso('La-151', 'lanthanum-151', 57, 151, 150.94232, False), - 'La-152': Iso('La-152', 'lanthanum-152', 57, 152, 151.94682, False), - 'La-153': Iso('La-153', 'lanthanum-153', 57, 153, 152.95036, False), - 'La-154': Iso('La-154', 'lanthanum-154', 57, 154, 153.95517, False), - 'La-155': Iso('La-155', 'lanthanum-155', 57, 155, 154.95901, False), - 'Ce-119': Iso('Ce-119', 'cerium-119', 58, 119, 118.95271, False), - 'Ce-120': Iso('Ce-120', 'cerium-120', 58, 120, 119.94654, False), - 'Ce-121': Iso('Ce-121', 'cerium-121', 58, 121, 120.94335, False), - 'Ce-122': Iso('Ce-122', 'cerium-122', 58, 122, 121.93787, False), - 'Ce-123': Iso('Ce-123', 'cerium-123', 58, 123, 122.93528, False), - 'Ce-124': Iso('Ce-124', 'cerium-124', 58, 124, 123.93031, False), - 'Ce-125': Iso('Ce-125', 'cerium-125', 58, 125, 124.92844, False), - 'Ce-126': Iso('Ce-126', 'cerium-126', 58, 126, 125.923971, False), - 'Ce-127': Iso('Ce-127', 'cerium-127', 58, 127, 126.922727, False), - 'Ce-128': Iso('Ce-128', 'cerium-128', 58, 128, 127.918911, False), - 'Ce-129': Iso('Ce-129', 'cerium-129', 58, 129, 128.918102, False), - 'Ce-130': Iso('Ce-130', 'cerium-130', 58, 130, 129.914736, False), - 'Ce-131': Iso('Ce-131', 'cerium-131', 58, 131, 130.914429, False), - 'Ce-132': Iso('Ce-132', 'cerium-132', 58, 132, 131.911464, False), - 'Ce-133': Iso('Ce-133', 'cerium-133', 58, 133, 132.911520, False), - 'Ce-134': Iso('Ce-134', 'cerium-134', 58, 134, 133.908928, False), - 'Ce-135': Iso('Ce-135', 'cerium-135', 58, 135, 134.909161, False), - 'Ce-136': Iso('Ce-136', 'cerium-136', 58, 136, 135.90712921, True, - isotopic_abundance=0.00185), - 'Ce-137': Iso('Ce-137', 'cerium-137', 58, 137, 136.90776236, False), - 'Ce-138': Iso('Ce-138', 'cerium-138', 58, 138, 137.905991, True, - isotopic_abundance=0.00251), - 'Ce-139': Iso('Ce-139', 'cerium-139', 58, 139, 138.9066551, False, - half_life=11900217.600000001), - 'Ce-140': Iso('Ce-140', 'cerium-140', 58, 140, 139.9054431, True, - isotopic_abundance=0.88450), - 'Ce-141': Iso('Ce-141', 'cerium-141', 58, 141, 140.9082807, False, - half_life=2808864.0), - 'Ce-142': Iso('Ce-142', 'cerium-142', 58, 142, 141.9092504, True, - isotopic_abundance=0.11114), - 'Ce-143': Iso('Ce-143', 'cerium-143', 58, 143, 142.9123921, False), - 'Ce-144': Iso('Ce-144', 'cerium-144', 58, 144, 143.9136529, False, - half_life=24583737.599999998), - 'Ce-145': Iso('Ce-145', 'cerium-145', 58, 145, 144.917265, False), - 'Ce-146': Iso('Ce-146', 'cerium-146', 58, 146, 145.918802, False), - 'Ce-147': Iso('Ce-147', 'cerium-147', 58, 147, 146.9226899, False), - 'Ce-148': Iso('Ce-148', 'cerium-148', 58, 148, 147.924424, False), - 'Ce-149': Iso('Ce-149', 'cerium-149', 58, 149, 148.928427, False), - 'Ce-150': Iso('Ce-150', 'cerium-150', 58, 150, 149.930384, False), - 'Ce-151': Iso('Ce-151', 'cerium-151', 58, 151, 150.934272, False), - 'Ce-152': Iso('Ce-152', 'cerium-152', 58, 152, 151.93660, False), - 'Ce-153': Iso('Ce-153', 'cerium-153', 58, 153, 152.94093, False), - 'Ce-154': Iso('Ce-154', 'cerium-154', 58, 154, 153.94380, False), - 'Ce-155': Iso('Ce-155', 'cerium-155', 58, 155, 154.94855, False), - 'Ce-156': Iso('Ce-156', 'cerium-156', 58, 156, 155.95183, False), - 'Ce-157': Iso('Ce-157', 'cerium-157', 58, 157, 156.95705, False), - 'Pr-121': Iso('Pr-121', 'praseodymium-121', 59, 121, 120.95532, False), - 'Pr-122': Iso('Pr-122', 'praseodymium-122', 59, 122, 121.95175, False), - 'Pr-123': Iso('Pr-123', 'praseodymium-123', 59, 123, 122.94596, False), - 'Pr-124': Iso('Pr-124', 'praseodymium-124', 59, 124, 123.94294, False), - 'Pr-125': Iso('Pr-125', 'praseodymium-125', 59, 125, 124.93770, False), - 'Pr-126': Iso('Pr-126', 'praseodymium-126', 59, 126, 125.93524, False), - 'Pr-127': Iso('Pr-127', 'praseodymium-127', 59, 127, 126.93071, False), - 'Pr-128': Iso('Pr-128', 'praseodymium-128', 59, 128, 127.928791, False), - 'Pr-129': Iso('Pr-129', 'praseodymium-129', 59, 129, 128.925095, False), - 'Pr-130': Iso('Pr-130', 'praseodymium-130', 59, 130, 129.923590, False), - 'Pr-131': Iso('Pr-131', 'praseodymium-131', 59, 131, 130.920235, False), - 'Pr-132': Iso('Pr-132', 'praseodymium-132', 59, 132, 131.919255, False), - 'Pr-133': Iso('Pr-133', 'praseodymium-133', 59, 133, 132.916331, False), - 'Pr-134': Iso('Pr-134', 'praseodymium-134', 59, 134, 133.915697, False), - 'Pr-135': Iso('Pr-135', 'praseodymium-135', 59, 135, 134.913112, False), - 'Pr-136': Iso('Pr-136', 'praseodymium-136', 59, 136, 135.912677, False), - 'Pr-137': Iso('Pr-137', 'praseodymium-137', 59, 137, 136.9106792, False), - 'Pr-138': Iso('Pr-138', 'praseodymium-138', 59, 138, 137.910754, False), - 'Pr-139': Iso('Pr-139', 'praseodymium-139', 59, 139, 138.9089408, False), - 'Pr-140': Iso('Pr-140', 'praseodymium-140', 59, 140, 139.9090803, False), - 'Pr-141': Iso('Pr-141', 'praseodymium-141', 59, 141, 140.9076576, True, - isotopic_abundance=1), - 'Pr-142': Iso('Pr-142', 'praseodymium-142', 59, 142, 141.9100496, False), - 'Pr-143': Iso('Pr-143', 'praseodymium-143', 59, 143, 142.9108228, False), - 'Pr-144': Iso('Pr-144', 'praseodymium-144', 59, 144, 143.9133109, False), - 'Pr-145': Iso('Pr-145', 'praseodymium-145', 59, 145, 144.9145182, False), - 'Pr-146': Iso('Pr-146', 'praseodymium-146', 59, 146, 145.917680, False), - 'Pr-147': Iso('Pr-147', 'praseodymium-147', 59, 147, 146.919008, False), - 'Pr-148': Iso('Pr-148', 'praseodymium-148', 59, 148, 147.922130, False), - 'Pr-149': Iso('Pr-149', 'praseodymium-149', 59, 149, 148.923736, False), - 'Pr-150': Iso('Pr-150', 'praseodymium-150', 59, 150, 149.9266765, False), - 'Pr-151': Iso('Pr-151', 'praseodymium-151', 59, 151, 150.928309, False), - 'Pr-152': Iso('Pr-152', 'praseodymium-152', 59, 152, 151.931553, False), - 'Pr-153': Iso('Pr-153', 'praseodymium-153', 59, 153, 152.933904, False), - 'Pr-154': Iso('Pr-154', 'praseodymium-154', 59, 154, 153.93753, False), - 'Pr-155': Iso('Pr-155', 'praseodymium-155', 59, 155, 154.940509, False), - 'Pr-156': Iso('Pr-156', 'praseodymium-156', 59, 156, 155.94464, False), - 'Pr-157': Iso('Pr-157', 'praseodymium-157', 59, 157, 156.94789, False), - 'Pr-158': Iso('Pr-158', 'praseodymium-158', 59, 158, 157.95241, False), - 'Pr-159': Iso('Pr-159', 'praseodymium-159', 59, 159, 158.95589, False), - 'Nd-124': Iso('Nd-124', 'neodymium-124', 60, 124, 123.95220, False), - 'Nd-125': Iso('Nd-125', 'neodymium-125', 60, 125, 124.94890, False), - 'Nd-126': Iso('Nd-126', 'neodymium-126', 60, 126, 125.94311, False), - 'Nd-127': Iso('Nd-127', 'neodymium-127', 60, 127, 126.94038, False), - 'Nd-128': Iso('Nd-128', 'neodymium-128', 60, 128, 127.93525, False), - 'Nd-129': Iso('Nd-129', 'neodymium-129', 60, 129, 128.93310, False), - 'Nd-130': Iso('Nd-130', 'neodymium-130', 60, 130, 129.928506, False), - 'Nd-131': Iso('Nd-131', 'neodymium-131', 60, 131, 130.927248, False), - 'Nd-132': Iso('Nd-132', 'neodymium-132', 60, 132, 131.923321, False), - 'Nd-133': Iso('Nd-133', 'neodymium-133', 60, 133, 132.922348, False), - 'Nd-134': Iso('Nd-134', 'neodymium-134', 60, 134, 133.918790, False), - 'Nd-135': Iso('Nd-135', 'neodymium-135', 60, 135, 134.918181, False), - 'Nd-136': Iso('Nd-136', 'neodymium-136', 60, 136, 135.914976, False), - 'Nd-137': Iso('Nd-137', 'neodymium-137', 60, 137, 136.914562, False), - 'Nd-138': Iso('Nd-138', 'neodymium-138', 60, 138, 137.911950, False), - 'Nd-139': Iso('Nd-139', 'neodymium-139', 60, 139, 138.911954, False), - 'Nd-140': Iso('Nd-140', 'neodymium-140', 60, 140, 139.909550, False), - 'Nd-141': Iso('Nd-141', 'neodymium-141', 60, 141, 140.9096147, False), - 'Nd-142': Iso('Nd-142', 'neodymium-142', 60, 142, 141.9077290, True, - isotopic_abundance=0.27152), - 'Nd-143': Iso('Nd-143', 'neodymium-143', 60, 143, 142.9098200, True, - isotopic_abundance=0.12174), - 'Nd-144': Iso('Nd-144', 'neodymium-144', 60, 144, 143.9100930, False, - isotopic_abundance=0.23798), - 'Nd-145': Iso('Nd-145', 'neodymium-145', 60, 145, 144.9125793, True, - isotopic_abundance=0.08293), - 'Nd-146': Iso('Nd-146', 'neodymium-146', 60, 146, 145.9131226, True, - isotopic_abundance=0.17189), - 'Nd-147': Iso('Nd-147', 'neodymium-147', 60, 147, 146.9161061, False), - 'Nd-148': Iso('Nd-148', 'neodymium-148', 60, 148, 147.9168993, True, - isotopic_abundance=0.05756), - 'Nd-149': Iso('Nd-149', 'neodymium-149', 60, 149, 148.9201548, False), - 'Nd-150': Iso('Nd-150', 'neodymium-150', 60, 150, 149.9209022, False, - isotopic_abundance=0.05638), - 'Nd-151': Iso('Nd-151', 'neodymium-151', 60, 151, 150.9238403, False), - 'Nd-152': Iso('Nd-152', 'neodymium-152', 60, 152, 151.924692, False), - 'Nd-153': Iso('Nd-153', 'neodymium-153', 60, 153, 152.9277180, False), - 'Nd-154': Iso('Nd-154', 'neodymium-154', 60, 154, 153.92948, False), - 'Nd-155': Iso('Nd-155', 'neodymium-155', 60, 155, 154.9331357, False), - 'Nd-156': Iso('Nd-156', 'neodymium-156', 60, 156, 155.93508, False), - 'Nd-157': Iso('Nd-157', 'neodymium-157', 60, 157, 156.939386, False), - 'Nd-158': Iso('Nd-158', 'neodymium-158', 60, 158, 157.94197, False), - 'Nd-159': Iso('Nd-159', 'neodymium-159', 60, 159, 158.94653, False), - 'Nd-160': Iso('Nd-160', 'neodymium-160', 60, 160, 159.94940, False), - 'Nd-161': Iso('Nd-161', 'neodymium-161', 60, 161, 160.95428, False), - 'Pm-126': Iso('Pm-126', 'promethium-126', 61, 126, 125.95792, False), - 'Pm-127': Iso('Pm-127', 'promethium-127', 61, 127, 126.95192, False), - 'Pm-128': Iso('Pm-128', 'promethium-128', 61, 128, 127.94870, False), - 'Pm-129': Iso('Pm-129', 'promethium-129', 61, 129, 128.94323, False), - 'Pm-130': Iso('Pm-130', 'promethium-130', 61, 130, 129.94053, False), - 'Pm-131': Iso('Pm-131', 'promethium-131', 61, 131, 130.93567, False), - 'Pm-132': Iso('Pm-132', 'promethium-132', 61, 132, 131.93384, False), - 'Pm-133': Iso('Pm-133', 'promethium-133', 61, 133, 132.929782, False), - 'Pm-134': Iso('Pm-134', 'promethium-134', 61, 134, 133.928353, False), - 'Pm-135': Iso('Pm-135', 'promethium-135', 61, 135, 134.924823, False), - 'Pm-136': Iso('Pm-136', 'promethium-136', 61, 136, 135.923585, False), - 'Pm-137': Iso('Pm-137', 'promethium-137', 61, 137, 136.920480, False), - 'Pm-138': Iso('Pm-138', 'promethium-138', 61, 138, 137.919548, False), - 'Pm-139': Iso('Pm-139', 'promethium-139', 61, 139, 138.916800, False), - 'Pm-140': Iso('Pm-140', 'promethium-140', 61, 140, 139.916040, False), - 'Pm-141': Iso('Pm-141', 'promethium-141', 61, 141, 140.913555, False), - 'Pm-142': Iso('Pm-142', 'promethium-142', 61, 142, 141.912890, False), - 'Pm-143': Iso('Pm-143', 'promethium-143', 61, 143, 142.9109383, False), - 'Pm-144': Iso('Pm-144', 'promethium-144', 61, 144, 143.9125964, False), - 'Pm-145': Iso('Pm-145', 'promethium-145', 61, 145, 144.9127559, False), - 'Pm-146': Iso('Pm-146', 'promethium-146', 61, 146, 145.9147024, False), - 'Pm-147': Iso('Pm-147', 'promethium-147', 61, 147, 146.9151450, False), - 'Pm-148': Iso('Pm-148', 'promethium-148', 61, 148, 147.9174819, False), - 'Pm-149': Iso('Pm-149', 'promethium-149', 61, 149, 148.9183423, False), - 'Pm-150': Iso('Pm-150', 'promethium-150', 61, 150, 149.920991, False), - 'Pm-151': Iso('Pm-151', 'promethium-151', 61, 151, 150.9212175, False), - 'Pm-152': Iso('Pm-152', 'promethium-152', 61, 152, 151.923506, False), - 'Pm-153': Iso('Pm-153', 'promethium-153', 61, 153, 152.9241567, False), - 'Pm-154': Iso('Pm-154', 'promethium-154', 61, 154, 153.926472, False), - 'Pm-155': Iso('Pm-155', 'promethium-155', 61, 155, 154.9281370, False), - 'Pm-156': Iso('Pm-156', 'promethium-156', 61, 156, 155.9311175, False), - 'Pm-157': Iso('Pm-157', 'promethium-157', 61, 157, 156.9331214, False), - 'Pm-158': Iso('Pm-158', 'promethium-158', 61, 158, 157.936565, False), - 'Pm-159': Iso('Pm-159', 'promethium-159', 61, 159, 158.939287, False), - 'Pm-160': Iso('Pm-160', 'promethium-160', 61, 160, 159.94310, False), - 'Pm-161': Iso('Pm-161', 'promethium-161', 61, 161, 160.94607, False), - 'Pm-162': Iso('Pm-162', 'promethium-162', 61, 162, 161.95022, False), - 'Pm-163': Iso('Pm-163', 'promethium-163', 61, 163, 162.95357, False), - 'Sm-128': Iso('Sm-128', 'samarium-128', 62, 128, 127.95842, False), - 'Sm-129': Iso('Sm-129', 'samarium-129', 62, 129, 128.95476, False), - 'Sm-130': Iso('Sm-130', 'samarium-130', 62, 130, 129.94900, False), - 'Sm-131': Iso('Sm-131', 'samarium-131', 62, 131, 130.94618, False), - 'Sm-132': Iso('Sm-132', 'samarium-132', 62, 132, 131.94087, False), - 'Sm-133': Iso('Sm-133', 'samarium-133', 62, 133, 132.93856, False), - 'Sm-134': Iso('Sm-134', 'samarium-134', 62, 134, 133.93411, False), - 'Sm-135': Iso('Sm-135', 'samarium-135', 62, 135, 134.93252, False), - 'Sm-136': Iso('Sm-136', 'samarium-136', 62, 136, 135.928276, False), - 'Sm-137': Iso('Sm-137', 'samarium-137', 62, 137, 136.926971, False), - 'Sm-138': Iso('Sm-138', 'samarium-138', 62, 138, 137.923244, False), - 'Sm-139': Iso('Sm-139', 'samarium-139', 62, 139, 138.922297, False), - 'Sm-140': Iso('Sm-140', 'samarium-140', 62, 140, 139.918995, False), - 'Sm-141': Iso('Sm-141', 'samarium-141', 62, 141, 140.9184816, False), - 'Sm-142': Iso('Sm-142', 'samarium-142', 62, 142, 141.9152044, False), - 'Sm-143': Iso('Sm-143', 'samarium-143', 62, 143, 142.9146353, False), - 'Sm-144': Iso('Sm-144', 'samarium-144', 62, 144, 143.9120065, True, - isotopic_abundance=0.0307), - 'Sm-145': Iso('Sm-145', 'samarium-145', 62, 145, 144.9134173, False), - 'Sm-146': Iso('Sm-146', 'samarium-146', 62, 146, 145.9130470, False), - 'Sm-147': Iso('Sm-147', 'samarium-147', 62, 147, 146.9149044, False, - isotopic_abundance=0.1499), - 'Sm-148': Iso('Sm-148', 'samarium-148', 62, 148, 147.9148292, False, - isotopic_abundance=0.1124), - 'Sm-149': Iso('Sm-149', 'samarium-149', 62, 149, 148.9171921, True, - isotopic_abundance=0.1382), - 'Sm-150': Iso('Sm-150', 'samarium-150', 62, 150, 149.9172829, True, - isotopic_abundance=0.0738), - 'Sm-151': Iso('Sm-151', 'samarium-151', 62, 151, 150.9199398, False), - 'Sm-152': Iso('Sm-152', 'samarium-152', 62, 152, 151.9197397, True, - isotopic_abundance=0.2675), - 'Sm-153': Iso('Sm-153', 'samarium-153', 62, 153, 152.9221047, False, - half_life=166627.08), - 'Sm-154': Iso('Sm-154', 'samarium-154', 62, 154, 153.9222169, True, - isotopic_abundance=0.2275), - 'Sm-155': Iso('Sm-155', 'samarium-155', 62, 155, 154.9246477, False), - 'Sm-156': Iso('Sm-156', 'samarium-156', 62, 156, 155.925536, False), - 'Sm-157': Iso('Sm-157', 'samarium-157', 62, 157, 156.9284187, False), - 'Sm-158': Iso('Sm-158', 'samarium-158', 62, 158, 157.9299510, False), - 'Sm-159': Iso('Sm-159', 'samarium-159', 62, 159, 158.9332172, False), - 'Sm-160': Iso('Sm-160', 'samarium-160', 62, 160, 159.9353353, False), - 'Sm-161': Iso('Sm-161', 'samarium-161', 62, 161, 160.9391602, False), - 'Sm-162': Iso('Sm-162', 'samarium-162', 62, 162, 161.94146, False), - 'Sm-163': Iso('Sm-163', 'samarium-163', 62, 163, 162.94555, False), - 'Sm-164': Iso('Sm-164', 'samarium-164', 62, 164, 163.94836, False), - 'Sm-165': Iso('Sm-165', 'samarium-165', 62, 165, 164.95297, False), - 'Eu-130': Iso('Eu-130', 'europium-130', 63, 130, 129.96369, False), - 'Eu-131': Iso('Eu-131', 'europium-131', 63, 131, 130.95784, False), - 'Eu-132': Iso('Eu-132', 'europium-132', 63, 132, 131.95467, False), - 'Eu-133': Iso('Eu-133', 'europium-133', 63, 133, 132.94929, False), - 'Eu-134': Iso('Eu-134', 'europium-134', 63, 134, 133.94640, False), - 'Eu-135': Iso('Eu-135', 'europium-135', 63, 135, 134.94187, False), - 'Eu-136': Iso('Eu-136', 'europium-136', 63, 136, 135.93962, False), - 'Eu-137': Iso('Eu-137', 'europium-137', 63, 137, 136.93546, False), - 'Eu-138': Iso('Eu-138', 'europium-138', 63, 138, 137.933709, False), - 'Eu-139': Iso('Eu-139', 'europium-139', 63, 139, 138.929792, False), - 'Eu-140': Iso('Eu-140', 'europium-140', 63, 140, 139.928088, False), - 'Eu-141': Iso('Eu-141', 'europium-141', 63, 141, 140.924932, False), - 'Eu-142': Iso('Eu-142', 'europium-142', 63, 142, 141.923442, False), - 'Eu-143': Iso('Eu-143', 'europium-143', 63, 143, 142.920299, False), - 'Eu-144': Iso('Eu-144', 'europium-144', 63, 144, 143.918820, False), - 'Eu-145': Iso('Eu-145', 'europium-145', 63, 145, 144.9162726, False), - 'Eu-146': Iso('Eu-146', 'europium-146', 63, 146, 145.9172110, False), - 'Eu-147': Iso('Eu-147', 'europium-147', 63, 147, 146.9167527, False), - 'Eu-148': Iso('Eu-148', 'europium-148', 63, 148, 147.918089, False), - 'Eu-149': Iso('Eu-149', 'europium-149', 63, 149, 148.9179378, False), - 'Eu-150': Iso('Eu-150', 'europium-150', 63, 150, 149.9197077, False), - 'Eu-151': Iso('Eu-151', 'europium-151', 63, 151, 150.9198578, False, - isotopic_abundance=0.4781), - 'Eu-152': Iso('Eu-152', 'europium-152', 63, 152, 151.9217522, False, - half_life=427438080.0), - 'Eu-153': Iso('Eu-153', 'europium-153', 63, 153, 152.9212380, True, - isotopic_abundance=0.5219), - 'Eu-154': Iso('Eu-154', 'europium-154', 63, 154, 153.9229870, False, - half_life=271745280.0), - 'Eu-155': Iso('Eu-155', 'europium-155', 63, 155, 154.9229011, False, - half_life=150254784.0), - 'Eu-156': Iso('Eu-156', 'europium-156', 63, 156, 155.9247605, False), - 'Eu-157': Iso('Eu-157', 'europium-157', 63, 157, 156.9254334, False), - 'Eu-158': Iso('Eu-158', 'europium-158', 63, 158, 157.927799, False), - 'Eu-159': Iso('Eu-159', 'europium-159', 63, 159, 158.9291001, False), - 'Eu-160': Iso('Eu-160', 'europium-160', 63, 160, 159.931851, False), - 'Eu-161': Iso('Eu-161', 'europium-161', 63, 161, 160.933664, False), - 'Eu-162': Iso('Eu-162', 'europium-162', 63, 162, 161.936989, False), - 'Eu-163': Iso('Eu-163', 'europium-163', 63, 163, 162.939196, False), - 'Eu-164': Iso('Eu-164', 'europium-164', 63, 164, 163.94274, False), - 'Eu-165': Iso('Eu-165', 'europium-165', 63, 165, 164.94559, False), - 'Eu-166': Iso('Eu-166', 'europium-166', 63, 166, 165.94962, False), - 'Eu-167': Iso('Eu-167', 'europium-167', 63, 167, 166.95289, False), - 'Gd-133': Iso('Gd-133', 'gadolinium-133', 64, 133, 132.96133, False), - 'Gd-134': Iso('Gd-134', 'gadolinium-134', 64, 134, 133.95566, False), - 'Gd-135': Iso('Gd-135', 'gadolinium-135', 64, 135, 134.95245, False), - 'Gd-136': Iso('Gd-136', 'gadolinium-136', 64, 136, 135.94730, False), - 'Gd-137': Iso('Gd-137', 'gadolinium-137', 64, 137, 136.94502, False), - 'Gd-138': Iso('Gd-138', 'gadolinium-138', 64, 138, 137.94025, False), - 'Gd-139': Iso('Gd-139', 'gadolinium-139', 64, 139, 138.93813, False), - 'Gd-140': Iso('Gd-140', 'gadolinium-140', 64, 140, 139.933674, False), - 'Gd-141': Iso('Gd-141', 'gadolinium-141', 64, 141, 140.932126, False), - 'Gd-142': Iso('Gd-142', 'gadolinium-142', 64, 142, 141.928116, False), - 'Gd-143': Iso('Gd-143', 'gadolinium-143', 64, 143, 142.92675, False), - 'Gd-144': Iso('Gd-144', 'gadolinium-144', 64, 144, 143.922963, False), - 'Gd-145': Iso('Gd-145', 'gadolinium-145', 64, 145, 144.921713, False), - 'Gd-146': Iso('Gd-146', 'gadolinium-146', 64, 146, 145.9183188, False), - 'Gd-147': Iso('Gd-147', 'gadolinium-147', 64, 147, 146.9191014, False), - 'Gd-148': Iso('Gd-148', 'gadolinium-148', 64, 148, 147.9181215, False), - 'Gd-149': Iso('Gd-149', 'gadolinium-149', 64, 149, 148.9193481, False), - 'Gd-150': Iso('Gd-150', 'gadolinium-150', 64, 150, 149.9186644, False), - 'Gd-151': Iso('Gd-151', 'gadolinium-151', 64, 151, 150.9203560, False), - 'Gd-152': Iso('Gd-152', 'gadolinium-152', 64, 152, 151.9197995, False, - isotopic_abundance=0.0020), - 'Gd-153': Iso('Gd-153', 'gadolinium-153', 64, 153, 152.9217580, False, - half_life=20690380.8), - 'Gd-154': Iso('Gd-154', 'gadolinium-154', 64, 154, 153.9208741, True, - isotopic_abundance=0.0218), - 'Gd-155': Iso('Gd-155', 'gadolinium-155', 64, 155, 154.9226305, True, - isotopic_abundance=0.1480), - 'Gd-156': Iso('Gd-156', 'gadolinium-156', 64, 156, 155.9221312, True, - isotopic_abundance=0.2047), - 'Gd-157': Iso('Gd-157', 'gadolinium-157', 64, 157, 156.9239686, True, - isotopic_abundance=0.1565), - 'Gd-158': Iso('Gd-158', 'gadolinium-158', 64, 158, 157.9241123, True, - isotopic_abundance=0.2484), - 'Gd-159': Iso('Gd-159', 'gadolinium-159', 64, 159, 158.9263970, False), - 'Gd-160': Iso('Gd-160', 'gadolinium-160', 64, 160, 159.9270624, True, - isotopic_abundance=0.2186), - 'Gd-161': Iso('Gd-161', 'gadolinium-161', 64, 161, 160.9296775, False), - 'Gd-162': Iso('Gd-162', 'gadolinium-162', 64, 162, 161.9309930, False), - 'Gd-163': Iso('Gd-163', 'gadolinium-163', 64, 163, 162.9341769, False), - 'Gd-164': Iso('Gd-164', 'gadolinium-164', 64, 164, 163.93583, False), - 'Gd-165': Iso('Gd-165', 'gadolinium-165', 64, 165, 164.93936, False), - 'Gd-166': Iso('Gd-166', 'gadolinium-166', 64, 166, 165.94146, False), - 'Gd-167': Iso('Gd-167', 'gadolinium-167', 64, 167, 166.94545, False), - 'Gd-168': Iso('Gd-168', 'gadolinium-168', 64, 168, 167.94808, False), - 'Gd-169': Iso('Gd-169', 'gadolinium-169', 64, 169, 168.95260, False), - 'Tb-135': Iso('Tb-135', 'terbium-135', 65, 135, 134.96476, False), - 'Tb-136': Iso('Tb-136', 'terbium-136', 65, 136, 135.96129, False), - 'Tb-137': Iso('Tb-137', 'terbium-137', 65, 137, 136.95602, False), - 'Tb-138': Iso('Tb-138', 'terbium-138', 65, 138, 137.95312, False), - 'Tb-139': Iso('Tb-139', 'terbium-139', 65, 139, 138.94833, False), - 'Tb-140': Iso('Tb-140', 'terbium-140', 65, 140, 139.94581, False), - 'Tb-141': Iso('Tb-141', 'terbium-141', 65, 141, 140.94145, False), - 'Tb-142': Iso('Tb-142', 'terbium-142', 65, 142, 141.93928, False), - 'Tb-143': Iso('Tb-143', 'terbium-143', 65, 143, 142.935137, False), - 'Tb-144': Iso('Tb-144', 'terbium-144', 65, 144, 143.933045, False), - 'Tb-145': Iso('Tb-145', 'terbium-145', 65, 145, 144.92882, False), - 'Tb-146': Iso('Tb-146', 'terbium-146', 65, 146, 145.927253, False), - 'Tb-147': Iso('Tb-147', 'terbium-147', 65, 147, 146.9240548, False), - 'Tb-148': Iso('Tb-148', 'terbium-148', 65, 148, 147.924282, False), - 'Tb-149': Iso('Tb-149', 'terbium-149', 65, 149, 148.9232535, False), - 'Tb-150': Iso('Tb-150', 'terbium-150', 65, 150, 149.9236649, False), - 'Tb-151': Iso('Tb-151', 'terbium-151', 65, 151, 150.9231096, False), - 'Tb-152': Iso('Tb-152', 'terbium-152', 65, 152, 151.924083, False), - 'Tb-153': Iso('Tb-153', 'terbium-153', 65, 153, 152.9234424, False), - 'Tb-154': Iso('Tb-154', 'terbium-154', 65, 154, 153.924685, False), - 'Tb-155': Iso('Tb-155', 'terbium-155', 65, 155, 154.923511, False), - 'Tb-156': Iso('Tb-156', 'terbium-156', 65, 156, 155.9247552, False), - 'Tb-157': Iso('Tb-157', 'terbium-157', 65, 157, 156.9240330, False), - 'Tb-158': Iso('Tb-158', 'terbium-158', 65, 158, 157.9254209, False), - 'Tb-159': Iso('Tb-159', 'terbium-159', 65, 159, 158.9253547, True, + 'Na-24': _iso('Na-24', 'sodium-24', 11, 24, 23.990962950, False, + half_life=53824.32), + 'Na-25': _iso('Na-25', 'sodium-25', 11, 25, 24.9899540, False), + 'Na-26': _iso('Na-26', 'sodium-26', 11, 26, 25.9926346, False), + 'Na-27': _iso('Na-27', 'sodium-27', 11, 27, 26.9940765, False), + 'Na-28': _iso('Na-28', 'sodium-28', 11, 28, 27.998939, False), + 'Na-29': _iso('Na-29', 'sodium-29', 11, 29, 29.0028771, False), + 'Na-30': _iso('Na-30', 'sodium-30', 11, 30, 30.0090979, False), + 'Na-31': _iso('Na-31', 'sodium-31', 11, 31, 31.013163, False), + 'Na-32': _iso('Na-32', 'sodium-32', 11, 32, 32.02019, False), + 'Na-33': _iso('Na-33', 'sodium-33', 11, 33, 33.02573, False), + 'Na-34': _iso('Na-34', 'sodium-34', 11, 34, 34.03359, False), + 'Na-35': _iso('Na-35', 'sodium-35', 11, 35, 35.04062, False), + 'Na-36': _iso('Na-36', 'sodium-36', 11, 36, 36.04929, False), + 'Na-37': _iso('Na-37', 'sodium-37', 11, 37, 37.05705, False), + 'Mg-19': _iso('Mg-19', 'magnesium-19', 12, 19, 19.034169, False), + 'Mg-20': _iso('Mg-20', 'magnesium-20', 12, 20, 20.018850, False), + 'Mg-21': _iso('Mg-21', 'magnesium-21', 12, 21, 21.011716, False), + 'Mg-22': _iso('Mg-22', 'magnesium-22', 12, 22, 21.99957065, False), + 'Mg-23': _iso('Mg-23', 'magnesium-23', 12, 23, 22.99412421, False), + 'Mg-24': _iso('Mg-24', 'magnesium-24', 12, 24, 23.985041697, True, + isotopic_abundance=0.7899), + 'Mg-25': _iso('Mg-25', 'magnesium-25', 12, 25, 24.985836976, True, + isotopic_abundance=0.1000), + 'Mg-26': _iso('Mg-26', 'magnesium-26', 12, 26, 25.982592968, True, + isotopic_abundance=0.1101), + 'Mg-27': _iso('Mg-27', 'magnesium-27', 12, 27, 26.984340624, False), + 'Mg-28': _iso('Mg-28', 'magnesium-28', 12, 28, 27.9838767, False), + 'Mg-29': _iso('Mg-29', 'magnesium-29', 12, 29, 28.988617, False), + 'Mg-30': _iso('Mg-30', 'magnesium-30', 12, 30, 29.9904629, False), + 'Mg-31': _iso('Mg-31', 'magnesium-31', 12, 31, 30.9966480, False), + 'Mg-32': _iso('Mg-32', 'magnesium-32', 12, 32, 31.9991102, False), + 'Mg-33': _iso('Mg-33', 'magnesium-33', 12, 33, 33.0053271, False), + 'Mg-34': _iso('Mg-34', 'magnesium-34', 12, 34, 34.008935, False), + 'Mg-35': _iso('Mg-35', 'magnesium-35', 12, 35, 35.01679, False), + 'Mg-36': _iso('Mg-36', 'magnesium-36', 12, 36, 36.02188, False), + 'Mg-37': _iso('Mg-37', 'magnesium-37', 12, 37, 37.03037, False), + 'Mg-38': _iso('Mg-38', 'magnesium-38', 12, 38, 38.03658, False), + 'Mg-39': _iso('Mg-39', 'magnesium-39', 12, 39, 39.04538, False), + 'Mg-40': _iso('Mg-40', 'magnesium-40', 12, 40, 40.05218, False), + 'Al-21': _iso('Al-21', 'aluminium-21', 13, 21, 21.02897, False), + 'Al-22': _iso('Al-22', 'aluminium-22', 13, 22, 22.01954, False), + 'Al-23': _iso('Al-23', 'aluminium-23', 13, 23, 23.00724435, False), + 'Al-24': _iso('Al-24', 'aluminium-24', 13, 24, 23.9999489, False), + 'Al-25': _iso('Al-25', 'aluminium-25', 13, 25, 24.99042810, False), + 'Al-26': _iso('Al-26', 'aluminium-26', 13, 26, 25.986891904, False), + 'Al-27': _iso('Al-27', 'aluminium-27', 13, 27, 26.98153853, True, isotopic_abundance=1), - 'Tb-160': Iso('Tb-160', 'terbium-160', 65, 160, 159.9271756, False), - 'Tb-161': Iso('Tb-161', 'terbium-161', 65, 161, 160.9275778, False), - 'Tb-162': Iso('Tb-162', 'terbium-162', 65, 162, 161.929495, False), - 'Tb-163': Iso('Tb-163', 'terbium-163', 65, 163, 162.9306547, False), - 'Tb-164': Iso('Tb-164', 'terbium-164', 65, 164, 163.93336, False), - 'Tb-165': Iso('Tb-165', 'terbium-165', 65, 165, 164.93498, False), - 'Tb-166': Iso('Tb-166', 'terbium-166', 65, 166, 165.937860, False), - 'Tb-167': Iso('Tb-167', 'terbium-167', 65, 167, 166.93996, False), - 'Tb-168': Iso('Tb-168', 'terbium-168', 65, 168, 167.94340, False), - 'Tb-169': Iso('Tb-169', 'terbium-169', 65, 169, 168.94597, False), - 'Tb-170': Iso('Tb-170', 'terbium-170', 65, 170, 169.94984, False), - 'Tb-171': Iso('Tb-171', 'terbium-171', 65, 171, 170.95273, False), - 'Dy-138': Iso('Dy-138', 'dysprosium-138', 66, 138, 137.96250, False), - 'Dy-139': Iso('Dy-139', 'dysprosium-139', 66, 139, 138.95959, False), - 'Dy-140': Iso('Dy-140', 'dysprosium-140', 66, 140, 139.95402, False), - 'Dy-141': Iso('Dy-141', 'dysprosium-141', 66, 141, 140.95128, False), - 'Dy-142': Iso('Dy-142', 'dysprosium-142', 66, 142, 141.94619, False), - 'Dy-143': Iso('Dy-143', 'dysprosium-143', 66, 143, 142.943994, False), - 'Dy-144': Iso('Dy-144', 'dysprosium-144', 66, 144, 143.9392695, False), - 'Dy-145': Iso('Dy-145', 'dysprosium-145', 66, 145, 144.9374740, False), - 'Dy-146': Iso('Dy-146', 'dysprosium-146', 66, 146, 145.9328445, False), - 'Dy-147': Iso('Dy-147', 'dysprosium-147', 66, 147, 146.9310827, False), - 'Dy-148': Iso('Dy-148', 'dysprosium-148', 66, 148, 147.927157, False), - 'Dy-149': Iso('Dy-149', 'dysprosium-149', 66, 149, 148.927322, False), - 'Dy-150': Iso('Dy-150', 'dysprosium-150', 66, 150, 149.9255933, False), - 'Dy-151': Iso('Dy-151', 'dysprosium-151', 66, 151, 150.9261916, False), - 'Dy-152': Iso('Dy-152', 'dysprosium-152', 66, 152, 151.9247253, False), - 'Dy-153': Iso('Dy-153', 'dysprosium-153', 66, 153, 152.9257724, False), - 'Dy-154': Iso('Dy-154', 'dysprosium-154', 66, 154, 153.9244293, False), - 'Dy-155': Iso('Dy-155', 'dysprosium-155', 66, 155, 154.925759, False), - 'Dy-156': Iso('Dy-156', 'dysprosium-156', 66, 156, 155.9242847, True, - isotopic_abundance=0.00056), - 'Dy-157': Iso('Dy-157', 'dysprosium-157', 66, 157, 156.9254707, False), - 'Dy-158': Iso('Dy-158', 'dysprosium-158', 66, 158, 157.9244159, True, - isotopic_abundance=0.00095), - 'Dy-159': Iso('Dy-159', 'dysprosium-159', 66, 159, 158.9257470, False), - 'Dy-160': Iso('Dy-160', 'dysprosium-160', 66, 160, 159.9252046, True, - isotopic_abundance=0.02329), - 'Dy-161': Iso('Dy-161', 'dysprosium-161', 66, 161, 160.9269405, True, - isotopic_abundance=0.18889), - 'Dy-162': Iso('Dy-162', 'dysprosium-162', 66, 162, 161.9268056, True, - isotopic_abundance=0.25475), - 'Dy-163': Iso('Dy-163', 'dysprosium-163', 66, 163, 162.9287383, True, - isotopic_abundance=0.24896), - 'Dy-164': Iso('Dy-164', 'dysprosium-164', 66, 164, 163.9291819, True, - isotopic_abundance=0.28260), - 'Dy-165': Iso('Dy-165', 'dysprosium-165', 66, 165, 164.9317105, False), - 'Dy-166': Iso('Dy-166', 'dysprosium-166', 66, 166, 165.9328139, False), - 'Dy-167': Iso('Dy-167', 'dysprosium-167', 66, 167, 166.935661, False), - 'Dy-168': Iso('Dy-168', 'dysprosium-168', 66, 168, 167.93713, False), - 'Dy-169': Iso('Dy-169', 'dysprosium-169', 66, 169, 168.94031, False), - 'Dy-170': Iso('Dy-170', 'dysprosium-170', 66, 170, 169.94239, False), - 'Dy-171': Iso('Dy-171', 'dysprosium-171', 66, 171, 170.94612, False), - 'Dy-172': Iso('Dy-172', 'dysprosium-172', 66, 172, 171.94846, False), - 'Dy-173': Iso('Dy-173', 'dysprosium-173', 66, 173, 172.95283, False), - 'Ho-140': Iso('Ho-140', 'holmium-140', 67, 140, 139.96859, False), - 'Ho-141': Iso('Ho-141', 'holmium-141', 67, 141, 140.96311, False), - 'Ho-142': Iso('Ho-142', 'holmium-142', 67, 142, 141.96001, False), - 'Ho-143': Iso('Ho-143', 'holmium-143', 67, 143, 142.95486, False), - 'Ho-144': Iso('Ho-144', 'holmium-144', 67, 144, 143.9521097, False), - 'Ho-145': Iso('Ho-145', 'holmium-145', 67, 145, 144.9472674, False), - 'Ho-146': Iso('Ho-146', 'holmium-146', 67, 146, 145.9449935, False), - 'Ho-147': Iso('Ho-147', 'holmium-147', 67, 147, 146.9401423, False), - 'Ho-148': Iso('Ho-148', 'holmium-148', 67, 148, 147.937744, False), - 'Ho-149': Iso('Ho-149', 'holmium-149', 67, 149, 148.933803, False), - 'Ho-150': Iso('Ho-150', 'holmium-150', 67, 150, 149.933498, False), - 'Ho-151': Iso('Ho-151', 'holmium-151', 67, 151, 150.9316983, False), - 'Ho-152': Iso('Ho-152', 'holmium-152', 67, 152, 151.931724, False), - 'Ho-153': Iso('Ho-153', 'holmium-153', 67, 153, 152.9302064, False), - 'Ho-154': Iso('Ho-154', 'holmium-154', 67, 154, 153.9306068, False), - 'Ho-155': Iso('Ho-155', 'holmium-155', 67, 155, 154.929104, False), - 'Ho-156': Iso('Ho-156', 'holmium-156', 67, 156, 155.929706, False), - 'Ho-157': Iso('Ho-157', 'holmium-157', 67, 157, 156.928254, False), - 'Ho-158': Iso('Ho-158', 'holmium-158', 67, 158, 157.928946, False), - 'Ho-159': Iso('Ho-159', 'holmium-159', 67, 159, 158.9277197, False), - 'Ho-160': Iso('Ho-160', 'holmium-160', 67, 160, 159.928737, False), - 'Ho-161': Iso('Ho-161', 'holmium-161', 67, 161, 160.9278615, False), - 'Ho-162': Iso('Ho-162', 'holmium-162', 67, 162, 161.9291023, False), - 'Ho-163': Iso('Ho-163', 'holmium-163', 67, 163, 162.9287410, False), - 'Ho-164': Iso('Ho-164', 'holmium-164', 67, 164, 163.9302403, False), - 'Ho-165': Iso('Ho-165', 'holmium-165', 67, 165, 164.9303288, True, + 'Al-28': _iso('Al-28', 'aluminium-28', 13, 28, 27.98191021, False), + 'Al-29': _iso('Al-29', 'aluminium-29', 13, 29, 28.9804565, False), + 'Al-30': _iso('Al-30', 'aluminium-30', 13, 30, 29.982960, False), + 'Al-31': _iso('Al-31', 'aluminium-31', 13, 31, 30.983945, False), + 'Al-32': _iso('Al-32', 'aluminium-32', 13, 32, 31.988085, False), + 'Al-33': _iso('Al-33', 'aluminium-33', 13, 33, 32.990909, False), + 'Al-34': _iso('Al-34', 'aluminium-34', 13, 34, 33.996705, False), + 'Al-35': _iso('Al-35', 'aluminium-35', 13, 35, 34.999764, False), + 'Al-36': _iso('Al-36', 'aluminium-36', 13, 36, 36.00639, False), + 'Al-37': _iso('Al-37', 'aluminium-37', 13, 37, 37.01053, False), + 'Al-38': _iso('Al-38', 'aluminium-38', 13, 38, 38.01740, False), + 'Al-39': _iso('Al-39', 'aluminium-39', 13, 39, 39.02254, False), + 'Al-40': _iso('Al-40', 'aluminium-40', 13, 40, 40.03003, False), + 'Al-41': _iso('Al-41', 'aluminium-41', 13, 41, 41.03638, False), + 'Al-42': _iso('Al-42', 'aluminium-42', 13, 42, 42.04384, False), + 'Al-43': _iso('Al-43', 'aluminium-43', 13, 43, 43.05147, False), + 'Si-22': _iso('Si-22', 'silicon-22', 14, 22, 22.03579, False), + 'Si-23': _iso('Si-23', 'silicon-23', 14, 23, 23.02544, False), + 'Si-24': _iso('Si-24', 'silicon-24', 14, 24, 24.011535, False), + 'Si-25': _iso('Si-25', 'silicon-25', 14, 25, 25.004109, False), + 'Si-26': _iso('Si-26', 'silicon-26', 14, 26, 25.99233384, False), + 'Si-27': _iso('Si-27', 'silicon-27', 14, 27, 26.98670481, False), + 'Si-28': _iso('Si-28', 'silicon-28', 14, 28, 27.97692653465, True, + isotopic_abundance=0.92223), + 'Si-29': _iso('Si-29', 'silicon-29', 14, 29, 28.97649466490, True, + isotopic_abundance=0.04685), + 'Si-30': _iso('Si-30', 'silicon-30', 14, 30, 29.973770136, True, + isotopic_abundance=0.03092), + 'Si-31': _iso('Si-31', 'silicon-31', 14, 31, 30.975363194, False), + 'Si-32': _iso('Si-32', 'silicon-32', 14, 32, 31.97415154, False), + 'Si-33': _iso('Si-33', 'silicon-33', 14, 33, 32.97797696, False), + 'Si-34': _iso('Si-34', 'silicon-34', 14, 34, 33.978576, False), + 'Si-35': _iso('Si-35', 'silicon-35', 14, 35, 34.984583, False), + 'Si-36': _iso('Si-36', 'silicon-36', 14, 36, 35.986695, False), + 'Si-37': _iso('Si-37', 'silicon-37', 14, 37, 36.992921, False), + 'Si-38': _iso('Si-38', 'silicon-38', 14, 38, 37.995523, False), + 'Si-39': _iso('Si-39', 'silicon-39', 14, 39, 39.002491, False), + 'Si-40': _iso('Si-40', 'silicon-40', 14, 40, 40.00583, False), + 'Si-41': _iso('Si-41', 'silicon-41', 14, 41, 41.01301, False), + 'Si-42': _iso('Si-42', 'silicon-42', 14, 42, 42.01778, False), + 'Si-43': _iso('Si-43', 'silicon-43', 14, 43, 43.02480, False), + 'Si-44': _iso('Si-44', 'silicon-44', 14, 44, 44.03061, False), + 'Si-45': _iso('Si-45', 'silicon-45', 14, 45, 45.03995, False), + 'P-24': _iso('P-24', 'phosphorus-24', 15, 24, 24.03577, False), + 'P-25': _iso('P-25', 'phosphorus-25', 15, 25, 25.02119, False), + 'P-26': _iso('P-26', 'phosphorus-26', 15, 26, 26.01178, False), + 'P-27': _iso('P-27', 'phosphorus-27', 15, 27, 26.999224, False), + 'P-28': _iso('P-28', 'phosphorus-28', 15, 28, 27.9923266, False), + 'P-29': _iso('P-29', 'phosphorus-29', 15, 29, 28.98180079, False), + 'P-30': _iso('P-30', 'phosphorus-30', 15, 30, 29.97831375, False), + 'P-31': _iso('P-31', 'phosphorus-31', 15, 31, 30.97376199842, True, + isotopic_abundance=1), + 'P-32': _iso('P-32', 'phosphorus-32', 15, 32, 31.973907643, False, + half_life=1232323.2), + 'P-33': _iso('P-33', 'phosphorus-33', 15, 33, 32.9717257, False), + 'P-34': _iso('P-34', 'phosphorus-34', 15, 34, 33.97364589, False), + 'P-35': _iso('P-35', 'phosphorus-35', 15, 35, 34.9733141, False), + 'P-36': _iso('P-36', 'phosphorus-36', 15, 36, 35.978260, False), + 'P-37': _iso('P-37', 'phosphorus-37', 15, 37, 36.979607, False), + 'P-38': _iso('P-38', 'phosphorus-38', 15, 38, 37.984252, False), + 'P-39': _iso('P-39', 'phosphorus-39', 15, 39, 38.986227, False), + 'P-40': _iso('P-40', 'phosphorus-40', 15, 40, 39.99133, False), + 'P-41': _iso('P-41', 'phosphorus-41', 15, 41, 40.994654, False), + 'P-42': _iso('P-42', 'phosphorus-42', 15, 42, 42.00108, False), + 'P-43': _iso('P-43', 'phosphorus-43', 15, 43, 43.00502, False), + 'P-44': _iso('P-44', 'phosphorus-44', 15, 44, 44.01121, False), + 'P-45': _iso('P-45', 'phosphorus-45', 15, 45, 45.01645, False), + 'P-46': _iso('P-46', 'phosphorus-46', 15, 46, 46.02446, False), + 'P-47': _iso('P-47', 'phosphorus-47', 15, 47, 47.03139, False), + 'S-26': _iso('S-26', 'sulfur-26', 16, 26, 26.02907, False), + 'S-27': _iso('S-27', 'sulfur-27', 16, 27, 27.01828, False), + 'S-28': _iso('S-28', 'sulfur-28', 16, 28, 28.00437, False), + 'S-29': _iso('S-29', 'sulfur-29', 16, 29, 28.996611, False), + 'S-30': _iso('S-30', 'sulfur-30', 16, 30, 29.98490703, False), + 'S-31': _iso('S-31', 'sulfur-31', 16, 31, 30.97955701, False), + 'S-32': _iso('S-32', 'sulfur-32', 16, 32, 31.9720711744, True, + isotopic_abundance=0.9499), + 'S-33': _iso('S-33', 'sulfur-33', 16, 33, 32.9714589098, True, + isotopic_abundance=0.0075), + 'S-34': _iso('S-34', 'sulfur-34', 16, 34, 33.967867004, True, + isotopic_abundance=0.0425), + 'S-35': _iso('S-35', 'sulfur-35', 16, 35, 34.969032310, False), + 'S-36': _iso('S-36', 'sulfur-36', 16, 36, 35.96708071, True, + isotopic_abundance=0.0001), + 'S-37': _iso('S-37', 'sulfur-37', 16, 37, 36.97112551, False), + 'S-38': _iso('S-38', 'sulfur-38', 16, 38, 37.9711633, False), + 'S-39': _iso('S-39', 'sulfur-39', 16, 39, 38.975134, False), + 'S-40': _iso('S-40', 'sulfur-40', 16, 40, 39.9754826, False), + 'S-41': _iso('S-41', 'sulfur-41', 16, 41, 40.9795935, False), + 'S-42': _iso('S-42', 'sulfur-42', 16, 42, 41.9810651, False), + 'S-43': _iso('S-43', 'sulfur-43', 16, 43, 42.9869076, False), + 'S-44': _iso('S-44', 'sulfur-44', 16, 44, 43.9901188, False), + 'S-45': _iso('S-45', 'sulfur-45', 16, 45, 44.99572, False), + 'S-46': _iso('S-46', 'sulfur-46', 16, 46, 46.00004, False), + 'S-47': _iso('S-47', 'sulfur-47', 16, 47, 47.00795, False), + 'S-48': _iso('S-48', 'sulfur-48', 16, 48, 48.01370, False), + 'S-49': _iso('S-49', 'sulfur-49', 16, 49, 49.02276, False), + 'Cl-28': _iso('Cl-28', 'chlorine-28', 17, 28, 28.02954, False), + 'Cl-29': _iso('Cl-29', 'chlorine-29', 17, 29, 29.01478, False), + 'Cl-30': _iso('Cl-30', 'chlorine-30', 17, 30, 30.00477, False), + 'Cl-31': _iso('Cl-31', 'chlorine-31', 17, 31, 30.992414, False), + 'Cl-32': _iso('Cl-32', 'chlorine-32', 17, 32, 31.98568464, False), + 'Cl-33': _iso('Cl-33', 'chlorine-33', 17, 33, 32.97745199, False), + 'Cl-34': _iso('Cl-34', 'chlorine-34', 17, 34, 33.973762485, False), + 'Cl-35': _iso('Cl-35', 'chlorine-35', 17, 35, 34.968852682, True, + isotopic_abundance=0.7576), + 'Cl-36': _iso('Cl-36', 'chlorine-36', 17, 36, 35.968306809, False), + 'Cl-37': _iso('Cl-37', 'chlorine-37', 17, 37, 36.965902602, True, + isotopic_abundance=0.2424), + 'Cl-38': _iso('Cl-38', 'chlorine-38', 17, 38, 37.96801044, False), + 'Cl-39': _iso('Cl-39', 'chlorine-39', 17, 39, 38.9680082, False), + 'Cl-40': _iso('Cl-40', 'chlorine-40', 17, 40, 39.970415, False), + 'Cl-41': _iso('Cl-41', 'chlorine-41', 17, 41, 40.970685, False), + 'Cl-42': _iso('Cl-42', 'chlorine-42', 17, 42, 41.97325, False), + 'Cl-43': _iso('Cl-43', 'chlorine-43', 17, 43, 42.97389, False), + 'Cl-44': _iso('Cl-44', 'chlorine-44', 17, 44, 43.97787, False), + 'Cl-45': _iso('Cl-45', 'chlorine-45', 17, 45, 44.98029, False), + 'Cl-46': _iso('Cl-46', 'chlorine-46', 17, 46, 45.98517, False), + 'Cl-47': _iso('Cl-47', 'chlorine-47', 17, 47, 46.98916, False), + 'Cl-48': _iso('Cl-48', 'chlorine-48', 17, 48, 47.99564, False), + 'Cl-49': _iso('Cl-49', 'chlorine-49', 17, 49, 49.00123, False), + 'Cl-50': _iso('Cl-50', 'chlorine-50', 17, 50, 50.00905, False), + 'Cl-51': _iso('Cl-51', 'chlorine-51', 17, 51, 51.01554, False), + 'Ar-30': _iso('Ar-30', 'argon-30', 18, 30, 30.02307, False), + 'Ar-31': _iso('Ar-31', 'argon-31', 18, 31, 31.01212, False), + 'Ar-32': _iso('Ar-32', 'argon-32', 18, 32, 31.9976378, False), + 'Ar-33': _iso('Ar-33', 'argon-33', 18, 33, 32.98992555, False), + 'Ar-34': _iso('Ar-34', 'argon-34', 18, 34, 33.980270090, False), + 'Ar-35': _iso('Ar-35', 'argon-35', 18, 35, 34.97525759, False), + 'Ar-36': _iso('Ar-36', 'argon-36', 18, 36, 35.967545105, True, + isotopic_abundance=0.003336), + 'Ar-37': _iso('Ar-37', 'argon-37', 18, 37, 36.96677633, False), + 'Ar-38': _iso('Ar-38', 'argon-38', 18, 38, 37.96273211, True, + isotopic_abundance=0.000629), + 'Ar-39': _iso('Ar-39', 'argon-39', 18, 39, 38.9643130, False), + 'Ar-40': _iso('Ar-40', 'argon-40', 18, 40, 39.9623831237, True, + isotopic_abundance=0.996035), + 'Ar-41': _iso('Ar-41', 'argon-41', 18, 41, 40.96450057, False), + 'Ar-42': _iso('Ar-42', 'argon-42', 18, 42, 41.9630457, False), + 'Ar-43': _iso('Ar-43', 'argon-43', 18, 43, 42.9656361, False), + 'Ar-44': _iso('Ar-44', 'argon-44', 18, 44, 43.9649238, False), + 'Ar-45': _iso('Ar-45', 'argon-45', 18, 45, 44.96803973, False), + 'Ar-46': _iso('Ar-46', 'argon-46', 18, 46, 45.968083, False), + 'Ar-47': _iso('Ar-47', 'argon-47', 18, 47, 46.972935, False), + 'Ar-48': _iso('Ar-48', 'argon-48', 18, 48, 47.97591, False), + 'Ar-49': _iso('Ar-49', 'argon-49', 18, 49, 48.98190, False), + 'Ar-50': _iso('Ar-50', 'argon-50', 18, 50, 49.98613, False), + 'Ar-51': _iso('Ar-51', 'argon-51', 18, 51, 50.99370, False), + 'Ar-52': _iso('Ar-52', 'argon-52', 18, 52, 51.99896, False), + 'Ar-53': _iso('Ar-53', 'argon-53', 18, 53, 53.00729, False), + 'K-32': _iso('K-32', 'potassium-32', 19, 32, 32.02265, False), + 'K-33': _iso('K-33', 'potassium-33', 19, 33, 33.00756, False), + 'K-34': _iso('K-34', 'potassium-34', 19, 34, 33.99869, False), + 'K-35': _iso('K-35', 'potassium-35', 19, 35, 34.98800541, False), + 'K-36': _iso('K-36', 'potassium-36', 19, 36, 35.98130201, False), + 'K-37': _iso('K-37', 'potassium-37', 19, 37, 36.97337589, False), + 'K-38': _iso('K-38', 'potassium-38', 19, 38, 37.96908112, False), + 'K-39': _iso('K-39', 'potassium-39', 19, 39, 38.9637064864, True, + isotopic_abundance=0.932581), + 'K-40': _iso('K-40', 'potassium-40', 19, 40, 39.963998166, False, + isotopic_abundance=0.000117), + 'K-41': _iso('K-41', 'potassium-41', 19, 41, 40.9618252579, True, + isotopic_abundance=0.067302), + 'K-42': _iso('K-42', 'potassium-42', 19, 42, 41.96240231, False), + 'K-43': _iso('K-43', 'potassium-43', 19, 43, 42.96073470, False), + 'K-44': _iso('K-44', 'potassium-44', 19, 44, 43.96158699, False), + 'K-45': _iso('K-45', 'potassium-45', 19, 45, 44.96069149, False), + 'K-46': _iso('K-46', 'potassium-46', 19, 46, 45.96198159, False), + 'K-47': _iso('K-47', 'potassium-47', 19, 47, 46.9616616, False), + 'K-48': _iso('K-48', 'potassium-48', 19, 48, 47.96534119, False), + 'K-49': _iso('K-49', 'potassium-49', 19, 49, 48.96821075, False), + 'K-50': _iso('K-50', 'potassium-50', 19, 50, 49.9723800, False), + 'K-51': _iso('K-51', 'potassium-51', 19, 51, 50.975828, False), + 'K-52': _iso('K-52', 'potassium-52', 19, 52, 51.98224, False), + 'K-53': _iso('K-53', 'potassium-53', 19, 53, 52.98746, False), + 'K-54': _iso('K-54', 'potassium-54', 19, 54, 53.99463, False), + 'K-55': _iso('K-55', 'potassium-55', 19, 55, 55.00076, False), + 'K-56': _iso('K-56', 'potassium-56', 19, 56, 56.00851, False), + 'Ca-34': _iso('Ca-34', 'calcium-34', 20, 34, 34.01487, False), + 'Ca-35': _iso('Ca-35', 'calcium-35', 20, 35, 35.00514, False), + 'Ca-36': _iso('Ca-36', 'calcium-36', 20, 36, 35.993074, False), + 'Ca-37': _iso('Ca-37', 'calcium-37', 20, 37, 36.98589785, False), + 'Ca-38': _iso('Ca-38', 'calcium-38', 20, 38, 37.97631922, False), + 'Ca-39': _iso('Ca-39', 'calcium-39', 20, 39, 38.97071081, False), + 'Ca-40': _iso('Ca-40', 'calcium-40', 20, 40, 39.962590863, True, + isotopic_abundance=0.96941), + 'Ca-41': _iso('Ca-41', 'calcium-41', 20, 41, 40.96227792, False), + 'Ca-42': _iso('Ca-42', 'calcium-42', 20, 42, 41.95861783, True, + isotopic_abundance=0.00647), + 'Ca-43': _iso('Ca-43', 'calcium-43', 20, 43, 42.95876644, True, + isotopic_abundance=0.00135), + 'Ca-44': _iso('Ca-44', 'calcium-44', 20, 44, 43.95548156, True, + isotopic_abundance=0.02086), + 'Ca-45': _iso('Ca-45', 'calcium-45', 20, 45, 44.95618635, False), + 'Ca-46': _iso('Ca-46', 'calcium-46', 20, 46, 45.9536890, True, + isotopic_abundance=0.00004), + 'Ca-47': _iso('Ca-47', 'calcium-47', 20, 47, 46.9545424, False), + 'Ca-48': _iso('Ca-48', 'calcium-48', 20, 48, 47.95252276, False, + isotopic_abundance=0.00187), + 'Ca-49': _iso('Ca-49', 'calcium-49', 20, 49, 48.95566274, False), + 'Ca-50': _iso('Ca-50', 'calcium-50', 20, 50, 49.9574992, False), + 'Ca-51': _iso('Ca-51', 'calcium-51', 20, 51, 50.960989, False), + 'Ca-52': _iso('Ca-52', 'calcium-52', 20, 52, 51.963217, False), + 'Ca-53': _iso('Ca-53', 'calcium-53', 20, 53, 52.96945, False), + 'Ca-54': _iso('Ca-54', 'calcium-54', 20, 54, 53.97340, False), + 'Ca-55': _iso('Ca-55', 'calcium-55', 20, 55, 54.98030, False), + 'Ca-56': _iso('Ca-56', 'calcium-56', 20, 56, 55.98508, False), + 'Ca-57': _iso('Ca-57', 'calcium-57', 20, 57, 56.99262, False), + 'Ca-58': _iso('Ca-58', 'calcium-58', 20, 58, 57.99794, False), + 'Sc-36': _iso('Sc-36', 'scandium-36', 21, 36, 36.01648, False), + 'Sc-37': _iso('Sc-37', 'scandium-37', 21, 37, 37.00374, False), + 'Sc-38': _iso('Sc-38', 'scandium-38', 21, 38, 37.99512, False), + 'Sc-39': _iso('Sc-39', 'scandium-39', 21, 39, 38.984785, False), + 'Sc-40': _iso('Sc-40', 'scandium-40', 21, 40, 39.9779673, False), + 'Sc-41': _iso('Sc-41', 'scandium-41', 21, 41, 40.969251105, False), + 'Sc-42': _iso('Sc-42', 'scandium-42', 21, 42, 41.96551653, False), + 'Sc-43': _iso('Sc-43', 'scandium-43', 21, 43, 42.9611505, False), + 'Sc-44': _iso('Sc-44', 'scandium-44', 21, 44, 43.9594029, False), + 'Sc-45': _iso('Sc-45', 'scandium-45', 21, 45, 44.95590828, True, isotopic_abundance=1), - 'Ho-166': Iso('Ho-166', 'holmium-166', 67, 166, 165.9322909, False, - half_life=96458.40000000001), - 'Ho-167': Iso('Ho-167', 'holmium-167', 67, 167, 166.9331385, False), - 'Ho-168': Iso('Ho-168', 'holmium-168', 67, 168, 167.935522, False), - 'Ho-169': Iso('Ho-169', 'holmium-169', 67, 169, 168.936878, False), - 'Ho-170': Iso('Ho-170', 'holmium-170', 67, 170, 169.939625, False), - 'Ho-171': Iso('Ho-171', 'holmium-171', 67, 171, 170.94147, False), - 'Ho-172': Iso('Ho-172', 'holmium-172', 67, 172, 171.94473, False), - 'Ho-173': Iso('Ho-173', 'holmium-173', 67, 173, 172.94702, False), - 'Ho-174': Iso('Ho-174', 'holmium-174', 67, 174, 173.95095, False), - 'Ho-175': Iso('Ho-175', 'holmium-175', 67, 175, 174.95362, False), - 'Er-142': Iso('Er-142', 'erbium-142', 68, 142, 141.97010, False), - 'Er-143': Iso('Er-143', 'erbium-143', 68, 143, 142.96662, False), - 'Er-144': Iso('Er-144', 'erbium-144', 68, 144, 143.96070, False), - 'Er-145': Iso('Er-145', 'erbium-145', 68, 145, 144.95805, False), - 'Er-146': Iso('Er-146', 'erbium-146', 68, 146, 145.9524184, False), - 'Er-147': Iso('Er-147', 'erbium-147', 68, 147, 146.949964, False), - 'Er-148': Iso('Er-148', 'erbium-148', 68, 148, 147.944735, False), - 'Er-149': Iso('Er-149', 'erbium-149', 68, 149, 148.942306, False), - 'Er-150': Iso('Er-150', 'erbium-150', 68, 150, 149.937916, False), - 'Er-151': Iso('Er-151', 'erbium-151', 68, 151, 150.937449, False), - 'Er-152': Iso('Er-152', 'erbium-152', 68, 152, 151.935057, False), - 'Er-153': Iso('Er-153', 'erbium-153', 68, 153, 152.935080, False), - 'Er-154': Iso('Er-154', 'erbium-154', 68, 154, 153.9327908, False), - 'Er-155': Iso('Er-155', 'erbium-155', 68, 155, 154.9332159, False), - 'Er-156': Iso('Er-156', 'erbium-156', 68, 156, 155.931067, False), - 'Er-157': Iso('Er-157', 'erbium-157', 68, 157, 156.931949, False), - 'Er-158': Iso('Er-158', 'erbium-158', 68, 158, 157.929893, False), - 'Er-159': Iso('Er-159', 'erbium-159', 68, 159, 158.9306918, False), - 'Er-160': Iso('Er-160', 'erbium-160', 68, 160, 159.929077, False), - 'Er-161': Iso('Er-161', 'erbium-161', 68, 161, 160.9300046, False), - 'Er-162': Iso('Er-162', 'erbium-162', 68, 162, 161.9287884, True, - isotopic_abundance=0.00139), - 'Er-163': Iso('Er-163', 'erbium-163', 68, 163, 162.9300408, False), - 'Er-164': Iso('Er-164', 'erbium-164', 68, 164, 163.9292088, True, - isotopic_abundance=0.01601), - 'Er-165': Iso('Er-165', 'erbium-165', 68, 165, 164.9307345, False), - 'Er-166': Iso('Er-166', 'erbium-166', 68, 166, 165.9302995, True, - isotopic_abundance=0.33503), - 'Er-167': Iso('Er-167', 'erbium-167', 68, 167, 166.9320546, True, - isotopic_abundance=0.22869), - 'Er-168': Iso('Er-168', 'erbium-168', 68, 168, 167.9323767, True, - isotopic_abundance=0.26978), - 'Er-169': Iso('Er-169', 'erbium-169', 68, 169, 168.9345968, False), - 'Er-170': Iso('Er-170', 'erbium-170', 68, 170, 169.9354702, True, - isotopic_abundance=0.14910), - 'Er-171': Iso('Er-171', 'erbium-171', 68, 171, 170.9380357, False), - 'Er-172': Iso('Er-172', 'erbium-172', 68, 172, 171.9393619, False), - 'Er-173': Iso('Er-173', 'erbium-173', 68, 173, 172.94240, False), - 'Er-174': Iso('Er-174', 'erbium-174', 68, 174, 173.94423, False), - 'Er-175': Iso('Er-175', 'erbium-175', 68, 175, 174.94777, False), - 'Er-176': Iso('Er-176', 'erbium-176', 68, 176, 175.94994, False), - 'Er-177': Iso('Er-177', 'erbium-177', 68, 177, 176.95399, False), - 'Tm-144': Iso('Tm-144', 'thulium-144', 69, 144, 143.97628, False), - 'Tm-145': Iso('Tm-145', 'thulium-145', 69, 145, 144.97039, False), - 'Tm-146': Iso('Tm-146', 'thulium-146', 69, 146, 145.96684, False), - 'Tm-147': Iso('Tm-147', 'thulium-147', 69, 147, 146.9613799, False), - 'Tm-148': Iso('Tm-148', 'thulium-148', 69, 148, 147.958384, False), - 'Tm-149': Iso('Tm-149', 'thulium-149', 69, 149, 148.95289, False), - 'Tm-150': Iso('Tm-150', 'thulium-150', 69, 150, 149.95009, False), - 'Tm-151': Iso('Tm-151', 'thulium-151', 69, 151, 150.945488, False), - 'Tm-152': Iso('Tm-152', 'thulium-152', 69, 152, 151.944422, False), - 'Tm-153': Iso('Tm-153', 'thulium-153', 69, 153, 152.942040, False), - 'Tm-154': Iso('Tm-154', 'thulium-154', 69, 154, 153.941570, False), - 'Tm-155': Iso('Tm-155', 'thulium-155', 69, 155, 154.939210, False), - 'Tm-156': Iso('Tm-156', 'thulium-156', 69, 156, 155.938992, False), - 'Tm-157': Iso('Tm-157', 'thulium-157', 69, 157, 156.936944, False), - 'Tm-158': Iso('Tm-158', 'thulium-158', 69, 158, 157.936980, False), - 'Tm-159': Iso('Tm-159', 'thulium-159', 69, 159, 158.934975, False), - 'Tm-160': Iso('Tm-160', 'thulium-160', 69, 160, 159.935263, False), - 'Tm-161': Iso('Tm-161', 'thulium-161', 69, 161, 160.933549, False), - 'Tm-162': Iso('Tm-162', 'thulium-162', 69, 162, 161.934002, False), - 'Tm-163': Iso('Tm-163', 'thulium-163', 69, 163, 162.9326592, False), - 'Tm-164': Iso('Tm-164', 'thulium-164', 69, 164, 163.933544, False), - 'Tm-165': Iso('Tm-165', 'thulium-165', 69, 165, 164.9324431, False), - 'Tm-166': Iso('Tm-166', 'thulium-166', 69, 166, 165.933561, False), - 'Tm-167': Iso('Tm-167', 'thulium-167', 69, 167, 166.9328562, False), - 'Tm-168': Iso('Tm-168', 'thulium-168', 69, 168, 167.9341774, False), - 'Tm-169': Iso('Tm-169', 'thulium-169', 69, 169, 168.9342179, True, + 'Sc-46': _iso('Sc-46', 'scandium-46', 21, 46, 45.95516826, False, + half_life=7242998.4), + 'Sc-47': _iso('Sc-47', 'scandium-47', 21, 47, 46.9524037, False), + 'Sc-48': _iso('Sc-48', 'scandium-48', 21, 48, 47.9522236, False), + 'Sc-49': _iso('Sc-49', 'scandium-49', 21, 49, 48.9500146, False), + 'Sc-50': _iso('Sc-50', 'scandium-50', 21, 50, 49.952176, False), + 'Sc-51': _iso('Sc-51', 'scandium-51', 21, 51, 50.953592, False), + 'Sc-52': _iso('Sc-52', 'scandium-52', 21, 52, 51.95688, False), + 'Sc-53': _iso('Sc-53', 'scandium-53', 21, 53, 52.95909, False), + 'Sc-54': _iso('Sc-54', 'scandium-54', 21, 54, 53.96393, False), + 'Sc-55': _iso('Sc-55', 'scandium-55', 21, 55, 54.96782, False), + 'Sc-56': _iso('Sc-56', 'scandium-56', 21, 56, 55.97345, False), + 'Sc-57': _iso('Sc-57', 'scandium-57', 21, 57, 56.97777, False), + 'Sc-58': _iso('Sc-58', 'scandium-58', 21, 58, 57.98403, False), + 'Sc-59': _iso('Sc-59', 'scandium-59', 21, 59, 58.98894, False), + 'Sc-60': _iso('Sc-60', 'scandium-60', 21, 60, 59.99565, False), + 'Sc-61': _iso('Sc-61', 'scandium-61', 21, 61, 61.00100, False), + 'Ti-38': _iso('Ti-38', 'titanium-38', 22, 38, 38.01145, False), + 'Ti-39': _iso('Ti-39', 'titanium-39', 22, 39, 39.00236, False), + 'Ti-40': _iso('Ti-40', 'titanium-40', 22, 40, 39.99050, False), + 'Ti-41': _iso('Ti-41', 'titanium-41', 22, 41, 40.983148, False), + 'Ti-42': _iso('Ti-42', 'titanium-42', 22, 42, 41.97304903, False), + 'Ti-43': _iso('Ti-43', 'titanium-43', 22, 43, 42.9685225, False), + 'Ti-44': _iso('Ti-44', 'titanium-44', 22, 44, 43.95968995, False, + half_life=1914105600.0), + 'Ti-45': _iso('Ti-45', 'titanium-45', 22, 45, 44.95812198, False), + 'Ti-46': _iso('Ti-46', 'titanium-46', 22, 46, 45.95262772, True, + isotopic_abundance=0.0825), + 'Ti-47': _iso('Ti-47', 'titanium-47', 22, 47, 46.95175879, True, + isotopic_abundance=0.0744), + 'Ti-48': _iso('Ti-48', 'titanium-48', 22, 48, 47.94794198, True, + isotopic_abundance=0.7372), + 'Ti-49': _iso('Ti-49', 'titanium-49', 22, 49, 48.94786568, True, + isotopic_abundance=0.0541), + 'Ti-50': _iso('Ti-50', 'titanium-50', 22, 50, 49.94478689, True, + isotopic_abundance=0.0518), + 'Ti-51': _iso('Ti-51', 'titanium-51', 22, 51, 50.94661065, False), + 'Ti-52': _iso('Ti-52', 'titanium-52', 22, 52, 51.9468930, False), + 'Ti-53': _iso('Ti-53', 'titanium-53', 22, 53, 52.94973, False), + 'Ti-54': _iso('Ti-54', 'titanium-54', 22, 54, 53.95105, False), + 'Ti-55': _iso('Ti-55', 'titanium-55', 22, 55, 54.95527, False), + 'Ti-56': _iso('Ti-56', 'titanium-56', 22, 56, 55.95791, False), + 'Ti-57': _iso('Ti-57', 'titanium-57', 22, 57, 56.96364, False), + 'Ti-58': _iso('Ti-58', 'titanium-58', 22, 58, 57.96660, False), + 'Ti-59': _iso('Ti-59', 'titanium-59', 22, 59, 58.97247, False), + 'Ti-60': _iso('Ti-60', 'titanium-60', 22, 60, 59.97603, False), + 'Ti-61': _iso('Ti-61', 'titanium-61', 22, 61, 60.98245, False), + 'Ti-62': _iso('Ti-62', 'titanium-62', 22, 62, 61.98651, False), + 'Ti-63': _iso('Ti-63', 'titanium-63', 22, 63, 62.99375, False), + 'V-40': _iso('V-40', 'vanadium-40', 23, 40, 40.01276, False), + 'V-41': _iso('V-41', 'vanadium-41', 23, 41, 41.00021, False), + 'V-42': _iso('V-42', 'vanadium-42', 23, 42, 41.99182, False), + 'V-43': _iso('V-43', 'vanadium-43', 23, 43, 42.980766, False), + 'V-44': _iso('V-44', 'vanadium-44', 23, 44, 43.97411, False), + 'V-45': _iso('V-45', 'vanadium-45', 23, 45, 44.9657748, False), + 'V-46': _iso('V-46', 'vanadium-46', 23, 46, 45.96019878, False), + 'V-47': _iso('V-47', 'vanadium-47', 23, 47, 46.95490491, False), + 'V-48': _iso('V-48', 'vanadium-48', 23, 48, 47.9522522, False), + 'V-49': _iso('V-49', 'vanadium-49', 23, 49, 48.94851180, False), + 'V-50': _iso('V-50', 'vanadium-50', 23, 50, 49.94715601, False, + isotopic_abundance=0.00250), + 'V-51': _iso('V-51', 'vanadium-51', 23, 51, 50.94395704, True, + isotopic_abundance=0.99750), + 'V-52': _iso('V-52', 'vanadium-52', 23, 52, 51.94477301, False), + 'V-53': _iso('V-53', 'vanadium-53', 23, 53, 52.9443367, False), + 'V-54': _iso('V-54', 'vanadium-54', 23, 54, 53.946439, False), + 'V-55': _iso('V-55', 'vanadium-55', 23, 55, 54.94724, False), + 'V-56': _iso('V-56', 'vanadium-56', 23, 56, 55.95048, False), + 'V-57': _iso('V-57', 'vanadium-57', 23, 57, 56.95252, False), + 'V-58': _iso('V-58', 'vanadium-58', 23, 58, 57.95672, False), + 'V-59': _iso('V-59', 'vanadium-59', 23, 59, 58.95939, False), + 'V-60': _iso('V-60', 'vanadium-60', 23, 60, 59.96431, False), + 'V-61': _iso('V-61', 'vanadium-61', 23, 61, 60.96725, False), + 'V-62': _iso('V-62', 'vanadium-62', 23, 62, 61.97265, False), + 'V-63': _iso('V-63', 'vanadium-63', 23, 63, 62.97639, False), + 'V-64': _iso('V-64', 'vanadium-64', 23, 64, 63.98264, False), + 'V-65': _iso('V-65', 'vanadium-65', 23, 65, 64.98750, False), + 'V-66': _iso('V-66', 'vanadium-66', 23, 66, 65.99398, False), + 'Cr-42': _iso('Cr-42', 'chromium-42', 24, 42, 42.00670, False), + 'Cr-43': _iso('Cr-43', 'chromium-43', 24, 43, 42.99753, False), + 'Cr-44': _iso('Cr-44', 'chromium-44', 24, 44, 43.98536, False), + 'Cr-45': _iso('Cr-45', 'chromium-45', 24, 45, 44.979050, False), + 'Cr-46': _iso('Cr-46', 'chromium-46', 24, 46, 45.968359, False), + 'Cr-47': _iso('Cr-47', 'chromium-47', 24, 47, 46.9628974, False), + 'Cr-48': _iso('Cr-48', 'chromium-48', 24, 48, 47.9540291, False), + 'Cr-49': _iso('Cr-49', 'chromium-49', 24, 49, 48.9513333, False), + 'Cr-50': _iso('Cr-50', 'chromium-50', 24, 50, 49.94604183, True, + isotopic_abundance=0.04345), + 'Cr-51': _iso('Cr-51', 'chromium-51', 24, 51, 50.94476502, False, + half_life=2393366.4), + 'Cr-52': _iso('Cr-52', 'chromium-52', 24, 52, 51.94050623, True, + isotopic_abundance=0.83789), + 'Cr-53': _iso('Cr-53', 'chromium-53', 24, 53, 52.94064815, True, + isotopic_abundance=0.09501), + 'Cr-54': _iso('Cr-54', 'chromium-54', 24, 54, 53.93887916, True, + isotopic_abundance=0.02365), + 'Cr-55': _iso('Cr-55', 'chromium-55', 24, 55, 54.94083843, False), + 'Cr-56': _iso('Cr-56', 'chromium-56', 24, 56, 55.9406531, False), + 'Cr-57': _iso('Cr-57', 'chromium-57', 24, 57, 56.9436130, False), + 'Cr-58': _iso('Cr-58', 'chromium-58', 24, 58, 57.94435, False), + 'Cr-59': _iso('Cr-59', 'chromium-59', 24, 59, 58.94859, False), + 'Cr-60': _iso('Cr-60', 'chromium-60', 24, 60, 59.95008, False), + 'Cr-61': _iso('Cr-61', 'chromium-61', 24, 61, 60.95442, False), + 'Cr-62': _iso('Cr-62', 'chromium-62', 24, 62, 61.95610, False), + 'Cr-63': _iso('Cr-63', 'chromium-63', 24, 63, 62.96165, False), + 'Cr-64': _iso('Cr-64', 'chromium-64', 24, 64, 63.96408, False), + 'Cr-65': _iso('Cr-65', 'chromium-65', 24, 65, 64.96996, False), + 'Cr-66': _iso('Cr-66', 'chromium-66', 24, 66, 65.97366, False), + 'Cr-67': _iso('Cr-67', 'chromium-67', 24, 67, 66.98016, False), + 'Cr-68': _iso('Cr-68', 'chromium-68', 24, 68, 67.98403, False), + 'Mn-44': _iso('Mn-44', 'manganese-44', 25, 44, 44.00715, False), + 'Mn-45': _iso('Mn-45', 'manganese-45', 25, 45, 44.99449, False), + 'Mn-46': _iso('Mn-46', 'manganese-46', 25, 46, 45.98609, False), + 'Mn-47': _iso('Mn-47', 'manganese-47', 25, 47, 46.975775, False), + 'Mn-48': _iso('Mn-48', 'manganese-48', 25, 48, 47.96852, False), + 'Mn-49': _iso('Mn-49', 'manganese-49', 25, 49, 48.959595, False), + 'Mn-50': _iso('Mn-50', 'manganese-50', 25, 50, 49.95423778, False), + 'Mn-51': _iso('Mn-51', 'manganese-51', 25, 51, 50.94820847, False), + 'Mn-52': _iso('Mn-52', 'manganese-52', 25, 52, 51.9455639, False), + 'Mn-53': _iso('Mn-53', 'manganese-53', 25, 53, 52.94128889, False), + 'Mn-54': _iso('Mn-54', 'manganese-54', 25, 54, 53.9403576, False, + half_life=26959219.200000003), + 'Mn-55': _iso('Mn-55', 'manganese-55', 25, 55, 54.93804391, True, isotopic_abundance=1), - 'Tm-170': Iso('Tm-170', 'thulium-170', 69, 170, 169.9358060, False), - 'Tm-171': Iso('Tm-171', 'thulium-171', 69, 171, 170.9364339, False), - 'Tm-172': Iso('Tm-172', 'thulium-172', 69, 172, 171.9384055, False), - 'Tm-173': Iso('Tm-173', 'thulium-173', 69, 173, 172.9396084, False), - 'Tm-174': Iso('Tm-174', 'thulium-174', 69, 174, 173.942173, False), - 'Tm-175': Iso('Tm-175', 'thulium-175', 69, 175, 174.943841, False), - 'Tm-176': Iso('Tm-176', 'thulium-176', 69, 176, 175.94700, False), - 'Tm-177': Iso('Tm-177', 'thulium-177', 69, 177, 176.94904, False), - 'Tm-178': Iso('Tm-178', 'thulium-178', 69, 178, 177.95264, False), - 'Tm-179': Iso('Tm-179', 'thulium-179', 69, 179, 178.95534, False), - 'Yb-148': Iso('Yb-148', 'ytterbium-148', 70, 148, 147.96758, False), - 'Yb-149': Iso('Yb-149', 'ytterbium-149', 70, 149, 148.96436, False), - 'Yb-150': Iso('Yb-150', 'ytterbium-150', 70, 150, 149.95852, False), - 'Yb-151': Iso('Yb-151', 'ytterbium-151', 70, 151, 150.95540, False), - 'Yb-152': Iso('Yb-152', 'ytterbium-152', 70, 152, 151.95027, False), - 'Yb-153': Iso('Yb-153', 'ytterbium-153', 70, 153, 152.94932, False), - 'Yb-154': Iso('Yb-154', 'ytterbium-154', 70, 154, 153.946396, False), - 'Yb-155': Iso('Yb-155', 'ytterbium-155', 70, 155, 154.945783, False), - 'Yb-156': Iso('Yb-156', 'ytterbium-156', 70, 156, 155.942825, False), - 'Yb-157': Iso('Yb-157', 'ytterbium-157', 70, 157, 156.942645, False), - 'Yb-158': Iso('Yb-158', 'ytterbium-158', 70, 158, 157.9398705, False), - 'Yb-159': Iso('Yb-159', 'ytterbium-159', 70, 159, 158.940055, False), - 'Yb-160': Iso('Yb-160', 'ytterbium-160', 70, 160, 159.937557, False), - 'Yb-161': Iso('Yb-161', 'ytterbium-161', 70, 161, 160.937907, False), - 'Yb-162': Iso('Yb-162', 'ytterbium-162', 70, 162, 161.935774, False), - 'Yb-163': Iso('Yb-163', 'ytterbium-163', 70, 163, 162.936340, False), - 'Yb-164': Iso('Yb-164', 'ytterbium-164', 70, 164, 163.934495, False), - 'Yb-165': Iso('Yb-165', 'ytterbium-165', 70, 165, 164.935270, False), - 'Yb-166': Iso('Yb-166', 'ytterbium-166', 70, 166, 165.9338747, False), - 'Yb-167': Iso('Yb-167', 'ytterbium-167', 70, 167, 166.9349530, False), - 'Yb-168': Iso('Yb-168', 'ytterbium-168', 70, 168, 167.9338896, True, - isotopic_abundance=0.00123), - 'Yb-169': Iso('Yb-169', 'ytterbium-169', 70, 169, 168.9351825, False, - half_life=2766070.0799999996), - 'Yb-170': Iso('Yb-170', 'ytterbium-170', 70, 170, 169.9347664, True, - isotopic_abundance=0.02982), - 'Yb-171': Iso('Yb-171', 'ytterbium-171', 70, 171, 170.9363302, True, - isotopic_abundance=0.1409), - 'Yb-172': Iso('Yb-172', 'ytterbium-172', 70, 172, 171.9363859, True, - isotopic_abundance=0.2168), - 'Yb-173': Iso('Yb-173', 'ytterbium-173', 70, 173, 172.9382151, True, - isotopic_abundance=0.16103), - 'Yb-174': Iso('Yb-174', 'ytterbium-174', 70, 174, 173.9388664, True, - isotopic_abundance=0.32026), - 'Yb-175': Iso('Yb-175', 'ytterbium-175', 70, 175, 174.9412808, False), - 'Yb-176': Iso('Yb-176', 'ytterbium-176', 70, 176, 175.9425764, True, - isotopic_abundance=0.12996), - 'Yb-177': Iso('Yb-177', 'ytterbium-177', 70, 177, 176.9452656, False), - 'Yb-178': Iso('Yb-178', 'ytterbium-178', 70, 178, 177.946651, False), - 'Yb-179': Iso('Yb-179', 'ytterbium-179', 70, 179, 178.95004, False), - 'Yb-180': Iso('Yb-180', 'ytterbium-180', 70, 180, 179.95212, False), - 'Yb-181': Iso('Yb-181', 'ytterbium-181', 70, 181, 180.95589, False), - 'Lu-150': Iso('Lu-150', 'lutetium-150', 71, 150, 149.97355, False), - 'Lu-151': Iso('Lu-151', 'lutetium-151', 71, 151, 150.96768, False), - 'Lu-152': Iso('Lu-152', 'lutetium-152', 71, 152, 151.96412, False), - 'Lu-153': Iso('Lu-153', 'lutetium-153', 71, 153, 152.95875, False), - 'Lu-154': Iso('Lu-154', 'lutetium-154', 71, 154, 153.95736, False), - 'Lu-155': Iso('Lu-155', 'lutetium-155', 71, 155, 154.954321, False), - 'Lu-156': Iso('Lu-156', 'lutetium-156', 71, 156, 155.953033, False), - 'Lu-157': Iso('Lu-157', 'lutetium-157', 71, 157, 156.950127, False), - 'Lu-158': Iso('Lu-158', 'lutetium-158', 71, 158, 157.949316, False), - 'Lu-159': Iso('Lu-159', 'lutetium-159', 71, 159, 158.946636, False), - 'Lu-160': Iso('Lu-160', 'lutetium-160', 71, 160, 159.946033, False), - 'Lu-161': Iso('Lu-161', 'lutetium-161', 71, 161, 160.943572, False), - 'Lu-162': Iso('Lu-162', 'lutetium-162', 71, 162, 161.943283, False), - 'Lu-163': Iso('Lu-163', 'lutetium-163', 71, 163, 162.941179, False), - 'Lu-164': Iso('Lu-164', 'lutetium-164', 71, 164, 163.941339, False), - 'Lu-165': Iso('Lu-165', 'lutetium-165', 71, 165, 164.939407, False), - 'Lu-166': Iso('Lu-166', 'lutetium-166', 71, 166, 165.939859, False), - 'Lu-167': Iso('Lu-167', 'lutetium-167', 71, 167, 166.938270, False), - 'Lu-168': Iso('Lu-168', 'lutetium-168', 71, 168, 167.938736, False), - 'Lu-169': Iso('Lu-169', 'lutetium-169', 71, 169, 168.9376441, False), - 'Lu-170': Iso('Lu-170', 'lutetium-170', 71, 170, 169.938478, False), - 'Lu-171': Iso('Lu-171', 'lutetium-171', 71, 171, 170.9379170, False), - 'Lu-172': Iso('Lu-172', 'lutetium-172', 71, 172, 171.9390891, False), - 'Lu-173': Iso('Lu-173', 'lutetium-173', 71, 173, 172.9389340, False), - 'Lu-174': Iso('Lu-174', 'lutetium-174', 71, 174, 173.9403409, False), - 'Lu-175': Iso('Lu-175', 'lutetium-175', 71, 175, 174.9407752, True, - isotopic_abundance=0.97401), - 'Lu-176': Iso('Lu-176', 'lutetium-176', 71, 176, 175.9426897, False, - isotopic_abundance=0.02599), - 'Lu-177': Iso('Lu-177', 'lutetium-177', 71, 177, 176.9437615, False, - half_life=573696.0), - 'Lu-178': Iso('Lu-178', 'lutetium-178', 71, 178, 177.9459580, False), - 'Lu-179': Iso('Lu-179', 'lutetium-179', 71, 179, 178.9473309, False), - 'Lu-180': Iso('Lu-180', 'lutetium-180', 71, 180, 179.949888, False), - 'Lu-181': Iso('Lu-181', 'lutetium-181', 71, 181, 180.95191, False), - 'Lu-182': Iso('Lu-182', 'lutetium-182', 71, 182, 181.95504, False), - 'Lu-183': Iso('Lu-183', 'lutetium-183', 71, 183, 182.957363, False), - 'Lu-184': Iso('Lu-184', 'lutetium-184', 71, 184, 183.96091, False), - 'Lu-185': Iso('Lu-185', 'lutetium-185', 71, 185, 184.96362, False), - 'Hf-153': Iso('Hf-153', 'hafnium-153', 72, 153, 152.97069, False), - 'Hf-154': Iso('Hf-154', 'hafnium-154', 72, 154, 153.96486, False), - 'Hf-155': Iso('Hf-155', 'hafnium-155', 72, 155, 154.96311, False), - 'Hf-156': Iso('Hf-156', 'hafnium-156', 72, 156, 155.95935, False), - 'Hf-157': Iso('Hf-157', 'hafnium-157', 72, 157, 156.95824, False), - 'Hf-158': Iso('Hf-158', 'hafnium-158', 72, 158, 157.954801, False), - 'Hf-159': Iso('Hf-159', 'hafnium-159', 72, 159, 158.953996, False), - 'Hf-160': Iso('Hf-160', 'hafnium-160', 72, 160, 159.950691, False), - 'Hf-161': Iso('Hf-161', 'hafnium-161', 72, 161, 160.950278, False), - 'Hf-162': Iso('Hf-162', 'hafnium-162', 72, 162, 161.9472148, False), - 'Hf-163': Iso('Hf-163', 'hafnium-163', 72, 163, 162.947113, False), - 'Hf-164': Iso('Hf-164', 'hafnium-164', 72, 164, 163.944371, False), - 'Hf-165': Iso('Hf-165', 'hafnium-165', 72, 165, 164.944567, False), - 'Hf-166': Iso('Hf-166', 'hafnium-166', 72, 166, 165.942180, False), - 'Hf-167': Iso('Hf-167', 'hafnium-167', 72, 167, 166.942600, False), - 'Hf-168': Iso('Hf-168', 'hafnium-168', 72, 168, 167.940568, False), - 'Hf-169': Iso('Hf-169', 'hafnium-169', 72, 169, 168.941259, False), - 'Hf-170': Iso('Hf-170', 'hafnium-170', 72, 170, 169.939609, False), - 'Hf-171': Iso('Hf-171', 'hafnium-171', 72, 171, 170.940492, False), - 'Hf-172': Iso('Hf-172', 'hafnium-172', 72, 172, 171.939450, False), - 'Hf-173': Iso('Hf-173', 'hafnium-173', 72, 173, 172.940513, False), - 'Hf-174': Iso('Hf-174', 'hafnium-174', 72, 174, 173.9400461, False, - isotopic_abundance=0.0016), - 'Hf-175': Iso('Hf-175', 'hafnium-175', 72, 175, 174.9415092, False), - 'Hf-176': Iso('Hf-176', 'hafnium-176', 72, 176, 175.9414076, True, - isotopic_abundance=0.0526), - 'Hf-177': Iso('Hf-177', 'hafnium-177', 72, 177, 176.9432277, True, - isotopic_abundance=0.1860), - 'Hf-178': Iso('Hf-178', 'hafnium-178', 72, 178, 177.9437058, True, - isotopic_abundance=0.2728), - 'Hf-179': Iso('Hf-179', 'hafnium-179', 72, 179, 178.9458232, True, - isotopic_abundance=0.1362), - 'Hf-180': Iso('Hf-180', 'hafnium-180', 72, 180, 179.9465570, True, - isotopic_abundance=0.3508), - 'Hf-181': Iso('Hf-181', 'hafnium-181', 72, 181, 180.9491083, False), - 'Hf-182': Iso('Hf-182', 'hafnium-182', 72, 182, 181.9505612, False), - 'Hf-183': Iso('Hf-183', 'hafnium-183', 72, 183, 182.953530, False), - 'Hf-184': Iso('Hf-184', 'hafnium-184', 72, 184, 183.955446, False), - 'Hf-185': Iso('Hf-185', 'hafnium-185', 72, 185, 184.958862, False), - 'Hf-186': Iso('Hf-186', 'hafnium-186', 72, 186, 185.960897, False), - 'Hf-187': Iso('Hf-187', 'hafnium-187', 72, 187, 186.96477, False), - 'Hf-188': Iso('Hf-188', 'hafnium-188', 72, 188, 187.96685, False), - 'Hf-189': Iso('Hf-189', 'hafnium-189', 72, 189, 188.97084, False), - 'Ta-155': Iso('Ta-155', 'tantalum-155', 73, 155, 154.97424, False), - 'Ta-156': Iso('Ta-156', 'tantalum-156', 73, 156, 155.97203, False), - 'Ta-157': Iso('Ta-157', 'tantalum-157', 73, 157, 156.96818, False), - 'Ta-158': Iso('Ta-158', 'tantalum-158', 73, 158, 157.96654, False), - 'Ta-159': Iso('Ta-159', 'tantalum-159', 73, 159, 158.963023, False), - 'Ta-160': Iso('Ta-160', 'tantalum-160', 73, 160, 159.961488, False), - 'Ta-161': Iso('Ta-161', 'tantalum-161', 73, 161, 160.958452, False), - 'Ta-162': Iso('Ta-162', 'tantalum-162', 73, 162, 161.957294, False), - 'Ta-163': Iso('Ta-163', 'tantalum-163', 73, 163, 162.954337, False), - 'Ta-164': Iso('Ta-164', 'tantalum-164', 73, 164, 163.953534, False), - 'Ta-165': Iso('Ta-165', 'tantalum-165', 73, 165, 164.950781, False), - 'Ta-166': Iso('Ta-166', 'tantalum-166', 73, 166, 165.950512, False), - 'Ta-167': Iso('Ta-167', 'tantalum-167', 73, 167, 166.948093, False), - 'Ta-168': Iso('Ta-168', 'tantalum-168', 73, 168, 167.948047, False), - 'Ta-169': Iso('Ta-169', 'tantalum-169', 73, 169, 168.946011, False), - 'Ta-170': Iso('Ta-170', 'tantalum-170', 73, 170, 169.946175, False), - 'Ta-171': Iso('Ta-171', 'tantalum-171', 73, 171, 170.944476, False), - 'Ta-172': Iso('Ta-172', 'tantalum-172', 73, 172, 171.944895, False), - 'Ta-173': Iso('Ta-173', 'tantalum-173', 73, 173, 172.943750, False), - 'Ta-174': Iso('Ta-174', 'tantalum-174', 73, 174, 173.944454, False), - 'Ta-175': Iso('Ta-175', 'tantalum-175', 73, 175, 174.943737, False), - 'Ta-176': Iso('Ta-176', 'tantalum-176', 73, 176, 175.944857, False), - 'Ta-177': Iso('Ta-177', 'tantalum-177', 73, 177, 176.9444795, False), - 'Ta-178': Iso('Ta-178', 'tantalum-178', 73, 178, 177.945678, False), - 'Ta-179': Iso('Ta-179', 'tantalum-179', 73, 179, 178.9459366, False), - 'Ta-180': Iso('Ta-180', 'tantalum-180', 73, 180, 179.9474648, True, - isotopic_abundance=0.0001201), - 'Ta-181': Iso('Ta-181', 'tantalum-181', 73, 181, 180.9479958, True, - isotopic_abundance=0.9998799), - 'Ta-182': Iso('Ta-182', 'tantalum-182', 73, 182, 181.9501519, False), - 'Ta-183': Iso('Ta-183', 'tantalum-183', 73, 183, 182.9513726, False), - 'Ta-184': Iso('Ta-184', 'tantalum-184', 73, 184, 183.954008, False), - 'Ta-185': Iso('Ta-185', 'tantalum-185', 73, 185, 184.955559, False), - 'Ta-186': Iso('Ta-186', 'tantalum-186', 73, 186, 185.958551, False), - 'Ta-187': Iso('Ta-187', 'tantalum-187', 73, 187, 186.960386, False), - 'Ta-188': Iso('Ta-188', 'tantalum-188', 73, 188, 187.963916, False), - 'Ta-189': Iso('Ta-189', 'tantalum-189', 73, 189, 188.96583, False), - 'Ta-190': Iso('Ta-190', 'tantalum-190', 73, 190, 189.96939, False), - 'Ta-191': Iso('Ta-191', 'tantalum-191', 73, 191, 190.97156, False), - 'Ta-192': Iso('Ta-192', 'tantalum-192', 73, 192, 191.97514, False), - 'W-157': Iso('W-157', 'tungsten-157', 74, 157, 156.97884, False), - 'W-158': Iso('W-158', 'tungsten-158', 74, 158, 157.97456, False), - 'W-159': Iso('W-159', 'tungsten-159', 74, 159, 158.97264, False), - 'W-160': Iso('W-160', 'tungsten-160', 74, 160, 159.96846, False), - 'W-161': Iso('W-161', 'tungsten-161', 74, 161, 160.96720, False), - 'W-162': Iso('W-162', 'tungsten-162', 74, 162, 161.963499, False), - 'W-163': Iso('W-163', 'tungsten-163', 74, 163, 162.962524, False), - 'W-164': Iso('W-164', 'tungsten-164', 74, 164, 163.958961, False), - 'W-165': Iso('W-165', 'tungsten-165', 74, 165, 164.958281, False), - 'W-166': Iso('W-166', 'tungsten-166', 74, 166, 165.955031, False), - 'W-167': Iso('W-167', 'tungsten-167', 74, 167, 166.954805, False), - 'W-168': Iso('W-168', 'tungsten-168', 74, 168, 167.951806, False), - 'W-169': Iso('W-169', 'tungsten-169', 74, 169, 168.951779, False), - 'W-170': Iso('W-170', 'tungsten-170', 74, 170, 169.949232, False), - 'W-171': Iso('W-171', 'tungsten-171', 74, 171, 170.949451, False), - 'W-172': Iso('W-172', 'tungsten-172', 74, 172, 171.947292, False), - 'W-173': Iso('W-173', 'tungsten-173', 74, 173, 172.947689, False), - 'W-174': Iso('W-174', 'tungsten-174', 74, 174, 173.946079, False), - 'W-175': Iso('W-175', 'tungsten-175', 74, 175, 174.946717, False), - 'W-176': Iso('W-176', 'tungsten-176', 74, 176, 175.945634, False), - 'W-177': Iso('W-177', 'tungsten-177', 74, 177, 176.946643, False), - 'W-178': Iso('W-178', 'tungsten-178', 74, 178, 177.945883, False), - 'W-179': Iso('W-179', 'tungsten-179', 74, 179, 178.947077, False), - 'W-180': Iso('W-180', 'tungsten-180', 74, 180, 179.9467108, False, - isotopic_abundance=0.0012), - 'W-181': Iso('W-181', 'tungsten-181', 74, 181, 180.9481978, False, - half_life=10462608.0), - 'W-182': Iso('W-182', 'tungsten-182', 74, 182, 181.94820394, True, - isotopic_abundance=0.2650), - 'W-183': Iso('W-183', 'tungsten-183', 74, 183, 182.95022275, True, - isotopic_abundance=0.1431), - 'W-184': Iso('W-184', 'tungsten-184', 74, 184, 183.95093092, True, - isotopic_abundance=0.3064), - 'W-185': Iso('W-185', 'tungsten-185', 74, 185, 184.95341897, False), - 'W-186': Iso('W-186', 'tungsten-186', 74, 186, 185.9543628, True, - isotopic_abundance=0.2843), - 'W-187': Iso('W-187', 'tungsten-187', 74, 187, 186.9571588, False), - 'W-188': Iso('W-188', 'tungsten-188', 74, 188, 187.9584862, False, - half_life=6029251.2), - 'W-189': Iso('W-189', 'tungsten-189', 74, 189, 188.961763, False), - 'W-190': Iso('W-190', 'tungsten-190', 74, 190, 189.963091, False), - 'W-191': Iso('W-191', 'tungsten-191', 74, 191, 190.966531, False), - 'W-192': Iso('W-192', 'tungsten-192', 74, 192, 191.96817, False), - 'W-193': Iso('W-193', 'tungsten-193', 74, 193, 192.97178, False), - 'W-194': Iso('W-194', 'tungsten-194', 74, 194, 193.97367, False), - 'Re-159': Iso('Re-159', 'rhenium-159', 75, 159, 158.98418, False), - 'Re-160': Iso('Re-160', 'rhenium-160', 75, 160, 159.98182, False), - 'Re-161': Iso('Re-161', 'rhenium-161', 75, 161, 160.97757, False), - 'Re-162': Iso('Re-162', 'rhenium-162', 75, 162, 161.97584, False), - 'Re-163': Iso('Re-163', 'rhenium-163', 75, 163, 162.972080, False), - 'Re-164': Iso('Re-164', 'rhenium-164', 75, 164, 163.970453, False), - 'Re-165': Iso('Re-165', 'rhenium-165', 75, 165, 164.967103, False), - 'Re-166': Iso('Re-166', 'rhenium-166', 75, 166, 165.965761, False), - 'Re-167': Iso('Re-167', 'rhenium-167', 75, 167, 166.962595, False), - 'Re-168': Iso('Re-168', 'rhenium-168', 75, 168, 167.961573, False), - 'Re-169': Iso('Re-169', 'rhenium-169', 75, 169, 168.958766, False), - 'Re-170': Iso('Re-170', 'rhenium-170', 75, 170, 169.958220, False), - 'Re-171': Iso('Re-171', 'rhenium-171', 75, 171, 170.955716, False), - 'Re-172': Iso('Re-172', 'rhenium-172', 75, 172, 171.955420, False), - 'Re-173': Iso('Re-173', 'rhenium-173', 75, 173, 172.953243, False), - 'Re-174': Iso('Re-174', 'rhenium-174', 75, 174, 173.953115, False), - 'Re-175': Iso('Re-175', 'rhenium-175', 75, 175, 174.951381, False), - 'Re-176': Iso('Re-176', 'rhenium-176', 75, 176, 175.951623, False), - 'Re-177': Iso('Re-177', 'rhenium-177', 75, 177, 176.950328, False), - 'Re-178': Iso('Re-178', 'rhenium-178', 75, 178, 177.950989, False), - 'Re-179': Iso('Re-179', 'rhenium-179', 75, 179, 178.949989, False), - 'Re-180': Iso('Re-180', 'rhenium-180', 75, 180, 179.950792, False), - 'Re-181': Iso('Re-181', 'rhenium-181', 75, 181, 180.950058, False), - 'Re-182': Iso('Re-182', 'rhenium-182', 75, 182, 181.95121, False), - 'Re-183': Iso('Re-183', 'rhenium-183', 75, 183, 182.9508196, False), - 'Re-184': Iso('Re-184', 'rhenium-184', 75, 184, 183.9525228, False), - 'Re-185': Iso('Re-185', 'rhenium-185', 75, 185, 184.9529545, True, - isotopic_abundance=0.3740), - 'Re-186': Iso('Re-186', 'rhenium-186', 75, 186, 185.9549856, False, - half_life=321292.8), - 'Re-187': Iso('Re-187', 'rhenium-187', 75, 187, 186.9557501, False, - isotopic_abundance=0.6260), - 'Re-188': Iso('Re-188', 'rhenium-188', 75, 188, 187.9581115, False, - half_life=61203.600000000006), - 'Re-189': Iso('Re-189', 'rhenium-189', 75, 189, 188.9592260, False), - 'Re-190': Iso('Re-190', 'rhenium-190', 75, 190, 189.961744, False), - 'Re-191': Iso('Re-191', 'rhenium-191', 75, 191, 190.963122, False), - 'Re-192': Iso('Re-192', 'rhenium-192', 75, 192, 191.966088, False), - 'Re-193': Iso('Re-193', 'rhenium-193', 75, 193, 192.967541, False), - 'Re-194': Iso('Re-194', 'rhenium-194', 75, 194, 193.97076, False), - 'Re-195': Iso('Re-195', 'rhenium-195', 75, 195, 194.97254, False), - 'Re-196': Iso('Re-196', 'rhenium-196', 75, 196, 195.97580, False), - 'Re-197': Iso('Re-197', 'rhenium-197', 75, 197, 196.97799, False), - 'Re-198': Iso('Re-198', 'rhenium-198', 75, 198, 197.98160, False), - 'Os-161': Iso('Os-161', 'osmium-161', 76, 161, 160.98903, False), - 'Os-162': Iso('Os-162', 'osmium-162', 76, 162, 161.98443, False), - 'Os-163': Iso('Os-163', 'osmium-163', 76, 163, 162.98241, False), - 'Os-164': Iso('Os-164', 'osmium-164', 76, 164, 163.97802, False), - 'Os-165': Iso('Os-165', 'osmium-165', 76, 165, 164.97660, False), - 'Os-166': Iso('Os-166', 'osmium-166', 76, 166, 165.972692, False), - 'Os-167': Iso('Os-167', 'osmium-167', 76, 167, 166.971549, False), - 'Os-168': Iso('Os-168', 'osmium-168', 76, 168, 167.967808, False), - 'Os-169': Iso('Os-169', 'osmium-169', 76, 169, 168.967018, False), - 'Os-170': Iso('Os-170', 'osmium-170', 76, 170, 169.963578, False), - 'Os-171': Iso('Os-171', 'osmium-171', 76, 171, 170.963174, False), - 'Os-172': Iso('Os-172', 'osmium-172', 76, 172, 171.960017, False), - 'Os-173': Iso('Os-173', 'osmium-173', 76, 173, 172.959808, False), - 'Os-174': Iso('Os-174', 'osmium-174', 76, 174, 173.957064, False), - 'Os-175': Iso('Os-175', 'osmium-175', 76, 175, 174.956945, False), - 'Os-176': Iso('Os-176', 'osmium-176', 76, 176, 175.954806, False), - 'Os-177': Iso('Os-177', 'osmium-177', 76, 177, 176.954966, False), - 'Os-178': Iso('Os-178', 'osmium-178', 76, 178, 177.953254, False), - 'Os-179': Iso('Os-179', 'osmium-179', 76, 179, 178.953817, False), - 'Os-180': Iso('Os-180', 'osmium-180', 76, 180, 179.952375, False), - 'Os-181': Iso('Os-181', 'osmium-181', 76, 181, 180.953247, False), - 'Os-182': Iso('Os-182', 'osmium-182', 76, 182, 181.952110, False), - 'Os-183': Iso('Os-183', 'osmium-183', 76, 183, 182.953125, False), - 'Os-184': Iso('Os-184', 'osmium-184', 76, 184, 183.9524885, True, - isotopic_abundance=0.0002), - 'Os-185': Iso('Os-185', 'osmium-185', 76, 185, 184.9540417, False), - 'Os-186': Iso('Os-186', 'osmium-186', 76, 186, 185.9538350, False, - isotopic_abundance=0.0159), - 'Os-187': Iso('Os-187', 'osmium-187', 76, 187, 186.9557474, True, - isotopic_abundance=0.0196), - 'Os-188': Iso('Os-188', 'osmium-188', 76, 188, 187.9558352, True, - isotopic_abundance=0.1324), - 'Os-189': Iso('Os-189', 'osmium-189', 76, 189, 188.9581442, True, - isotopic_abundance=0.1615), - 'Os-190': Iso('Os-190', 'osmium-190', 76, 190, 189.9584437, True, - isotopic_abundance=0.2626), - 'Os-191': Iso('Os-191', 'osmium-191', 76, 191, 190.9609264, False), - 'Os-192': Iso('Os-192', 'osmium-192', 76, 192, 191.9614770, True, - isotopic_abundance=0.4078), - 'Os-193': Iso('Os-193', 'osmium-193', 76, 193, 192.9641479, False), - 'Os-194': Iso('Os-194', 'osmium-194', 76, 194, 193.9651772, False), - 'Os-195': Iso('Os-195', 'osmium-195', 76, 195, 194.968318, False), - 'Os-196': Iso('Os-196', 'osmium-196', 76, 196, 195.969641, False), - 'Os-197': Iso('Os-197', 'osmium-197', 76, 197, 196.97283, False), - 'Os-198': Iso('Os-198', 'osmium-198', 76, 198, 197.97441, False), - 'Os-199': Iso('Os-199', 'osmium-199', 76, 199, 198.97801, False), - 'Os-200': Iso('Os-200', 'osmium-200', 76, 200, 199.97984, False), - 'Os-201': Iso('Os-201', 'osmium-201', 76, 201, 200.98364, False), - 'Os-202': Iso('Os-202', 'osmium-202', 76, 202, 201.98595, False), - 'Ir-164': Iso('Ir-164', 'iridium-164', 77, 164, 163.99191, False), - 'Ir-165': Iso('Ir-165', 'iridium-165', 77, 165, 164.98750, False), - 'Ir-166': Iso('Ir-166', 'iridium-166', 77, 166, 165.98566, False), - 'Ir-167': Iso('Ir-167', 'iridium-167', 77, 167, 166.981666, False), - 'Ir-168': Iso('Ir-168', 'iridium-168', 77, 168, 167.979907, False), - 'Ir-169': Iso('Ir-169', 'iridium-169', 77, 169, 168.976298, False), - 'Ir-170': Iso('Ir-170', 'iridium-170', 77, 170, 169.974922, False), - 'Ir-171': Iso('Ir-171', 'iridium-171', 77, 171, 170.971640, False), - 'Ir-172': Iso('Ir-172', 'iridium-172', 77, 172, 171.970607, False), - 'Ir-173': Iso('Ir-173', 'iridium-173', 77, 173, 172.967506, False), - 'Ir-174': Iso('Ir-174', 'iridium-174', 77, 174, 173.966861, False), - 'Ir-175': Iso('Ir-175', 'iridium-175', 77, 175, 174.964150, False), - 'Ir-176': Iso('Ir-176', 'iridium-176', 77, 176, 175.963650, False), - 'Ir-177': Iso('Ir-177', 'iridium-177', 77, 177, 176.961301, False), - 'Ir-178': Iso('Ir-178', 'iridium-178', 77, 178, 177.961082, False), - 'Ir-179': Iso('Ir-179', 'iridium-179', 77, 179, 178.959120, False), - 'Ir-180': Iso('Ir-180', 'iridium-180', 77, 180, 179.959229, False), - 'Ir-181': Iso('Ir-181', 'iridium-181', 77, 181, 180.957625, False), - 'Ir-182': Iso('Ir-182', 'iridium-182', 77, 182, 181.958076, False), - 'Ir-183': Iso('Ir-183', 'iridium-183', 77, 183, 182.956840, False), - 'Ir-184': Iso('Ir-184', 'iridium-184', 77, 184, 183.957476, False), - 'Ir-185': Iso('Ir-185', 'iridium-185', 77, 185, 184.956698, False), - 'Ir-186': Iso('Ir-186', 'iridium-186', 77, 186, 185.957944, False), - 'Ir-187': Iso('Ir-187', 'iridium-187', 77, 187, 186.957542, False), - 'Ir-188': Iso('Ir-188', 'iridium-188', 77, 188, 187.958828, False), - 'Ir-189': Iso('Ir-189', 'iridium-189', 77, 189, 188.958715, False), - 'Ir-190': Iso('Ir-190', 'iridium-190', 77, 190, 189.9605412, False), - 'Ir-191': Iso('Ir-191', 'iridium-191', 77, 191, 190.9605893, True, - isotopic_abundance=0.373), - 'Ir-192': Iso('Ir-192', 'iridium-192', 77, 192, 191.9626002, False, - half_life=6377184.0), - 'Ir-193': Iso('Ir-193', 'iridium-193', 77, 193, 192.9629216, True, - isotopic_abundance=0.627), - 'Ir-194': Iso('Ir-194', 'iridium-194', 77, 194, 193.9650735, False), - 'Ir-195': Iso('Ir-195', 'iridium-195', 77, 195, 194.9659747, False), - 'Ir-196': Iso('Ir-196', 'iridium-196', 77, 196, 195.968397, False), - 'Ir-197': Iso('Ir-197', 'iridium-197', 77, 197, 196.969655, False), - 'Ir-198': Iso('Ir-198', 'iridium-198', 77, 198, 197.97228, False), - 'Ir-199': Iso('Ir-199', 'iridium-199', 77, 199, 198.973805, False), - 'Ir-200': Iso('Ir-200', 'iridium-200', 77, 200, 199.97680, False), - 'Ir-201': Iso('Ir-201', 'iridium-201', 77, 201, 200.97864, False), - 'Ir-202': Iso('Ir-202', 'iridium-202', 77, 202, 201.98199, False), - 'Ir-203': Iso('Ir-203', 'iridium-203', 77, 203, 202.98423, False), - 'Ir-204': Iso('Ir-204', 'iridium-204', 77, 204, 203.98960, False), - 'Pt-166': Iso('Pt-166', 'platinum-166', 78, 166, 165.99486, False), - 'Pt-167': Iso('Pt-167', 'platinum-167', 78, 167, 166.99269, False), - 'Pt-168': Iso('Pt-168', 'platinum-168', 78, 168, 167.98813, False), - 'Pt-169': Iso('Pt-169', 'platinum-169', 78, 169, 168.98657, False), - 'Pt-170': Iso('Pt-170', 'platinum-170', 78, 170, 169.982496, False), - 'Pt-171': Iso('Pt-171', 'platinum-171', 78, 171, 170.981245, False), - 'Pt-172': Iso('Pt-172', 'platinum-172', 78, 172, 171.977351, False), - 'Pt-173': Iso('Pt-173', 'platinum-173', 78, 173, 172.976443, False), - 'Pt-174': Iso('Pt-174', 'platinum-174', 78, 174, 173.972820, False), - 'Pt-175': Iso('Pt-175', 'platinum-175', 78, 175, 174.972410, False), - 'Pt-176': Iso('Pt-176', 'platinum-176', 78, 176, 175.968938, False), - 'Pt-177': Iso('Pt-177', 'platinum-177', 78, 177, 176.968470, False), - 'Pt-178': Iso('Pt-178', 'platinum-178', 78, 178, 177.965650, False), - 'Pt-179': Iso('Pt-179', 'platinum-179', 78, 179, 178.9653590, False), - 'Pt-180': Iso('Pt-180', 'platinum-180', 78, 180, 179.963032, False), - 'Pt-181': Iso('Pt-181', 'platinum-181', 78, 181, 180.963098, False), - 'Pt-182': Iso('Pt-182', 'platinum-182', 78, 182, 181.961172, False), - 'Pt-183': Iso('Pt-183', 'platinum-183', 78, 183, 182.961597, False), - 'Pt-184': Iso('Pt-184', 'platinum-184', 78, 184, 183.959915, False), - 'Pt-185': Iso('Pt-185', 'platinum-185', 78, 185, 184.960614, False), - 'Pt-186': Iso('Pt-186', 'platinum-186', 78, 186, 185.959351, False), - 'Pt-187': Iso('Pt-187', 'platinum-187', 78, 187, 186.960617, False), - 'Pt-188': Iso('Pt-188', 'platinum-188', 78, 188, 187.9593889, False), - 'Pt-189': Iso('Pt-189', 'platinum-189', 78, 189, 188.960831, False), - 'Pt-190': Iso('Pt-190', 'platinum-190', 78, 190, 189.9599297, False, - isotopic_abundance=0.00012), - 'Pt-191': Iso('Pt-191', 'platinum-191', 78, 191, 190.9616729, False), - 'Pt-192': Iso('Pt-192', 'platinum-192', 78, 192, 191.9610387, True, - isotopic_abundance=0.00782), - 'Pt-193': Iso('Pt-193', 'platinum-193', 78, 193, 192.9629824, False), - 'Pt-194': Iso('Pt-194', 'platinum-194', 78, 194, 193.9626809, True, - isotopic_abundance=0.3286), - 'Pt-195': Iso('Pt-195', 'platinum-195', 78, 195, 194.9647917, True, - isotopic_abundance=0.3378), - 'Pt-196': Iso('Pt-196', 'platinum-196', 78, 196, 195.96495209, True, - isotopic_abundance=0.2521), - 'Pt-197': Iso('Pt-197', 'platinum-197', 78, 197, 196.96734069, False), - 'Pt-198': Iso('Pt-198', 'platinum-198', 78, 198, 197.9678949, True, - isotopic_abundance=0.07356), - 'Pt-199': Iso('Pt-199', 'platinum-199', 78, 199, 198.9705952, False), - 'Pt-200': Iso('Pt-200', 'platinum-200', 78, 200, 199.971443, False), - 'Pt-201': Iso('Pt-201', 'platinum-201', 78, 201, 200.974513, False), - 'Pt-202': Iso('Pt-202', 'platinum-202', 78, 202, 201.975639, False), - 'Pt-203': Iso('Pt-203', 'platinum-203', 78, 203, 202.97893, False), - 'Pt-204': Iso('Pt-204', 'platinum-204', 78, 204, 203.98076, False), - 'Pt-205': Iso('Pt-205', 'platinum-205', 78, 205, 204.98608, False), - 'Pt-206': Iso('Pt-206', 'platinum-206', 78, 206, 205.98966, False), - 'Au-169': Iso('Au-169', 'gold-169', 79, 169, 168.99808, False), - 'Au-170': Iso('Au-170', 'gold-170', 79, 170, 169.99597, False), - 'Au-171': Iso('Au-171', 'gold-171', 79, 171, 170.991876, False), - 'Au-172': Iso('Au-172', 'gold-172', 79, 172, 171.989942, False), - 'Au-173': Iso('Au-173', 'gold-173', 79, 173, 172.986241, False), - 'Au-174': Iso('Au-174', 'gold-174', 79, 174, 173.984717, False), - 'Au-175': Iso('Au-175', 'gold-175', 79, 175, 174.981304, False), - 'Au-176': Iso('Au-176', 'gold-176', 79, 176, 175.980250, False), - 'Au-177': Iso('Au-177', 'gold-177', 79, 177, 176.976870, False), - 'Au-178': Iso('Au-178', 'gold-178', 79, 178, 177.976032, False), - 'Au-179': Iso('Au-179', 'gold-179', 79, 179, 178.973174, False), - 'Au-180': Iso('Au-180', 'gold-180', 79, 180, 179.972523, False), - 'Au-181': Iso('Au-181', 'gold-181', 79, 181, 180.970079, False), - 'Au-182': Iso('Au-182', 'gold-182', 79, 182, 181.969618, False), - 'Au-183': Iso('Au-183', 'gold-183', 79, 183, 182.967591, False), - 'Au-184': Iso('Au-184', 'gold-184', 79, 184, 183.967452, False), - 'Au-185': Iso('Au-185', 'gold-185', 79, 185, 184.965790, False), - 'Au-186': Iso('Au-186', 'gold-186', 79, 186, 185.965953, False), - 'Au-187': Iso('Au-187', 'gold-187', 79, 187, 186.964543, False), - 'Au-188': Iso('Au-188', 'gold-188', 79, 188, 187.965349, False), - 'Au-189': Iso('Au-189', 'gold-189', 79, 189, 188.963948, False), - 'Au-190': Iso('Au-190', 'gold-190', 79, 190, 189.964698, False), - 'Au-191': Iso('Au-191', 'gold-191', 79, 191, 190.963702, False), - 'Au-192': Iso('Au-192', 'gold-192', 79, 192, 191.964814, False), - 'Au-193': Iso('Au-193', 'gold-193', 79, 193, 192.9641373, False), - 'Au-194': Iso('Au-194', 'gold-194', 79, 194, 193.9654178, False), - 'Au-195': Iso('Au-195', 'gold-195', 79, 195, 194.9650352, False, - half_life=16078867.200000001), - 'Au-196': Iso('Au-196', 'gold-196', 79, 196, 195.9665699, False), - 'Au-197': Iso('Au-197', 'gold-197', 79, 197, 196.96656879, True, + 'Mn-56': _iso('Mn-56', 'manganese-56', 25, 56, 55.93890369, False), + 'Mn-57': _iso('Mn-57', 'manganese-57', 25, 57, 56.9382861, False), + 'Mn-58': _iso('Mn-58', 'manganese-58', 25, 58, 57.9400666, False), + 'Mn-59': _iso('Mn-59', 'manganese-59', 25, 59, 58.9403911, False), + 'Mn-60': _iso('Mn-60', 'manganese-60', 25, 60, 59.9431366, False), + 'Mn-61': _iso('Mn-61', 'manganese-61', 25, 61, 60.9444525, False), + 'Mn-62': _iso('Mn-62', 'manganese-62', 25, 62, 61.94795, False), + 'Mn-63': _iso('Mn-63', 'manganese-63', 25, 63, 62.9496647, False), + 'Mn-64': _iso('Mn-64', 'manganese-64', 25, 64, 63.9538494, False), + 'Mn-65': _iso('Mn-65', 'manganese-65', 25, 65, 64.9560198, False), + 'Mn-66': _iso('Mn-66', 'manganese-66', 25, 66, 65.960547, False), + 'Mn-67': _iso('Mn-67', 'manganese-67', 25, 67, 66.96424, False), + 'Mn-68': _iso('Mn-68', 'manganese-68', 25, 68, 67.96962, False), + 'Mn-69': _iso('Mn-69', 'manganese-69', 25, 69, 68.97366, False), + 'Mn-70': _iso('Mn-70', 'manganese-70', 25, 70, 69.97937, False), + 'Mn-71': _iso('Mn-71', 'manganese-71', 25, 71, 70.98368, False), + 'Fe-45': _iso('Fe-45', 'iron-45', 26, 45, 45.01442, False), + 'Fe-46': _iso('Fe-46', 'iron-46', 26, 46, 46.00063, False), + 'Fe-47': _iso('Fe-47', 'iron-47', 26, 47, 46.99185, False), + 'Fe-48': _iso('Fe-48', 'iron-48', 26, 48, 47.98023, False), + 'Fe-49': _iso('Fe-49', 'iron-49', 26, 49, 48.973429, False), + 'Fe-50': _iso('Fe-50', 'iron-50', 26, 50, 49.962975, False), + 'Fe-51': _iso('Fe-51', 'iron-51', 26, 51, 50.9568410, False), + 'Fe-52': _iso('Fe-52', 'iron-52', 26, 52, 51.9481131, False), + 'Fe-53': _iso('Fe-53', 'iron-53', 26, 53, 52.9453064, False), + 'Fe-54': _iso('Fe-54', 'iron-54', 26, 54, 53.93960899, True, + isotopic_abundance=0.05845), + 'Fe-55': _iso('Fe-55', 'iron-55', 26, 55, 54.93829199, False), + 'Fe-56': _iso('Fe-56', 'iron-56', 26, 56, 55.93493633, True, + isotopic_abundance=0.91754), + 'Fe-57': _iso('Fe-57', 'iron-57', 26, 57, 56.93539284, True, + isotopic_abundance=0.02119), + 'Fe-58': _iso('Fe-58', 'iron-58', 26, 58, 57.93327443, True, + isotopic_abundance=0.00282), + 'Fe-59': _iso('Fe-59', 'iron-59', 26, 59, 58.93487434, False, + half_life=3845439.36), + 'Fe-60': _iso('Fe-60', 'iron-60', 26, 60, 59.9340711, False), + 'Fe-61': _iso('Fe-61', 'iron-61', 26, 61, 60.9367462, False), + 'Fe-62': _iso('Fe-62', 'iron-62', 26, 62, 61.9367918, False), + 'Fe-63': _iso('Fe-63', 'iron-63', 26, 63, 62.9402727, False), + 'Fe-64': _iso('Fe-64', 'iron-64', 26, 64, 63.9409878, False), + 'Fe-65': _iso('Fe-65', 'iron-65', 26, 65, 64.9450115, False), + 'Fe-66': _iso('Fe-66', 'iron-66', 26, 66, 65.9462500, False), + 'Fe-67': _iso('Fe-67', 'iron-67', 26, 67, 66.95054, False), + 'Fe-68': _iso('Fe-68', 'iron-68', 26, 68, 67.95295, False), + 'Fe-69': _iso('Fe-69', 'iron-69', 26, 69, 68.95807, False), + 'Fe-70': _iso('Fe-70', 'iron-70', 26, 70, 69.96102, False), + 'Fe-71': _iso('Fe-71', 'iron-71', 26, 71, 70.96672, False), + 'Fe-72': _iso('Fe-72', 'iron-72', 26, 72, 71.96983, False), + 'Fe-73': _iso('Fe-73', 'iron-73', 26, 73, 72.97572, False), + 'Fe-74': _iso('Fe-74', 'iron-74', 26, 74, 73.97935, False), + 'Co-47': _iso('Co-47', 'cobalt-47', 27, 47, 47.01057, False), + 'Co-48': _iso('Co-48', 'cobalt-48', 27, 48, 48.00093, False), + 'Co-49': _iso('Co-49', 'cobalt-49', 27, 49, 48.98891, False), + 'Co-50': _iso('Co-50', 'cobalt-50', 27, 50, 49.98091, False), + 'Co-51': _iso('Co-51', 'cobalt-51', 27, 51, 50.970647, False), + 'Co-52': _iso('Co-52', 'cobalt-52', 27, 52, 51.96351, False), + 'Co-53': _iso('Co-53', 'cobalt-53', 27, 53, 52.9542041, False), + 'Co-54': _iso('Co-54', 'cobalt-54', 27, 54, 53.94845987, False), + 'Co-55': _iso('Co-55', 'cobalt-55', 27, 55, 54.94199720, False), + 'Co-56': _iso('Co-56', 'cobalt-56', 27, 56, 55.93983880, False), + 'Co-57': _iso('Co-57', 'cobalt-57', 27, 57, 56.93629057, False, + half_life=23510304.0), + 'Co-58': _iso('Co-58', 'cobalt-58', 27, 58, 57.9357521, False, + half_life=6114528.0), + 'Co-59': _iso('Co-59', 'cobalt-59', 27, 59, 58.93319429, True, isotopic_abundance=1), - 'Au-198': Iso('Au-198', 'gold-198', 79, 198, 197.96824242, False, - half_life=232862.688), - 'Au-199': Iso('Au-199', 'gold-199', 79, 199, 198.96876528, False), - 'Au-200': Iso('Au-200', 'gold-200', 79, 200, 199.970756, False), - 'Au-201': Iso('Au-201', 'gold-201', 79, 201, 200.9716575, False), - 'Au-202': Iso('Au-202', 'gold-202', 79, 202, 201.973856, False), - 'Au-203': Iso('Au-203', 'gold-203', 79, 203, 202.9751544, False), - 'Au-204': Iso('Au-204', 'gold-204', 79, 204, 203.97783, False), - 'Au-205': Iso('Au-205', 'gold-205', 79, 205, 204.97985, False), - 'Au-206': Iso('Au-206', 'gold-206', 79, 206, 205.98474, False), - 'Au-207': Iso('Au-207', 'gold-207', 79, 207, 206.98840, False), - 'Au-208': Iso('Au-208', 'gold-208', 79, 208, 207.99345, False), - 'Au-209': Iso('Au-209', 'gold-209', 79, 209, 208.99735, False), - 'Au-210': Iso('Au-210', 'gold-210', 79, 210, 210.00250, False), - 'Hg-171': Iso('Hg-171', 'mercury-171', 80, 171, 171.00353, False), - 'Hg-172': Iso('Hg-172', 'mercury-172', 80, 172, 171.99881, False), - 'Hg-173': Iso('Hg-173', 'mercury-173', 80, 173, 172.99709, False), - 'Hg-174': Iso('Hg-174', 'mercury-174', 80, 174, 173.992865, False), - 'Hg-175': Iso('Hg-175', 'mercury-175', 80, 175, 174.991441, False), - 'Hg-176': Iso('Hg-176', 'mercury-176', 80, 176, 175.987361, False), - 'Hg-177': Iso('Hg-177', 'mercury-177', 80, 177, 176.986277, False), - 'Hg-178': Iso('Hg-178', 'mercury-178', 80, 178, 177.982484, False), - 'Hg-179': Iso('Hg-179', 'mercury-179', 80, 179, 178.981831, False), - 'Hg-180': Iso('Hg-180', 'mercury-180', 80, 180, 179.978260, False), - 'Hg-181': Iso('Hg-181', 'mercury-181', 80, 181, 180.977819, False), - 'Hg-182': Iso('Hg-182', 'mercury-182', 80, 182, 181.974689, False), - 'Hg-183': Iso('Hg-183', 'mercury-183', 80, 183, 182.9744448, False), - 'Hg-184': Iso('Hg-184', 'mercury-184', 80, 184, 183.971714, False), - 'Hg-185': Iso('Hg-185', 'mercury-185', 80, 185, 184.971899, False), - 'Hg-186': Iso('Hg-186', 'mercury-186', 80, 186, 185.969362, False), - 'Hg-187': Iso('Hg-187', 'mercury-187', 80, 187, 186.969814, False), - 'Hg-188': Iso('Hg-188', 'mercury-188', 80, 188, 187.967567, False), - 'Hg-189': Iso('Hg-189', 'mercury-189', 80, 189, 188.968195, False), - 'Hg-190': Iso('Hg-190', 'mercury-190', 80, 190, 189.966323, False), - 'Hg-191': Iso('Hg-191', 'mercury-191', 80, 191, 190.967157, False), - 'Hg-192': Iso('Hg-192', 'mercury-192', 80, 192, 191.965635, False), - 'Hg-193': Iso('Hg-193', 'mercury-193', 80, 193, 192.966653, False), - 'Hg-194': Iso('Hg-194', 'mercury-194', 80, 194, 193.9654491, False), - 'Hg-195': Iso('Hg-195', 'mercury-195', 80, 195, 194.966721, False), - 'Hg-196': Iso('Hg-196', 'mercury-196', 80, 196, 195.9658326, True, - isotopic_abundance=0.0015), - 'Hg-197': Iso('Hg-197', 'mercury-197', 80, 197, 196.9672128, False), - 'Hg-198': Iso('Hg-198', 'mercury-198', 80, 198, 197.96676860, True, - isotopic_abundance=0.0997), - 'Hg-199': Iso('Hg-199', 'mercury-199', 80, 199, 198.96828064, True, - isotopic_abundance=0.1687), - 'Hg-200': Iso('Hg-200', 'mercury-200', 80, 200, 199.96832659, True, - isotopic_abundance=0.2310), - 'Hg-201': Iso('Hg-201', 'mercury-201', 80, 201, 200.97030284, True, - isotopic_abundance=0.1318), - 'Hg-202': Iso('Hg-202', 'mercury-202', 80, 202, 201.97064340, True, - isotopic_abundance=0.2986), - 'Hg-203': Iso('Hg-203', 'mercury-203', 80, 203, 202.9728728, False, - half_life=4027881.6), - 'Hg-204': Iso('Hg-204', 'mercury-204', 80, 204, 203.97349398, True, - isotopic_abundance=0.0687), - 'Hg-205': Iso('Hg-205', 'mercury-205', 80, 205, 204.9760734, False), - 'Hg-206': Iso('Hg-206', 'mercury-206', 80, 206, 205.977514, False), - 'Hg-207': Iso('Hg-207', 'mercury-207', 80, 207, 206.982300, False), - 'Hg-208': Iso('Hg-208', 'mercury-208', 80, 208, 207.985759, False), - 'Hg-209': Iso('Hg-209', 'mercury-209', 80, 209, 208.99072, False), - 'Hg-210': Iso('Hg-210', 'mercury-210', 80, 210, 209.99424, False), - 'Hg-211': Iso('Hg-211', 'mercury-211', 80, 211, 210.99933, False), - 'Hg-212': Iso('Hg-212', 'mercury-212', 80, 212, 212.00296, False), - 'Hg-213': Iso('Hg-213', 'mercury-213', 80, 213, 213.00823, False), - 'Hg-214': Iso('Hg-214', 'mercury-214', 80, 214, 214.01200, False), - 'Hg-215': Iso('Hg-215', 'mercury-215', 80, 215, 215.01740, False), - 'Hg-216': Iso('Hg-216', 'mercury-216', 80, 216, 216.02132, False), - 'Tl-176': Iso('Tl-176', 'thallium-176', 81, 176, 176.000624, False), - 'Tl-177': Iso('Tl-177', 'thallium-177', 81, 177, 176.996431, False), - 'Tl-178': Iso('Tl-178', 'thallium-178', 81, 178, 177.99485, False), - 'Tl-179': Iso('Tl-179', 'thallium-179', 81, 179, 178.991111, False), - 'Tl-180': Iso('Tl-180', 'thallium-180', 81, 180, 179.990057, False), - 'Tl-181': Iso('Tl-181', 'thallium-181', 81, 181, 180.9862600, False), - 'Tl-182': Iso('Tl-182', 'thallium-182', 81, 182, 181.985713, False), - 'Tl-183': Iso('Tl-183', 'thallium-183', 81, 183, 182.982193, False), - 'Tl-184': Iso('Tl-184', 'thallium-184', 81, 184, 183.981886, False), - 'Tl-185': Iso('Tl-185', 'thallium-185', 81, 185, 184.978789, False), - 'Tl-186': Iso('Tl-186', 'thallium-186', 81, 186, 185.978651, False), - 'Tl-187': Iso('Tl-187', 'thallium-187', 81, 187, 186.9759063, False), - 'Tl-188': Iso('Tl-188', 'thallium-188', 81, 188, 187.976021, False), - 'Tl-189': Iso('Tl-189', 'thallium-189', 81, 189, 188.973588, False), - 'Tl-190': Iso('Tl-190', 'thallium-190', 81, 190, 189.973828, False), - 'Tl-191': Iso('Tl-191', 'thallium-191', 81, 191, 190.9717842, False), - 'Tl-192': Iso('Tl-192', 'thallium-192', 81, 192, 191.972225, False), - 'Tl-193': Iso('Tl-193', 'thallium-193', 81, 193, 192.9705020, False), - 'Tl-194': Iso('Tl-194', 'thallium-194', 81, 194, 193.971081, False), - 'Tl-195': Iso('Tl-195', 'thallium-195', 81, 195, 194.969774, False), - 'Tl-196': Iso('Tl-196', 'thallium-196', 81, 196, 195.970481, False), - 'Tl-197': Iso('Tl-197', 'thallium-197', 81, 197, 196.969576, False), - 'Tl-198': Iso('Tl-198', 'thallium-198', 81, 198, 197.970483, False), - 'Tl-199': Iso('Tl-199', 'thallium-199', 81, 199, 198.969877, False), - 'Tl-200': Iso('Tl-200', 'thallium-200', 81, 200, 199.9709633, False), - 'Tl-201': Iso('Tl-201', 'thallium-201', 81, 201, 200.970822, False, - half_life=263139.83999999997), - 'Tl-202': Iso('Tl-202', 'thallium-202', 81, 202, 201.972102, False, - half_life=1077062.4), - 'Tl-203': Iso('Tl-203', 'thallium-203', 81, 203, 202.9723446, True, - isotopic_abundance=0.2952), - 'Tl-204': Iso('Tl-204', 'thallium-204', 81, 204, 203.9738639, False), - 'Tl-205': Iso('Tl-205', 'thallium-205', 81, 205, 204.9744278, True, - isotopic_abundance=0.7048), - 'Tl-206': Iso('Tl-206', 'thallium-206', 81, 206, 205.9761106, False), - 'Tl-207': Iso('Tl-207', 'thallium-207', 81, 207, 206.9774197, False), - 'Tl-208': Iso('Tl-208', 'thallium-208', 81, 208, 207.9820190, False), - 'Tl-209': Iso('Tl-209', 'thallium-209', 81, 209, 208.9853594, False), - 'Tl-210': Iso('Tl-210', 'thallium-210', 81, 210, 209.990074, False), - 'Tl-211': Iso('Tl-211', 'thallium-211', 81, 211, 210.993475, False), - 'Tl-212': Iso('Tl-212', 'thallium-212', 81, 212, 211.99834, False), - 'Tl-213': Iso('Tl-213', 'thallium-213', 81, 213, 213.001915, False), - 'Tl-214': Iso('Tl-214', 'thallium-214', 81, 214, 214.00694, False), - 'Tl-215': Iso('Tl-215', 'thallium-215', 81, 215, 215.01064, False), - 'Tl-216': Iso('Tl-216', 'thallium-216', 81, 216, 216.01580, False), - 'Tl-217': Iso('Tl-217', 'thallium-217', 81, 217, 217.01966, False), - 'Tl-218': Iso('Tl-218', 'thallium-218', 81, 218, 218.02479, False), - 'Pb-178': Iso('Pb-178', 'lead-178', 82, 178, 178.003831, False), - 'Pb-179': Iso('Pb-179', 'lead-179', 82, 179, 179.002201, False), - 'Pb-180': Iso('Pb-180', 'lead-180', 82, 180, 179.997928, False), - 'Pb-181': Iso('Pb-181', 'lead-181', 82, 181, 180.996653, False), - 'Pb-182': Iso('Pb-182', 'lead-182', 82, 182, 181.992672, False), - 'Pb-183': Iso('Pb-183', 'lead-183', 82, 183, 182.991872, False), - 'Pb-184': Iso('Pb-184', 'lead-184', 82, 184, 183.988136, False), - 'Pb-185': Iso('Pb-185', 'lead-185', 82, 185, 184.987610, False), - 'Pb-186': Iso('Pb-186', 'lead-186', 82, 186, 185.984238, False), - 'Pb-187': Iso('Pb-187', 'lead-187', 82, 187, 186.9839109, False), - 'Pb-188': Iso('Pb-188', 'lead-188', 82, 188, 187.980875, False), - 'Pb-189': Iso('Pb-189', 'lead-189', 82, 189, 188.980807, False), - 'Pb-190': Iso('Pb-190', 'lead-190', 82, 190, 189.978082, False), - 'Pb-191': Iso('Pb-191', 'lead-191', 82, 191, 190.978276, False), - 'Pb-192': Iso('Pb-192', 'lead-192', 82, 192, 191.975775, False), - 'Pb-193': Iso('Pb-193', 'lead-193', 82, 193, 192.976173, False), - 'Pb-194': Iso('Pb-194', 'lead-194', 82, 194, 193.974012, False), - 'Pb-195': Iso('Pb-195', 'lead-195', 82, 195, 194.974543, False), - 'Pb-196': Iso('Pb-196', 'lead-196', 82, 196, 195.972774, False), - 'Pb-197': Iso('Pb-197', 'lead-197', 82, 197, 196.9734312, False), - 'Pb-198': Iso('Pb-198', 'lead-198', 82, 198, 197.972034, False), - 'Pb-199': Iso('Pb-199', 'lead-199', 82, 199, 198.972913, False), - 'Pb-200': Iso('Pb-200', 'lead-200', 82, 200, 199.971819, False), - 'Pb-201': Iso('Pb-201', 'lead-201', 82, 201, 200.972883, False), - 'Pb-202': Iso('Pb-202', 'lead-202', 82, 202, 201.9721520, False), - 'Pb-203': Iso('Pb-203', 'lead-203', 82, 203, 202.9733911, False, - half_life=186922.80000000002), - 'Pb-204': Iso('Pb-204', 'lead-204', 82, 204, 203.9730440, True, - isotopic_abundance=0.014), - 'Pb-205': Iso('Pb-205', 'lead-205', 82, 205, 204.9744822, False), - 'Pb-206': Iso('Pb-206', 'lead-206', 82, 206, 205.9744657, True, - isotopic_abundance=0.241), - 'Pb-207': Iso('Pb-207', 'lead-207', 82, 207, 206.9758973, True, - isotopic_abundance=0.221), - 'Pb-208': Iso('Pb-208', 'lead-208', 82, 208, 207.9766525, True, - isotopic_abundance=0.524), - 'Pb-209': Iso('Pb-209', 'lead-209', 82, 209, 208.9810905, False), - 'Pb-210': Iso('Pb-210', 'lead-210', 82, 210, 209.9841889, False), - 'Pb-211': Iso('Pb-211', 'lead-211', 82, 211, 210.9887371, False), - 'Pb-212': Iso('Pb-212', 'lead-212', 82, 212, 211.9918977, False), - 'Pb-213': Iso('Pb-213', 'lead-213', 82, 213, 212.9965629, False), - 'Pb-214': Iso('Pb-214', 'lead-214', 82, 214, 213.9998059, False), - 'Pb-215': Iso('Pb-215', 'lead-215', 82, 215, 215.00474, False), - 'Pb-216': Iso('Pb-216', 'lead-216', 82, 216, 216.00803, False), - 'Pb-217': Iso('Pb-217', 'lead-217', 82, 217, 217.01314, False), - 'Pb-218': Iso('Pb-218', 'lead-218', 82, 218, 218.01659, False), - 'Pb-219': Iso('Pb-219', 'lead-219', 82, 219, 219.02177, False), - 'Pb-220': Iso('Pb-220', 'lead-220', 82, 220, 220.02541, False), - 'Bi-184': Iso('Bi-184', 'bismuth-184', 83, 184, 184.001275, False), - 'Bi-185': Iso('Bi-185', 'bismuth-185', 83, 185, 184.997600, False), - 'Bi-186': Iso('Bi-186', 'bismuth-186', 83, 186, 185.996644, False), - 'Bi-187': Iso('Bi-187', 'bismuth-187', 83, 187, 186.993147, False), - 'Bi-188': Iso('Bi-188', 'bismuth-188', 83, 188, 187.992287, False), - 'Bi-189': Iso('Bi-189', 'bismuth-189', 83, 189, 188.989195, False), - 'Bi-190': Iso('Bi-190', 'bismuth-190', 83, 190, 189.988622, False), - 'Bi-191': Iso('Bi-191', 'bismuth-191', 83, 191, 190.9857866, False), - 'Bi-192': Iso('Bi-192', 'bismuth-192', 83, 192, 191.985469, False), - 'Bi-193': Iso('Bi-193', 'bismuth-193', 83, 193, 192.982960, False), - 'Bi-194': Iso('Bi-194', 'bismuth-194', 83, 194, 193.982785, False), - 'Bi-195': Iso('Bi-195', 'bismuth-195', 83, 195, 194.9806488, False), - 'Bi-196': Iso('Bi-196', 'bismuth-196', 83, 196, 195.980667, False), - 'Bi-197': Iso('Bi-197', 'bismuth-197', 83, 197, 196.9788651, False), - 'Bi-198': Iso('Bi-198', 'bismuth-198', 83, 198, 197.979206, False), - 'Bi-199': Iso('Bi-199', 'bismuth-199', 83, 199, 198.977673, False), - 'Bi-200': Iso('Bi-200', 'bismuth-200', 83, 200, 199.978131, False), - 'Bi-201': Iso('Bi-201', 'bismuth-201', 83, 201, 200.977010, False), - 'Bi-202': Iso('Bi-202', 'bismuth-202', 83, 202, 201.977734, False), - 'Bi-203': Iso('Bi-203', 'bismuth-203', 83, 203, 202.976893, False), - 'Bi-204': Iso('Bi-204', 'bismuth-204', 83, 204, 203.9778361, False), - 'Bi-205': Iso('Bi-205', 'bismuth-205', 83, 205, 204.9773867, False), - 'Bi-206': Iso('Bi-206', 'bismuth-206', 83, 206, 205.9784993, False), - 'Bi-207': Iso('Bi-207', 'bismuth-207', 83, 207, 206.9784710, False, - half_life=995587200.0), - 'Bi-208': Iso('Bi-208', 'bismuth-208', 83, 208, 207.9797425, False), - 'Bi-209': Iso('Bi-209', 'bismuth-209', 83, 209, 208.9803991, False, + 'Co-60': _iso('Co-60', 'cobalt-60', 27, 60, 59.93381630, False, + half_life=166337280.0), + 'Co-61': _iso('Co-61', 'cobalt-61', 27, 61, 60.93247662, False), + 'Co-62': _iso('Co-62', 'cobalt-62', 27, 62, 61.934059, False), + 'Co-63': _iso('Co-63', 'cobalt-63', 27, 63, 62.933600, False), + 'Co-64': _iso('Co-64', 'cobalt-64', 27, 64, 63.935811, False), + 'Co-65': _iso('Co-65', 'cobalt-65', 27, 65, 64.9364621, False), + 'Co-66': _iso('Co-66', 'cobalt-66', 27, 66, 65.939443, False), + 'Co-67': _iso('Co-67', 'cobalt-67', 27, 67, 66.9406096, False), + 'Co-68': _iso('Co-68', 'cobalt-68', 27, 68, 67.94426, False), + 'Co-69': _iso('Co-69', 'cobalt-69', 27, 69, 68.94614, False), + 'Co-70': _iso('Co-70', 'cobalt-70', 27, 70, 69.94963, False), + 'Co-71': _iso('Co-71', 'cobalt-71', 27, 71, 70.95237, False), + 'Co-72': _iso('Co-72', 'cobalt-72', 27, 72, 71.95729, False), + 'Co-73': _iso('Co-73', 'cobalt-73', 27, 73, 72.96039, False), + 'Co-74': _iso('Co-74', 'cobalt-74', 27, 74, 73.96515, False), + 'Co-75': _iso('Co-75', 'cobalt-75', 27, 75, 74.96876, False), + 'Co-76': _iso('Co-76', 'cobalt-76', 27, 76, 75.97413, False), + 'Ni-48': _iso('Ni-48', 'nickel-48', 28, 48, 48.01769, False), + 'Ni-49': _iso('Ni-49', 'nickel-49', 28, 49, 49.00770, False), + 'Ni-50': _iso('Ni-50', 'nickel-50', 28, 50, 49.99474, False), + 'Ni-51': _iso('Ni-51', 'nickel-51', 28, 51, 50.98611, False), + 'Ni-52': _iso('Ni-52', 'nickel-52', 28, 52, 51.97480, False), + 'Ni-53': _iso('Ni-53', 'nickel-53', 28, 53, 52.968190, False), + 'Ni-54': _iso('Ni-54', 'nickel-54', 28, 54, 53.957892, False), + 'Ni-55': _iso('Ni-55', 'nickel-55', 28, 55, 54.95133063, False), + 'Ni-56': _iso('Ni-56', 'nickel-56', 28, 56, 55.94212855, False), + 'Ni-57': _iso('Ni-57', 'nickel-57', 28, 57, 56.93979218, False), + 'Ni-58': _iso('Ni-58', 'nickel-58', 28, 58, 57.93534241, True, + isotopic_abundance=0.68077), + 'Ni-59': _iso('Ni-59', 'nickel-59', 28, 59, 58.93434620, False), + 'Ni-60': _iso('Ni-60', 'nickel-60', 28, 60, 59.93078588, True, + isotopic_abundance=0.26223), + 'Ni-61': _iso('Ni-61', 'nickel-61', 28, 61, 60.93105557, True, + isotopic_abundance=0.011399), + 'Ni-62': _iso('Ni-62', 'nickel-62', 28, 62, 61.92834537, True, + isotopic_abundance=0.036346), + 'Ni-63': _iso('Ni-63', 'nickel-63', 28, 63, 62.92966963, False), + 'Ni-64': _iso('Ni-64', 'nickel-64', 28, 64, 63.92796682, True, + isotopic_abundance=0.009255), + 'Ni-65': _iso('Ni-65', 'nickel-65', 28, 65, 64.93008517, False), + 'Ni-66': _iso('Ni-66', 'nickel-66', 28, 66, 65.9291393, False), + 'Ni-67': _iso('Ni-67', 'nickel-67', 28, 67, 66.9315694, False), + 'Ni-68': _iso('Ni-68', 'nickel-68', 28, 68, 67.9318688, False), + 'Ni-69': _iso('Ni-69', 'nickel-69', 28, 69, 68.9356103, False), + 'Ni-70': _iso('Ni-70', 'nickel-70', 28, 70, 69.9364313, False), + 'Ni-71': _iso('Ni-71', 'nickel-71', 28, 71, 70.9405190, False), + 'Ni-72': _iso('Ni-72', 'nickel-72', 28, 72, 71.9417859, False), + 'Ni-73': _iso('Ni-73', 'nickel-73', 28, 73, 72.9462067, False), + 'Ni-74': _iso('Ni-74', 'nickel-74', 28, 74, 73.94798, False), + 'Ni-75': _iso('Ni-75', 'nickel-75', 28, 75, 74.95250, False), + 'Ni-76': _iso('Ni-76', 'nickel-76', 28, 76, 75.95533, False), + 'Ni-77': _iso('Ni-77', 'nickel-77', 28, 77, 76.96055, False), + 'Ni-78': _iso('Ni-78', 'nickel-78', 28, 78, 77.96336, False), + 'Ni-79': _iso('Ni-79', 'nickel-79', 28, 79, 78.97025, False), + 'Cu-52': _iso('Cu-52', 'copper-52', 29, 52, 51.99671, False), + 'Cu-53': _iso('Cu-53', 'copper-53', 29, 53, 52.98459, False), + 'Cu-54': _iso('Cu-54', 'copper-54', 29, 54, 53.97666, False), + 'Cu-55': _iso('Cu-55', 'copper-55', 29, 55, 54.96604, False), + 'Cu-56': _iso('Cu-56', 'copper-56', 29, 56, 55.95895, False), + 'Cu-57': _iso('Cu-57', 'copper-57', 29, 57, 56.94921250, False), + 'Cu-58': _iso('Cu-58', 'copper-58', 29, 58, 57.94453305, False), + 'Cu-59': _iso('Cu-59', 'copper-59', 29, 59, 58.93949748, False), + 'Cu-60': _iso('Cu-60', 'copper-60', 29, 60, 59.9373645, False), + 'Cu-61': _iso('Cu-61', 'copper-61', 29, 61, 60.9334576, False), + 'Cu-62': _iso('Cu-62', 'copper-62', 29, 62, 61.93259541, False), + 'Cu-63': _iso('Cu-63', 'copper-63', 29, 63, 62.92959772, True, + isotopic_abundance=0.6915), + 'Cu-64': _iso('Cu-64', 'copper-64', 29, 64, 63.92976434, False), + 'Cu-65': _iso('Cu-65', 'copper-65', 29, 65, 64.92778970, True, + isotopic_abundance=0.3085), + 'Cu-66': _iso('Cu-66', 'copper-66', 29, 66, 65.92886903, False), + 'Cu-67': _iso('Cu-67', 'copper-67', 29, 67, 66.9277303, False), + 'Cu-68': _iso('Cu-68', 'copper-68', 29, 68, 67.9296109, False), + 'Cu-69': _iso('Cu-69', 'copper-69', 29, 69, 68.9294293, False), + 'Cu-70': _iso('Cu-70', 'copper-70', 29, 70, 69.9323921, False), + 'Cu-71': _iso('Cu-71', 'copper-71', 29, 71, 70.9326768, False), + 'Cu-72': _iso('Cu-72', 'copper-72', 29, 72, 71.9358203, False), + 'Cu-73': _iso('Cu-73', 'copper-73', 29, 73, 72.9366744, False), + 'Cu-74': _iso('Cu-74', 'copper-74', 29, 74, 73.9398749, False), + 'Cu-75': _iso('Cu-75', 'copper-75', 29, 75, 74.9415226, False), + 'Cu-76': _iso('Cu-76', 'copper-76', 29, 76, 75.9452750, False), + 'Cu-77': _iso('Cu-77', 'copper-77', 29, 77, 76.94792, False), + 'Cu-78': _iso('Cu-78', 'copper-78', 29, 78, 77.95223, False), + 'Cu-79': _iso('Cu-79', 'copper-79', 29, 79, 78.95502, False), + 'Cu-80': _iso('Cu-80', 'copper-80', 29, 80, 79.96089, False), + 'Cu-81': _iso('Cu-81', 'copper-81', 29, 81, 80.96587, False), + 'Cu-82': _iso('Cu-82', 'copper-82', 29, 82, 81.97244, False), + 'Zn-54': _iso('Zn-54', 'zinc-54', 30, 54, 53.99204, False), + 'Zn-55': _iso('Zn-55', 'zinc-55', 30, 55, 54.98398, False), + 'Zn-56': _iso('Zn-56', 'zinc-56', 30, 56, 55.97254, False), + 'Zn-57': _iso('Zn-57', 'zinc-57', 30, 57, 56.96506, False), + 'Zn-58': _iso('Zn-58', 'zinc-58', 30, 58, 57.954591, False), + 'Zn-59': _iso('Zn-59', 'zinc-59', 30, 59, 58.94931266, False), + 'Zn-60': _iso('Zn-60', 'zinc-60', 30, 60, 59.94184210, False), + 'Zn-61': _iso('Zn-61', 'zinc-61', 30, 61, 60.939507, False), + 'Zn-62': _iso('Zn-62', 'zinc-62', 30, 62, 61.93433397, False), + 'Zn-63': _iso('Zn-63', 'zinc-63', 30, 63, 62.9332115, False), + 'Zn-64': _iso('Zn-64', 'zinc-64', 30, 64, 63.92914201, True, + isotopic_abundance=0.4917), + 'Zn-65': _iso('Zn-65', 'zinc-65', 30, 65, 64.92924077, False, + half_life=21095769.599999998), + 'Zn-66': _iso('Zn-66', 'zinc-66', 30, 66, 65.92603381, True, + isotopic_abundance=0.2773), + 'Zn-67': _iso('Zn-67', 'zinc-67', 30, 67, 66.92712775, True, + isotopic_abundance=0.0404), + 'Zn-68': _iso('Zn-68', 'zinc-68', 30, 68, 67.92484455, True, + isotopic_abundance=0.1845), + 'Zn-69': _iso('Zn-69', 'zinc-69', 30, 69, 68.9265507, False), + 'Zn-70': _iso('Zn-70', 'zinc-70', 30, 70, 69.9253192, True, + isotopic_abundance=0.0061), + 'Zn-71': _iso('Zn-71', 'zinc-71', 30, 71, 70.9277196, False), + 'Zn-72': _iso('Zn-72', 'zinc-72', 30, 72, 71.9268428, False), + 'Zn-73': _iso('Zn-73', 'zinc-73', 30, 73, 72.9295826, False), + 'Zn-74': _iso('Zn-74', 'zinc-74', 30, 74, 73.9294073, False), + 'Zn-75': _iso('Zn-75', 'zinc-75', 30, 75, 74.9328402, False), + 'Zn-76': _iso('Zn-76', 'zinc-76', 30, 76, 75.9331150, False), + 'Zn-77': _iso('Zn-77', 'zinc-77', 30, 77, 76.9368872, False), + 'Zn-78': _iso('Zn-78', 'zinc-78', 30, 78, 77.9382892, False), + 'Zn-79': _iso('Zn-79', 'zinc-79', 30, 79, 78.9426381, False), + 'Zn-80': _iso('Zn-80', 'zinc-80', 30, 80, 79.9445529, False), + 'Zn-81': _iso('Zn-81', 'zinc-81', 30, 81, 80.9504026, False), + 'Zn-82': _iso('Zn-82', 'zinc-82', 30, 82, 81.95426, False), + 'Zn-83': _iso('Zn-83', 'zinc-83', 30, 83, 82.96056, False), + 'Zn-84': _iso('Zn-84', 'zinc-84', 30, 84, 83.96521, False), + 'Zn-85': _iso('Zn-85', 'zinc-85', 30, 85, 84.97226, False), + 'Ga-56': _iso('Ga-56', 'gallium-56', 31, 56, 55.99536, False), + 'Ga-57': _iso('Ga-57', 'gallium-57', 31, 57, 56.98320, False), + 'Ga-58': _iso('Ga-58', 'gallium-58', 31, 58, 57.97478, False), + 'Ga-59': _iso('Ga-59', 'gallium-59', 31, 59, 58.96353, False), + 'Ga-60': _iso('Ga-60', 'gallium-60', 31, 60, 59.95729, False), + 'Ga-61': _iso('Ga-61', 'gallium-61', 31, 61, 60.949399, False), + 'Ga-62': _iso('Ga-62', 'gallium-62', 31, 62, 61.94419025, False), + 'Ga-63': _iso('Ga-63', 'gallium-63', 31, 63, 62.9392942, False), + 'Ga-64': _iso('Ga-64', 'gallium-64', 31, 64, 63.9368404, False), + 'Ga-65': _iso('Ga-65', 'gallium-65', 31, 65, 64.93273459, False), + 'Ga-66': _iso('Ga-66', 'gallium-66', 31, 66, 65.9315894, False), + 'Ga-67': _iso('Ga-67', 'gallium-67', 31, 67, 66.9282025, False, + half_life=281797.056), + 'Ga-68': _iso('Ga-68', 'gallium-68', 31, 68, 67.9279805, False), + 'Ga-69': _iso('Ga-69', 'gallium-69', 31, 69, 68.9255735, True, + isotopic_abundance=0.60108), + 'Ga-70': _iso('Ga-70', 'gallium-70', 31, 70, 69.9260219, False), + 'Ga-71': _iso('Ga-71', 'gallium-71', 31, 71, 70.92470258, True, + isotopic_abundance=0.39892), + 'Ga-72': _iso('Ga-72', 'gallium-72', 31, 72, 71.92636747, False), + 'Ga-73': _iso('Ga-73', 'gallium-73', 31, 73, 72.9251747, False), + 'Ga-74': _iso('Ga-74', 'gallium-74', 31, 74, 73.9269457, False), + 'Ga-75': _iso('Ga-75', 'gallium-75', 31, 75, 74.9265002, False), + 'Ga-76': _iso('Ga-76', 'gallium-76', 31, 76, 75.9288276, False), + 'Ga-77': _iso('Ga-77', 'gallium-77', 31, 77, 76.9291543, False), + 'Ga-78': _iso('Ga-78', 'gallium-78', 31, 78, 77.9316088, False), + 'Ga-79': _iso('Ga-79', 'gallium-79', 31, 79, 78.9328523, False), + 'Ga-80': _iso('Ga-80', 'gallium-80', 31, 80, 79.9364208, False), + 'Ga-81': _iso('Ga-81', 'gallium-81', 31, 81, 80.9381338, False), + 'Ga-82': _iso('Ga-82', 'gallium-82', 31, 82, 81.9431765, False), + 'Ga-83': _iso('Ga-83', 'gallium-83', 31, 83, 82.9471203, False), + 'Ga-84': _iso('Ga-84', 'gallium-84', 31, 84, 83.95246, False), + 'Ga-85': _iso('Ga-85', 'gallium-85', 31, 85, 84.95699, False), + 'Ga-86': _iso('Ga-86', 'gallium-86', 31, 86, 85.96301, False), + 'Ga-87': _iso('Ga-87', 'gallium-87', 31, 87, 86.96824, False), + 'Ge-58': _iso('Ge-58', 'germanium-58', 32, 58, 57.99172, False), + 'Ge-59': _iso('Ge-59', 'germanium-59', 32, 59, 58.98249, False), + 'Ge-60': _iso('Ge-60', 'germanium-60', 32, 60, 59.97036, False), + 'Ge-61': _iso('Ge-61', 'germanium-61', 32, 61, 60.96379, False), + 'Ge-62': _iso('Ge-62', 'germanium-62', 32, 62, 61.95502, False), + 'Ge-63': _iso('Ge-63', 'germanium-63', 32, 63, 62.949628, False), + 'Ge-64': _iso('Ge-64', 'germanium-64', 32, 64, 63.9416899, False), + 'Ge-65': _iso('Ge-65', 'germanium-65', 32, 65, 64.9393681, False), + 'Ge-66': _iso('Ge-66', 'germanium-66', 32, 66, 65.9338621, False), + 'Ge-67': _iso('Ge-67', 'germanium-67', 32, 67, 66.9327339, False), + 'Ge-68': _iso('Ge-68', 'germanium-68', 32, 68, 67.9280953, False), + 'Ge-69': _iso('Ge-69', 'germanium-69', 32, 69, 68.9279645, False), + 'Ge-70': _iso('Ge-70', 'germanium-70', 32, 70, 69.92424875, True, + isotopic_abundance=0.2057), + 'Ge-71': _iso('Ge-71', 'germanium-71', 32, 71, 70.92495233, False), + 'Ge-72': _iso('Ge-72', 'germanium-72', 32, 72, 71.922075826, True, + isotopic_abundance=0.2745), + 'Ge-73': _iso('Ge-73', 'germanium-73', 32, 73, 72.923458956, True, + isotopic_abundance=0.0775), + 'Ge-74': _iso('Ge-74', 'germanium-74', 32, 74, 73.921177761, True, + isotopic_abundance=0.3650), + 'Ge-75': _iso('Ge-75', 'germanium-75', 32, 75, 74.922858370, False), + 'Ge-76': _iso('Ge-76', 'germanium-76', 32, 76, 75.921402726, False, + isotopic_abundance=0.0773), + 'Ge-77': _iso('Ge-77', 'germanium-77', 32, 77, 76.923549843, False), + 'Ge-78': _iso('Ge-78', 'germanium-78', 32, 78, 77.9228529, False), + 'Ge-79': _iso('Ge-79', 'germanium-79', 32, 79, 78.925360, False), + 'Ge-80': _iso('Ge-80', 'germanium-80', 32, 80, 79.9253508, False), + 'Ge-81': _iso('Ge-81', 'germanium-81', 32, 81, 80.9288329, False), + 'Ge-82': _iso('Ge-82', 'germanium-82', 32, 82, 81.9297740, False), + 'Ge-83': _iso('Ge-83', 'germanium-83', 32, 83, 82.9345391, False), + 'Ge-84': _iso('Ge-84', 'germanium-84', 32, 84, 83.9375751, False), + 'Ge-85': _iso('Ge-85', 'germanium-85', 32, 85, 84.9429697, False), + 'Ge-86': _iso('Ge-86', 'germanium-86', 32, 86, 85.94658, False), + 'Ge-87': _iso('Ge-87', 'germanium-87', 32, 87, 86.95268, False), + 'Ge-88': _iso('Ge-88', 'germanium-88', 32, 88, 87.95691, False), + 'Ge-89': _iso('Ge-89', 'germanium-89', 32, 89, 88.96379, False), + 'Ge-90': _iso('Ge-90', 'germanium-90', 32, 90, 89.96863, False), + 'As-60': _iso('As-60', 'arsenic-60', 33, 60, 59.99388, False), + 'As-61': _iso('As-61', 'arsenic-61', 33, 61, 60.98112, False), + 'As-62': _iso('As-62', 'arsenic-62', 33, 62, 61.97361, False), + 'As-63': _iso('As-63', 'arsenic-63', 33, 63, 62.96390, False), + 'As-64': _iso('As-64', 'arsenic-64', 33, 64, 63.95743, False), + 'As-65': _iso('As-65', 'arsenic-65', 33, 65, 64.949611, False), + 'As-66': _iso('As-66', 'arsenic-66', 33, 66, 65.9441488, False), + 'As-67': _iso('As-67', 'arsenic-67', 33, 67, 66.93925111, False), + 'As-68': _iso('As-68', 'arsenic-68', 33, 68, 67.9367741, False), + 'As-69': _iso('As-69', 'arsenic-69', 33, 69, 68.932246, False), + 'As-70': _iso('As-70', 'arsenic-70', 33, 70, 69.930926, False), + 'As-71': _iso('As-71', 'arsenic-71', 33, 71, 70.9271138, False), + 'As-72': _iso('As-72', 'arsenic-72', 33, 72, 71.9267523, False), + 'As-73': _iso('As-73', 'arsenic-73', 33, 73, 72.9238291, False), + 'As-74': _iso('As-74', 'arsenic-74', 33, 74, 73.9239286, False), + 'As-75': _iso('As-75', 'arsenic-75', 33, 75, 74.92159457, True, isotopic_abundance=1), - 'Bi-210': Iso('Bi-210', 'bismuth-210', 83, 210, 209.9841207, False), - 'Bi-211': Iso('Bi-211', 'bismuth-211', 83, 211, 210.9872697, False), - 'Bi-212': Iso('Bi-212', 'bismuth-212', 83, 212, 211.9912860, False), - 'Bi-213': Iso('Bi-213', 'bismuth-213', 83, 213, 212.9943851, False), - 'Bi-214': Iso('Bi-214', 'bismuth-214', 83, 214, 213.998712, False), - 'Bi-215': Iso('Bi-215', 'bismuth-215', 83, 215, 215.001770, False), - 'Bi-216': Iso('Bi-216', 'bismuth-216', 83, 216, 216.006306, False), - 'Bi-217': Iso('Bi-217', 'bismuth-217', 83, 217, 217.009372, False), - 'Bi-218': Iso('Bi-218', 'bismuth-218', 83, 218, 218.014188, False), - 'Bi-219': Iso('Bi-219', 'bismuth-219', 83, 219, 219.01748, False), - 'Bi-220': Iso('Bi-220', 'bismuth-220', 83, 220, 220.02235, False), - 'Bi-221': Iso('Bi-221', 'bismuth-221', 83, 221, 221.02587, False), - 'Bi-222': Iso('Bi-222', 'bismuth-222', 83, 222, 222.03078, False), - 'Bi-223': Iso('Bi-223', 'bismuth-223', 83, 223, 223.03450, False), - 'Bi-224': Iso('Bi-224', 'bismuth-224', 83, 224, 224.03947, False), - 'Po-186': Iso('Po-186', 'polonium-186', 84, 186, 186.004393, False), - 'Po-187': Iso('Po-187', 'polonium-187', 84, 187, 187.003041, False), - 'Po-188': Iso('Po-188', 'polonium-188', 84, 188, 187.999416, False), - 'Po-189': Iso('Po-189', 'polonium-189', 84, 189, 188.998473, False), - 'Po-190': Iso('Po-190', 'polonium-190', 84, 190, 189.995101, False), - 'Po-191': Iso('Po-191', 'polonium-191', 84, 191, 190.9945585, False), - 'Po-192': Iso('Po-192', 'polonium-192', 84, 192, 191.991336, False), - 'Po-193': Iso('Po-193', 'polonium-193', 84, 193, 192.991026, False), - 'Po-194': Iso('Po-194', 'polonium-194', 84, 194, 193.988186, False), - 'Po-195': Iso('Po-195', 'polonium-195', 84, 195, 194.988126, False), - 'Po-196': Iso('Po-196', 'polonium-196', 84, 196, 195.985526, False), - 'Po-197': Iso('Po-197', 'polonium-197', 84, 197, 196.985660, False), - 'Po-198': Iso('Po-198', 'polonium-198', 84, 198, 197.983389, False), - 'Po-199': Iso('Po-199', 'polonium-199', 84, 199, 198.983667, False), - 'Po-200': Iso('Po-200', 'polonium-200', 84, 200, 199.981799, False), - 'Po-201': Iso('Po-201', 'polonium-201', 84, 201, 200.9822598, False), - 'Po-202': Iso('Po-202', 'polonium-202', 84, 202, 201.980758, False), - 'Po-203': Iso('Po-203', 'polonium-203', 84, 203, 202.9814161, False), - 'Po-204': Iso('Po-204', 'polonium-204', 84, 204, 203.980310, False), - 'Po-205': Iso('Po-205', 'polonium-205', 84, 205, 204.981203, False), - 'Po-206': Iso('Po-206', 'polonium-206', 84, 206, 205.9804740, False), - 'Po-207': Iso('Po-207', 'polonium-207', 84, 207, 206.9815938, False), - 'Po-208': Iso('Po-208', 'polonium-208', 84, 208, 207.9812461, False), - 'Po-209': Iso('Po-209', 'polonium-209', 84, 209, 208.9824308, False), - 'Po-210': Iso('Po-210', 'polonium-210', 84, 210, 209.9828741, False), - 'Po-211': Iso('Po-211', 'polonium-211', 84, 211, 210.9866536, False), - 'Po-212': Iso('Po-212', 'polonium-212', 84, 212, 211.9888684, False), - 'Po-213': Iso('Po-213', 'polonium-213', 84, 213, 212.9928576, False), - 'Po-214': Iso('Po-214', 'polonium-214', 84, 214, 213.9952017, False), - 'Po-215': Iso('Po-215', 'polonium-215', 84, 215, 214.9994201, False), - 'Po-216': Iso('Po-216', 'polonium-216', 84, 216, 216.0019152, False), - 'Po-217': Iso('Po-217', 'polonium-217', 84, 217, 217.0063182, False), - 'Po-218': Iso('Po-218', 'polonium-218', 84, 218, 218.0089735, False), - 'Po-219': Iso('Po-219', 'polonium-219', 84, 219, 219.013614, False), - 'Po-220': Iso('Po-220', 'polonium-220', 84, 220, 220.016386, False), - 'Po-221': Iso('Po-221', 'polonium-221', 84, 221, 221.021228, False), - 'Po-222': Iso('Po-222', 'polonium-222', 84, 222, 222.024140, False), - 'Po-223': Iso('Po-223', 'polonium-223', 84, 223, 223.02907, False), - 'Po-224': Iso('Po-224', 'polonium-224', 84, 224, 224.03211, False), - 'Po-225': Iso('Po-225', 'polonium-225', 84, 225, 225.03707, False), - 'Po-226': Iso('Po-226', 'polonium-226', 84, 226, 226.04031, False), - 'Po-227': Iso('Po-227', 'polonium-227', 84, 227, 227.04539, False), - 'At-191': Iso('At-191', 'astatine-191', 85, 191, 191.004148, False), - 'At-192': Iso('At-192', 'astatine-192', 85, 192, 192.003152, False), - 'At-193': Iso('At-193', 'astatine-193', 85, 193, 192.999927, False), - 'At-194': Iso('At-194', 'astatine-194', 85, 194, 193.999236, False), - 'At-195': Iso('At-195', 'astatine-195', 85, 195, 194.9962685, False), - 'At-196': Iso('At-196', 'astatine-196', 85, 196, 195.995800, False), - 'At-197': Iso('At-197', 'astatine-197', 85, 197, 196.993189, False), - 'At-198': Iso('At-198', 'astatine-198', 85, 198, 197.992784, False), - 'At-199': Iso('At-199', 'astatine-199', 85, 199, 198.9905277, False), - 'At-200': Iso('At-200', 'astatine-200', 85, 200, 199.990351, False), - 'At-201': Iso('At-201', 'astatine-201', 85, 201, 200.9884171, False), - 'At-202': Iso('At-202', 'astatine-202', 85, 202, 201.988630, False), - 'At-203': Iso('At-203', 'astatine-203', 85, 203, 202.986943, False), - 'At-204': Iso('At-204', 'astatine-204', 85, 204, 203.987251, False), - 'At-205': Iso('At-205', 'astatine-205', 85, 205, 204.986076, False), - 'At-206': Iso('At-206', 'astatine-206', 85, 206, 205.986657, False), - 'At-207': Iso('At-207', 'astatine-207', 85, 207, 206.985800, False), - 'At-208': Iso('At-208', 'astatine-208', 85, 208, 207.9866133, False), - 'At-209': Iso('At-209', 'astatine-209', 85, 209, 208.9861702, False), - 'At-210': Iso('At-210', 'astatine-210', 85, 210, 209.9871479, False), - 'At-211': Iso('At-211', 'astatine-211', 85, 211, 210.9874966, False), - 'At-212': Iso('At-212', 'astatine-212', 85, 212, 211.9907377, False), - 'At-213': Iso('At-213', 'astatine-213', 85, 213, 212.9929370, False), - 'At-214': Iso('At-214', 'astatine-214', 85, 214, 213.9963721, False), - 'At-215': Iso('At-215', 'astatine-215', 85, 215, 214.9986528, False), - 'At-216': Iso('At-216', 'astatine-216', 85, 216, 216.0024236, False), - 'At-217': Iso('At-217', 'astatine-217', 85, 217, 217.0047192, False), - 'At-218': Iso('At-218', 'astatine-218', 85, 218, 218.008695, False), - 'At-219': Iso('At-219', 'astatine-219', 85, 219, 219.0111618, False), - 'At-220': Iso('At-220', 'astatine-220', 85, 220, 220.015433, False), - 'At-221': Iso('At-221', 'astatine-221', 85, 221, 221.018017, False), - 'At-222': Iso('At-222', 'astatine-222', 85, 222, 222.022494, False), - 'At-223': Iso('At-223', 'astatine-223', 85, 223, 223.025151, False), - 'At-224': Iso('At-224', 'astatine-224', 85, 224, 224.029749, False), - 'At-225': Iso('At-225', 'astatine-225', 85, 225, 225.03263, False), - 'At-226': Iso('At-226', 'astatine-226', 85, 226, 226.03716, False), - 'At-227': Iso('At-227', 'astatine-227', 85, 227, 227.04024, False), - 'At-228': Iso('At-228', 'astatine-228', 85, 228, 228.04475, False), - 'At-229': Iso('At-229', 'astatine-229', 85, 229, 229.04812, False), - 'Rn-193': Iso('Rn-193', 'radon-193', 86, 193, 193.009708, False), - 'Rn-194': Iso('Rn-194', 'radon-194', 86, 194, 194.006144, False), - 'Rn-195': Iso('Rn-195', 'radon-195', 86, 195, 195.005422, False), - 'Rn-196': Iso('Rn-196', 'radon-196', 86, 196, 196.002116, False), - 'Rn-197': Iso('Rn-197', 'radon-197', 86, 197, 197.001585, False), - 'Rn-198': Iso('Rn-198', 'radon-198', 86, 198, 197.998679, False), - 'Rn-199': Iso('Rn-199', 'radon-199', 86, 199, 198.998390, False), - 'Rn-200': Iso('Rn-200', 'radon-200', 86, 200, 199.995690, False), - 'Rn-201': Iso('Rn-201', 'radon-201', 86, 201, 200.995628, False), - 'Rn-202': Iso('Rn-202', 'radon-202', 86, 202, 201.993264, False), - 'Rn-203': Iso('Rn-203', 'radon-203', 86, 203, 202.993388, False), - 'Rn-204': Iso('Rn-204', 'radon-204', 86, 204, 203.991430, False), - 'Rn-205': Iso('Rn-205', 'radon-205', 86, 205, 204.991719, False), - 'Rn-206': Iso('Rn-206', 'radon-206', 86, 206, 205.990214, False), - 'Rn-207': Iso('Rn-207', 'radon-207', 86, 207, 206.9907303, False), - 'Rn-208': Iso('Rn-208', 'radon-208', 86, 208, 207.989635, False), - 'Rn-209': Iso('Rn-209', 'radon-209', 86, 209, 208.990415, False), - 'Rn-210': Iso('Rn-210', 'radon-210', 86, 210, 209.9896891, False), - 'Rn-211': Iso('Rn-211', 'radon-211', 86, 211, 210.9906011, False), - 'Rn-212': Iso('Rn-212', 'radon-212', 86, 212, 211.9907039, False), - 'Rn-213': Iso('Rn-213', 'radon-213', 86, 213, 212.9938831, False), - 'Rn-214': Iso('Rn-214', 'radon-214', 86, 214, 213.9953630, False), - 'Rn-215': Iso('Rn-215', 'radon-215', 86, 215, 214.9987459, False), - 'Rn-216': Iso('Rn-216', 'radon-216', 86, 216, 216.0002719, False), - 'Rn-217': Iso('Rn-217', 'radon-217', 86, 217, 217.0039280, False), - 'Rn-218': Iso('Rn-218', 'radon-218', 86, 218, 218.0056016, False), - 'Rn-219': Iso('Rn-219', 'radon-219', 86, 219, 219.0094804, False), - 'Rn-220': Iso('Rn-220', 'radon-220', 86, 220, 220.0113941, False), - 'Rn-221': Iso('Rn-221', 'radon-221', 86, 221, 221.0155371, False), - 'Rn-222': Iso('Rn-222', 'radon-222', 86, 222, 222.0175782, False), - 'Rn-223': Iso('Rn-223', 'radon-223', 86, 223, 223.0218893, False), - 'Rn-224': Iso('Rn-224', 'radon-224', 86, 224, 224.024096, False), - 'Rn-225': Iso('Rn-225', 'radon-225', 86, 225, 225.028486, False), - 'Rn-226': Iso('Rn-226', 'radon-226', 86, 226, 226.030861, False), - 'Rn-227': Iso('Rn-227', 'radon-227', 86, 227, 227.035304, False), - 'Rn-228': Iso('Rn-228', 'radon-228', 86, 228, 228.037835, False), - 'Rn-229': Iso('Rn-229', 'radon-229', 86, 229, 229.042257, False), - 'Rn-230': Iso('Rn-230', 'radon-230', 86, 230, 230.04514, False), - 'Rn-231': Iso('Rn-231', 'radon-231', 86, 231, 231.04987, False), - 'Fr-199': Iso('Fr-199', 'francium-199', 87, 199, 199.007259, False), - 'Fr-200': Iso('Fr-200', 'francium-200', 87, 200, 200.006586, False), - 'Fr-201': Iso('Fr-201', 'francium-201', 87, 201, 201.003867, False), - 'Fr-202': Iso('Fr-202', 'francium-202', 87, 202, 202.003320, False), - 'Fr-203': Iso('Fr-203', 'francium-203', 87, 203, 203.0009407, False), - 'Fr-204': Iso('Fr-204', 'francium-204', 87, 204, 204.000652, False), - 'Fr-205': Iso('Fr-205', 'francium-205', 87, 205, 204.9985939, False), - 'Fr-206': Iso('Fr-206', 'francium-206', 87, 206, 205.998666, False), - 'Fr-207': Iso('Fr-207', 'francium-207', 87, 207, 206.996946, False), - 'Fr-208': Iso('Fr-208', 'francium-208', 87, 208, 207.997138, False), - 'Fr-209': Iso('Fr-209', 'francium-209', 87, 209, 208.995955, False), - 'Fr-210': Iso('Fr-210', 'francium-210', 87, 210, 209.996422, False), - 'Fr-211': Iso('Fr-211', 'francium-211', 87, 211, 210.995556, False), - 'Fr-212': Iso('Fr-212', 'francium-212', 87, 212, 211.9962257, False), - 'Fr-213': Iso('Fr-213', 'francium-213', 87, 213, 212.9961860, False), - 'Fr-214': Iso('Fr-214', 'francium-214', 87, 214, 213.9989713, False), - 'Fr-215': Iso('Fr-215', 'francium-215', 87, 215, 215.0003418, False), - 'Fr-216': Iso('Fr-216', 'francium-216', 87, 216, 216.0031899, False), - 'Fr-217': Iso('Fr-217', 'francium-217', 87, 217, 217.0046323, False), - 'Fr-218': Iso('Fr-218', 'francium-218', 87, 218, 218.0075787, False), - 'Fr-219': Iso('Fr-219', 'francium-219', 87, 219, 219.0092524, False), - 'Fr-220': Iso('Fr-220', 'francium-220', 87, 220, 220.0123277, False), - 'Fr-221': Iso('Fr-221', 'francium-221', 87, 221, 221.0142552, False), - 'Fr-222': Iso('Fr-222', 'francium-222', 87, 222, 222.017552, False), - 'Fr-223': Iso('Fr-223', 'francium-223', 87, 223, 223.0197360, False), - 'Fr-224': Iso('Fr-224', 'francium-224', 87, 224, 224.023398, False), - 'Fr-225': Iso('Fr-225', 'francium-225', 87, 225, 225.025573, False), - 'Fr-226': Iso('Fr-226', 'francium-226', 87, 226, 226.029566, False), - 'Fr-227': Iso('Fr-227', 'francium-227', 87, 227, 227.031869, False), - 'Fr-228': Iso('Fr-228', 'francium-228', 87, 228, 228.035823, False), - 'Fr-229': Iso('Fr-229', 'francium-229', 87, 229, 229.038298, False), - 'Fr-230': Iso('Fr-230', 'francium-230', 87, 230, 230.042416, False), - 'Fr-231': Iso('Fr-231', 'francium-231', 87, 231, 231.045158, False), - 'Fr-232': Iso('Fr-232', 'francium-232', 87, 232, 232.04937, False), - 'Fr-233': Iso('Fr-233', 'francium-233', 87, 233, 233.05264, False), - 'Ra-201': Iso('Ra-201', 'radium-201', 88, 201, 201.01271, False), - 'Ra-202': Iso('Ra-202', 'radium-202', 88, 202, 202.009760, False), - 'Ra-203': Iso('Ra-203', 'radium-203', 88, 203, 203.009304, False), - 'Ra-204': Iso('Ra-204', 'radium-204', 88, 204, 204.006492, False), - 'Ra-205': Iso('Ra-205', 'radium-205', 88, 205, 205.006268, False), - 'Ra-206': Iso('Ra-206', 'radium-206', 88, 206, 206.003828, False), - 'Ra-207': Iso('Ra-207', 'radium-207', 88, 207, 207.003799, False), - 'Ra-208': Iso('Ra-208', 'radium-208', 88, 208, 208.001841, False), - 'Ra-209': Iso('Ra-209', 'radium-209', 88, 209, 209.001990, False), - 'Ra-210': Iso('Ra-210', 'radium-210', 88, 210, 210.000494, False), - 'Ra-211': Iso('Ra-211', 'radium-211', 88, 211, 211.0008932, False), - 'Ra-212': Iso('Ra-212', 'radium-212', 88, 212, 211.999787, False), - 'Ra-213': Iso('Ra-213', 'radium-213', 88, 213, 213.000384, False), - 'Ra-214': Iso('Ra-214', 'radium-214', 88, 214, 214.0000997, False), - 'Ra-215': Iso('Ra-215', 'radium-215', 88, 215, 215.0027204, False), - 'Ra-216': Iso('Ra-216', 'radium-216', 88, 216, 216.0035334, False), - 'Ra-217': Iso('Ra-217', 'radium-217', 88, 217, 217.0063207, False), - 'Ra-218': Iso('Ra-218', 'radium-218', 88, 218, 218.007141, False), - 'Ra-219': Iso('Ra-219', 'radium-219', 88, 219, 219.0100855, False), - 'Ra-220': Iso('Ra-220', 'radium-220', 88, 220, 220.0110259, False), - 'Ra-221': Iso('Ra-221', 'radium-221', 88, 221, 221.0139177, False), - 'Ra-222': Iso('Ra-222', 'radium-222', 88, 222, 222.0153748, False), - 'Ra-223': Iso('Ra-223', 'radium-223', 88, 223, 223.0185023, False), - 'Ra-224': Iso('Ra-224', 'radium-224', 88, 224, 224.0202120, False), - 'Ra-225': Iso('Ra-225', 'radium-225', 88, 225, 225.0236119, False), - 'Ra-226': Iso('Ra-226', 'radium-226', 88, 226, 226.0254103, False), - 'Ra-227': Iso('Ra-227', 'radium-227', 88, 227, 227.0291783, False), - 'Ra-228': Iso('Ra-228', 'radium-228', 88, 228, 228.0310707, False), - 'Ra-229': Iso('Ra-229', 'radium-229', 88, 229, 229.034942, False), - 'Ra-230': Iso('Ra-230', 'radium-230', 88, 230, 230.037055, False), - 'Ra-231': Iso('Ra-231', 'radium-231', 88, 231, 231.041027, False), - 'Ra-232': Iso('Ra-232', 'radium-232', 88, 232, 232.0434753, False), - 'Ra-233': Iso('Ra-233', 'radium-233', 88, 233, 233.047582, False), - 'Ra-234': Iso('Ra-234', 'radium-234', 88, 234, 234.050342, False), - 'Ra-235': Iso('Ra-235', 'radium-235', 88, 235, 235.05497, False), - 'Ac-206': Iso('Ac-206', 'actinium-206', 89, 206, 206.014452, False), - 'Ac-207': Iso('Ac-207', 'actinium-207', 89, 207, 207.011966, False), - 'Ac-208': Iso('Ac-208', 'actinium-208', 89, 208, 208.011550, False), - 'Ac-209': Iso('Ac-209', 'actinium-209', 89, 209, 209.009495, False), - 'Ac-210': Iso('Ac-210', 'actinium-210', 89, 210, 210.009436, False), - 'Ac-211': Iso('Ac-211', 'actinium-211', 89, 211, 211.007732, False), - 'Ac-212': Iso('Ac-212', 'actinium-212', 89, 212, 212.007813, False), - 'Ac-213': Iso('Ac-213', 'actinium-213', 89, 213, 213.006609, False), - 'Ac-214': Iso('Ac-214', 'actinium-214', 89, 214, 214.006918, False), - 'Ac-215': Iso('Ac-215', 'actinium-215', 89, 215, 215.006475, False), - 'Ac-216': Iso('Ac-216', 'actinium-216', 89, 216, 216.008743, False), - 'Ac-217': Iso('Ac-217', 'actinium-217', 89, 217, 217.009344, False), - 'Ac-218': Iso('Ac-218', 'actinium-218', 89, 218, 218.011642, False), - 'Ac-219': Iso('Ac-219', 'actinium-219', 89, 219, 219.012421, False), - 'Ac-220': Iso('Ac-220', 'actinium-220', 89, 220, 220.0147549, False), - 'Ac-221': Iso('Ac-221', 'actinium-221', 89, 221, 221.015592, False), - 'Ac-222': Iso('Ac-222', 'actinium-222', 89, 222, 222.0178442, False), - 'Ac-223': Iso('Ac-223', 'actinium-223', 89, 223, 223.0191377, False), - 'Ac-224': Iso('Ac-224', 'actinium-224', 89, 224, 224.0217232, False), - 'Ac-225': Iso('Ac-225', 'actinium-225', 89, 225, 225.0232300, False), - 'Ac-226': Iso('Ac-226', 'actinium-226', 89, 226, 226.0260984, False), - 'Ac-227': Iso('Ac-227', 'actinium-227', 89, 227, 227.0277523, False), - 'Ac-228': Iso('Ac-228', 'actinium-228', 89, 228, 228.0310215, False), - 'Ac-229': Iso('Ac-229', 'actinium-229', 89, 229, 229.032956, False), - 'Ac-230': Iso('Ac-230', 'actinium-230', 89, 230, 230.036327, False), - 'Ac-231': Iso('Ac-231', 'actinium-231', 89, 231, 231.038393, False), - 'Ac-232': Iso('Ac-232', 'actinium-232', 89, 232, 232.042034, False), - 'Ac-233': Iso('Ac-233', 'actinium-233', 89, 233, 233.044346, False), - 'Ac-234': Iso('Ac-234', 'actinium-234', 89, 234, 234.048139, False), - 'Ac-235': Iso('Ac-235', 'actinium-235', 89, 235, 235.050840, False), - 'Ac-236': Iso('Ac-236', 'actinium-236', 89, 236, 236.054988, False), - 'Ac-237': Iso('Ac-237', 'actinium-237', 89, 237, 237.05827, False), - 'Th-208': Iso('Th-208', 'thorium-208', 90, 208, 208.017900, False), - 'Th-209': Iso('Th-209', 'thorium-209', 90, 209, 209.017753, False), - 'Th-210': Iso('Th-210', 'thorium-210', 90, 210, 210.015094, False), - 'Th-211': Iso('Th-211', 'thorium-211', 90, 211, 211.014929, False), - 'Th-212': Iso('Th-212', 'thorium-212', 90, 212, 212.012988, False), - 'Th-213': Iso('Th-213', 'thorium-213', 90, 213, 213.013009, False), - 'Th-214': Iso('Th-214', 'thorium-214', 90, 214, 214.011500, False), - 'Th-215': Iso('Th-215', 'thorium-215', 90, 215, 215.0117248, False), - 'Th-216': Iso('Th-216', 'thorium-216', 90, 216, 216.011056, False), - 'Th-217': Iso('Th-217', 'thorium-217', 90, 217, 217.013117, False), - 'Th-218': Iso('Th-218', 'thorium-218', 90, 218, 218.013276, False), - 'Th-219': Iso('Th-219', 'thorium-219', 90, 219, 219.015537, False), - 'Th-220': Iso('Th-220', 'thorium-220', 90, 220, 220.015748, False), - 'Th-221': Iso('Th-221', 'thorium-221', 90, 221, 221.018184, False), - 'Th-222': Iso('Th-222', 'thorium-222', 90, 222, 222.018469, False), - 'Th-223': Iso('Th-223', 'thorium-223', 90, 223, 223.0208119, False), - 'Th-224': Iso('Th-224', 'thorium-224', 90, 224, 224.021464, False), - 'Th-225': Iso('Th-225', 'thorium-225', 90, 225, 225.0239514, False), - 'Th-226': Iso('Th-226', 'thorium-226', 90, 226, 226.0249034, False), - 'Th-227': Iso('Th-227', 'thorium-227', 90, 227, 227.0277042, False), - 'Th-228': Iso('Th-228', 'thorium-228', 90, 228, 228.0287413, False, - half_life=60359040.0), - 'Th-229': Iso('Th-229', 'thorium-229', 90, 229, 229.0317627, False), - 'Th-230': Iso('Th-230', 'thorium-230', 90, 230, 230.0331341, False), - 'Th-231': Iso('Th-231', 'thorium-231', 90, 231, 231.0363046, False), - 'Th-232': Iso('Th-232', 'thorium-232', 90, 232, 232.0380558, False, + 'As-76': _iso('As-76', 'arsenic-76', 33, 76, 75.92239202, False), + 'As-77': _iso('As-77', 'arsenic-77', 33, 77, 76.9206476, False), + 'As-78': _iso('As-78', 'arsenic-78', 33, 78, 77.921828, False), + 'As-79': _iso('As-79', 'arsenic-79', 33, 79, 78.9209484, False), + 'As-80': _iso('As-80', 'arsenic-80', 33, 80, 79.9224746, False), + 'As-81': _iso('As-81', 'arsenic-81', 33, 81, 80.9221323, False), + 'As-82': _iso('As-82', 'arsenic-82', 33, 82, 81.9247412, False), + 'As-83': _iso('As-83', 'arsenic-83', 33, 83, 82.9252069, False), + 'As-84': _iso('As-84', 'arsenic-84', 33, 84, 83.9293033, False), + 'As-85': _iso('As-85', 'arsenic-85', 33, 85, 84.9321637, False), + 'As-86': _iso('As-86', 'arsenic-86', 33, 86, 85.9367015, False), + 'As-87': _iso('As-87', 'arsenic-87', 33, 87, 86.9402917, False), + 'As-88': _iso('As-88', 'arsenic-88', 33, 88, 87.94555, False), + 'As-89': _iso('As-89', 'arsenic-89', 33, 89, 88.94976, False), + 'As-90': _iso('As-90', 'arsenic-90', 33, 90, 89.95563, False), + 'As-91': _iso('As-91', 'arsenic-91', 33, 91, 90.96039, False), + 'As-92': _iso('As-92', 'arsenic-92', 33, 92, 91.96674, False), + 'Se-64': _iso('Se-64', 'selenium-64', 34, 64, 63.97109, False), + 'Se-65': _iso('Se-65', 'selenium-65', 34, 65, 64.96440, False), + 'Se-66': _iso('Se-66', 'selenium-66', 34, 66, 65.95559, False), + 'Se-67': _iso('Se-67', 'selenium-67', 34, 67, 66.949994, False), + 'Se-68': _iso('Se-68', 'selenium-68', 34, 68, 67.94182524, False), + 'Se-69': _iso('Se-69', 'selenium-69', 34, 69, 68.9394148, False), + 'Se-70': _iso('Se-70', 'selenium-70', 34, 70, 69.9335155, False), + 'Se-71': _iso('Se-71', 'selenium-71', 34, 71, 70.9322094, False), + 'Se-72': _iso('Se-72', 'selenium-72', 34, 72, 71.9271405, False), + 'Se-73': _iso('Se-73', 'selenium-73', 34, 73, 72.9267549, False), + 'Se-74': _iso('Se-74', 'selenium-74', 34, 74, 73.922475934, True, + isotopic_abundance=0.0089), + 'Se-75': _iso('Se-75', 'selenium-75', 34, 75, 74.922522870, False, + half_life=10351497.6), + 'Se-76': _iso('Se-76', 'selenium-76', 34, 76, 75.919213704, True, + isotopic_abundance=0.0937), + 'Se-77': _iso('Se-77', 'selenium-77', 34, 77, 76.919914154, True, + isotopic_abundance=0.0763), + 'Se-78': _iso('Se-78', 'selenium-78', 34, 78, 77.91730928, True, + isotopic_abundance=0.2377), + 'Se-79': _iso('Se-79', 'selenium-79', 34, 79, 78.91849929, False), + 'Se-80': _iso('Se-80', 'selenium-80', 34, 80, 79.9165218, True, + isotopic_abundance=0.4961), + 'Se-81': _iso('Se-81', 'selenium-81', 34, 81, 80.9179930, False), + 'Se-82': _iso('Se-82', 'selenium-82', 34, 82, 81.9166995, False, + isotopic_abundance=0.0873), + 'Se-83': _iso('Se-83', 'selenium-83', 34, 83, 82.9191186, False), + 'Se-84': _iso('Se-84', 'selenium-84', 34, 84, 83.9184668, False), + 'Se-85': _iso('Se-85', 'selenium-85', 34, 85, 84.9222608, False), + 'Se-86': _iso('Se-86', 'selenium-86', 34, 86, 85.9243117, False), + 'Se-87': _iso('Se-87', 'selenium-87', 34, 87, 86.9286886, False), + 'Se-88': _iso('Se-88', 'selenium-88', 34, 88, 87.9314175, False), + 'Se-89': _iso('Se-89', 'selenium-89', 34, 89, 88.9366691, False), + 'Se-90': _iso('Se-90', 'selenium-90', 34, 90, 89.94010, False), + 'Se-91': _iso('Se-91', 'selenium-91', 34, 91, 90.94596, False), + 'Se-92': _iso('Se-92', 'selenium-92', 34, 92, 91.94984, False), + 'Se-93': _iso('Se-93', 'selenium-93', 34, 93, 92.95629, False), + 'Se-94': _iso('Se-94', 'selenium-94', 34, 94, 93.96049, False), + 'Se-95': _iso('Se-95', 'selenium-95', 34, 95, 94.96730, False), + 'Br-67': _iso('Br-67', 'bromine-67', 35, 67, 66.96465, False), + 'Br-68': _iso('Br-68', 'bromine-68', 35, 68, 67.95873, False), + 'Br-69': _iso('Br-69', 'bromine-69', 35, 69, 68.950497, False), + 'Br-70': _iso('Br-70', 'bromine-70', 35, 70, 69.944792, False), + 'Br-71': _iso('Br-71', 'bromine-71', 35, 71, 70.9393422, False), + 'Br-72': _iso('Br-72', 'bromine-72', 35, 72, 71.9365886, False), + 'Br-73': _iso('Br-73', 'bromine-73', 35, 73, 72.9316715, False), + 'Br-74': _iso('Br-74', 'bromine-74', 35, 74, 73.9299102, False), + 'Br-75': _iso('Br-75', 'bromine-75', 35, 75, 74.9258105, False), + 'Br-76': _iso('Br-76', 'bromine-76', 35, 76, 75.924542, False), + 'Br-77': _iso('Br-77', 'bromine-77', 35, 77, 76.9213792, False), + 'Br-78': _iso('Br-78', 'bromine-78', 35, 78, 77.9211459, False), + 'Br-79': _iso('Br-79', 'bromine-79', 35, 79, 78.9183376, True, + isotopic_abundance=0.5069), + 'Br-80': _iso('Br-80', 'bromine-80', 35, 80, 79.9185298, False), + 'Br-81': _iso('Br-81', 'bromine-81', 35, 81, 80.9162897, True, + isotopic_abundance=0.4931), + 'Br-82': _iso('Br-82', 'bromine-82', 35, 82, 81.9168032, False), + 'Br-83': _iso('Br-83', 'bromine-83', 35, 83, 82.9151756, False), + 'Br-84': _iso('Br-84', 'bromine-84', 35, 84, 83.916496, False), + 'Br-85': _iso('Br-85', 'bromine-85', 35, 85, 84.9156458, False), + 'Br-86': _iso('Br-86', 'bromine-86', 35, 86, 85.9188054, False), + 'Br-87': _iso('Br-87', 'bromine-87', 35, 87, 86.9206740, False), + 'Br-88': _iso('Br-88', 'bromine-88', 35, 88, 87.9240833, False), + 'Br-89': _iso('Br-89', 'bromine-89', 35, 89, 88.9267046, False), + 'Br-90': _iso('Br-90', 'bromine-90', 35, 90, 89.9312928, False), + 'Br-91': _iso('Br-91', 'bromine-91', 35, 91, 90.9343986, False), + 'Br-92': _iso('Br-92', 'bromine-92', 35, 92, 91.9396316, False), + 'Br-93': _iso('Br-93', 'bromine-93', 35, 93, 92.94313, False), + 'Br-94': _iso('Br-94', 'bromine-94', 35, 94, 93.94890, False), + 'Br-95': _iso('Br-95', 'bromine-95', 35, 95, 94.95301, False), + 'Br-96': _iso('Br-96', 'bromine-96', 35, 96, 95.95903, False), + 'Br-97': _iso('Br-97', 'bromine-97', 35, 97, 96.96344, False), + 'Br-98': _iso('Br-98', 'bromine-98', 35, 98, 97.96946, False), + 'Kr-69': _iso('Kr-69', 'krypton-69', 36, 69, 68.96518, False), + 'Kr-70': _iso('Kr-70', 'krypton-70', 36, 70, 69.95604, False), + 'Kr-71': _iso('Kr-71', 'krypton-71', 36, 71, 70.95027, False), + 'Kr-72': _iso('Kr-72', 'krypton-72', 36, 72, 71.9420924, False), + 'Kr-73': _iso('Kr-73', 'krypton-73', 36, 73, 72.9392892, False), + 'Kr-74': _iso('Kr-74', 'krypton-74', 36, 74, 73.9330840, False), + 'Kr-75': _iso('Kr-75', 'krypton-75', 36, 75, 74.9309457, False), + 'Kr-76': _iso('Kr-76', 'krypton-76', 36, 76, 75.9259103, False), + 'Kr-77': _iso('Kr-77', 'krypton-77', 36, 77, 76.9246700, False), + 'Kr-78': _iso('Kr-78', 'krypton-78', 36, 78, 77.92036494, True, + isotopic_abundance=0.00355), + 'Kr-79': _iso('Kr-79', 'krypton-79', 36, 79, 78.9200829, False), + 'Kr-80': _iso('Kr-80', 'krypton-80', 36, 80, 79.91637808, True, + isotopic_abundance=0.02286), + 'Kr-81': _iso('Kr-81', 'krypton-81', 36, 81, 80.9165912, False), + 'Kr-82': _iso('Kr-82', 'krypton-82', 36, 82, 81.91348273, True, + isotopic_abundance=0.11593), + 'Kr-83': _iso('Kr-83', 'krypton-83', 36, 83, 82.91412716, True, + isotopic_abundance=0.11500), + 'Kr-84': _iso('Kr-84', 'krypton-84', 36, 84, 83.9114977282, True, + isotopic_abundance=0.56987), + 'Kr-85': _iso('Kr-85', 'krypton-85', 36, 85, 84.9125273, False, + half_life=340044480.0), + 'Kr-86': _iso('Kr-86', 'krypton-86', 36, 86, 85.9106106269, True, + isotopic_abundance=0.17279), + 'Kr-87': _iso('Kr-87', 'krypton-87', 36, 87, 86.91335476, False), + 'Kr-88': _iso('Kr-88', 'krypton-88', 36, 88, 87.9144479, False), + 'Kr-89': _iso('Kr-89', 'krypton-89', 36, 89, 88.9178355, False), + 'Kr-90': _iso('Kr-90', 'krypton-90', 36, 90, 89.9195279, False), + 'Kr-91': _iso('Kr-91', 'krypton-91', 36, 91, 90.9238063, False), + 'Kr-92': _iso('Kr-92', 'krypton-92', 36, 92, 91.9261731, False), + 'Kr-93': _iso('Kr-93', 'krypton-93', 36, 93, 92.9311472, False), + 'Kr-94': _iso('Kr-94', 'krypton-94', 36, 94, 93.934140, False), + 'Kr-95': _iso('Kr-95', 'krypton-95', 36, 95, 94.939711, False), + 'Kr-96': _iso('Kr-96', 'krypton-96', 36, 96, 95.943017, False), + 'Kr-97': _iso('Kr-97', 'krypton-97', 36, 97, 96.94909, False), + 'Kr-98': _iso('Kr-98', 'krypton-98', 36, 98, 97.95243, False), + 'Kr-99': _iso('Kr-99', 'krypton-99', 36, 99, 98.95839, False), + 'Kr-100': _iso('Kr-100', 'krypton-100', 36, 100, 99.96237, False), + 'Kr-101': _iso('Kr-101', 'krypton-101', 36, 101, 100.96873, False), + 'Rb-71': _iso('Rb-71', 'rubidium-71', 37, 71, 70.96532, False), + 'Rb-72': _iso('Rb-72', 'rubidium-72', 37, 72, 71.95908, False), + 'Rb-73': _iso('Rb-73', 'rubidium-73', 37, 73, 72.95053, False), + 'Rb-74': _iso('Rb-74', 'rubidium-74', 37, 74, 73.9442659, False), + 'Rb-75': _iso('Rb-75', 'rubidium-75', 37, 75, 74.9385732, False), + 'Rb-76': _iso('Rb-76', 'rubidium-76', 37, 76, 75.9350730, False), + 'Rb-77': _iso('Rb-77', 'rubidium-77', 37, 77, 76.9304016, False), + 'Rb-78': _iso('Rb-78', 'rubidium-78', 37, 78, 77.9281419, False), + 'Rb-79': _iso('Rb-79', 'rubidium-79', 37, 79, 78.9239899, False), + 'Rb-80': _iso('Rb-80', 'rubidium-80', 37, 80, 79.9225164, False), + 'Rb-81': _iso('Rb-81', 'rubidium-81', 37, 81, 80.9189939, False), + 'Rb-82': _iso('Rb-82', 'rubidium-82', 37, 82, 81.9182090, False), + 'Rb-83': _iso('Rb-83', 'rubidium-83', 37, 83, 82.9151142, False), + 'Rb-84': _iso('Rb-84', 'rubidium-84', 37, 84, 83.9143752, False), + 'Rb-85': _iso('Rb-85', 'rubidium-85', 37, 85, 84.9117897379, True, + isotopic_abundance=0.7217), + 'Rb-86': _iso('Rb-86', 'rubidium-86', 37, 86, 85.91116743, False), + 'Rb-87': _iso('Rb-87', 'rubidium-87', 37, 87, 86.9091805310, False, + isotopic_abundance=0.2783), + 'Rb-88': _iso('Rb-88', 'rubidium-88', 37, 88, 87.91131559, False), + 'Rb-89': _iso('Rb-89', 'rubidium-89', 37, 89, 88.9122783, False), + 'Rb-90': _iso('Rb-90', 'rubidium-90', 37, 90, 89.9147985, False), + 'Rb-91': _iso('Rb-91', 'rubidium-91', 37, 91, 90.9165372, False), + 'Rb-92': _iso('Rb-92', 'rubidium-92', 37, 92, 91.9197284, False), + 'Rb-93': _iso('Rb-93', 'rubidium-93', 37, 93, 92.9220393, False), + 'Rb-94': _iso('Rb-94', 'rubidium-94', 37, 94, 93.9263948, False), + 'Rb-95': _iso('Rb-95', 'rubidium-95', 37, 95, 94.929260, False), + 'Rb-96': _iso('Rb-96', 'rubidium-96', 37, 96, 95.9341334, False), + 'Rb-97': _iso('Rb-97', 'rubidium-97', 37, 97, 96.9371771, False), + 'Rb-98': _iso('Rb-98', 'rubidium-98', 37, 98, 97.9416869, False), + 'Rb-99': _iso('Rb-99', 'rubidium-99', 37, 99, 98.94503, False), + 'Rb-100': _iso('Rb-100', 'rubidium-100', 37, 100, 99.95003, False), + 'Rb-101': _iso('Rb-101', 'rubidium-101', 37, 101, 100.95404, False), + 'Rb-102': _iso('Rb-102', 'rubidium-102', 37, 102, 101.95952, False), + 'Rb-103': _iso('Rb-103', 'rubidium-103', 37, 103, 102.96392, False), + 'Sr-73': _iso('Sr-73', 'strontium-73', 38, 73, 72.96570, False), + 'Sr-74': _iso('Sr-74', 'strontium-74', 38, 74, 73.95617, False), + 'Sr-75': _iso('Sr-75', 'strontium-75', 38, 75, 74.94995, False), + 'Sr-76': _iso('Sr-76', 'strontium-76', 38, 76, 75.941763, False), + 'Sr-77': _iso('Sr-77', 'strontium-77', 38, 77, 76.9379455, False), + 'Sr-78': _iso('Sr-78', 'strontium-78', 38, 78, 77.9321800, False), + 'Sr-79': _iso('Sr-79', 'strontium-79', 38, 79, 78.9297077, False), + 'Sr-80': _iso('Sr-80', 'strontium-80', 38, 80, 79.9245175, False), + 'Sr-81': _iso('Sr-81', 'strontium-81', 38, 81, 80.9232114, False), + 'Sr-82': _iso('Sr-82', 'strontium-82', 38, 82, 81.9183999, False), + 'Sr-83': _iso('Sr-83', 'strontium-83', 38, 83, 82.9175544, False), + 'Sr-84': _iso('Sr-84', 'strontium-84', 38, 84, 83.9134191, True, + isotopic_abundance=0.0056), + 'Sr-85': _iso('Sr-85', 'strontium-85', 38, 85, 84.9129320, False, + half_life=5603299.199999999), + 'Sr-86': _iso('Sr-86', 'strontium-86', 38, 86, 85.9092606, True, + isotopic_abundance=0.0986), + 'Sr-87': _iso('Sr-87', 'strontium-87', 38, 87, 86.9088775, True, + isotopic_abundance=0.0700), + 'Sr-88': _iso('Sr-88', 'strontium-88', 38, 88, 87.9056125, True, + isotopic_abundance=0.8258), + 'Sr-89': _iso('Sr-89', 'strontium-89', 38, 89, 88.9074511, False), + 'Sr-90': _iso('Sr-90', 'strontium-90', 38, 90, 89.9077300, False), + 'Sr-91': _iso('Sr-91', 'strontium-91', 38, 91, 90.9101954, False), + 'Sr-92': _iso('Sr-92', 'strontium-92', 38, 92, 91.9110382, False), + 'Sr-93': _iso('Sr-93', 'strontium-93', 38, 93, 92.9140242, False), + 'Sr-94': _iso('Sr-94', 'strontium-94', 38, 94, 93.9153556, False), + 'Sr-95': _iso('Sr-95', 'strontium-95', 38, 95, 94.9193529, False), + 'Sr-96': _iso('Sr-96', 'strontium-96', 38, 96, 95.9217066, False), + 'Sr-97': _iso('Sr-97', 'strontium-97', 38, 97, 96.9263740, False), + 'Sr-98': _iso('Sr-98', 'strontium-98', 38, 98, 97.9286888, False), + 'Sr-99': _iso('Sr-99', 'strontium-99', 38, 99, 98.9328907, False), + 'Sr-100': _iso('Sr-100', 'strontium-100', 38, 100, 99.935770, False), + 'Sr-101': _iso('Sr-101', 'strontium-101', 38, 101, 100.940352, False), + 'Sr-102': _iso('Sr-102', 'strontium-102', 38, 102, 101.943791, False), + 'Sr-103': _iso('Sr-103', 'strontium-103', 38, 103, 102.94909, False), + 'Sr-104': _iso('Sr-104', 'strontium-104', 38, 104, 103.95265, False), + 'Sr-105': _iso('Sr-105', 'strontium-105', 38, 105, 104.95855, False), + 'Sr-106': _iso('Sr-106', 'strontium-106', 38, 106, 105.96265, False), + 'Sr-107': _iso('Sr-107', 'strontium-107', 38, 107, 106.96897, False), + 'Y-76': _iso('Y-76', 'yttrium-76', 39, 76, 75.95856, False), + 'Y-77': _iso('Y-77', 'yttrium-77', 39, 77, 76.949781, False), + 'Y-78': _iso('Y-78', 'yttrium-78', 39, 78, 77.94361, False), + 'Y-79': _iso('Y-79', 'yttrium-79', 39, 79, 78.93735, False), + 'Y-80': _iso('Y-80', 'yttrium-80', 39, 80, 79.9343561, False), + 'Y-81': _iso('Y-81', 'yttrium-81', 39, 81, 80.9294556, False), + 'Y-82': _iso('Y-82', 'yttrium-82', 39, 82, 81.9269314, False), + 'Y-83': _iso('Y-83', 'yttrium-83', 39, 83, 82.922485, False), + 'Y-84': _iso('Y-84', 'yttrium-84', 39, 84, 83.9206721, False), + 'Y-85': _iso('Y-85', 'yttrium-85', 39, 85, 84.916433, False), + 'Y-86': _iso('Y-86', 'yttrium-86', 39, 86, 85.914886, False), + 'Y-87': _iso('Y-87', 'yttrium-87', 39, 87, 86.9108761, False), + 'Y-88': _iso('Y-88', 'yttrium-88', 39, 88, 87.9095016, False, + half_life=9212486.4), + 'Y-89': _iso('Y-89', 'yttrium-89', 39, 89, 88.9058403, True, + isotopic_abundance=1), + 'Y-90': _iso('Y-90', 'yttrium-90', 39, 90, 89.9071439, False), + 'Y-91': _iso('Y-91', 'yttrium-91', 39, 91, 90.9072974, False), + 'Y-92': _iso('Y-92', 'yttrium-92', 39, 92, 91.9089451, False), + 'Y-93': _iso('Y-93', 'yttrium-93', 39, 93, 92.909578, False), + 'Y-94': _iso('Y-94', 'yttrium-94', 39, 94, 93.9115906, False), + 'Y-95': _iso('Y-95', 'yttrium-95', 39, 95, 94.9128161, False), + 'Y-96': _iso('Y-96', 'yttrium-96', 39, 96, 95.9158968, False), + 'Y-97': _iso('Y-97', 'yttrium-97', 39, 97, 96.9182741, False), + 'Y-98': _iso('Y-98', 'yttrium-98', 39, 98, 97.9223821, False), + 'Y-99': _iso('Y-99', 'yttrium-99', 39, 99, 98.9241480, False), + 'Y-100': _iso('Y-100', 'yttrium-100', 39, 100, 99.927715, False), + 'Y-101': _iso('Y-101', 'yttrium-101', 39, 101, 100.9301477, False), + 'Y-102': _iso('Y-102', 'yttrium-102', 39, 102, 101.9343277, False), + 'Y-103': _iso('Y-103', 'yttrium-103', 39, 103, 102.937243, False), + 'Y-104': _iso('Y-104', 'yttrium-104', 39, 104, 103.94196, False), + 'Y-105': _iso('Y-105', 'yttrium-105', 39, 105, 104.94544, False), + 'Y-106': _iso('Y-106', 'yttrium-106', 39, 106, 105.95056, False), + 'Y-107': _iso('Y-107', 'yttrium-107', 39, 107, 106.95452, False), + 'Y-108': _iso('Y-108', 'yttrium-108', 39, 108, 107.95996, False), + 'Y-109': _iso('Y-109', 'yttrium-109', 39, 109, 108.96436, False), + 'Zr-78': _iso('Zr-78', 'zirconium-78', 40, 78, 77.95566, False), + 'Zr-79': _iso('Zr-79', 'zirconium-79', 40, 79, 78.94948, False), + 'Zr-80': _iso('Zr-80', 'zirconium-80', 40, 80, 79.9404, False), + 'Zr-81': _iso('Zr-81', 'zirconium-81', 40, 81, 80.93731, False), + 'Zr-82': _iso('Zr-82', 'zirconium-82', 40, 82, 81.93135, False), + 'Zr-83': _iso('Zr-83', 'zirconium-83', 40, 83, 82.9292421, False), + 'Zr-84': _iso('Zr-84', 'zirconium-84', 40, 84, 83.9233269, False), + 'Zr-85': _iso('Zr-85', 'zirconium-85', 40, 85, 84.9214444, False), + 'Zr-86': _iso('Zr-86', 'zirconium-86', 40, 86, 85.9162972, False), + 'Zr-87': _iso('Zr-87', 'zirconium-87', 40, 87, 86.9148180, False), + 'Zr-88': _iso('Zr-88', 'zirconium-88', 40, 88, 87.9102213, False), + 'Zr-89': _iso('Zr-89', 'zirconium-89', 40, 89, 88.9088814, False), + 'Zr-90': _iso('Zr-90', 'zirconium-90', 40, 90, 89.9046977, True, + isotopic_abundance=0.5145), + 'Zr-91': _iso('Zr-91', 'zirconium-91', 40, 91, 90.9056396, True, + isotopic_abundance=0.1122), + 'Zr-92': _iso('Zr-92', 'zirconium-92', 40, 92, 91.9050347, True, + isotopic_abundance=0.1715), + 'Zr-93': _iso('Zr-93', 'zirconium-93', 40, 93, 92.9064699, False), + 'Zr-94': _iso('Zr-94', 'zirconium-94', 40, 94, 93.9063108, True, + isotopic_abundance=0.1738), + 'Zr-95': _iso('Zr-95', 'zirconium-95', 40, 95, 94.9080385, False), + 'Zr-96': _iso('Zr-96', 'zirconium-96', 40, 96, 95.9082714, False, + isotopic_abundance=0.0280), + 'Zr-97': _iso('Zr-97', 'zirconium-97', 40, 97, 96.9109512, False), + 'Zr-98': _iso('Zr-98', 'zirconium-98', 40, 98, 97.9127289, False), + 'Zr-99': _iso('Zr-99', 'zirconium-99', 40, 99, 98.916667, False), + 'Zr-100': _iso('Zr-100', 'zirconium-100', 40, 100, 99.9180006, False), + 'Zr-101': _iso('Zr-101', 'zirconium-101', 40, 101, 100.9214480, False), + 'Zr-102': _iso('Zr-102', 'zirconium-102', 40, 102, 101.9231409, False), + 'Zr-103': _iso('Zr-103', 'zirconium-103', 40, 103, 102.927191, False), + 'Zr-104': _iso('Zr-104', 'zirconium-104', 40, 104, 103.929436, False), + 'Zr-105': _iso('Zr-105', 'zirconium-105', 40, 105, 104.934008, False), + 'Zr-106': _iso('Zr-106', 'zirconium-106', 40, 106, 105.93676, False), + 'Zr-107': _iso('Zr-107', 'zirconium-107', 40, 107, 106.94174, False), + 'Zr-108': _iso('Zr-108', 'zirconium-108', 40, 108, 107.94487, False), + 'Zr-109': _iso('Zr-109', 'zirconium-109', 40, 109, 108.95041, False), + 'Zr-110': _iso('Zr-110', 'zirconium-110', 40, 110, 109.95396, False), + 'Zr-111': _iso('Zr-111', 'zirconium-111', 40, 111, 110.95968, False), + 'Zr-112': _iso('Zr-112', 'zirconium-112', 40, 112, 111.96370, False), + 'Nb-81': _iso('Nb-81', 'niobium-81', 41, 81, 80.94960, False), + 'Nb-82': _iso('Nb-82', 'niobium-82', 41, 82, 81.94396, False), + 'Nb-83': _iso('Nb-83', 'niobium-83', 41, 83, 82.93729, False), + 'Nb-84': _iso('Nb-84', 'niobium-84', 41, 84, 83.93449, False), + 'Nb-85': _iso('Nb-85', 'niobium-85', 41, 85, 84.9288458, False), + 'Nb-86': _iso('Nb-86', 'niobium-86', 41, 86, 85.9257828, False), + 'Nb-87': _iso('Nb-87', 'niobium-87', 41, 87, 86.9206937, False), + 'Nb-88': _iso('Nb-88', 'niobium-88', 41, 88, 87.918222, False), + 'Nb-89': _iso('Nb-89', 'niobium-89', 41, 89, 88.913445, False), + 'Nb-90': _iso('Nb-90', 'niobium-90', 41, 90, 89.9112584, False), + 'Nb-91': _iso('Nb-91', 'niobium-91', 41, 91, 90.9069897, False), + 'Nb-92': _iso('Nb-92', 'niobium-92', 41, 92, 91.9071881, False), + 'Nb-93': _iso('Nb-93', 'niobium-93', 41, 93, 92.9063730, True, isotopic_abundance=1), - 'Th-233': Iso('Th-233', 'thorium-233', 90, 233, 233.0415823, False), - 'Th-234': Iso('Th-234', 'thorium-234', 90, 234, 234.0436014, False), - 'Th-235': Iso('Th-235', 'thorium-235', 90, 235, 235.047255, False), - 'Th-236': Iso('Th-236', 'thorium-236', 90, 236, 236.049657, False), - 'Th-237': Iso('Th-237', 'thorium-237', 90, 237, 237.053629, False), - 'Th-238': Iso('Th-238', 'thorium-238', 90, 238, 238.05650, False), - 'Th-239': Iso('Th-239', 'thorium-239', 90, 239, 239.06077, False), - 'Pa-212': Iso('Pa-212', 'protactinium-212', 91, 212, 212.023203, False), - 'Pa-213': Iso('Pa-213', 'protactinium-213', 91, 213, 213.021109, False), - 'Pa-214': Iso('Pa-214', 'protactinium-214', 91, 214, 214.020918, False), - 'Pa-215': Iso('Pa-215', 'protactinium-215', 91, 215, 215.019183, False), - 'Pa-216': Iso('Pa-216', 'protactinium-216', 91, 216, 216.019109, False), - 'Pa-217': Iso('Pa-217', 'protactinium-217', 91, 217, 217.018325, False), - 'Pa-218': Iso('Pa-218', 'protactinium-218', 91, 218, 218.020059, False), - 'Pa-219': Iso('Pa-219', 'protactinium-219', 91, 219, 219.019904, False), - 'Pa-220': Iso('Pa-220', 'protactinium-220', 91, 220, 220.021705, False), - 'Pa-221': Iso('Pa-221', 'protactinium-221', 91, 221, 221.021875, False), - 'Pa-222': Iso('Pa-222', 'protactinium-222', 91, 222, 222.023784, False), - 'Pa-223': Iso('Pa-223', 'protactinium-223', 91, 223, 223.023963, False), - 'Pa-224': Iso('Pa-224', 'protactinium-224', 91, 224, 224.0256176, False), - 'Pa-225': Iso('Pa-225', 'protactinium-225', 91, 225, 225.026131, False), - 'Pa-226': Iso('Pa-226', 'protactinium-226', 91, 226, 226.027948, False), - 'Pa-227': Iso('Pa-227', 'protactinium-227', 91, 227, 227.0288054, False), - 'Pa-228': Iso('Pa-228', 'protactinium-228', 91, 228, 228.0310517, False), - 'Pa-229': Iso('Pa-229', 'protactinium-229', 91, 229, 229.0320972, False), - 'Pa-230': Iso('Pa-230', 'protactinium-230', 91, 230, 230.0345410, False), - 'Pa-231': Iso('Pa-231', 'protactinium-231', 91, 231, 231.0358842, False, + 'Nb-94': _iso('Nb-94', 'niobium-94', 41, 94, 93.9072788, False), + 'Nb-95': _iso('Nb-95', 'niobium-95', 41, 95, 94.90683240, False), + 'Nb-96': _iso('Nb-96', 'niobium-96', 41, 96, 95.9080973, False), + 'Nb-97': _iso('Nb-97', 'niobium-97', 41, 97, 96.9080959, False), + 'Nb-98': _iso('Nb-98', 'niobium-98', 41, 98, 97.9103265, False), + 'Nb-99': _iso('Nb-99', 'niobium-99', 41, 99, 98.911613, False), + 'Nb-100': _iso('Nb-100', 'niobium-100', 41, 100, 99.9143276, False), + 'Nb-101': _iso('Nb-101', 'niobium-101', 41, 101, 100.9153103, False), + 'Nb-102': _iso('Nb-102', 'niobium-102', 41, 102, 101.9180772, False), + 'Nb-103': _iso('Nb-103', 'niobium-103', 41, 103, 102.9194572, False), + 'Nb-104': _iso('Nb-104', 'niobium-104', 41, 104, 103.9228925, False), + 'Nb-105': _iso('Nb-105', 'niobium-105', 41, 105, 104.9249465, False), + 'Nb-106': _iso('Nb-106', 'niobium-106', 41, 106, 105.9289317, False), + 'Nb-107': _iso('Nb-107', 'niobium-107', 41, 107, 106.9315937, False), + 'Nb-108': _iso('Nb-108', 'niobium-108', 41, 108, 107.9360748, False), + 'Nb-109': _iso('Nb-109', 'niobium-109', 41, 109, 108.93922, False), + 'Nb-110': _iso('Nb-110', 'niobium-110', 41, 110, 109.94403, False), + 'Nb-111': _iso('Nb-111', 'niobium-111', 41, 111, 110.94753, False), + 'Nb-112': _iso('Nb-112', 'niobium-112', 41, 112, 111.95247, False), + 'Nb-113': _iso('Nb-113', 'niobium-113', 41, 113, 112.95651, False), + 'Nb-114': _iso('Nb-114', 'niobium-114', 41, 114, 113.96201, False), + 'Nb-115': _iso('Nb-115', 'niobium-115', 41, 115, 114.96634, False), + 'Mo-83': _iso('Mo-83', 'molybdenum-83', 42, 83, 82.94988, False), + 'Mo-84': _iso('Mo-84', 'molybdenum-84', 42, 84, 83.94149, False), + 'Mo-85': _iso('Mo-85', 'molybdenum-85', 42, 85, 84.938261, False), + 'Mo-86': _iso('Mo-86', 'molybdenum-86', 42, 86, 85.9311748, False), + 'Mo-87': _iso('Mo-87', 'molybdenum-87', 42, 87, 86.9281962, False), + 'Mo-88': _iso('Mo-88', 'molybdenum-88', 42, 88, 87.9219678, False), + 'Mo-89': _iso('Mo-89', 'molybdenum-89', 42, 89, 88.9194682, False), + 'Mo-90': _iso('Mo-90', 'molybdenum-90', 42, 90, 89.9139309, False), + 'Mo-91': _iso('Mo-91', 'molybdenum-91', 42, 91, 90.9117453, False), + 'Mo-92': _iso('Mo-92', 'molybdenum-92', 42, 92, 91.90680796, True, + isotopic_abundance=0.1453), + 'Mo-93': _iso('Mo-93', 'molybdenum-93', 42, 93, 92.90680958, False), + 'Mo-94': _iso('Mo-94', 'molybdenum-94', 42, 94, 93.90508490, True, + isotopic_abundance=0.0915), + 'Mo-95': _iso('Mo-95', 'molybdenum-95', 42, 95, 94.90583877, True, + isotopic_abundance=0.1584), + 'Mo-96': _iso('Mo-96', 'molybdenum-96', 42, 96, 95.90467612, True, + isotopic_abundance=0.1667), + 'Mo-97': _iso('Mo-97', 'molybdenum-97', 42, 97, 96.90601812, True, + isotopic_abundance=0.0960), + 'Mo-98': _iso('Mo-98', 'molybdenum-98', 42, 98, 97.90540482, True, + isotopic_abundance=0.2439), + 'Mo-99': _iso('Mo-99', 'molybdenum-99', 42, 99, 98.90770851, False, + half_life=237326.04), + 'Mo-100': _iso('Mo-100', 'molybdenum-100', 42, 100, 99.9074718, False, + isotopic_abundance=0.0982), + 'Mo-101': _iso('Mo-101', 'molybdenum-101', 42, 101, 100.9103414, False), + 'Mo-102': _iso('Mo-102', 'molybdenum-102', 42, 102, 101.9102834, False), + 'Mo-103': _iso('Mo-103', 'molybdenum-103', 42, 103, 102.913079, False), + 'Mo-104': _iso('Mo-104', 'molybdenum-104', 42, 104, 103.9137344, False), + 'Mo-105': _iso('Mo-105', 'molybdenum-105', 42, 105, 104.916969, False), + 'Mo-106': _iso('Mo-106', 'molybdenum-106', 42, 106, 105.918259, False), + 'Mo-107': _iso('Mo-107', 'molybdenum-107', 42, 107, 106.922106, False), + 'Mo-108': _iso('Mo-108', 'molybdenum-108', 42, 108, 107.924033, False), + 'Mo-109': _iso('Mo-109', 'molybdenum-109', 42, 109, 108.928424, False), + 'Mo-110': _iso('Mo-110', 'molybdenum-110', 42, 110, 109.930704, False), + 'Mo-111': _iso('Mo-111', 'molybdenum-111', 42, 111, 110.935654, False), + 'Mo-112': _iso('Mo-112', 'molybdenum-112', 42, 112, 111.93831, False), + 'Mo-113': _iso('Mo-113', 'molybdenum-113', 42, 113, 112.94335, False), + 'Mo-114': _iso('Mo-114', 'molybdenum-114', 42, 114, 113.94653, False), + 'Mo-115': _iso('Mo-115', 'molybdenum-115', 42, 115, 114.95196, False), + 'Mo-116': _iso('Mo-116', 'molybdenum-116', 42, 116, 115.95545, False), + 'Mo-117': _iso('Mo-117', 'molybdenum-117', 42, 117, 116.96117, False), + 'Tc-85': _iso('Tc-85', 'technetium-85', 43, 85, 84.95058, False), + 'Tc-86': _iso('Tc-86', 'technetium-86', 43, 86, 85.94493, False), + 'Tc-87': _iso('Tc-87', 'technetium-87', 43, 87, 86.9380672, False), + 'Tc-88': _iso('Tc-88', 'technetium-88', 43, 88, 87.93378, False), + 'Tc-89': _iso('Tc-89', 'technetium-89', 43, 89, 88.9276487, False), + 'Tc-90': _iso('Tc-90', 'technetium-90', 43, 90, 89.9240739, False), + 'Tc-91': _iso('Tc-91', 'technetium-91', 43, 91, 90.9184254, False), + 'Tc-92': _iso('Tc-92', 'technetium-92', 43, 92, 91.9152698, False), + 'Tc-93': _iso('Tc-93', 'technetium-93', 43, 93, 92.9102460, False), + 'Tc-94': _iso('Tc-94', 'technetium-94', 43, 94, 93.9096536, False), + 'Tc-95': _iso('Tc-95', 'technetium-95', 43, 95, 94.9076536, False), + 'Tc-96': _iso('Tc-96', 'technetium-96', 43, 96, 95.9078680, False), + 'Tc-97': _iso('Tc-97', 'technetium-97', 43, 97, 96.9063667, False), + 'Tc-98': _iso('Tc-98', 'technetium-98', 43, 98, 97.9072124, False), + 'Tc-99': _iso('Tc-99', 'technetium-99', 43, 99, 98.9062508, False, + half_life=21636.0), + 'Tc-100': _iso('Tc-100', 'technetium-100', 43, 100, 99.9076539, False), + 'Tc-101': _iso('Tc-101', 'technetium-101', 43, 101, 100.907309, False), + 'Tc-102': _iso('Tc-102', 'technetium-102', 43, 102, 101.9092097, False), + 'Tc-103': _iso('Tc-103', 'technetium-103', 43, 103, 102.909176, False), + 'Tc-104': _iso('Tc-104', 'technetium-104', 43, 104, 103.911425, False), + 'Tc-105': _iso('Tc-105', 'technetium-105', 43, 105, 104.911655, False), + 'Tc-106': _iso('Tc-106', 'technetium-106', 43, 106, 105.914358, False), + 'Tc-107': _iso('Tc-107', 'technetium-107', 43, 107, 106.9154606, False), + 'Tc-108': _iso('Tc-108', 'technetium-108', 43, 108, 107.9184957, False), + 'Tc-109': _iso('Tc-109', 'technetium-109', 43, 109, 108.920256, False), + 'Tc-110': _iso('Tc-110', 'technetium-110', 43, 110, 109.923744, False), + 'Tc-111': _iso('Tc-111', 'technetium-111', 43, 111, 110.925901, False), + 'Tc-112': _iso('Tc-112', 'technetium-112', 43, 112, 111.9299458, False), + 'Tc-113': _iso('Tc-113', 'technetium-113', 43, 113, 112.9325690, False), + 'Tc-114': _iso('Tc-114', 'technetium-114', 43, 114, 113.93691, False), + 'Tc-115': _iso('Tc-115', 'technetium-115', 43, 115, 114.93998, False), + 'Tc-116': _iso('Tc-116', 'technetium-116', 43, 116, 115.94476, False), + 'Tc-117': _iso('Tc-117', 'technetium-117', 43, 117, 116.94806, False), + 'Tc-118': _iso('Tc-118', 'technetium-118', 43, 118, 117.95299, False), + 'Tc-119': _iso('Tc-119', 'technetium-119', 43, 119, 118.95666, False), + 'Tc-120': _iso('Tc-120', 'technetium-120', 43, 120, 119.96187, False), + 'Ru-87': _iso('Ru-87', 'ruthenium-87', 44, 87, 86.95069, False), + 'Ru-88': _iso('Ru-88', 'ruthenium-88', 44, 88, 87.94160, False), + 'Ru-89': _iso('Ru-89', 'ruthenium-89', 44, 89, 88.93762, False), + 'Ru-90': _iso('Ru-90', 'ruthenium-90', 44, 90, 89.9303444, False), + 'Ru-91': _iso('Ru-91', 'ruthenium-91', 44, 91, 90.9267419, False), + 'Ru-92': _iso('Ru-92', 'ruthenium-92', 44, 92, 91.9202344, False), + 'Ru-93': _iso('Ru-93', 'ruthenium-93', 44, 93, 92.9171044, False), + 'Ru-94': _iso('Ru-94', 'ruthenium-94', 44, 94, 93.9113429, False), + 'Ru-95': _iso('Ru-95', 'ruthenium-95', 44, 95, 94.910406, False), + 'Ru-96': _iso('Ru-96', 'ruthenium-96', 44, 96, 95.90759025, True, + isotopic_abundance=0.0554), + 'Ru-97': _iso('Ru-97', 'ruthenium-97', 44, 97, 96.9075471, False), + 'Ru-98': _iso('Ru-98', 'ruthenium-98', 44, 98, 97.9052868, True, + isotopic_abundance=0.0187), + 'Ru-99': _iso('Ru-99', 'ruthenium-99', 44, 99, 98.9059341, True, + isotopic_abundance=0.1276), + 'Ru-100': _iso('Ru-100', 'ruthenium-100', 44, 100, 99.9042143, True, + isotopic_abundance=0.1260), + 'Ru-101': _iso('Ru-101', 'ruthenium-101', 44, 101, 100.9055769, True, + isotopic_abundance=0.1706), + 'Ru-102': _iso('Ru-102', 'ruthenium-102', 44, 102, 101.9043441, True, + isotopic_abundance=0.3155), + 'Ru-103': _iso('Ru-103', 'ruthenium-103', 44, 103, 102.9063186, False, + half_life=3396384.0), + 'Ru-104': _iso('Ru-104', 'ruthenium-104', 44, 104, 103.9054275, True, + isotopic_abundance=0.1862), + 'Ru-105': _iso('Ru-105', 'ruthenium-105', 44, 105, 104.9077476, False), + 'Ru-106': _iso('Ru-106', 'ruthenium-106', 44, 106, 105.9073291, False), + 'Ru-107': _iso('Ru-107', 'ruthenium-107', 44, 107, 106.9099720, False), + 'Ru-108': _iso('Ru-108', 'ruthenium-108', 44, 108, 107.9101880, False), + 'Ru-109': _iso('Ru-109', 'ruthenium-109', 44, 109, 108.9133260, False), + 'Ru-110': _iso('Ru-110', 'ruthenium-110', 44, 110, 109.9140407, False), + 'Ru-111': _iso('Ru-111', 'ruthenium-111', 44, 111, 110.917570, False), + 'Ru-112': _iso('Ru-112', 'ruthenium-112', 44, 112, 111.918809, False), + 'Ru-113': _iso('Ru-113', 'ruthenium-113', 44, 113, 112.922844, False), + 'Ru-114': _iso('Ru-114', 'ruthenium-114', 44, 114, 113.9246136, False), + 'Ru-115': _iso('Ru-115', 'ruthenium-115', 44, 115, 114.928820, False), + 'Ru-116': _iso('Ru-116', 'ruthenium-116', 44, 116, 115.9312192, False), + 'Ru-117': _iso('Ru-117', 'ruthenium-117', 44, 117, 116.93610, False), + 'Ru-118': _iso('Ru-118', 'ruthenium-118', 44, 118, 117.93853, False), + 'Ru-119': _iso('Ru-119', 'ruthenium-119', 44, 119, 118.94357, False), + 'Ru-120': _iso('Ru-120', 'ruthenium-120', 44, 120, 119.94631, False), + 'Ru-121': _iso('Ru-121', 'ruthenium-121', 44, 121, 120.95164, False), + 'Ru-122': _iso('Ru-122', 'ruthenium-122', 44, 122, 121.95447, False), + 'Ru-123': _iso('Ru-123', 'ruthenium-123', 44, 123, 122.95989, False), + 'Ru-124': _iso('Ru-124', 'ruthenium-124', 44, 124, 123.96305, False), + 'Rh-89': _iso('Rh-89', 'rhodium-89', 45, 89, 88.95058, False), + 'Rh-90': _iso('Rh-90', 'rhodium-90', 45, 90, 89.94422, False), + 'Rh-91': _iso('Rh-91', 'rhodium-91', 45, 91, 90.93688, False), + 'Rh-92': _iso('Rh-92', 'rhodium-92', 45, 92, 91.9323677, False), + 'Rh-93': _iso('Rh-93', 'rhodium-93', 45, 93, 92.9259128, False), + 'Rh-94': _iso('Rh-94', 'rhodium-94', 45, 94, 93.9217305, False), + 'Rh-95': _iso('Rh-95', 'rhodium-95', 45, 95, 94.9158979, False), + 'Rh-96': _iso('Rh-96', 'rhodium-96', 45, 96, 95.914453, False), + 'Rh-97': _iso('Rh-97', 'rhodium-97', 45, 97, 96.911329, False), + 'Rh-98': _iso('Rh-98', 'rhodium-98', 45, 98, 97.910708, False), + 'Rh-99': _iso('Rh-99', 'rhodium-99', 45, 99, 98.9081282, False), + 'Rh-100': _iso('Rh-100', 'rhodium-100', 45, 100, 99.908117, False), + 'Rh-101': _iso('Rh-101', 'rhodium-101', 45, 101, 100.9061606, False), + 'Rh-102': _iso('Rh-102', 'rhodium-102', 45, 102, 101.9068374, False), + 'Rh-103': _iso('Rh-103', 'rhodium-103', 45, 103, 102.9054980, True, + isotopic_abundance=1), + 'Rh-104': _iso('Rh-104', 'rhodium-104', 45, 104, 103.9066492, False), + 'Rh-105': _iso('Rh-105', 'rhodium-105', 45, 105, 104.9056885, False), + 'Rh-106': _iso('Rh-106', 'rhodium-106', 45, 106, 105.9072868, False), + 'Rh-107': _iso('Rh-107', 'rhodium-107', 45, 107, 106.906748, False), + 'Rh-108': _iso('Rh-108', 'rhodium-108', 45, 108, 107.908714, False), + 'Rh-109': _iso('Rh-109', 'rhodium-109', 45, 109, 108.9087488, False), + 'Rh-110': _iso('Rh-110', 'rhodium-110', 45, 110, 109.911079, False), + 'Rh-111': _iso('Rh-111', 'rhodium-111', 45, 111, 110.9116423, False), + 'Rh-112': _iso('Rh-112', 'rhodium-112', 45, 112, 111.914403, False), + 'Rh-113': _iso('Rh-113', 'rhodium-113', 45, 113, 112.9154393, False), + 'Rh-114': _iso('Rh-114', 'rhodium-114', 45, 114, 113.918718, False), + 'Rh-115': _iso('Rh-115', 'rhodium-115', 45, 115, 114.9203116, False), + 'Rh-116': _iso('Rh-116', 'rhodium-116', 45, 116, 115.924059, False), + 'Rh-117': _iso('Rh-117', 'rhodium-117', 45, 117, 116.9260354, False), + 'Rh-118': _iso('Rh-118', 'rhodium-118', 45, 118, 117.930340, False), + 'Rh-119': _iso('Rh-119', 'rhodium-119', 45, 119, 118.932557, False), + 'Rh-120': _iso('Rh-120', 'rhodium-120', 45, 120, 119.93686, False), + 'Rh-121': _iso('Rh-121', 'rhodium-121', 45, 121, 120.93942, False), + 'Rh-122': _iso('Rh-122', 'rhodium-122', 45, 122, 121.94399, False), + 'Rh-123': _iso('Rh-123', 'rhodium-123', 45, 123, 122.94685, False), + 'Rh-124': _iso('Rh-124', 'rhodium-124', 45, 124, 123.95151, False), + 'Rh-125': _iso('Rh-125', 'rhodium-125', 45, 125, 124.95469, False), + 'Rh-126': _iso('Rh-126', 'rhodium-126', 45, 126, 125.95946, False), + 'Pd-91': _iso('Pd-91', 'palladium-91', 46, 91, 90.95032, False), + 'Pd-92': _iso('Pd-92', 'palladium-92', 46, 92, 91.94088, False), + 'Pd-93': _iso('Pd-93', 'palladium-93', 46, 93, 92.93651, False), + 'Pd-94': _iso('Pd-94', 'palladium-94', 46, 94, 93.9290376, False), + 'Pd-95': _iso('Pd-95', 'palladium-95', 46, 95, 94.9248898, False), + 'Pd-96': _iso('Pd-96', 'palladium-96', 46, 96, 95.9182151, False), + 'Pd-97': _iso('Pd-97', 'palladium-97', 46, 97, 96.9164720, False), + 'Pd-98': _iso('Pd-98', 'palladium-98', 46, 98, 97.9126983, False), + 'Pd-99': _iso('Pd-99', 'palladium-99', 46, 99, 98.9117748, False), + 'Pd-100': _iso('Pd-100', 'palladium-100', 46, 100, 99.908505, False), + 'Pd-101': _iso('Pd-101', 'palladium-101', 46, 101, 100.9082864, False), + 'Pd-102': _iso('Pd-102', 'palladium-102', 46, 102, 101.9056022, True, + isotopic_abundance=0.0102), + 'Pd-103': _iso('Pd-103', 'palladium-103', 46, 103, 102.9060809, False), + 'Pd-104': _iso('Pd-104', 'palladium-104', 46, 104, 103.9040305, True, + isotopic_abundance=0.1114), + 'Pd-105': _iso('Pd-105', 'palladium-105', 46, 105, 104.9050796, True, + isotopic_abundance=0.2233), + 'Pd-106': _iso('Pd-106', 'palladium-106', 46, 106, 105.9034804, True, + isotopic_abundance=0.2733), + 'Pd-107': _iso('Pd-107', 'palladium-107', 46, 107, 106.9051282, False), + 'Pd-108': _iso('Pd-108', 'palladium-108', 46, 108, 107.9038916, True, + isotopic_abundance=0.2646), + 'Pd-109': _iso('Pd-109', 'palladium-109', 46, 109, 108.9059504, False), + 'Pd-110': _iso('Pd-110', 'palladium-110', 46, 110, 109.90517220, True, + isotopic_abundance=0.1172), + 'Pd-111': _iso('Pd-111', 'palladium-111', 46, 111, 110.90768968, False), + 'Pd-112': _iso('Pd-112', 'palladium-112', 46, 112, 111.9073297, False), + 'Pd-113': _iso('Pd-113', 'palladium-113', 46, 113, 112.9102610, False), + 'Pd-114': _iso('Pd-114', 'palladium-114', 46, 114, 113.9103686, False), + 'Pd-115': _iso('Pd-115', 'palladium-115', 46, 115, 114.913659, False), + 'Pd-116': _iso('Pd-116', 'palladium-116', 46, 116, 115.9142970, False), + 'Pd-117': _iso('Pd-117', 'palladium-117', 46, 117, 116.9179547, False), + 'Pd-118': _iso('Pd-118', 'palladium-118', 46, 118, 117.9190667, False), + 'Pd-119': _iso('Pd-119', 'palladium-119', 46, 119, 118.9233402, False), + 'Pd-120': _iso('Pd-120', 'palladium-120', 46, 120, 119.9245511, False), + 'Pd-121': _iso('Pd-121', 'palladium-121', 46, 121, 120.9289503, False), + 'Pd-122': _iso('Pd-122', 'palladium-122', 46, 122, 121.930632, False), + 'Pd-123': _iso('Pd-123', 'palladium-123', 46, 123, 122.93514, False), + 'Pd-124': _iso('Pd-124', 'palladium-124', 46, 124, 123.93714, False), + 'Pd-125': _iso('Pd-125', 'palladium-125', 46, 125, 124.94179, False), + 'Pd-126': _iso('Pd-126', 'palladium-126', 46, 126, 125.94416, False), + 'Pd-127': _iso('Pd-127', 'palladium-127', 46, 127, 126.94907, False), + 'Pd-128': _iso('Pd-128', 'palladium-128', 46, 128, 127.95183, False), + 'Ag-93': _iso('Ag-93', 'silver-93', 47, 93, 92.95033, False), + 'Ag-94': _iso('Ag-94', 'silver-94', 47, 94, 93.94373, False), + 'Ag-95': _iso('Ag-95', 'silver-95', 47, 95, 94.93602, False), + 'Ag-96': _iso('Ag-96', 'silver-96', 47, 96, 95.930744, False), + 'Ag-97': _iso('Ag-97', 'silver-97', 47, 97, 96.92397, False), + 'Ag-98': _iso('Ag-98', 'silver-98', 47, 98, 97.921560, False), + 'Ag-99': _iso('Ag-99', 'silver-99', 47, 99, 98.9176458, False), + 'Ag-100': _iso('Ag-100', 'silver-100', 47, 100, 99.9161154, False), + 'Ag-101': _iso('Ag-101', 'silver-101', 47, 101, 100.9126840, False), + 'Ag-102': _iso('Ag-102', 'silver-102', 47, 102, 101.9117047, False), + 'Ag-103': _iso('Ag-103', 'silver-103', 47, 103, 102.9089631, False), + 'Ag-104': _iso('Ag-104', 'silver-104', 47, 104, 103.9086239, False), + 'Ag-105': _iso('Ag-105', 'silver-105', 47, 105, 104.9065256, False), + 'Ag-106': _iso('Ag-106', 'silver-106', 47, 106, 105.9066636, False), + 'Ag-107': _iso('Ag-107', 'silver-107', 47, 107, 106.9050916, True, + isotopic_abundance=0.51839), + 'Ag-108': _iso('Ag-108', 'silver-108', 47, 108, 107.9059503, False), + 'Ag-109': _iso('Ag-109', 'silver-109', 47, 109, 108.9047553, True, + isotopic_abundance=0.48161), + 'Ag-110': _iso('Ag-110', 'silver-110', 47, 110, 109.9061102, False), + 'Ag-111': _iso('Ag-111', 'silver-111', 47, 111, 110.9052959, False), + 'Ag-112': _iso('Ag-112', 'silver-112', 47, 112, 111.9070486, False), + 'Ag-113': _iso('Ag-113', 'silver-113', 47, 113, 112.906573, False), + 'Ag-114': _iso('Ag-114', 'silver-114', 47, 114, 113.9088230, False), + 'Ag-115': _iso('Ag-115', 'silver-115', 47, 115, 114.908767, False), + 'Ag-116': _iso('Ag-116', 'silver-116', 47, 116, 115.9113868, False), + 'Ag-117': _iso('Ag-117', 'silver-117', 47, 117, 116.911774, False), + 'Ag-118': _iso('Ag-118', 'silver-118', 47, 118, 117.9145955, False), + 'Ag-119': _iso('Ag-119', 'silver-119', 47, 119, 118.915570, False), + 'Ag-120': _iso('Ag-120', 'silver-120', 47, 120, 119.9187848, False), + 'Ag-121': _iso('Ag-121', 'silver-121', 47, 121, 120.920125, False), + 'Ag-122': _iso('Ag-122', 'silver-122', 47, 122, 121.923664, False), + 'Ag-123': _iso('Ag-123', 'silver-123', 47, 123, 122.925337, False), + 'Ag-124': _iso('Ag-124', 'silver-124', 47, 124, 123.92893, False), + 'Ag-125': _iso('Ag-125', 'silver-125', 47, 125, 124.93105, False), + 'Ag-126': _iso('Ag-126', 'silver-126', 47, 126, 125.93475, False), + 'Ag-127': _iso('Ag-127', 'silver-127', 47, 127, 126.93711, False), + 'Ag-128': _iso('Ag-128', 'silver-128', 47, 128, 127.94106, False), + 'Ag-129': _iso('Ag-129', 'silver-129', 47, 129, 128.94395, False), + 'Ag-130': _iso('Ag-130', 'silver-130', 47, 130, 129.95070, False), + 'Cd-95': _iso('Cd-95', 'cadmium-95', 48, 95, 94.94994, False), + 'Cd-96': _iso('Cd-96', 'cadmium-96', 48, 96, 95.94034, False), + 'Cd-97': _iso('Cd-97', 'cadmium-97', 48, 97, 96.93510, False), + 'Cd-98': _iso('Cd-98', 'cadmium-98', 48, 98, 97.927389, False), + 'Cd-99': _iso('Cd-99', 'cadmium-99', 48, 99, 98.9249258, False), + 'Cd-100': _iso('Cd-100', 'cadmium-100', 48, 100, 99.9203488, False), + 'Cd-101': _iso('Cd-101', 'cadmium-101', 48, 101, 100.9185862, False), + 'Cd-102': _iso('Cd-102', 'cadmium-102', 48, 102, 101.9144820, False), + 'Cd-103': _iso('Cd-103', 'cadmium-103', 48, 103, 102.9134165, False), + 'Cd-104': _iso('Cd-104', 'cadmium-104', 48, 104, 103.9098564, False), + 'Cd-105': _iso('Cd-105', 'cadmium-105', 48, 105, 104.9094639, False), + 'Cd-106': _iso('Cd-106', 'cadmium-106', 48, 106, 105.9064599, True, + isotopic_abundance=0.0125), + 'Cd-107': _iso('Cd-107', 'cadmium-107', 48, 107, 106.9066121, False), + 'Cd-108': _iso('Cd-108', 'cadmium-108', 48, 108, 107.9041834, True, + isotopic_abundance=0.0089), + 'Cd-109': _iso('Cd-109', 'cadmium-109', 48, 109, 108.9049867, False, + half_life=40025664.0), + 'Cd-110': _iso('Cd-110', 'cadmium-110', 48, 110, 109.90300661, True, + isotopic_abundance=0.1249), + 'Cd-111': _iso('Cd-111', 'cadmium-111', 48, 111, 110.90418287, True, + isotopic_abundance=0.1280), + 'Cd-112': _iso('Cd-112', 'cadmium-112', 48, 112, 111.90276287, True, + isotopic_abundance=0.2413), + 'Cd-113': _iso('Cd-113', 'cadmium-113', 48, 113, 112.90440813, False, + isotopic_abundance=0.1222), + 'Cd-114': _iso('Cd-114', 'cadmium-114', 48, 114, 113.90336509, True, + isotopic_abundance=0.2873), + 'Cd-115': _iso('Cd-115', 'cadmium-115', 48, 115, 114.90543751, False), + 'Cd-116': _iso('Cd-116', 'cadmium-116', 48, 116, 115.90476315, False, + isotopic_abundance=0.0749), + 'Cd-117': _iso('Cd-117', 'cadmium-117', 48, 117, 116.9072260, False), + 'Cd-118': _iso('Cd-118', 'cadmium-118', 48, 118, 117.906922, False), + 'Cd-119': _iso('Cd-119', 'cadmium-119', 48, 119, 118.909847, False), + 'Cd-120': _iso('Cd-120', 'cadmium-120', 48, 120, 119.9098681, False), + 'Cd-121': _iso('Cd-121', 'cadmium-121', 48, 121, 120.9129637, False), + 'Cd-122': _iso('Cd-122', 'cadmium-122', 48, 122, 121.9134591, False), + 'Cd-123': _iso('Cd-123', 'cadmium-123', 48, 123, 122.9168925, False), + 'Cd-124': _iso('Cd-124', 'cadmium-124', 48, 124, 123.9176574, False), + 'Cd-125': _iso('Cd-125', 'cadmium-125', 48, 125, 124.9212576, False), + 'Cd-126': _iso('Cd-126', 'cadmium-126', 48, 126, 125.9224291, False), + 'Cd-127': _iso('Cd-127', 'cadmium-127', 48, 127, 126.926472, False), + 'Cd-128': _iso('Cd-128', 'cadmium-128', 48, 128, 127.9278129, False), + 'Cd-129': _iso('Cd-129', 'cadmium-129', 48, 129, 128.93182, False), + 'Cd-130': _iso('Cd-130', 'cadmium-130', 48, 130, 129.93394, False), + 'Cd-131': _iso('Cd-131', 'cadmium-131', 48, 131, 130.94060, False), + 'Cd-132': _iso('Cd-132', 'cadmium-132', 48, 132, 131.94604, False), + 'Cd-133': _iso('Cd-133', 'cadmium-133', 48, 133, 132.95285, False), + 'In-97': _iso('In-97', 'indium-97', 49, 97, 96.94934, False), + 'In-98': _iso('In-98', 'indium-98', 49, 98, 97.94214, False), + 'In-99': _iso('In-99', 'indium-99', 49, 99, 98.93411, False), + 'In-100': _iso('In-100', 'indium-100', 49, 100, 99.93096, False), + 'In-101': _iso('In-101', 'indium-101', 49, 101, 100.92634, False), + 'In-102': _iso('In-102', 'indium-102', 49, 102, 101.9241071, False), + 'In-103': _iso('In-103', 'indium-103', 49, 103, 102.9198819, False), + 'In-104': _iso('In-104', 'indium-104', 49, 104, 103.9182145, False), + 'In-105': _iso('In-105', 'indium-105', 49, 105, 104.914502, False), + 'In-106': _iso('In-106', 'indium-106', 49, 106, 105.913464, False), + 'In-107': _iso('In-107', 'indium-107', 49, 107, 106.910290, False), + 'In-108': _iso('In-108', 'indium-108', 49, 108, 107.9096935, False), + 'In-109': _iso('In-109', 'indium-109', 49, 109, 108.9071514, False), + 'In-110': _iso('In-110', 'indium-110', 49, 110, 109.907170, False), + 'In-111': _iso('In-111', 'indium-111', 49, 111, 110.9051085, False, + half_life=242332.128), + 'In-112': _iso('In-112', 'indium-112', 49, 112, 111.9055377, False), + 'In-113': _iso('In-113', 'indium-113', 49, 113, 112.90406184, True, + isotopic_abundance=0.0429), + 'In-114': _iso('In-114', 'indium-114', 49, 114, 113.90491791, False), + 'In-115': _iso('In-115', 'indium-115', 49, 115, 114.903878776, False, + isotopic_abundance=0.9571), + 'In-116': _iso('In-116', 'indium-116', 49, 116, 115.90525999, False), + 'In-117': _iso('In-117', 'indium-117', 49, 117, 116.9045157, False), + 'In-118': _iso('In-118', 'indium-118', 49, 118, 117.9063566, False), + 'In-119': _iso('In-119', 'indium-119', 49, 119, 118.9058507, False), + 'In-120': _iso('In-120', 'indium-120', 49, 120, 119.907967, False), + 'In-121': _iso('In-121', 'indium-121', 49, 121, 120.907851, False), + 'In-122': _iso('In-122', 'indium-122', 49, 122, 121.910281, False), + 'In-123': _iso('In-123', 'indium-123', 49, 123, 122.910434, False), + 'In-124': _iso('In-124', 'indium-124', 49, 124, 123.913182, False), + 'In-125': _iso('In-125', 'indium-125', 49, 125, 124.913605, False), + 'In-126': _iso('In-126', 'indium-126', 49, 126, 125.916507, False), + 'In-127': _iso('In-127', 'indium-127', 49, 127, 126.917446, False), + 'In-128': _iso('In-128', 'indium-128', 49, 128, 127.92040, False), + 'In-129': _iso('In-129', 'indium-129', 49, 129, 128.9218053, False), + 'In-130': _iso('In-130', 'indium-130', 49, 130, 129.924977, False), + 'In-131': _iso('In-131', 'indium-131', 49, 131, 130.9269715, False), + 'In-132': _iso('In-132', 'indium-132', 49, 132, 131.933001, False), + 'In-133': _iso('In-133', 'indium-133', 49, 133, 132.93831, False), + 'In-134': _iso('In-134', 'indium-134', 49, 134, 133.94454, False), + 'In-135': _iso('In-135', 'indium-135', 49, 135, 134.95005, False), + 'Sn-99': _iso('Sn-99', 'tin-99', 50, 99, 98.94853, False), + 'Sn-100': _iso('Sn-100', 'tin-100', 50, 100, 99.93850, False), + 'Sn-101': _iso('Sn-101', 'tin-101', 50, 101, 100.93526, False), + 'Sn-102': _iso('Sn-102', 'tin-102', 50, 102, 101.93029, False), + 'Sn-103': _iso('Sn-103', 'tin-103', 50, 103, 102.928105, False), + 'Sn-104': _iso('Sn-104', 'tin-104', 50, 104, 103.9231052, False), + 'Sn-105': _iso('Sn-105', 'tin-105', 50, 105, 104.9212684, False), + 'Sn-106': _iso('Sn-106', 'tin-106', 50, 106, 105.9169574, False), + 'Sn-107': _iso('Sn-107', 'tin-107', 50, 107, 106.9157137, False), + 'Sn-108': _iso('Sn-108', 'tin-108', 50, 108, 107.9118943, False), + 'Sn-109': _iso('Sn-109', 'tin-109', 50, 109, 108.9112921, False), + 'Sn-110': _iso('Sn-110', 'tin-110', 50, 110, 109.907845, False), + 'Sn-111': _iso('Sn-111', 'tin-111', 50, 111, 110.9077401, False), + 'Sn-112': _iso('Sn-112', 'tin-112', 50, 112, 111.90482387, True, + isotopic_abundance=0.0097), + 'Sn-113': _iso('Sn-113', 'tin-113', 50, 113, 112.9051757, False, + half_life=9942825.6), + 'Sn-114': _iso('Sn-114', 'tin-114', 50, 114, 113.9027827, True, + isotopic_abundance=0.0066), + 'Sn-115': _iso('Sn-115', 'tin-115', 50, 115, 114.903344699, True, + isotopic_abundance=0.0034), + 'Sn-116': _iso('Sn-116', 'tin-116', 50, 116, 115.90174280, True, + isotopic_abundance=0.1454), + 'Sn-117': _iso('Sn-117', 'tin-117', 50, 117, 116.90295398, True, + isotopic_abundance=0.0768), + 'Sn-118': _iso('Sn-118', 'tin-118', 50, 118, 117.90160657, True, + isotopic_abundance=0.2422), + 'Sn-119': _iso('Sn-119', 'tin-119', 50, 119, 118.90331117, True, + isotopic_abundance=0.0859), + 'Sn-120': _iso('Sn-120', 'tin-120', 50, 120, 119.90220163, True, + isotopic_abundance=0.3258), + 'Sn-121': _iso('Sn-121', 'tin-121', 50, 121, 120.9042426, False), + 'Sn-122': _iso('Sn-122', 'tin-122', 50, 122, 121.9034438, True, + isotopic_abundance=0.0463), + 'Sn-123': _iso('Sn-123', 'tin-123', 50, 123, 122.9057252, False), + 'Sn-124': _iso('Sn-124', 'tin-124', 50, 124, 123.9052766, True, + isotopic_abundance=0.0579), + 'Sn-125': _iso('Sn-125', 'tin-125', 50, 125, 124.9077864, False), + 'Sn-126': _iso('Sn-126', 'tin-126', 50, 126, 125.907659, False), + 'Sn-127': _iso('Sn-127', 'tin-127', 50, 127, 126.910390, False), + 'Sn-128': _iso('Sn-128', 'tin-128', 50, 128, 127.910507, False), + 'Sn-129': _iso('Sn-129', 'tin-129', 50, 129, 128.913465, False), + 'Sn-130': _iso('Sn-130', 'tin-130', 50, 130, 129.9139738, False), + 'Sn-131': _iso('Sn-131', 'tin-131', 50, 131, 130.9170450, False), + 'Sn-132': _iso('Sn-132', 'tin-132', 50, 132, 131.9178267, False), + 'Sn-133': _iso('Sn-133', 'tin-133', 50, 133, 132.9239134, False), + 'Sn-134': _iso('Sn-134', 'tin-134', 50, 134, 133.9286821, False), + 'Sn-135': _iso('Sn-135', 'tin-135', 50, 135, 134.9349086, False), + 'Sn-136': _iso('Sn-136', 'tin-136', 50, 136, 135.93999, False), + 'Sn-137': _iso('Sn-137', 'tin-137', 50, 137, 136.94655, False), + 'Sn-138': _iso('Sn-138', 'tin-138', 50, 138, 137.95184, False), + 'Sb-103': _iso('Sb-103', 'antimony-103', 51, 103, 102.93969, False), + 'Sb-104': _iso('Sb-104', 'antimony-104', 51, 104, 103.93648, False), + 'Sb-105': _iso('Sb-105', 'antimony-105', 51, 105, 104.931276, False), + 'Sb-106': _iso('Sb-106', 'antimony-106', 51, 106, 105.9286380, False), + 'Sb-107': _iso('Sb-107', 'antimony-107', 51, 107, 106.9241506, False), + 'Sb-108': _iso('Sb-108', 'antimony-108', 51, 108, 107.9222267, False), + 'Sb-109': _iso('Sb-109', 'antimony-109', 51, 109, 108.9181411, False), + 'Sb-110': _iso('Sb-110', 'antimony-110', 51, 110, 109.9168543, False), + 'Sb-111': _iso('Sb-111', 'antimony-111', 51, 111, 110.9132182, False), + 'Sb-112': _iso('Sb-112', 'antimony-112', 51, 112, 111.912400, False), + 'Sb-113': _iso('Sb-113', 'antimony-113', 51, 113, 112.909375, False), + 'Sb-114': _iso('Sb-114', 'antimony-114', 51, 114, 113.909290, False), + 'Sb-115': _iso('Sb-115', 'antimony-115', 51, 115, 114.906598, False), + 'Sb-116': _iso('Sb-116', 'antimony-116', 51, 116, 115.9067931, False), + 'Sb-117': _iso('Sb-117', 'antimony-117', 51, 117, 116.9048415, False), + 'Sb-118': _iso('Sb-118', 'antimony-118', 51, 118, 117.9055321, False), + 'Sb-119': _iso('Sb-119', 'antimony-119', 51, 119, 118.9039455, False), + 'Sb-120': _iso('Sb-120', 'antimony-120', 51, 120, 119.9050794, False), + 'Sb-121': _iso('Sb-121', 'antimony-121', 51, 121, 120.9038120, True, + isotopic_abundance=0.5721), + 'Sb-122': _iso('Sb-122', 'antimony-122', 51, 122, 121.9051699, False), + 'Sb-123': _iso('Sb-123', 'antimony-123', 51, 123, 122.9042132, True, + isotopic_abundance=0.4279), + 'Sb-124': _iso('Sb-124', 'antimony-124', 51, 124, 123.9059350, False), + 'Sb-125': _iso('Sb-125', 'antimony-125', 51, 125, 124.9052530, False, + half_life=87053184.0), + 'Sb-126': _iso('Sb-126', 'antimony-126', 51, 126, 125.907253, False), + 'Sb-127': _iso('Sb-127', 'antimony-127', 51, 127, 126.9069243, False), + 'Sb-128': _iso('Sb-128', 'antimony-128', 51, 128, 127.909146, False), + 'Sb-129': _iso('Sb-129', 'antimony-129', 51, 129, 128.909147, False), + 'Sb-130': _iso('Sb-130', 'antimony-130', 51, 130, 129.911662, False), + 'Sb-131': _iso('Sb-131', 'antimony-131', 51, 131, 130.9119888, False), + 'Sb-132': _iso('Sb-132', 'antimony-132', 51, 132, 131.9145077, False), + 'Sb-133': _iso('Sb-133', 'antimony-133', 51, 133, 132.9152732, False), + 'Sb-134': _iso('Sb-134', 'antimony-134', 51, 134, 133.9205357, False), + 'Sb-135': _iso('Sb-135', 'antimony-135', 51, 135, 134.9251851, False), + 'Sb-136': _iso('Sb-136', 'antimony-136', 51, 136, 135.9307459, False), + 'Sb-137': _iso('Sb-137', 'antimony-137', 51, 137, 136.93555, False), + 'Sb-138': _iso('Sb-138', 'antimony-138', 51, 138, 137.94145, False), + 'Sb-139': _iso('Sb-139', 'antimony-139', 51, 139, 138.94655, False), + 'Sb-140': _iso('Sb-140', 'antimony-140', 51, 140, 139.95283, False), + 'Te-105': _iso('Te-105', 'tellurium-105', 52, 105, 104.94330, False), + 'Te-106': _iso('Te-106', 'tellurium-106', 52, 106, 105.93750, False), + 'Te-107': _iso('Te-107', 'tellurium-107', 52, 107, 106.935012, False), + 'Te-108': _iso('Te-108', 'tellurium-108', 52, 108, 107.9293805, False), + 'Te-109': _iso('Te-109', 'tellurium-109', 52, 109, 108.9273045, False), + 'Te-110': _iso('Te-110', 'tellurium-110', 52, 110, 109.9224581, False), + 'Te-111': _iso('Te-111', 'tellurium-111', 52, 111, 110.9210006, False), + 'Te-112': _iso('Te-112', 'tellurium-112', 52, 112, 111.9167279, False), + 'Te-113': _iso('Te-113', 'tellurium-113', 52, 113, 112.915891, False), + 'Te-114': _iso('Te-114', 'tellurium-114', 52, 114, 113.912089, False), + 'Te-115': _iso('Te-115', 'tellurium-115', 52, 115, 114.911902, False), + 'Te-116': _iso('Te-116', 'tellurium-116', 52, 116, 115.908460, False), + 'Te-117': _iso('Te-117', 'tellurium-117', 52, 117, 116.908646, False), + 'Te-118': _iso('Te-118', 'tellurium-118', 52, 118, 117.905854, False), + 'Te-119': _iso('Te-119', 'tellurium-119', 52, 119, 118.9064071, False), + 'Te-120': _iso('Te-120', 'tellurium-120', 52, 120, 119.9040593, True, + isotopic_abundance=0.0009), + 'Te-121': _iso('Te-121', 'tellurium-121', 52, 121, 120.904944, False), + 'Te-122': _iso('Te-122', 'tellurium-122', 52, 122, 121.9030435, True, + isotopic_abundance=0.0255), + 'Te-123': _iso('Te-123', 'tellurium-123', 52, 123, 122.9042698, True, + isotopic_abundance=0.0089), + 'Te-124': _iso('Te-124', 'tellurium-124', 52, 124, 123.9028171, True, + isotopic_abundance=0.0474), + 'Te-125': _iso('Te-125', 'tellurium-125', 52, 125, 124.9044299, True, + isotopic_abundance=0.0707), + 'Te-126': _iso('Te-126', 'tellurium-126', 52, 126, 125.9033109, True, + isotopic_abundance=0.1884), + 'Te-127': _iso('Te-127', 'tellurium-127', 52, 127, 126.9052257, False), + 'Te-128': _iso('Te-128', 'tellurium-128', 52, 128, 127.90446128, False, + isotopic_abundance=0.3174), + 'Te-129': _iso('Te-129', 'tellurium-129', 52, 129, 128.90659646, False), + 'Te-130': _iso('Te-130', 'tellurium-130', 52, 130, 129.906222748, False, + isotopic_abundance=0.3408), + 'Te-131': _iso('Te-131', 'tellurium-131', 52, 131, 130.908522213, False), + 'Te-132': _iso('Te-132', 'tellurium-132', 52, 132, 131.9085467, False), + 'Te-133': _iso('Te-133', 'tellurium-133', 52, 133, 132.9109688, False), + 'Te-134': _iso('Te-134', 'tellurium-134', 52, 134, 133.9113940, False), + 'Te-135': _iso('Te-135', 'tellurium-135', 52, 135, 134.9165557, False), + 'Te-136': _iso('Te-136', 'tellurium-136', 52, 136, 135.9201006, False), + 'Te-137': _iso('Te-137', 'tellurium-137', 52, 137, 136.9255989, False), + 'Te-138': _iso('Te-138', 'tellurium-138', 52, 138, 137.9294722, False), + 'Te-139': _iso('Te-139', 'tellurium-139', 52, 139, 138.9353672, False), + 'Te-140': _iso('Te-140', 'tellurium-140', 52, 140, 139.939499, False), + 'Te-141': _iso('Te-141', 'tellurium-141', 52, 141, 140.94580, False), + 'Te-142': _iso('Te-142', 'tellurium-142', 52, 142, 141.95022, False), + 'Te-143': _iso('Te-143', 'tellurium-143', 52, 143, 142.95676, False), + 'I-107': _iso('I-107', 'iodine-107', 53, 107, 106.94678, False), + 'I-108': _iso('I-108', 'iodine-108', 53, 108, 107.94348, False), + 'I-109': _iso('I-109', 'iodine-109', 53, 109, 108.9380853, False), + 'I-110': _iso('I-110', 'iodine-110', 53, 110, 109.935089, False), + 'I-111': _iso('I-111', 'iodine-111', 53, 111, 110.9302692, False), + 'I-112': _iso('I-112', 'iodine-112', 53, 112, 111.928005, False), + 'I-113': _iso('I-113', 'iodine-113', 53, 113, 112.9236501, False), + 'I-114': _iso('I-114', 'iodine-114', 53, 114, 113.92185, False), + 'I-115': _iso('I-115', 'iodine-115', 53, 115, 114.918048, False), + 'I-116': _iso('I-116', 'iodine-116', 53, 116, 115.91681, False), + 'I-117': _iso('I-117', 'iodine-117', 53, 117, 116.913648, False), + 'I-118': _iso('I-118', 'iodine-118', 53, 118, 117.913074, False), + 'I-119': _iso('I-119', 'iodine-119', 53, 119, 118.910074, False), + 'I-120': _iso('I-120', 'iodine-120', 53, 120, 119.910087, False), + 'I-121': _iso('I-121', 'iodine-121', 53, 121, 120.9074051, False), + 'I-122': _iso('I-122', 'iodine-122', 53, 122, 121.9075888, False), + 'I-123': _iso('I-123', 'iodine-123', 53, 123, 122.9055885, False, + half_life=47604.6), + 'I-124': _iso('I-124', 'iodine-124', 53, 124, 123.9062090, False), + 'I-125': _iso('I-125', 'iodine-125', 53, 125, 124.9046294, False, + half_life=5139936.0), + 'I-126': _iso('I-126', 'iodine-126', 53, 126, 125.9056233, False), + 'I-127': _iso('I-127', 'iodine-127', 53, 127, 126.9044719, True, isotopic_abundance=1), - 'Pa-232': Iso('Pa-232', 'protactinium-232', 91, 232, 232.0385917, False), - 'Pa-233': Iso('Pa-233', 'protactinium-233', 91, 233, 233.0402472, False), - 'Pa-234': Iso('Pa-234', 'protactinium-234', 91, 234, 234.0433072, False), - 'Pa-235': Iso('Pa-235', 'protactinium-235', 91, 235, 235.045399, False), - 'Pa-236': Iso('Pa-236', 'protactinium-236', 91, 236, 236.048668, False), - 'Pa-237': Iso('Pa-237', 'protactinium-237', 91, 237, 237.051023, False), - 'Pa-238': Iso('Pa-238', 'protactinium-238', 91, 238, 238.054637, False), - 'Pa-239': Iso('Pa-239', 'protactinium-239', 91, 239, 239.05726, False), - 'Pa-240': Iso('Pa-240', 'protactinium-240', 91, 240, 240.06098, False), - 'Pa-241': Iso('Pa-241', 'protactinium-241', 91, 241, 241.06408, False), - 'U-217': Iso('U-217', 'uranium-217', 92, 217, 217.02466, False), - 'U-218': Iso('U-218', 'uranium-218', 92, 218, 218.023523, False), - 'U-219': Iso('U-219', 'uranium-219', 92, 219, 219.024999, False), - 'U-220': Iso('U-220', 'uranium-220', 92, 220, 220.02462, False), - 'U-221': Iso('U-221', 'uranium-221', 92, 221, 221.02628, False), - 'U-222': Iso('U-222', 'uranium-222', 92, 222, 222.02600, False), - 'U-223': Iso('U-223', 'uranium-223', 92, 223, 223.027739, False), - 'U-224': Iso('U-224', 'uranium-224', 92, 224, 224.027605, False), - 'U-225': Iso('U-225', 'uranium-225', 92, 225, 225.029391, False), - 'U-226': Iso('U-226', 'uranium-226', 92, 226, 226.029339, False), - 'U-227': Iso('U-227', 'uranium-227', 92, 227, 227.031157, False), - 'U-228': Iso('U-228', 'uranium-228', 92, 228, 228.031371, False), - 'U-229': Iso('U-229', 'uranium-229', 92, 229, 229.0335063, False), - 'U-230': Iso('U-230', 'uranium-230', 92, 230, 230.0339401, False), - 'U-231': Iso('U-231', 'uranium-231', 92, 231, 231.0362939, False), - 'U-232': Iso('U-232', 'uranium-232', 92, 232, 232.0371563, False), - 'U-233': Iso('U-233', 'uranium-233', 92, 233, 233.0396355, False), - 'U-234': Iso('U-234', 'uranium-234', 92, 234, 234.0409523, False, - isotopic_abundance=0.000054), - 'U-235': Iso('U-235', 'uranium-235', 92, 235, 235.0439301, False, - isotopic_abundance=0.007204), - 'U-236': Iso('U-236', 'uranium-236', 92, 236, 236.0455682, False), - 'U-237': Iso('U-237', 'uranium-237', 92, 237, 237.0487304, False), - 'U-238': Iso('U-238', 'uranium-238', 92, 238, 238.0507884, False, - isotopic_abundance=0.992742), - 'U-239': Iso('U-239', 'uranium-239', 92, 239, 239.0542935, False), - 'U-240': Iso('U-240', 'uranium-240', 92, 240, 240.0565934, False), - 'U-241': Iso('U-241', 'uranium-241', 92, 241, 241.06033, False), - 'U-242': Iso('U-242', 'uranium-242', 92, 242, 242.06293, False), - 'U-243': Iso('U-243', 'uranium-243', 92, 243, 243.06699, False), - 'Np-219': Iso('Np-219', 'neptunium-219', 93, 219, 219.03143, False), - 'Np-220': Iso('Np-220', 'neptunium-220', 93, 220, 220.03254, False), - 'Np-221': Iso('Np-221', 'neptunium-221', 93, 221, 221.03204, False), - 'Np-222': Iso('Np-222', 'neptunium-222', 93, 222, 222.03330, False), - 'Np-223': Iso('Np-223', 'neptunium-223', 93, 223, 223.03285, False), - 'Np-224': Iso('Np-224', 'neptunium-224', 93, 224, 224.03422, False), - 'Np-225': Iso('Np-225', 'neptunium-225', 93, 225, 225.033911, False), - 'Np-226': Iso('Np-226', 'neptunium-226', 93, 226, 226.035188, False), - 'Np-227': Iso('Np-227', 'neptunium-227', 93, 227, 227.034957, False), - 'Np-228': Iso('Np-228', 'neptunium-228', 93, 228, 228.036067, False), - 'Np-229': Iso('Np-229', 'neptunium-229', 93, 229, 229.036264, False), - 'Np-230': Iso('Np-230', 'neptunium-230', 93, 230, 230.037828, False), - 'Np-231': Iso('Np-231', 'neptunium-231', 93, 231, 231.038245, False), - 'Np-232': Iso('Np-232', 'neptunium-232', 93, 232, 232.04011, False), - 'Np-233': Iso('Np-233', 'neptunium-233', 93, 233, 233.040741, False), - 'Np-234': Iso('Np-234', 'neptunium-234', 93, 234, 234.0428953, False), - 'Np-235': Iso('Np-235', 'neptunium-235', 93, 235, 235.0440635, False), - 'Np-236': Iso('Np-236', 'neptunium-236', 93, 236, 236.046570, False), - 'Np-237': Iso('Np-237', 'neptunium-237', 93, 237, 237.0481736, False), - 'Np-238': Iso('Np-238', 'neptunium-238', 93, 238, 238.0509466, False), - 'Np-239': Iso('Np-239', 'neptunium-239', 93, 239, 239.0529392, False), - 'Np-240': Iso('Np-240', 'neptunium-240', 93, 240, 240.056165, False), - 'Np-241': Iso('Np-241', 'neptunium-241', 93, 241, 241.058253, False), - 'Np-242': Iso('Np-242', 'neptunium-242', 93, 242, 242.06164, False), - 'Np-243': Iso('Np-243', 'neptunium-243', 93, 243, 243.064280, False), - 'Np-244': Iso('Np-244', 'neptunium-244', 93, 244, 244.06785, False), - 'Np-245': Iso('Np-245', 'neptunium-245', 93, 245, 245.07080, False), - 'Pu-228': Iso('Pu-228', 'plutonium-228', 94, 228, 228.038732, False), - 'Pu-229': Iso('Pu-229', 'plutonium-229', 94, 229, 229.040144, False), - 'Pu-230': Iso('Pu-230', 'plutonium-230', 94, 230, 230.039650, False), - 'Pu-231': Iso('Pu-231', 'plutonium-231', 94, 231, 231.041102, False), - 'Pu-232': Iso('Pu-232', 'plutonium-232', 94, 232, 232.041185, False), - 'Pu-233': Iso('Pu-233', 'plutonium-233', 94, 233, 233.042998, False), - 'Pu-234': Iso('Pu-234', 'plutonium-234', 94, 234, 234.0433174, False), - 'Pu-235': Iso('Pu-235', 'plutonium-235', 94, 235, 235.045286, False), - 'Pu-236': Iso('Pu-236', 'plutonium-236', 94, 236, 236.0460581, False), - 'Pu-237': Iso('Pu-237', 'plutonium-237', 94, 237, 237.0484098, False), - 'Pu-238': Iso('Pu-238', 'plutonium-238', 94, 238, 238.0495601, False), - 'Pu-239': Iso('Pu-239', 'plutonium-239', 94, 239, 239.0521636, False), - 'Pu-240': Iso('Pu-240', 'plutonium-240', 94, 240, 240.0538138, False), - 'Pu-241': Iso('Pu-241', 'plutonium-241', 94, 241, 241.0568517, False), - 'Pu-242': Iso('Pu-242', 'plutonium-242', 94, 242, 242.0587428, False), - 'Pu-243': Iso('Pu-243', 'plutonium-243', 94, 243, 243.0620036, False), - 'Pu-244': Iso('Pu-244', 'plutonium-244', 94, 244, 244.0642053, False), - 'Pu-245': Iso('Pu-245', 'plutonium-245', 94, 245, 245.067826, False), - 'Pu-246': Iso('Pu-246', 'plutonium-246', 94, 246, 246.070205, False), - 'Pu-247': Iso('Pu-247', 'plutonium-247', 94, 247, 247.07419, False), - 'Am-230': Iso('Am-230', 'americium-230', 95, 230, 230.04609, False), - 'Am-231': Iso('Am-231', 'americium-231', 95, 231, 231.04556, False), - 'Am-232': Iso('Am-232', 'americium-232', 95, 232, 232.04645, False), - 'Am-233': Iso('Am-233', 'americium-233', 95, 233, 233.04644, False), - 'Am-234': Iso('Am-234', 'americium-234', 95, 234, 234.04773, False), - 'Am-235': Iso('Am-235', 'americium-235', 95, 235, 235.047908, False), - 'Am-236': Iso('Am-236', 'americium-236', 95, 236, 236.04943, False), - 'Am-237': Iso('Am-237', 'americium-237', 95, 237, 237.049996, False), - 'Am-238': Iso('Am-238', 'americium-238', 95, 238, 238.051985, False), - 'Am-239': Iso('Am-239', 'americium-239', 95, 239, 239.0530247, False), - 'Am-240': Iso('Am-240', 'americium-240', 95, 240, 240.055300, False), - 'Am-241': Iso('Am-241', 'americium-241', 95, 241, 241.0568293, False), - 'Am-242': Iso('Am-242', 'americium-242', 95, 242, 242.0595494, False), - 'Am-243': Iso('Am-243', 'americium-243', 95, 243, 243.0613813, False), - 'Am-244': Iso('Am-244', 'americium-244', 95, 244, 244.0642851, False), - 'Am-245': Iso('Am-245', 'americium-245', 95, 245, 245.0664548, False), - 'Am-246': Iso('Am-246', 'americium-246', 95, 246, 246.069775, False), - 'Am-247': Iso('Am-247', 'americium-247', 95, 247, 247.07209, False), - 'Am-248': Iso('Am-248', 'americium-248', 95, 248, 248.07575, False), - 'Am-249': Iso('Am-249', 'americium-249', 95, 249, 249.07848, False), - 'Cm-232': Iso('Cm-232', 'curium-232', 96, 232, 232.04982, False), - 'Cm-233': Iso('Cm-233', 'curium-233', 96, 233, 233.050770, False), - 'Cm-234': Iso('Cm-234', 'curium-234', 96, 234, 234.050160, False), - 'Cm-235': Iso('Cm-235', 'curium-235', 96, 235, 235.05154, False), - 'Cm-236': Iso('Cm-236', 'curium-236', 96, 236, 236.051374, False), - 'Cm-237': Iso('Cm-237', 'curium-237', 96, 237, 237.052869, False), - 'Cm-238': Iso('Cm-238', 'curium-238', 96, 238, 238.053081, False), - 'Cm-239': Iso('Cm-239', 'curium-239', 96, 239, 239.054910, False), - 'Cm-240': Iso('Cm-240', 'curium-240', 96, 240, 240.0555297, False), - 'Cm-241': Iso('Cm-241', 'curium-241', 96, 241, 241.0576532, False), - 'Cm-242': Iso('Cm-242', 'curium-242', 96, 242, 242.0588360, False), - 'Cm-243': Iso('Cm-243', 'curium-243', 96, 243, 243.0613893, False), - 'Cm-244': Iso('Cm-244', 'curium-244', 96, 244, 244.0627528, False), - 'Cm-245': Iso('Cm-245', 'curium-245', 96, 245, 245.0654915, False), - 'Cm-246': Iso('Cm-246', 'curium-246', 96, 246, 246.0672238, False), - 'Cm-247': Iso('Cm-247', 'curium-247', 96, 247, 247.0703541, False), - 'Cm-248': Iso('Cm-248', 'curium-248', 96, 248, 248.0723499, False), - 'Cm-249': Iso('Cm-249', 'curium-249', 96, 249, 249.0759548, False), - 'Cm-250': Iso('Cm-250', 'curium-250', 96, 250, 250.078358, False), - 'Cm-251': Iso('Cm-251', 'curium-251', 96, 251, 251.082286, False), - 'Cm-252': Iso('Cm-252', 'curium-252', 96, 252, 252.08487, False), - 'Bk-234': Iso('Bk-234', 'berkelium-234', 97, 234, 234.05727, False), - 'Bk-235': Iso('Bk-235', 'berkelium-235', 97, 235, 235.05658, False), - 'Bk-236': Iso('Bk-236', 'berkelium-236', 97, 236, 236.05748, False), - 'Bk-237': Iso('Bk-237', 'berkelium-237', 97, 237, 237.05710, False), - 'Bk-238': Iso('Bk-238', 'berkelium-238', 97, 238, 238.05820, False), - 'Bk-239': Iso('Bk-239', 'berkelium-239', 97, 239, 239.05824, False), - 'Bk-240': Iso('Bk-240', 'berkelium-240', 97, 240, 240.05976, False), - 'Bk-241': Iso('Bk-241', 'berkelium-241', 97, 241, 241.06016, False), - 'Bk-242': Iso('Bk-242', 'berkelium-242', 97, 242, 242.06198, False), - 'Bk-243': Iso('Bk-243', 'berkelium-243', 97, 243, 243.0630078, False), - 'Bk-244': Iso('Bk-244', 'berkelium-244', 97, 244, 244.065181, False), - 'Bk-245': Iso('Bk-245', 'berkelium-245', 97, 245, 245.0663618, False), - 'Bk-246': Iso('Bk-246', 'berkelium-246', 97, 246, 246.068673, False), - 'Bk-247': Iso('Bk-247', 'berkelium-247', 97, 247, 247.0703073, False), - 'Bk-248': Iso('Bk-248', 'berkelium-248', 97, 248, 248.073088, False), - 'Bk-249': Iso('Bk-249', 'berkelium-249', 97, 249, 249.0749877, False), - 'Bk-250': Iso('Bk-250', 'berkelium-250', 97, 250, 250.0783167, False), - 'Bk-251': Iso('Bk-251', 'berkelium-251', 97, 251, 251.080762, False), - 'Bk-252': Iso('Bk-252', 'berkelium-252', 97, 252, 252.08431, False), - 'Bk-253': Iso('Bk-253', 'berkelium-253', 97, 253, 253.08688, False), - 'Bk-254': Iso('Bk-254', 'berkelium-254', 97, 254, 254.09060, False), - 'Cf-237': Iso('Cf-237', 'californium-237', 98, 237, 237.062198, False), - 'Cf-238': Iso('Cf-238', 'californium-238', 98, 238, 238.06149, False), - 'Cf-239': Iso('Cf-239', 'californium-239', 98, 239, 239.06253, False), - 'Cf-240': Iso('Cf-240', 'californium-240', 98, 240, 240.062256, False), - 'Cf-241': Iso('Cf-241', 'californium-241', 98, 241, 241.06369, False), - 'Cf-242': Iso('Cf-242', 'californium-242', 98, 242, 242.063754, False), - 'Cf-243': Iso('Cf-243', 'californium-243', 98, 243, 243.06548, False), - 'Cf-244': Iso('Cf-244', 'californium-244', 98, 244, 244.0660008, False), - 'Cf-245': Iso('Cf-245', 'californium-245', 98, 245, 245.0680487, False), - 'Cf-246': Iso('Cf-246', 'californium-246', 98, 246, 246.0688055, False), - 'Cf-247': Iso('Cf-247', 'californium-247', 98, 247, 247.070965, False), - 'Cf-248': Iso('Cf-248', 'californium-248', 98, 248, 248.0721851, False), - 'Cf-249': Iso('Cf-249', 'californium-249', 98, 249, 249.0748539, False), - 'Cf-250': Iso('Cf-250', 'californium-250', 98, 250, 250.0764062, False), - 'Cf-251': Iso('Cf-251', 'californium-251', 98, 251, 251.0795886, False), - 'Cf-252': Iso('Cf-252', 'californium-252', 98, 252, 252.0816272, False), - 'Cf-253': Iso('Cf-253', 'californium-253', 98, 253, 253.0851345, False), - 'Cf-254': Iso('Cf-254', 'californium-254', 98, 254, 254.087324, False), - 'Cf-255': Iso('Cf-255', 'californium-255', 98, 255, 255.09105, False), - 'Cf-256': Iso('Cf-256', 'californium-256', 98, 256, 256.09344, False), - 'Es-239': Iso('Es-239', 'einsteinium-239', 99, 239, 239.06823, False), - 'Es-240': Iso('Es-240', 'einsteinium-240', 99, 240, 240.06892, False), - 'Es-241': Iso('Es-241', 'einsteinium-241', 99, 241, 241.06856, False), - 'Es-242': Iso('Es-242', 'einsteinium-242', 99, 242, 242.06957, False), - 'Es-243': Iso('Es-243', 'einsteinium-243', 99, 243, 243.06951, False), - 'Es-244': Iso('Es-244', 'einsteinium-244', 99, 244, 244.07088, False), - 'Es-245': Iso('Es-245', 'einsteinium-245', 99, 245, 245.07125, False), - 'Es-246': Iso('Es-246', 'einsteinium-246', 99, 246, 246.07290, False), - 'Es-247': Iso('Es-247', 'einsteinium-247', 99, 247, 247.073622, False), - 'Es-248': Iso('Es-248', 'einsteinium-248', 99, 248, 248.075471, False), - 'Es-249': Iso('Es-249', 'einsteinium-249', 99, 249, 249.076411, False), - 'Es-250': Iso('Es-250', 'einsteinium-250', 99, 250, 250.07861, False), - 'Es-251': Iso('Es-251', 'einsteinium-251', 99, 251, 251.0799936, False), - 'Es-252': Iso('Es-252', 'einsteinium-252', 99, 252, 252.082980, False), - 'Es-253': Iso('Es-253', 'einsteinium-253', 99, 253, 253.0848257, False), - 'Es-254': Iso('Es-254', 'einsteinium-254', 99, 254, 254.0880222, False), - 'Es-255': Iso('Es-255', 'einsteinium-255', 99, 255, 255.090275, False), - 'Es-256': Iso('Es-256', 'einsteinium-256', 99, 256, 256.09360, False), - 'Es-257': Iso('Es-257', 'einsteinium-257', 99, 257, 257.09598, False), - 'Es-258': Iso('Es-258', 'einsteinium-258', 99, 258, 258.09952, False), - 'Fm-241': Iso('Fm-241', 'fermium-241', 100, 241, 241.07421, False), - 'Fm-242': Iso('Fm-242', 'fermium-242', 100, 242, 242.07343, False), - 'Fm-243': Iso('Fm-243', 'fermium-243', 100, 243, 243.07446, False), - 'Fm-244': Iso('Fm-244', 'fermium-244', 100, 244, 244.07404, False), - 'Fm-245': Iso('Fm-245', 'fermium-245', 100, 245, 245.07535, False), - 'Fm-246': Iso('Fm-246', 'fermium-246', 100, 246, 246.075350, False), - 'Fm-247': Iso('Fm-247', 'fermium-247', 100, 247, 247.07694, False), - 'Fm-248': Iso('Fm-248', 'fermium-248', 100, 248, 248.0771865, False), - 'Fm-249': Iso('Fm-249', 'fermium-249', 100, 249, 249.0789275, False), - 'Fm-250': Iso('Fm-250', 'fermium-250', 100, 250, 250.0795210, False), - 'Fm-251': Iso('Fm-251', 'fermium-251', 100, 251, 251.081540, False), - 'Fm-252': Iso('Fm-252', 'fermium-252', 100, 252, 252.0824671, False), - 'Fm-253': Iso('Fm-253', 'fermium-253', 100, 253, 253.0851846, False), - 'Fm-254': Iso('Fm-254', 'fermium-254', 100, 254, 254.0868544, False), - 'Fm-255': Iso('Fm-255', 'fermium-255', 100, 255, 255.0899640, False), - 'Fm-256': Iso('Fm-256', 'fermium-256', 100, 256, 256.0917745, False), - 'Fm-257': Iso('Fm-257', 'fermium-257', 100, 257, 257.0951061, False), - 'Fm-258': Iso('Fm-258', 'fermium-258', 100, 258, 258.09708, False), - 'Fm-259': Iso('Fm-259', 'fermium-259', 100, 259, 259.10060, False), - 'Fm-260': Iso('Fm-260', 'fermium-260', 100, 260, 260.10281, False), - 'Md-245': Iso('Md-245', 'mendelevium-245', 101, 245, 245.08081, False), - 'Md-246': Iso('Md-246', 'mendelevium-246', 101, 246, 246.08171, False), - 'Md-247': Iso('Md-247', 'mendelevium-247', 101, 247, 247.08152, False), - 'Md-248': Iso('Md-248', 'mendelevium-248', 101, 248, 248.08282, False), - 'Md-249': Iso('Md-249', 'mendelevium-249', 101, 249, 249.08291, False), - 'Md-250': Iso('Md-250', 'mendelevium-250', 101, 250, 250.08441, False), - 'Md-251': Iso('Md-251', 'mendelevium-251', 101, 251, 251.084774, False), - 'Md-252': Iso('Md-252', 'mendelevium-252', 101, 252, 252.08643, False), - 'Md-253': Iso('Md-253', 'mendelevium-253', 101, 253, 253.087144, False), - 'Md-254': Iso('Md-254', 'mendelevium-254', 101, 254, 254.08959, False), - 'Md-255': Iso('Md-255', 'mendelevium-255', 101, 255, 255.0910841, False), - 'Md-256': Iso('Md-256', 'mendelevium-256', 101, 256, 256.09389, False), - 'Md-257': Iso('Md-257', 'mendelevium-257', 101, 257, 257.0955424, False), - 'Md-258': Iso('Md-258', 'mendelevium-258', 101, 258, 258.0984315, False), - 'Md-259': Iso('Md-259', 'mendelevium-259', 101, 259, 259.10051, False), - 'Md-260': Iso('Md-260', 'mendelevium-260', 101, 260, 260.10365, False), - 'Md-261': Iso('Md-261', 'mendelevium-261', 101, 261, 261.10583, False), - 'Md-262': Iso('Md-262', 'mendelevium-262', 101, 262, 262.10910, False), - 'No-248': Iso('No-248', 'nobelium-248', 102, 248, 248.08655, False), - 'No-249': Iso('No-249', 'nobelium-249', 102, 249, 249.08780, False), - 'No-250': Iso('No-250', 'nobelium-250', 102, 250, 250.08756, False), - 'No-251': Iso('No-251', 'nobelium-251', 102, 251, 251.08894, False), - 'No-252': Iso('No-252', 'nobelium-252', 102, 252, 252.088967, False), - 'No-253': Iso('No-253', 'nobelium-253', 102, 253, 253.0905641, False), - 'No-254': Iso('No-254', 'nobelium-254', 102, 254, 254.090956, False), - 'No-255': Iso('No-255', 'nobelium-255', 102, 255, 255.093191, False), - 'No-256': Iso('No-256', 'nobelium-256', 102, 256, 256.0942829, False), - 'No-257': Iso('No-257', 'nobelium-257', 102, 257, 257.0968878, False), - 'No-258': Iso('No-258', 'nobelium-258', 102, 258, 258.09821, False), - 'No-259': Iso('No-259', 'nobelium-259', 102, 259, 259.10103, False), - 'No-260': Iso('No-260', 'nobelium-260', 102, 260, 260.10264, False), - 'No-261': Iso('No-261', 'nobelium-261', 102, 261, 261.10570, False), - 'No-262': Iso('No-262', 'nobelium-262', 102, 262, 262.10746, False), - 'No-263': Iso('No-263', 'nobelium-263', 102, 263, 263.11071, False), - 'No-264': Iso('No-264', 'nobelium-264', 102, 264, 264.11273, False), - 'Lr-251': Iso('Lr-251', 'lawrencium-251', 103, 251, 251.09418, False), - 'Lr-252': Iso('Lr-252', 'lawrencium-252', 103, 252, 252.09526, False), - 'Lr-253': Iso('Lr-253', 'lawrencium-253', 103, 253, 253.09509, False), - 'Lr-254': Iso('Lr-254', 'lawrencium-254', 103, 254, 254.09648, False), - 'Lr-255': Iso('Lr-255', 'lawrencium-255', 103, 255, 255.096562, False), - 'Lr-256': Iso('Lr-256', 'lawrencium-256', 103, 256, 256.098494, False), - 'Lr-257': Iso('Lr-257', 'lawrencium-257', 103, 257, 257.099418, False), - 'Lr-258': Iso('Lr-258', 'lawrencium-258', 103, 258, 258.10176, False), - 'Lr-259': Iso('Lr-259', 'lawrencium-259', 103, 259, 259.102902, False), - 'Lr-260': Iso('Lr-260', 'lawrencium-260', 103, 260, 260.10550, False), - 'Lr-261': Iso('Lr-261', 'lawrencium-261', 103, 261, 261.10688, False), - 'Lr-262': Iso('Lr-262', 'lawrencium-262', 103, 262, 262.10961, False), - 'Lr-263': Iso('Lr-263', 'lawrencium-263', 103, 263, 263.11136, False), - 'Lr-264': Iso('Lr-264', 'lawrencium-264', 103, 264, 264.11420, False), - 'Lr-265': Iso('Lr-265', 'lawrencium-265', 103, 265, 265.11619, False), - 'Lr-266': Iso('Lr-266', 'lawrencium-266', 103, 266, 266.11983, False), - 'Rf-253': Iso('Rf-253', 'rutherfordium-253', 104, 253, 253.10044, False), - 'Rf-254': Iso('Rf-254', 'rutherfordium-254', 104, 254, 254.10005, False), - 'Rf-255': Iso('Rf-255', 'rutherfordium-255', 104, 255, 255.10127, False), - 'Rf-256': Iso('Rf-256', 'rutherfordium-256', 104, 256, 256.101152, False), - 'Rf-257': Iso('Rf-257', 'rutherfordium-257', 104, 257, 257.102918, False), - 'Rf-258': Iso('Rf-258', 'rutherfordium-258', 104, 258, 258.103428, False), - 'Rf-259': Iso('Rf-259', 'rutherfordium-259', 104, 259, 259.105596, False), - 'Rf-260': Iso('Rf-260', 'rutherfordium-260', 104, 260, 260.10644, False), - 'Rf-261': Iso('Rf-261', 'rutherfordium-261', 104, 261, 261.108773, False), - 'Rf-262': Iso('Rf-262', 'rutherfordium-262', 104, 262, 262.10992, False), - 'Rf-263': Iso('Rf-263', 'rutherfordium-263', 104, 263, 263.11249, False), - 'Rf-264': Iso('Rf-264', 'rutherfordium-264', 104, 264, 264.11388, False), - 'Rf-265': Iso('Rf-265', 'rutherfordium-265', 104, 265, 265.11668, False), - 'Rf-266': Iso('Rf-266', 'rutherfordium-266', 104, 266, 266.11817, False), - 'Rf-267': Iso('Rf-267', 'rutherfordium-267', 104, 267, 267.12179, False), - 'Rf-268': Iso('Rf-268', 'rutherfordium-268', 104, 268, 268.12397, False), - 'Db-255': Iso('Db-255', 'dubnium-255', 105, 255, 255.10707, False), - 'Db-256': Iso('Db-256', 'dubnium-256', 105, 256, 256.10789, False), - 'Db-257': Iso('Db-257', 'dubnium-257', 105, 257, 257.10758, False), - 'Db-258': Iso('Db-258', 'dubnium-258', 105, 258, 258.10928, False), - 'Db-259': Iso('Db-259', 'dubnium-259', 105, 259, 259.109492, False), - 'Db-260': Iso('Db-260', 'dubnium-260', 105, 260, 260.11130, False), - 'Db-261': Iso('Db-261', 'dubnium-261', 105, 261, 261.11192, False), - 'Db-262': Iso('Db-262', 'dubnium-262', 105, 262, 262.11407, False), - 'Db-263': Iso('Db-263', 'dubnium-263', 105, 263, 263.11499, False), - 'Db-264': Iso('Db-264', 'dubnium-264', 105, 264, 264.11741, False), - 'Db-265': Iso('Db-265', 'dubnium-265', 105, 265, 265.11861, False), - 'Db-266': Iso('Db-266', 'dubnium-266', 105, 266, 266.12103, False), - 'Db-267': Iso('Db-267', 'dubnium-267', 105, 267, 267.12247, False), - 'Db-268': Iso('Db-268', 'dubnium-268', 105, 268, 268.12567, False), - 'Db-269': Iso('Db-269', 'dubnium-269', 105, 269, 269.12791, False), - 'Db-270': Iso('Db-270', 'dubnium-270', 105, 270, 270.13136, False), - 'Sg-258': Iso('Sg-258', 'seaborgium-258', 106, 258, 258.11298, False), - 'Sg-259': Iso('Sg-259', 'seaborgium-259', 106, 259, 259.11440, False), - 'Sg-260': Iso('Sg-260', 'seaborgium-260', 106, 260, 260.114384, False), - 'Sg-261': Iso('Sg-261', 'seaborgium-261', 106, 261, 261.115949, False), - 'Sg-262': Iso('Sg-262', 'seaborgium-262', 106, 262, 262.116337, False), - 'Sg-263': Iso('Sg-263', 'seaborgium-263', 106, 263, 263.11829, False), - 'Sg-264': Iso('Sg-264', 'seaborgium-264', 106, 264, 264.11893, False), - 'Sg-265': Iso('Sg-265', 'seaborgium-265', 106, 265, 265.12109, False), - 'Sg-266': Iso('Sg-266', 'seaborgium-266', 106, 266, 266.12198, False), - 'Sg-267': Iso('Sg-267', 'seaborgium-267', 106, 267, 267.12436, False), - 'Sg-268': Iso('Sg-268', 'seaborgium-268', 106, 268, 268.12539, False), - 'Sg-269': Iso('Sg-269', 'seaborgium-269', 106, 269, 269.12863, False), - 'Sg-270': Iso('Sg-270', 'seaborgium-270', 106, 270, 270.13043, False), - 'Sg-271': Iso('Sg-271', 'seaborgium-271', 106, 271, 271.13393, False), - 'Sg-272': Iso('Sg-272', 'seaborgium-272', 106, 272, 272.13589, False), - 'Sg-273': Iso('Sg-273', 'seaborgium-273', 106, 273, 273.13958, False), - 'Bh-260': Iso('Bh-260', 'bohrium-260', 107, 260, 260.12166, False), - 'Bh-261': Iso('Bh-261', 'bohrium-261', 107, 261, 261.12145, False), - 'Bh-262': Iso('Bh-262', 'bohrium-262', 107, 262, 262.12297, False), - 'Bh-263': Iso('Bh-263', 'bohrium-263', 107, 263, 263.12292, False), - 'Bh-264': Iso('Bh-264', 'bohrium-264', 107, 264, 264.12459, False), - 'Bh-265': Iso('Bh-265', 'bohrium-265', 107, 265, 265.12491, False), - 'Bh-266': Iso('Bh-266', 'bohrium-266', 107, 266, 266.12679, False), - 'Bh-267': Iso('Bh-267', 'bohrium-267', 107, 267, 267.12750, False), - 'Bh-268': Iso('Bh-268', 'bohrium-268', 107, 268, 268.12969, False), - 'Bh-269': Iso('Bh-269', 'bohrium-269', 107, 269, 269.13042, False), - 'Bh-270': Iso('Bh-270', 'bohrium-270', 107, 270, 270.13336, False), - 'Bh-271': Iso('Bh-271', 'bohrium-271', 107, 271, 271.13526, False), - 'Bh-272': Iso('Bh-272', 'bohrium-272', 107, 272, 272.13826, False), - 'Bh-273': Iso('Bh-273', 'bohrium-273', 107, 273, 273.14024, False), - 'Bh-274': Iso('Bh-274', 'bohrium-274', 107, 274, 274.14355, False), - 'Bh-275': Iso('Bh-275', 'bohrium-275', 107, 275, 275.14567, False), - 'Hs-263': Iso('Hs-263', 'hassium-263', 108, 263, 263.12852, False), - 'Hs-264': Iso('Hs-264', 'hassium-264', 108, 264, 264.128357, False), - 'Hs-265': Iso('Hs-265', 'hassium-265', 108, 265, 265.129793, False), - 'Hs-266': Iso('Hs-266', 'hassium-266', 108, 266, 266.130046, False), - 'Hs-267': Iso('Hs-267', 'hassium-267', 108, 267, 267.13167, False), - 'Hs-268': Iso('Hs-268', 'hassium-268', 108, 268, 268.13186, False), - 'Hs-269': Iso('Hs-269', 'hassium-269', 108, 269, 269.13375, False), - 'Hs-270': Iso('Hs-270', 'hassium-270', 108, 270, 270.13429, False), - 'Hs-271': Iso('Hs-271', 'hassium-271', 108, 271, 271.13717, False), - 'Hs-272': Iso('Hs-272', 'hassium-272', 108, 272, 272.13850, False), - 'Hs-273': Iso('Hs-273', 'hassium-273', 108, 273, 273.14168, False), - 'Hs-274': Iso('Hs-274', 'hassium-274', 108, 274, 274.14330, False), - 'Hs-275': Iso('Hs-275', 'hassium-275', 108, 275, 275.14667, False), - 'Hs-276': Iso('Hs-276', 'hassium-276', 108, 276, 276.14846, False), - 'Hs-277': Iso('Hs-277', 'hassium-277', 108, 277, 277.15190, False), - 'Mt-265': Iso('Mt-265', 'meitnerium-265', 109, 265, 265.13600, False), - 'Mt-266': Iso('Mt-266', 'meitnerium-266', 109, 266, 266.13737, False), - 'Mt-267': Iso('Mt-267', 'meitnerium-267', 109, 267, 267.13719, False), - 'Mt-268': Iso('Mt-268', 'meitnerium-268', 109, 268, 268.13865, False), - 'Mt-269': Iso('Mt-269', 'meitnerium-269', 109, 269, 269.13882, False), - 'Mt-270': Iso('Mt-270', 'meitnerium-270', 109, 270, 270.14033, False), - 'Mt-271': Iso('Mt-271', 'meitnerium-271', 109, 271, 271.14074, False), - 'Mt-272': Iso('Mt-272', 'meitnerium-272', 109, 272, 272.14341, False), - 'Mt-273': Iso('Mt-273', 'meitnerium-273', 109, 273, 273.14440, False), - 'Mt-274': Iso('Mt-274', 'meitnerium-274', 109, 274, 274.14724, False), - 'Mt-275': Iso('Mt-275', 'meitnerium-275', 109, 275, 275.14882, False), - 'Mt-276': Iso('Mt-276', 'meitnerium-276', 109, 276, 276.15159, False), - 'Mt-277': Iso('Mt-277', 'meitnerium-277', 109, 277, 277.15327, False), - 'Mt-278': Iso('Mt-278', 'meitnerium-278', 109, 278, 278.15631, False), - 'Mt-279': Iso('Mt-279', 'meitnerium-279', 109, 279, 279.15808, False), - 'Ds-267': Iso('Ds-267', 'darmstadtium-267', 110, 267, 267.14377, False), - 'Ds-268': Iso('Ds-268', 'darmstadtium-268', 110, 268, 268.14348, False), - 'Ds-269': Iso('Ds-269', 'darmstadtium-269', 110, 269, 269.144752, False), - 'Ds-270': Iso('Ds-270', 'darmstadtium-270', 110, 270, 270.144584, False), - 'Ds-271': Iso('Ds-271', 'darmstadtium-271', 110, 271, 271.14595, False), - 'Ds-272': Iso('Ds-272', 'darmstadtium-272', 110, 272, 272.14602, False), - 'Ds-273': Iso('Ds-273', 'darmstadtium-273', 110, 273, 273.14856, False), - 'Ds-274': Iso('Ds-274', 'darmstadtium-274', 110, 274, 274.14941, False), - 'Ds-275': Iso('Ds-275', 'darmstadtium-275', 110, 275, 275.15203, False), - 'Ds-276': Iso('Ds-276', 'darmstadtium-276', 110, 276, 276.15303, False), - 'Ds-277': Iso('Ds-277', 'darmstadtium-277', 110, 277, 277.15591, False), - 'Ds-278': Iso('Ds-278', 'darmstadtium-278', 110, 278, 278.15704, False), - 'Ds-279': Iso('Ds-279', 'darmstadtium-279', 110, 279, 279.16010, False), - 'Ds-280': Iso('Ds-280', 'darmstadtium-280', 110, 280, 280.16131, False), - 'Ds-281': Iso('Ds-281', 'darmstadtium-281', 110, 281, 281.16451, False), - 'Rg-272': Iso('Rg-272', 'roentgenium-272', 111, 272, 272.15327, False), - 'Rg-273': Iso('Rg-273', 'roentgenium-273', 111, 273, 273.15313, False), - 'Rg-274': Iso('Rg-274', 'roentgenium-274', 111, 274, 274.15525, False), - 'Rg-275': Iso('Rg-275', 'roentgenium-275', 111, 275, 275.15594, False), - 'Rg-276': Iso('Rg-276', 'roentgenium-276', 111, 276, 276.15833, False), - 'Rg-277': Iso('Rg-277', 'roentgenium-277', 111, 277, 277.15907, False), - 'Rg-278': Iso('Rg-278', 'roentgenium-278', 111, 278, 278.16149, False), - 'Rg-279': Iso('Rg-279', 'roentgenium-279', 111, 279, 279.16272, False), - 'Rg-280': Iso('Rg-280', 'roentgenium-280', 111, 280, 280.16514, False), - 'Rg-281': Iso('Rg-281', 'roentgenium-281', 111, 281, 281.16636, False), - 'Rg-282': Iso('Rg-282', 'roentgenium-282', 111, 282, 282.16912, False), - 'Rg-283': Iso('Rg-283', 'roentgenium-283', 111, 283, 283.17054, False), - 'Cn-276': Iso('Cn-276', 'copernicium-276', 112, 276, 276.16141, False), - 'Cn-277': Iso('Cn-277', 'copernicium-277', 112, 277, 277.16364, False), - 'Cn-278': Iso('Cn-278', 'copernicium-278', 112, 278, 278.16416, False), - 'Cn-279': Iso('Cn-279', 'copernicium-279', 112, 279, 279.16654, False), - 'Cn-280': Iso('Cn-280', 'copernicium-280', 112, 280, 280.16715, False), - 'Cn-281': Iso('Cn-281', 'copernicium-281', 112, 281, 281.16975, False), - 'Cn-282': Iso('Cn-282', 'copernicium-282', 112, 282, 282.17050, False), - 'Cn-283': Iso('Cn-283', 'copernicium-283', 112, 283, 283.17327, False), - 'Cn-284': Iso('Cn-284', 'copernicium-284', 112, 284, 284.17416, False), - 'Cn-285': Iso('Cn-285', 'copernicium-285', 112, 285, 285.17712, False), - 'Nh-278': Iso('Nh-278', 'nihonium-278', 113, 278, 278.17058, False), - 'Nh-279': Iso('Nh-279', 'nihonium-279', 113, 279, 279.17095, False), - 'Nh-280': Iso('Nh-280', 'nihonium-280', 113, 280, 280.17293, False), - 'Nh-281': Iso('Nh-281', 'nihonium-281', 113, 281, 281.17348, False), - 'Nh-282': Iso('Nh-282', 'nihonium-282', 113, 282, 282.17567, False), - 'Nh-283': Iso('Nh-283', 'nihonium-283', 113, 283, 283.17657, False), - 'Nh-284': Iso('Nh-284', 'nihonium-284', 113, 284, 284.17873, False), - 'Nh-285': Iso('Nh-285', 'nihonium-285', 113, 285, 285.17973, False), - 'Nh-286': Iso('Nh-286', 'nihonium-286', 113, 286, 286.18221, False), - 'Nh-287': Iso('Nh-287', 'nihonium-287', 113, 287, 287.18339, False), - 'Fl-285': Iso('Fl-285', 'flerovium-285', 114, 285, 285.18364, False), - 'Fl-286': Iso('Fl-286', 'flerovium-286', 114, 286, 286.18423, False), - 'Fl-287': Iso('Fl-287', 'flerovium-287', 114, 287, 287.18678, False), - 'Fl-288': Iso('Fl-288', 'flerovium-288', 114, 288, 288.18757, False), - 'Fl-289': Iso('Fl-289', 'flerovium-289', 114, 289, 289.19042, False), - 'Mc-287': Iso('Mc-287', 'moscovium-287', 115, 287, 287.19070, False), - 'Mc-288': Iso('Mc-288', 'moscovium-288', 115, 288, 288.19274, False), - 'Mc-289': Iso('Mc-289', 'moscovium-289', 115, 289, 289.19363, False), - 'Mc-290': Iso('Mc-290', 'moscovium-290', 115, 290, 290.19598, False), - 'Mc-291': Iso('Mc-291', 'moscovium-291', 115, 291, 291.19707, False), - 'Lv-289': Iso('Lv-289', 'livermorium-289', 116, 289, 289.19816, False), - 'Lv-290': Iso('Lv-290', 'livermorium-290', 116, 290, 290.19864, False), - 'Lv-291': Iso('Lv-291', 'livermorium-291', 116, 291, 291.20108, False), - 'Lv-292': Iso('Lv-292', 'livermorium-292', 116, 292, 292.20174, False), - 'Lv-293': Iso('Lv-293', 'livermorium-293', 116, 293, 293.20449, False), - 'Ts-291': Iso('Ts-291', 'tennessine-291', 117, 291, 291.20553, False), - 'Ts-292': Iso('Ts-292', 'tennessine-292', 117, 292, 292.20746, False), - 'Ts-293': Iso('Ts-293', 'tennessine-293', 117, 293, 293.20824, False), - 'Ts-294': Iso('Ts-294', 'tennessine-294', 117, 294, 294.21046, False), - 'Og-293': Iso('Og-293', 'oganesson-293', 118, 293, 293.21356, False), - 'Og-294': Iso('Og-294', 'oganesson-294', 118, 294, 294.21392, False), - 'Og-295': Iso('Og-295', 'oganesson-295', 118, 295, 295.21624, False), - } + 'I-128': _iso('I-128', 'iodine-128', 53, 128, 127.9058086, False), + 'I-129': _iso('I-129', 'iodine-129', 53, 129, 128.9049837, False), + 'I-130': _iso('I-130', 'iodine-130', 53, 130, 129.9066702, False), + 'I-131': _iso('I-131', 'iodine-131', 53, 131, 130.90612630, False, + half_life=692902.0800000001), + 'I-132': _iso('I-132', 'iodine-132', 53, 132, 131.9079935, False), + 'I-133': _iso('I-133', 'iodine-133', 53, 133, 132.9077970, False), + 'I-134': _iso('I-134', 'iodine-134', 53, 134, 133.9097588, False), + 'I-135': _iso('I-135', 'iodine-135', 53, 135, 134.9100488, False), + 'I-136': _iso('I-136', 'iodine-136', 53, 136, 135.914604, False), + 'I-137': _iso('I-137', 'iodine-137', 53, 137, 136.9180282, False), + 'I-138': _iso('I-138', 'iodine-138', 53, 138, 137.9227264, False), + 'I-139': _iso('I-139', 'iodine-139', 53, 139, 138.926506, False), + 'I-140': _iso('I-140', 'iodine-140', 53, 140, 139.93173, False), + 'I-141': _iso('I-141', 'iodine-141', 53, 141, 140.93569, False), + 'I-142': _iso('I-142', 'iodine-142', 53, 142, 141.94120, False), + 'I-143': _iso('I-143', 'iodine-143', 53, 143, 142.94565, False), + 'I-144': _iso('I-144', 'iodine-144', 53, 144, 143.95139, False), + 'I-145': _iso('I-145', 'iodine-145', 53, 145, 144.95605, False), + 'Xe-109': _iso('Xe-109', 'xenon-109', 54, 109, 108.95043, False), + 'Xe-110': _iso('Xe-110', 'xenon-110', 54, 110, 109.94426, False), + 'Xe-111': _iso('Xe-111', 'xenon-111', 54, 111, 110.941607, False), + 'Xe-112': _iso('Xe-112', 'xenon-112', 54, 112, 111.9355590, False), + 'Xe-113': _iso('Xe-113', 'xenon-113', 54, 113, 112.9332217, False), + 'Xe-114': _iso('Xe-114', 'xenon-114', 54, 114, 113.927980, False), + 'Xe-115': _iso('Xe-115', 'xenon-115', 54, 115, 114.926294, False), + 'Xe-116': _iso('Xe-116', 'xenon-116', 54, 116, 115.921581, False), + 'Xe-117': _iso('Xe-117', 'xenon-117', 54, 117, 116.920359, False), + 'Xe-118': _iso('Xe-118', 'xenon-118', 54, 118, 117.916179, False), + 'Xe-119': _iso('Xe-119', 'xenon-119', 54, 119, 118.915411, False), + 'Xe-120': _iso('Xe-120', 'xenon-120', 54, 120, 119.911784, False), + 'Xe-121': _iso('Xe-121', 'xenon-121', 54, 121, 120.911453, False), + 'Xe-122': _iso('Xe-122', 'xenon-122', 54, 122, 121.908368, False), + 'Xe-123': _iso('Xe-123', 'xenon-123', 54, 123, 122.908482, False), + 'Xe-124': _iso('Xe-124', 'xenon-124', 54, 124, 123.9058920, True, + isotopic_abundance=0.000952), + 'Xe-125': _iso('Xe-125', 'xenon-125', 54, 125, 124.9063944, False), + 'Xe-126': _iso('Xe-126', 'xenon-126', 54, 126, 125.9042983, True, + isotopic_abundance=0.000890), + 'Xe-127': _iso('Xe-127', 'xenon-127', 54, 127, 126.9051829, False, + half_life=3140173.44), + 'Xe-128': _iso('Xe-128', 'xenon-128', 54, 128, 127.9035310, True, + isotopic_abundance=0.019102), + 'Xe-129': _iso('Xe-129', 'xenon-129', 54, 129, 128.9047808611, True, + isotopic_abundance=0.264006), + 'Xe-130': _iso('Xe-130', 'xenon-130', 54, 130, 129.903509349, True, + isotopic_abundance=0.040710), + 'Xe-131': _iso('Xe-131', 'xenon-131', 54, 131, 130.90508406, True, + isotopic_abundance=0.212324), + 'Xe-132': _iso('Xe-132', 'xenon-132', 54, 132, 131.9041550856, True, + isotopic_abundance=0.269086), + 'Xe-133': _iso('Xe-133', 'xenon-133', 54, 133, 132.9059108, False, + half_life=453381.408), + 'Xe-134': _iso('Xe-134', 'xenon-134', 54, 134, 133.90539466, True, + isotopic_abundance=0.104357), + 'Xe-135': _iso('Xe-135', 'xenon-135', 54, 135, 134.9072278, False), + 'Xe-136': _iso('Xe-136', 'xenon-136', 54, 136, 135.907214484, False, + isotopic_abundance=0.088573), + 'Xe-137': _iso('Xe-137', 'xenon-137', 54, 137, 136.91155778, False), + 'Xe-138': _iso('Xe-138', 'xenon-138', 54, 138, 137.9141463, False), + 'Xe-139': _iso('Xe-139', 'xenon-139', 54, 139, 138.9187922, False), + 'Xe-140': _iso('Xe-140', 'xenon-140', 54, 140, 139.9216458, False), + 'Xe-141': _iso('Xe-141', 'xenon-141', 54, 141, 140.9267872, False), + 'Xe-142': _iso('Xe-142', 'xenon-142', 54, 142, 141.9299731, False), + 'Xe-143': _iso('Xe-143', 'xenon-143', 54, 143, 142.9353696, False), + 'Xe-144': _iso('Xe-144', 'xenon-144', 54, 144, 143.9389451, False), + 'Xe-145': _iso('Xe-145', 'xenon-145', 54, 145, 144.944720, False), + 'Xe-146': _iso('Xe-146', 'xenon-146', 54, 146, 145.948518, False), + 'Xe-147': _iso('Xe-147', 'xenon-147', 54, 147, 146.95426, False), + 'Xe-148': _iso('Xe-148', 'xenon-148', 54, 148, 147.95813, False), + 'Cs-112': _iso('Cs-112', 'caesium-112', 55, 112, 111.950309, False), + 'Cs-113': _iso('Cs-113', 'caesium-113', 55, 113, 112.9444291, False), + 'Cs-114': _iso('Cs-114', 'caesium-114', 55, 114, 113.941296, False), + 'Cs-115': _iso('Cs-115', 'caesium-115', 55, 115, 114.93591, False), + 'Cs-116': _iso('Cs-116', 'caesium-116', 55, 116, 115.93337, False), + 'Cs-117': _iso('Cs-117', 'caesium-117', 55, 117, 116.928617, False), + 'Cs-118': _iso('Cs-118', 'caesium-118', 55, 118, 117.926560, False), + 'Cs-119': _iso('Cs-119', 'caesium-119', 55, 119, 118.922377, False), + 'Cs-120': _iso('Cs-120', 'caesium-120', 55, 120, 119.920677, False), + 'Cs-121': _iso('Cs-121', 'caesium-121', 55, 121, 120.917227, False), + 'Cs-122': _iso('Cs-122', 'caesium-122', 55, 122, 121.916108, False), + 'Cs-123': _iso('Cs-123', 'caesium-123', 55, 123, 122.912996, False), + 'Cs-124': _iso('Cs-124', 'caesium-124', 55, 124, 123.9122578, False), + 'Cs-125': _iso('Cs-125', 'caesium-125', 55, 125, 124.9097280, False), + 'Cs-126': _iso('Cs-126', 'caesium-126', 55, 126, 125.909446, False), + 'Cs-127': _iso('Cs-127', 'caesium-127', 55, 127, 126.9074174, False), + 'Cs-128': _iso('Cs-128', 'caesium-128', 55, 128, 127.9077487, False), + 'Cs-129': _iso('Cs-129', 'caesium-129', 55, 129, 128.9060657, False), + 'Cs-130': _iso('Cs-130', 'caesium-130', 55, 130, 129.9067093, False), + 'Cs-131': _iso('Cs-131', 'caesium-131', 55, 131, 130.9054649, False), + 'Cs-132': _iso('Cs-132', 'caesium-132', 55, 132, 131.9064339, False), + 'Cs-133': _iso('Cs-133', 'caesium-133', 55, 133, 132.9054519610, True, + isotopic_abundance=1), + 'Cs-134': _iso('Cs-134', 'caesium-134', 55, 134, 133.906718503, False, + half_life=65135232.0), + 'Cs-135': _iso('Cs-135', 'caesium-135', 55, 135, 134.9059770, False), + 'Cs-136': _iso('Cs-136', 'caesium-136', 55, 136, 135.9073114, False), + 'Cs-137': _iso('Cs-137', 'caesium-137', 55, 137, 136.90708923, False, + half_life=951981119.9999999), + 'Cs-138': _iso('Cs-138', 'caesium-138', 55, 138, 137.9110171, False), + 'Cs-139': _iso('Cs-139', 'caesium-139', 55, 139, 138.9133638, False), + 'Cs-140': _iso('Cs-140', 'caesium-140', 55, 140, 139.9172831, False), + 'Cs-141': _iso('Cs-141', 'caesium-141', 55, 141, 140.9200455, False), + 'Cs-142': _iso('Cs-142', 'caesium-142', 55, 142, 141.9242960, False), + 'Cs-143': _iso('Cs-143', 'caesium-143', 55, 143, 142.927349, False), + 'Cs-144': _iso('Cs-144', 'caesium-144', 55, 144, 143.932076, False), + 'Cs-145': _iso('Cs-145', 'caesium-145', 55, 145, 144.935527, False), + 'Cs-146': _iso('Cs-146', 'caesium-146', 55, 146, 145.940344, False), + 'Cs-147': _iso('Cs-147', 'caesium-147', 55, 147, 146.944156, False), + 'Cs-148': _iso('Cs-148', 'caesium-148', 55, 148, 147.94923, False), + 'Cs-149': _iso('Cs-149', 'caesium-149', 55, 149, 148.95302, False), + 'Cs-150': _iso('Cs-150', 'caesium-150', 55, 150, 149.95833, False), + 'Cs-151': _iso('Cs-151', 'caesium-151', 55, 151, 150.96258, False), + 'Ba-114': _iso('Ba-114', 'barium-114', 56, 114, 113.95066, False), + 'Ba-115': _iso('Ba-115', 'barium-115', 56, 115, 114.94737, False), + 'Ba-116': _iso('Ba-116', 'barium-116', 56, 116, 115.94128, False), + 'Ba-117': _iso('Ba-117', 'barium-117', 56, 117, 116.93814, False), + 'Ba-118': _iso('Ba-118', 'barium-118', 56, 118, 117.93306, False), + 'Ba-119': _iso('Ba-119', 'barium-119', 56, 119, 118.93066, False), + 'Ba-120': _iso('Ba-120', 'barium-120', 56, 120, 119.92605, False), + 'Ba-121': _iso('Ba-121', 'barium-121', 56, 121, 120.92405, False), + 'Ba-122': _iso('Ba-122', 'barium-122', 56, 122, 121.919904, False), + 'Ba-123': _iso('Ba-123', 'barium-123', 56, 123, 122.918781, False), + 'Ba-124': _iso('Ba-124', 'barium-124', 56, 124, 123.915094, False), + 'Ba-125': _iso('Ba-125', 'barium-125', 56, 125, 124.914472, False), + 'Ba-126': _iso('Ba-126', 'barium-126', 56, 126, 125.911250, False), + 'Ba-127': _iso('Ba-127', 'barium-127', 56, 127, 126.911091, False), + 'Ba-128': _iso('Ba-128', 'barium-128', 56, 128, 127.9083420, False), + 'Ba-129': _iso('Ba-129', 'barium-129', 56, 129, 128.908681, False), + 'Ba-130': _iso('Ba-130', 'barium-130', 56, 130, 129.9063207, False, + isotopic_abundance=0.00106), + 'Ba-131': _iso('Ba-131', 'barium-131', 56, 131, 130.9069410, False), + 'Ba-132': _iso('Ba-132', 'barium-132', 56, 132, 131.9050611, True, + isotopic_abundance=0.00101), + 'Ba-133': _iso('Ba-133', 'barium-133', 56, 133, 132.9060074, False, + half_life=333046080.0), + 'Ba-134': _iso('Ba-134', 'barium-134', 56, 134, 133.90450818, True, + isotopic_abundance=0.02417), + 'Ba-135': _iso('Ba-135', 'barium-135', 56, 135, 134.90568838, True, + isotopic_abundance=0.06592), + 'Ba-136': _iso('Ba-136', 'barium-136', 56, 136, 135.90457573, True, + isotopic_abundance=0.07854), + 'Ba-137': _iso('Ba-137', 'barium-137', 56, 137, 136.90582714, True, + isotopic_abundance=0.11232), + 'Ba-138': _iso('Ba-138', 'barium-138', 56, 138, 137.90524700, True, + isotopic_abundance=0.71698), + 'Ba-139': _iso('Ba-139', 'barium-139', 56, 139, 138.90884110, False), + 'Ba-140': _iso('Ba-140', 'barium-140', 56, 140, 139.9106057, False, + half_life=1101833.28), + 'Ba-141': _iso('Ba-141', 'barium-141', 56, 141, 140.9144033, False), + 'Ba-142': _iso('Ba-142', 'barium-142', 56, 142, 141.9164324, False), + 'Ba-143': _iso('Ba-143', 'barium-143', 56, 143, 142.9206253, False), + 'Ba-144': _iso('Ba-144', 'barium-144', 56, 144, 143.9229549, False), + 'Ba-145': _iso('Ba-145', 'barium-145', 56, 145, 144.9275184, False), + 'Ba-146': _iso('Ba-146', 'barium-146', 56, 146, 145.930284, False), + 'Ba-147': _iso('Ba-147', 'barium-147', 56, 147, 146.935304, False), + 'Ba-148': _iso('Ba-148', 'barium-148', 56, 148, 147.938171, False), + 'Ba-149': _iso('Ba-149', 'barium-149', 56, 149, 148.94308, False), + 'Ba-150': _iso('Ba-150', 'barium-150', 56, 150, 149.94605, False), + 'Ba-151': _iso('Ba-151', 'barium-151', 56, 151, 150.95127, False), + 'Ba-152': _iso('Ba-152', 'barium-152', 56, 152, 151.95481, False), + 'Ba-153': _iso('Ba-153', 'barium-153', 56, 153, 152.96036, False), + 'La-116': _iso('La-116', 'lanthanum-116', 57, 116, 115.95630, False), + 'La-117': _iso('La-117', 'lanthanum-117', 57, 117, 116.94999, False), + 'La-118': _iso('La-118', 'lanthanum-118', 57, 118, 117.94673, False), + 'La-119': _iso('La-119', 'lanthanum-119', 57, 119, 118.94099, False), + 'La-120': _iso('La-120', 'lanthanum-120', 57, 120, 119.93807, False), + 'La-121': _iso('La-121', 'lanthanum-121', 57, 121, 120.93315, False), + 'La-122': _iso('La-122', 'lanthanum-122', 57, 122, 121.93071, False), + 'La-123': _iso('La-123', 'lanthanum-123', 57, 123, 122.92630, False), + 'La-124': _iso('La-124', 'lanthanum-124', 57, 124, 123.924574, False), + 'La-125': _iso('La-125', 'lanthanum-125', 57, 125, 124.920816, False), + 'La-126': _iso('La-126', 'lanthanum-126', 57, 126, 125.919513, False), + 'La-127': _iso('La-127', 'lanthanum-127', 57, 127, 126.916375, False), + 'La-128': _iso('La-128', 'lanthanum-128', 57, 128, 127.915592, False), + 'La-129': _iso('La-129', 'lanthanum-129', 57, 129, 128.912694, False), + 'La-130': _iso('La-130', 'lanthanum-130', 57, 130, 129.912369, False), + 'La-131': _iso('La-131', 'lanthanum-131', 57, 131, 130.910070, False), + 'La-132': _iso('La-132', 'lanthanum-132', 57, 132, 131.910119, False), + 'La-133': _iso('La-133', 'lanthanum-133', 57, 133, 132.908218, False), + 'La-134': _iso('La-134', 'lanthanum-134', 57, 134, 133.908514, False), + 'La-135': _iso('La-135', 'lanthanum-135', 57, 135, 134.906984, False), + 'La-136': _iso('La-136', 'lanthanum-136', 57, 136, 135.907635, False), + 'La-137': _iso('La-137', 'lanthanum-137', 57, 137, 136.9064504, False), + 'La-138': _iso('La-138', 'lanthanum-138', 57, 138, 137.9071149, False, + isotopic_abundance=0.0008881), + 'La-139': _iso('La-139', 'lanthanum-139', 57, 139, 138.9063563, True, + isotopic_abundance=0.9991119), + 'La-140': _iso('La-140', 'lanthanum-140', 57, 140, 139.9094806, False, + half_life=145054.8), + 'La-141': _iso('La-141', 'lanthanum-141', 57, 141, 140.9109660, False), + 'La-142': _iso('La-142', 'lanthanum-142', 57, 142, 141.9140909, False), + 'La-143': _iso('La-143', 'lanthanum-143', 57, 143, 142.9160795, False), + 'La-144': _iso('La-144', 'lanthanum-144', 57, 144, 143.919646, False), + 'La-145': _iso('La-145', 'lanthanum-145', 57, 145, 144.921808, False), + 'La-146': _iso('La-146', 'lanthanum-146', 57, 146, 145.925875, False), + 'La-147': _iso('La-147', 'lanthanum-147', 57, 147, 146.928418, False), + 'La-148': _iso('La-148', 'lanthanum-148', 57, 148, 147.932679, False), + 'La-149': _iso('La-149', 'lanthanum-149', 57, 149, 148.93535, False), + 'La-150': _iso('La-150', 'lanthanum-150', 57, 150, 149.93947, False), + 'La-151': _iso('La-151', 'lanthanum-151', 57, 151, 150.94232, False), + 'La-152': _iso('La-152', 'lanthanum-152', 57, 152, 151.94682, False), + 'La-153': _iso('La-153', 'lanthanum-153', 57, 153, 152.95036, False), + 'La-154': _iso('La-154', 'lanthanum-154', 57, 154, 153.95517, False), + 'La-155': _iso('La-155', 'lanthanum-155', 57, 155, 154.95901, False), + 'Ce-119': _iso('Ce-119', 'cerium-119', 58, 119, 118.95271, False), + 'Ce-120': _iso('Ce-120', 'cerium-120', 58, 120, 119.94654, False), + 'Ce-121': _iso('Ce-121', 'cerium-121', 58, 121, 120.94335, False), + 'Ce-122': _iso('Ce-122', 'cerium-122', 58, 122, 121.93787, False), + 'Ce-123': _iso('Ce-123', 'cerium-123', 58, 123, 122.93528, False), + 'Ce-124': _iso('Ce-124', 'cerium-124', 58, 124, 123.93031, False), + 'Ce-125': _iso('Ce-125', 'cerium-125', 58, 125, 124.92844, False), + 'Ce-126': _iso('Ce-126', 'cerium-126', 58, 126, 125.923971, False), + 'Ce-127': _iso('Ce-127', 'cerium-127', 58, 127, 126.922727, False), + 'Ce-128': _iso('Ce-128', 'cerium-128', 58, 128, 127.918911, False), + 'Ce-129': _iso('Ce-129', 'cerium-129', 58, 129, 128.918102, False), + 'Ce-130': _iso('Ce-130', 'cerium-130', 58, 130, 129.914736, False), + 'Ce-131': _iso('Ce-131', 'cerium-131', 58, 131, 130.914429, False), + 'Ce-132': _iso('Ce-132', 'cerium-132', 58, 132, 131.911464, False), + 'Ce-133': _iso('Ce-133', 'cerium-133', 58, 133, 132.911520, False), + 'Ce-134': _iso('Ce-134', 'cerium-134', 58, 134, 133.908928, False), + 'Ce-135': _iso('Ce-135', 'cerium-135', 58, 135, 134.909161, False), + 'Ce-136': _iso('Ce-136', 'cerium-136', 58, 136, 135.90712921, True, + isotopic_abundance=0.00185), + 'Ce-137': _iso('Ce-137', 'cerium-137', 58, 137, 136.90776236, False), + 'Ce-138': _iso('Ce-138', 'cerium-138', 58, 138, 137.905991, True, + isotopic_abundance=0.00251), + 'Ce-139': _iso('Ce-139', 'cerium-139', 58, 139, 138.9066551, False, + half_life=11900217.600000001), + 'Ce-140': _iso('Ce-140', 'cerium-140', 58, 140, 139.9054431, True, + isotopic_abundance=0.88450), + 'Ce-141': _iso('Ce-141', 'cerium-141', 58, 141, 140.9082807, False, + half_life=2808864.0), + 'Ce-142': _iso('Ce-142', 'cerium-142', 58, 142, 141.9092504, True, + isotopic_abundance=0.11114), + 'Ce-143': _iso('Ce-143', 'cerium-143', 58, 143, 142.9123921, False), + 'Ce-144': _iso('Ce-144', 'cerium-144', 58, 144, 143.9136529, False, + half_life=24583737.599999998), + 'Ce-145': _iso('Ce-145', 'cerium-145', 58, 145, 144.917265, False), + 'Ce-146': _iso('Ce-146', 'cerium-146', 58, 146, 145.918802, False), + 'Ce-147': _iso('Ce-147', 'cerium-147', 58, 147, 146.9226899, False), + 'Ce-148': _iso('Ce-148', 'cerium-148', 58, 148, 147.924424, False), + 'Ce-149': _iso('Ce-149', 'cerium-149', 58, 149, 148.928427, False), + 'Ce-150': _iso('Ce-150', 'cerium-150', 58, 150, 149.930384, False), + 'Ce-151': _iso('Ce-151', 'cerium-151', 58, 151, 150.934272, False), + 'Ce-152': _iso('Ce-152', 'cerium-152', 58, 152, 151.93660, False), + 'Ce-153': _iso('Ce-153', 'cerium-153', 58, 153, 152.94093, False), + 'Ce-154': _iso('Ce-154', 'cerium-154', 58, 154, 153.94380, False), + 'Ce-155': _iso('Ce-155', 'cerium-155', 58, 155, 154.94855, False), + 'Ce-156': _iso('Ce-156', 'cerium-156', 58, 156, 155.95183, False), + 'Ce-157': _iso('Ce-157', 'cerium-157', 58, 157, 156.95705, False), + 'Pr-121': _iso('Pr-121', 'praseodymium-121', 59, 121, 120.95532, False), + 'Pr-122': _iso('Pr-122', 'praseodymium-122', 59, 122, 121.95175, False), + 'Pr-123': _iso('Pr-123', 'praseodymium-123', 59, 123, 122.94596, False), + 'Pr-124': _iso('Pr-124', 'praseodymium-124', 59, 124, 123.94294, False), + 'Pr-125': _iso('Pr-125', 'praseodymium-125', 59, 125, 124.93770, False), + 'Pr-126': _iso('Pr-126', 'praseodymium-126', 59, 126, 125.93524, False), + 'Pr-127': _iso('Pr-127', 'praseodymium-127', 59, 127, 126.93071, False), + 'Pr-128': _iso('Pr-128', 'praseodymium-128', 59, 128, 127.928791, False), + 'Pr-129': _iso('Pr-129', 'praseodymium-129', 59, 129, 128.925095, False), + 'Pr-130': _iso('Pr-130', 'praseodymium-130', 59, 130, 129.923590, False), + 'Pr-131': _iso('Pr-131', 'praseodymium-131', 59, 131, 130.920235, False), + 'Pr-132': _iso('Pr-132', 'praseodymium-132', 59, 132, 131.919255, False), + 'Pr-133': _iso('Pr-133', 'praseodymium-133', 59, 133, 132.916331, False), + 'Pr-134': _iso('Pr-134', 'praseodymium-134', 59, 134, 133.915697, False), + 'Pr-135': _iso('Pr-135', 'praseodymium-135', 59, 135, 134.913112, False), + 'Pr-136': _iso('Pr-136', 'praseodymium-136', 59, 136, 135.912677, False), + 'Pr-137': _iso('Pr-137', 'praseodymium-137', 59, 137, 136.9106792, False), + 'Pr-138': _iso('Pr-138', 'praseodymium-138', 59, 138, 137.910754, False), + 'Pr-139': _iso('Pr-139', 'praseodymium-139', 59, 139, 138.9089408, False), + 'Pr-140': _iso('Pr-140', 'praseodymium-140', 59, 140, 139.9090803, False), + 'Pr-141': _iso('Pr-141', 'praseodymium-141', 59, 141, 140.9076576, True, + isotopic_abundance=1), + 'Pr-142': _iso('Pr-142', 'praseodymium-142', 59, 142, 141.9100496, False), + 'Pr-143': _iso('Pr-143', 'praseodymium-143', 59, 143, 142.9108228, False), + 'Pr-144': _iso('Pr-144', 'praseodymium-144', 59, 144, 143.9133109, False), + 'Pr-145': _iso('Pr-145', 'praseodymium-145', 59, 145, 144.9145182, False), + 'Pr-146': _iso('Pr-146', 'praseodymium-146', 59, 146, 145.917680, False), + 'Pr-147': _iso('Pr-147', 'praseodymium-147', 59, 147, 146.919008, False), + 'Pr-148': _iso('Pr-148', 'praseodymium-148', 59, 148, 147.922130, False), + 'Pr-149': _iso('Pr-149', 'praseodymium-149', 59, 149, 148.923736, False), + 'Pr-150': _iso('Pr-150', 'praseodymium-150', 59, 150, 149.9266765, False), + 'Pr-151': _iso('Pr-151', 'praseodymium-151', 59, 151, 150.928309, False), + 'Pr-152': _iso('Pr-152', 'praseodymium-152', 59, 152, 151.931553, False), + 'Pr-153': _iso('Pr-153', 'praseodymium-153', 59, 153, 152.933904, False), + 'Pr-154': _iso('Pr-154', 'praseodymium-154', 59, 154, 153.93753, False), + 'Pr-155': _iso('Pr-155', 'praseodymium-155', 59, 155, 154.940509, False), + 'Pr-156': _iso('Pr-156', 'praseodymium-156', 59, 156, 155.94464, False), + 'Pr-157': _iso('Pr-157', 'praseodymium-157', 59, 157, 156.94789, False), + 'Pr-158': _iso('Pr-158', 'praseodymium-158', 59, 158, 157.95241, False), + 'Pr-159': _iso('Pr-159', 'praseodymium-159', 59, 159, 158.95589, False), + 'Nd-124': _iso('Nd-124', 'neodymium-124', 60, 124, 123.95220, False), + 'Nd-125': _iso('Nd-125', 'neodymium-125', 60, 125, 124.94890, False), + 'Nd-126': _iso('Nd-126', 'neodymium-126', 60, 126, 125.94311, False), + 'Nd-127': _iso('Nd-127', 'neodymium-127', 60, 127, 126.94038, False), + 'Nd-128': _iso('Nd-128', 'neodymium-128', 60, 128, 127.93525, False), + 'Nd-129': _iso('Nd-129', 'neodymium-129', 60, 129, 128.93310, False), + 'Nd-130': _iso('Nd-130', 'neodymium-130', 60, 130, 129.928506, False), + 'Nd-131': _iso('Nd-131', 'neodymium-131', 60, 131, 130.927248, False), + 'Nd-132': _iso('Nd-132', 'neodymium-132', 60, 132, 131.923321, False), + 'Nd-133': _iso('Nd-133', 'neodymium-133', 60, 133, 132.922348, False), + 'Nd-134': _iso('Nd-134', 'neodymium-134', 60, 134, 133.918790, False), + 'Nd-135': _iso('Nd-135', 'neodymium-135', 60, 135, 134.918181, False), + 'Nd-136': _iso('Nd-136', 'neodymium-136', 60, 136, 135.914976, False), + 'Nd-137': _iso('Nd-137', 'neodymium-137', 60, 137, 136.914562, False), + 'Nd-138': _iso('Nd-138', 'neodymium-138', 60, 138, 137.911950, False), + 'Nd-139': _iso('Nd-139', 'neodymium-139', 60, 139, 138.911954, False), + 'Nd-140': _iso('Nd-140', 'neodymium-140', 60, 140, 139.909550, False), + 'Nd-141': _iso('Nd-141', 'neodymium-141', 60, 141, 140.9096147, False), + 'Nd-142': _iso('Nd-142', 'neodymium-142', 60, 142, 141.9077290, True, + isotopic_abundance=0.27152), + 'Nd-143': _iso('Nd-143', 'neodymium-143', 60, 143, 142.9098200, True, + isotopic_abundance=0.12174), + 'Nd-144': _iso('Nd-144', 'neodymium-144', 60, 144, 143.9100930, False, + isotopic_abundance=0.23798), + 'Nd-145': _iso('Nd-145', 'neodymium-145', 60, 145, 144.9125793, True, + isotopic_abundance=0.08293), + 'Nd-146': _iso('Nd-146', 'neodymium-146', 60, 146, 145.9131226, True, + isotopic_abundance=0.17189), + 'Nd-147': _iso('Nd-147', 'neodymium-147', 60, 147, 146.9161061, False), + 'Nd-148': _iso('Nd-148', 'neodymium-148', 60, 148, 147.9168993, True, + isotopic_abundance=0.05756), + 'Nd-149': _iso('Nd-149', 'neodymium-149', 60, 149, 148.9201548, False), + 'Nd-150': _iso('Nd-150', 'neodymium-150', 60, 150, 149.9209022, False, + isotopic_abundance=0.05638), + 'Nd-151': _iso('Nd-151', 'neodymium-151', 60, 151, 150.9238403, False), + 'Nd-152': _iso('Nd-152', 'neodymium-152', 60, 152, 151.924692, False), + 'Nd-153': _iso('Nd-153', 'neodymium-153', 60, 153, 152.9277180, False), + 'Nd-154': _iso('Nd-154', 'neodymium-154', 60, 154, 153.92948, False), + 'Nd-155': _iso('Nd-155', 'neodymium-155', 60, 155, 154.9331357, False), + 'Nd-156': _iso('Nd-156', 'neodymium-156', 60, 156, 155.93508, False), + 'Nd-157': _iso('Nd-157', 'neodymium-157', 60, 157, 156.939386, False), + 'Nd-158': _iso('Nd-158', 'neodymium-158', 60, 158, 157.94197, False), + 'Nd-159': _iso('Nd-159', 'neodymium-159', 60, 159, 158.94653, False), + 'Nd-160': _iso('Nd-160', 'neodymium-160', 60, 160, 159.94940, False), + 'Nd-161': _iso('Nd-161', 'neodymium-161', 60, 161, 160.95428, False), + 'Pm-126': _iso('Pm-126', 'promethium-126', 61, 126, 125.95792, False), + 'Pm-127': _iso('Pm-127', 'promethium-127', 61, 127, 126.95192, False), + 'Pm-128': _iso('Pm-128', 'promethium-128', 61, 128, 127.94870, False), + 'Pm-129': _iso('Pm-129', 'promethium-129', 61, 129, 128.94323, False), + 'Pm-130': _iso('Pm-130', 'promethium-130', 61, 130, 129.94053, False), + 'Pm-131': _iso('Pm-131', 'promethium-131', 61, 131, 130.93567, False), + 'Pm-132': _iso('Pm-132', 'promethium-132', 61, 132, 131.93384, False), + 'Pm-133': _iso('Pm-133', 'promethium-133', 61, 133, 132.929782, False), + 'Pm-134': _iso('Pm-134', 'promethium-134', 61, 134, 133.928353, False), + 'Pm-135': _iso('Pm-135', 'promethium-135', 61, 135, 134.924823, False), + 'Pm-136': _iso('Pm-136', 'promethium-136', 61, 136, 135.923585, False), + 'Pm-137': _iso('Pm-137', 'promethium-137', 61, 137, 136.920480, False), + 'Pm-138': _iso('Pm-138', 'promethium-138', 61, 138, 137.919548, False), + 'Pm-139': _iso('Pm-139', 'promethium-139', 61, 139, 138.916800, False), + 'Pm-140': _iso('Pm-140', 'promethium-140', 61, 140, 139.916040, False), + 'Pm-141': _iso('Pm-141', 'promethium-141', 61, 141, 140.913555, False), + 'Pm-142': _iso('Pm-142', 'promethium-142', 61, 142, 141.912890, False), + 'Pm-143': _iso('Pm-143', 'promethium-143', 61, 143, 142.9109383, False), + 'Pm-144': _iso('Pm-144', 'promethium-144', 61, 144, 143.9125964, False), + 'Pm-145': _iso('Pm-145', 'promethium-145', 61, 145, 144.9127559, False), + 'Pm-146': _iso('Pm-146', 'promethium-146', 61, 146, 145.9147024, False), + 'Pm-147': _iso('Pm-147', 'promethium-147', 61, 147, 146.9151450, False), + 'Pm-148': _iso('Pm-148', 'promethium-148', 61, 148, 147.9174819, False), + 'Pm-149': _iso('Pm-149', 'promethium-149', 61, 149, 148.9183423, False), + 'Pm-150': _iso('Pm-150', 'promethium-150', 61, 150, 149.920991, False), + 'Pm-151': _iso('Pm-151', 'promethium-151', 61, 151, 150.9212175, False), + 'Pm-152': _iso('Pm-152', 'promethium-152', 61, 152, 151.923506, False), + 'Pm-153': _iso('Pm-153', 'promethium-153', 61, 153, 152.9241567, False), + 'Pm-154': _iso('Pm-154', 'promethium-154', 61, 154, 153.926472, False), + 'Pm-155': _iso('Pm-155', 'promethium-155', 61, 155, 154.9281370, False), + 'Pm-156': _iso('Pm-156', 'promethium-156', 61, 156, 155.9311175, False), + 'Pm-157': _iso('Pm-157', 'promethium-157', 61, 157, 156.9331214, False), + 'Pm-158': _iso('Pm-158', 'promethium-158', 61, 158, 157.936565, False), + 'Pm-159': _iso('Pm-159', 'promethium-159', 61, 159, 158.939287, False), + 'Pm-160': _iso('Pm-160', 'promethium-160', 61, 160, 159.94310, False), + 'Pm-161': _iso('Pm-161', 'promethium-161', 61, 161, 160.94607, False), + 'Pm-162': _iso('Pm-162', 'promethium-162', 61, 162, 161.95022, False), + 'Pm-163': _iso('Pm-163', 'promethium-163', 61, 163, 162.95357, False), + 'Sm-128': _iso('Sm-128', 'samarium-128', 62, 128, 127.95842, False), + 'Sm-129': _iso('Sm-129', 'samarium-129', 62, 129, 128.95476, False), + 'Sm-130': _iso('Sm-130', 'samarium-130', 62, 130, 129.94900, False), + 'Sm-131': _iso('Sm-131', 'samarium-131', 62, 131, 130.94618, False), + 'Sm-132': _iso('Sm-132', 'samarium-132', 62, 132, 131.94087, False), + 'Sm-133': _iso('Sm-133', 'samarium-133', 62, 133, 132.93856, False), + 'Sm-134': _iso('Sm-134', 'samarium-134', 62, 134, 133.93411, False), + 'Sm-135': _iso('Sm-135', 'samarium-135', 62, 135, 134.93252, False), + 'Sm-136': _iso('Sm-136', 'samarium-136', 62, 136, 135.928276, False), + 'Sm-137': _iso('Sm-137', 'samarium-137', 62, 137, 136.926971, False), + 'Sm-138': _iso('Sm-138', 'samarium-138', 62, 138, 137.923244, False), + 'Sm-139': _iso('Sm-139', 'samarium-139', 62, 139, 138.922297, False), + 'Sm-140': _iso('Sm-140', 'samarium-140', 62, 140, 139.918995, False), + 'Sm-141': _iso('Sm-141', 'samarium-141', 62, 141, 140.9184816, False), + 'Sm-142': _iso('Sm-142', 'samarium-142', 62, 142, 141.9152044, False), + 'Sm-143': _iso('Sm-143', 'samarium-143', 62, 143, 142.9146353, False), + 'Sm-144': _iso('Sm-144', 'samarium-144', 62, 144, 143.9120065, True, + isotopic_abundance=0.0307), + 'Sm-145': _iso('Sm-145', 'samarium-145', 62, 145, 144.9134173, False), + 'Sm-146': _iso('Sm-146', 'samarium-146', 62, 146, 145.9130470, False), + 'Sm-147': _iso('Sm-147', 'samarium-147', 62, 147, 146.9149044, False, + isotopic_abundance=0.1499), + 'Sm-148': _iso('Sm-148', 'samarium-148', 62, 148, 147.9148292, False, + isotopic_abundance=0.1124), + 'Sm-149': _iso('Sm-149', 'samarium-149', 62, 149, 148.9171921, True, + isotopic_abundance=0.1382), + 'Sm-150': _iso('Sm-150', 'samarium-150', 62, 150, 149.9172829, True, + isotopic_abundance=0.0738), + 'Sm-151': _iso('Sm-151', 'samarium-151', 62, 151, 150.9199398, False), + 'Sm-152': _iso('Sm-152', 'samarium-152', 62, 152, 151.9197397, True, + isotopic_abundance=0.2675), + 'Sm-153': _iso('Sm-153', 'samarium-153', 62, 153, 152.9221047, False, + half_life=166627.08), + 'Sm-154': _iso('Sm-154', 'samarium-154', 62, 154, 153.9222169, True, + isotopic_abundance=0.2275), + 'Sm-155': _iso('Sm-155', 'samarium-155', 62, 155, 154.9246477, False), + 'Sm-156': _iso('Sm-156', 'samarium-156', 62, 156, 155.925536, False), + 'Sm-157': _iso('Sm-157', 'samarium-157', 62, 157, 156.9284187, False), + 'Sm-158': _iso('Sm-158', 'samarium-158', 62, 158, 157.9299510, False), + 'Sm-159': _iso('Sm-159', 'samarium-159', 62, 159, 158.9332172, False), + 'Sm-160': _iso('Sm-160', 'samarium-160', 62, 160, 159.9353353, False), + 'Sm-161': _iso('Sm-161', 'samarium-161', 62, 161, 160.9391602, False), + 'Sm-162': _iso('Sm-162', 'samarium-162', 62, 162, 161.94146, False), + 'Sm-163': _iso('Sm-163', 'samarium-163', 62, 163, 162.94555, False), + 'Sm-164': _iso('Sm-164', 'samarium-164', 62, 164, 163.94836, False), + 'Sm-165': _iso('Sm-165', 'samarium-165', 62, 165, 164.95297, False), + 'Eu-130': _iso('Eu-130', 'europium-130', 63, 130, 129.96369, False), + 'Eu-131': _iso('Eu-131', 'europium-131', 63, 131, 130.95784, False), + 'Eu-132': _iso('Eu-132', 'europium-132', 63, 132, 131.95467, False), + 'Eu-133': _iso('Eu-133', 'europium-133', 63, 133, 132.94929, False), + 'Eu-134': _iso('Eu-134', 'europium-134', 63, 134, 133.94640, False), + 'Eu-135': _iso('Eu-135', 'europium-135', 63, 135, 134.94187, False), + 'Eu-136': _iso('Eu-136', 'europium-136', 63, 136, 135.93962, False), + 'Eu-137': _iso('Eu-137', 'europium-137', 63, 137, 136.93546, False), + 'Eu-138': _iso('Eu-138', 'europium-138', 63, 138, 137.933709, False), + 'Eu-139': _iso('Eu-139', 'europium-139', 63, 139, 138.929792, False), + 'Eu-140': _iso('Eu-140', 'europium-140', 63, 140, 139.928088, False), + 'Eu-141': _iso('Eu-141', 'europium-141', 63, 141, 140.924932, False), + 'Eu-142': _iso('Eu-142', 'europium-142', 63, 142, 141.923442, False), + 'Eu-143': _iso('Eu-143', 'europium-143', 63, 143, 142.920299, False), + 'Eu-144': _iso('Eu-144', 'europium-144', 63, 144, 143.918820, False), + 'Eu-145': _iso('Eu-145', 'europium-145', 63, 145, 144.9162726, False), + 'Eu-146': _iso('Eu-146', 'europium-146', 63, 146, 145.9172110, False), + 'Eu-147': _iso('Eu-147', 'europium-147', 63, 147, 146.9167527, False), + 'Eu-148': _iso('Eu-148', 'europium-148', 63, 148, 147.918089, False), + 'Eu-149': _iso('Eu-149', 'europium-149', 63, 149, 148.9179378, False), + 'Eu-150': _iso('Eu-150', 'europium-150', 63, 150, 149.9197077, False), + 'Eu-151': _iso('Eu-151', 'europium-151', 63, 151, 150.9198578, False, + isotopic_abundance=0.4781), + 'Eu-152': _iso('Eu-152', 'europium-152', 63, 152, 151.9217522, False, + half_life=427438080.0), + 'Eu-153': _iso('Eu-153', 'europium-153', 63, 153, 152.9212380, True, + isotopic_abundance=0.5219), + 'Eu-154': _iso('Eu-154', 'europium-154', 63, 154, 153.9229870, False, + half_life=271745280.0), + 'Eu-155': _iso('Eu-155', 'europium-155', 63, 155, 154.9229011, False, + half_life=150254784.0), + 'Eu-156': _iso('Eu-156', 'europium-156', 63, 156, 155.9247605, False), + 'Eu-157': _iso('Eu-157', 'europium-157', 63, 157, 156.9254334, False), + 'Eu-158': _iso('Eu-158', 'europium-158', 63, 158, 157.927799, False), + 'Eu-159': _iso('Eu-159', 'europium-159', 63, 159, 158.9291001, False), + 'Eu-160': _iso('Eu-160', 'europium-160', 63, 160, 159.931851, False), + 'Eu-161': _iso('Eu-161', 'europium-161', 63, 161, 160.933664, False), + 'Eu-162': _iso('Eu-162', 'europium-162', 63, 162, 161.936989, False), + 'Eu-163': _iso('Eu-163', 'europium-163', 63, 163, 162.939196, False), + 'Eu-164': _iso('Eu-164', 'europium-164', 63, 164, 163.94274, False), + 'Eu-165': _iso('Eu-165', 'europium-165', 63, 165, 164.94559, False), + 'Eu-166': _iso('Eu-166', 'europium-166', 63, 166, 165.94962, False), + 'Eu-167': _iso('Eu-167', 'europium-167', 63, 167, 166.95289, False), + 'Gd-133': _iso('Gd-133', 'gadolinium-133', 64, 133, 132.96133, False), + 'Gd-134': _iso('Gd-134', 'gadolinium-134', 64, 134, 133.95566, False), + 'Gd-135': _iso('Gd-135', 'gadolinium-135', 64, 135, 134.95245, False), + 'Gd-136': _iso('Gd-136', 'gadolinium-136', 64, 136, 135.94730, False), + 'Gd-137': _iso('Gd-137', 'gadolinium-137', 64, 137, 136.94502, False), + 'Gd-138': _iso('Gd-138', 'gadolinium-138', 64, 138, 137.94025, False), + 'Gd-139': _iso('Gd-139', 'gadolinium-139', 64, 139, 138.93813, False), + 'Gd-140': _iso('Gd-140', 'gadolinium-140', 64, 140, 139.933674, False), + 'Gd-141': _iso('Gd-141', 'gadolinium-141', 64, 141, 140.932126, False), + 'Gd-142': _iso('Gd-142', 'gadolinium-142', 64, 142, 141.928116, False), + 'Gd-143': _iso('Gd-143', 'gadolinium-143', 64, 143, 142.92675, False), + 'Gd-144': _iso('Gd-144', 'gadolinium-144', 64, 144, 143.922963, False), + 'Gd-145': _iso('Gd-145', 'gadolinium-145', 64, 145, 144.921713, False), + 'Gd-146': _iso('Gd-146', 'gadolinium-146', 64, 146, 145.9183188, False), + 'Gd-147': _iso('Gd-147', 'gadolinium-147', 64, 147, 146.9191014, False), + 'Gd-148': _iso('Gd-148', 'gadolinium-148', 64, 148, 147.9181215, False), + 'Gd-149': _iso('Gd-149', 'gadolinium-149', 64, 149, 148.9193481, False), + 'Gd-150': _iso('Gd-150', 'gadolinium-150', 64, 150, 149.9186644, False), + 'Gd-151': _iso('Gd-151', 'gadolinium-151', 64, 151, 150.9203560, False), + 'Gd-152': _iso('Gd-152', 'gadolinium-152', 64, 152, 151.9197995, False, + isotopic_abundance=0.0020), + 'Gd-153': _iso('Gd-153', 'gadolinium-153', 64, 153, 152.9217580, False, + half_life=20690380.8), + 'Gd-154': _iso('Gd-154', 'gadolinium-154', 64, 154, 153.9208741, True, + isotopic_abundance=0.0218), + 'Gd-155': _iso('Gd-155', 'gadolinium-155', 64, 155, 154.9226305, True, + isotopic_abundance=0.1480), + 'Gd-156': _iso('Gd-156', 'gadolinium-156', 64, 156, 155.9221312, True, + isotopic_abundance=0.2047), + 'Gd-157': _iso('Gd-157', 'gadolinium-157', 64, 157, 156.9239686, True, + isotopic_abundance=0.1565), + 'Gd-158': _iso('Gd-158', 'gadolinium-158', 64, 158, 157.9241123, True, + isotopic_abundance=0.2484), + 'Gd-159': _iso('Gd-159', 'gadolinium-159', 64, 159, 158.9263970, False), + 'Gd-160': _iso('Gd-160', 'gadolinium-160', 64, 160, 159.9270624, True, + isotopic_abundance=0.2186), + 'Gd-161': _iso('Gd-161', 'gadolinium-161', 64, 161, 160.9296775, False), + 'Gd-162': _iso('Gd-162', 'gadolinium-162', 64, 162, 161.9309930, False), + 'Gd-163': _iso('Gd-163', 'gadolinium-163', 64, 163, 162.9341769, False), + 'Gd-164': _iso('Gd-164', 'gadolinium-164', 64, 164, 163.93583, False), + 'Gd-165': _iso('Gd-165', 'gadolinium-165', 64, 165, 164.93936, False), + 'Gd-166': _iso('Gd-166', 'gadolinium-166', 64, 166, 165.94146, False), + 'Gd-167': _iso('Gd-167', 'gadolinium-167', 64, 167, 166.94545, False), + 'Gd-168': _iso('Gd-168', 'gadolinium-168', 64, 168, 167.94808, False), + 'Gd-169': _iso('Gd-169', 'gadolinium-169', 64, 169, 168.95260, False), + 'Tb-135': _iso('Tb-135', 'terbium-135', 65, 135, 134.96476, False), + 'Tb-136': _iso('Tb-136', 'terbium-136', 65, 136, 135.96129, False), + 'Tb-137': _iso('Tb-137', 'terbium-137', 65, 137, 136.95602, False), + 'Tb-138': _iso('Tb-138', 'terbium-138', 65, 138, 137.95312, False), + 'Tb-139': _iso('Tb-139', 'terbium-139', 65, 139, 138.94833, False), + 'Tb-140': _iso('Tb-140', 'terbium-140', 65, 140, 139.94581, False), + 'Tb-141': _iso('Tb-141', 'terbium-141', 65, 141, 140.94145, False), + 'Tb-142': _iso('Tb-142', 'terbium-142', 65, 142, 141.93928, False), + 'Tb-143': _iso('Tb-143', 'terbium-143', 65, 143, 142.935137, False), + 'Tb-144': _iso('Tb-144', 'terbium-144', 65, 144, 143.933045, False), + 'Tb-145': _iso('Tb-145', 'terbium-145', 65, 145, 144.92882, False), + 'Tb-146': _iso('Tb-146', 'terbium-146', 65, 146, 145.927253, False), + 'Tb-147': _iso('Tb-147', 'terbium-147', 65, 147, 146.9240548, False), + 'Tb-148': _iso('Tb-148', 'terbium-148', 65, 148, 147.924282, False), + 'Tb-149': _iso('Tb-149', 'terbium-149', 65, 149, 148.9232535, False), + 'Tb-150': _iso('Tb-150', 'terbium-150', 65, 150, 149.9236649, False), + 'Tb-151': _iso('Tb-151', 'terbium-151', 65, 151, 150.9231096, False), + 'Tb-152': _iso('Tb-152', 'terbium-152', 65, 152, 151.924083, False), + 'Tb-153': _iso('Tb-153', 'terbium-153', 65, 153, 152.9234424, False), + 'Tb-154': _iso('Tb-154', 'terbium-154', 65, 154, 153.924685, False), + 'Tb-155': _iso('Tb-155', 'terbium-155', 65, 155, 154.923511, False), + 'Tb-156': _iso('Tb-156', 'terbium-156', 65, 156, 155.9247552, False), + 'Tb-157': _iso('Tb-157', 'terbium-157', 65, 157, 156.9240330, False), + 'Tb-158': _iso('Tb-158', 'terbium-158', 65, 158, 157.9254209, False), + 'Tb-159': _iso('Tb-159', 'terbium-159', 65, 159, 158.9253547, True, + isotopic_abundance=1), + 'Tb-160': _iso('Tb-160', 'terbium-160', 65, 160, 159.9271756, False), + 'Tb-161': _iso('Tb-161', 'terbium-161', 65, 161, 160.9275778, False), + 'Tb-162': _iso('Tb-162', 'terbium-162', 65, 162, 161.929495, False), + 'Tb-163': _iso('Tb-163', 'terbium-163', 65, 163, 162.9306547, False), + 'Tb-164': _iso('Tb-164', 'terbium-164', 65, 164, 163.93336, False), + 'Tb-165': _iso('Tb-165', 'terbium-165', 65, 165, 164.93498, False), + 'Tb-166': _iso('Tb-166', 'terbium-166', 65, 166, 165.937860, False), + 'Tb-167': _iso('Tb-167', 'terbium-167', 65, 167, 166.93996, False), + 'Tb-168': _iso('Tb-168', 'terbium-168', 65, 168, 167.94340, False), + 'Tb-169': _iso('Tb-169', 'terbium-169', 65, 169, 168.94597, False), + 'Tb-170': _iso('Tb-170', 'terbium-170', 65, 170, 169.94984, False), + 'Tb-171': _iso('Tb-171', 'terbium-171', 65, 171, 170.95273, False), + 'Dy-138': _iso('Dy-138', 'dysprosium-138', 66, 138, 137.96250, False), + 'Dy-139': _iso('Dy-139', 'dysprosium-139', 66, 139, 138.95959, False), + 'Dy-140': _iso('Dy-140', 'dysprosium-140', 66, 140, 139.95402, False), + 'Dy-141': _iso('Dy-141', 'dysprosium-141', 66, 141, 140.95128, False), + 'Dy-142': _iso('Dy-142', 'dysprosium-142', 66, 142, 141.94619, False), + 'Dy-143': _iso('Dy-143', 'dysprosium-143', 66, 143, 142.943994, False), + 'Dy-144': _iso('Dy-144', 'dysprosium-144', 66, 144, 143.9392695, False), + 'Dy-145': _iso('Dy-145', 'dysprosium-145', 66, 145, 144.9374740, False), + 'Dy-146': _iso('Dy-146', 'dysprosium-146', 66, 146, 145.9328445, False), + 'Dy-147': _iso('Dy-147', 'dysprosium-147', 66, 147, 146.9310827, False), + 'Dy-148': _iso('Dy-148', 'dysprosium-148', 66, 148, 147.927157, False), + 'Dy-149': _iso('Dy-149', 'dysprosium-149', 66, 149, 148.927322, False), + 'Dy-150': _iso('Dy-150', 'dysprosium-150', 66, 150, 149.9255933, False), + 'Dy-151': _iso('Dy-151', 'dysprosium-151', 66, 151, 150.9261916, False), + 'Dy-152': _iso('Dy-152', 'dysprosium-152', 66, 152, 151.9247253, False), + 'Dy-153': _iso('Dy-153', 'dysprosium-153', 66, 153, 152.9257724, False), + 'Dy-154': _iso('Dy-154', 'dysprosium-154', 66, 154, 153.9244293, False), + 'Dy-155': _iso('Dy-155', 'dysprosium-155', 66, 155, 154.925759, False), + 'Dy-156': _iso('Dy-156', 'dysprosium-156', 66, 156, 155.9242847, True, + isotopic_abundance=0.00056), + 'Dy-157': _iso('Dy-157', 'dysprosium-157', 66, 157, 156.9254707, False), + 'Dy-158': _iso('Dy-158', 'dysprosium-158', 66, 158, 157.9244159, True, + isotopic_abundance=0.00095), + 'Dy-159': _iso('Dy-159', 'dysprosium-159', 66, 159, 158.9257470, False), + 'Dy-160': _iso('Dy-160', 'dysprosium-160', 66, 160, 159.9252046, True, + isotopic_abundance=0.02329), + 'Dy-161': _iso('Dy-161', 'dysprosium-161', 66, 161, 160.9269405, True, + isotopic_abundance=0.18889), + 'Dy-162': _iso('Dy-162', 'dysprosium-162', 66, 162, 161.9268056, True, + isotopic_abundance=0.25475), + 'Dy-163': _iso('Dy-163', 'dysprosium-163', 66, 163, 162.9287383, True, + isotopic_abundance=0.24896), + 'Dy-164': _iso('Dy-164', 'dysprosium-164', 66, 164, 163.9291819, True, + isotopic_abundance=0.28260), + 'Dy-165': _iso('Dy-165', 'dysprosium-165', 66, 165, 164.9317105, False), + 'Dy-166': _iso('Dy-166', 'dysprosium-166', 66, 166, 165.9328139, False), + 'Dy-167': _iso('Dy-167', 'dysprosium-167', 66, 167, 166.935661, False), + 'Dy-168': _iso('Dy-168', 'dysprosium-168', 66, 168, 167.93713, False), + 'Dy-169': _iso('Dy-169', 'dysprosium-169', 66, 169, 168.94031, False), + 'Dy-170': _iso('Dy-170', 'dysprosium-170', 66, 170, 169.94239, False), + 'Dy-171': _iso('Dy-171', 'dysprosium-171', 66, 171, 170.94612, False), + 'Dy-172': _iso('Dy-172', 'dysprosium-172', 66, 172, 171.94846, False), + 'Dy-173': _iso('Dy-173', 'dysprosium-173', 66, 173, 172.95283, False), + 'Ho-140': _iso('Ho-140', 'holmium-140', 67, 140, 139.96859, False), + 'Ho-141': _iso('Ho-141', 'holmium-141', 67, 141, 140.96311, False), + 'Ho-142': _iso('Ho-142', 'holmium-142', 67, 142, 141.96001, False), + 'Ho-143': _iso('Ho-143', 'holmium-143', 67, 143, 142.95486, False), + 'Ho-144': _iso('Ho-144', 'holmium-144', 67, 144, 143.9521097, False), + 'Ho-145': _iso('Ho-145', 'holmium-145', 67, 145, 144.9472674, False), + 'Ho-146': _iso('Ho-146', 'holmium-146', 67, 146, 145.9449935, False), + 'Ho-147': _iso('Ho-147', 'holmium-147', 67, 147, 146.9401423, False), + 'Ho-148': _iso('Ho-148', 'holmium-148', 67, 148, 147.937744, False), + 'Ho-149': _iso('Ho-149', 'holmium-149', 67, 149, 148.933803, False), + 'Ho-150': _iso('Ho-150', 'holmium-150', 67, 150, 149.933498, False), + 'Ho-151': _iso('Ho-151', 'holmium-151', 67, 151, 150.9316983, False), + 'Ho-152': _iso('Ho-152', 'holmium-152', 67, 152, 151.931724, False), + 'Ho-153': _iso('Ho-153', 'holmium-153', 67, 153, 152.9302064, False), + 'Ho-154': _iso('Ho-154', 'holmium-154', 67, 154, 153.9306068, False), + 'Ho-155': _iso('Ho-155', 'holmium-155', 67, 155, 154.929104, False), + 'Ho-156': _iso('Ho-156', 'holmium-156', 67, 156, 155.929706, False), + 'Ho-157': _iso('Ho-157', 'holmium-157', 67, 157, 156.928254, False), + 'Ho-158': _iso('Ho-158', 'holmium-158', 67, 158, 157.928946, False), + 'Ho-159': _iso('Ho-159', 'holmium-159', 67, 159, 158.9277197, False), + 'Ho-160': _iso('Ho-160', 'holmium-160', 67, 160, 159.928737, False), + 'Ho-161': _iso('Ho-161', 'holmium-161', 67, 161, 160.9278615, False), + 'Ho-162': _iso('Ho-162', 'holmium-162', 67, 162, 161.9291023, False), + 'Ho-163': _iso('Ho-163', 'holmium-163', 67, 163, 162.9287410, False), + 'Ho-164': _iso('Ho-164', 'holmium-164', 67, 164, 163.9302403, False), + 'Ho-165': _iso('Ho-165', 'holmium-165', 67, 165, 164.9303288, True, + isotopic_abundance=1), + 'Ho-166': _iso('Ho-166', 'holmium-166', 67, 166, 165.9322909, False, + half_life=96458.40000000001), + 'Ho-167': _iso('Ho-167', 'holmium-167', 67, 167, 166.9331385, False), + 'Ho-168': _iso('Ho-168', 'holmium-168', 67, 168, 167.935522, False), + 'Ho-169': _iso('Ho-169', 'holmium-169', 67, 169, 168.936878, False), + 'Ho-170': _iso('Ho-170', 'holmium-170', 67, 170, 169.939625, False), + 'Ho-171': _iso('Ho-171', 'holmium-171', 67, 171, 170.94147, False), + 'Ho-172': _iso('Ho-172', 'holmium-172', 67, 172, 171.94473, False), + 'Ho-173': _iso('Ho-173', 'holmium-173', 67, 173, 172.94702, False), + 'Ho-174': _iso('Ho-174', 'holmium-174', 67, 174, 173.95095, False), + 'Ho-175': _iso('Ho-175', 'holmium-175', 67, 175, 174.95362, False), + 'Er-142': _iso('Er-142', 'erbium-142', 68, 142, 141.97010, False), + 'Er-143': _iso('Er-143', 'erbium-143', 68, 143, 142.96662, False), + 'Er-144': _iso('Er-144', 'erbium-144', 68, 144, 143.96070, False), + 'Er-145': _iso('Er-145', 'erbium-145', 68, 145, 144.95805, False), + 'Er-146': _iso('Er-146', 'erbium-146', 68, 146, 145.9524184, False), + 'Er-147': _iso('Er-147', 'erbium-147', 68, 147, 146.949964, False), + 'Er-148': _iso('Er-148', 'erbium-148', 68, 148, 147.944735, False), + 'Er-149': _iso('Er-149', 'erbium-149', 68, 149, 148.942306, False), + 'Er-150': _iso('Er-150', 'erbium-150', 68, 150, 149.937916, False), + 'Er-151': _iso('Er-151', 'erbium-151', 68, 151, 150.937449, False), + 'Er-152': _iso('Er-152', 'erbium-152', 68, 152, 151.935057, False), + 'Er-153': _iso('Er-153', 'erbium-153', 68, 153, 152.935080, False), + 'Er-154': _iso('Er-154', 'erbium-154', 68, 154, 153.9327908, False), + 'Er-155': _iso('Er-155', 'erbium-155', 68, 155, 154.9332159, False), + 'Er-156': _iso('Er-156', 'erbium-156', 68, 156, 155.931067, False), + 'Er-157': _iso('Er-157', 'erbium-157', 68, 157, 156.931949, False), + 'Er-158': _iso('Er-158', 'erbium-158', 68, 158, 157.929893, False), + 'Er-159': _iso('Er-159', 'erbium-159', 68, 159, 158.9306918, False), + 'Er-160': _iso('Er-160', 'erbium-160', 68, 160, 159.929077, False), + 'Er-161': _iso('Er-161', 'erbium-161', 68, 161, 160.9300046, False), + 'Er-162': _iso('Er-162', 'erbium-162', 68, 162, 161.9287884, True, + isotopic_abundance=0.00139), + 'Er-163': _iso('Er-163', 'erbium-163', 68, 163, 162.9300408, False), + 'Er-164': _iso('Er-164', 'erbium-164', 68, 164, 163.9292088, True, + isotopic_abundance=0.01601), + 'Er-165': _iso('Er-165', 'erbium-165', 68, 165, 164.9307345, False), + 'Er-166': _iso('Er-166', 'erbium-166', 68, 166, 165.9302995, True, + isotopic_abundance=0.33503), + 'Er-167': _iso('Er-167', 'erbium-167', 68, 167, 166.9320546, True, + isotopic_abundance=0.22869), + 'Er-168': _iso('Er-168', 'erbium-168', 68, 168, 167.9323767, True, + isotopic_abundance=0.26978), + 'Er-169': _iso('Er-169', 'erbium-169', 68, 169, 168.9345968, False), + 'Er-170': _iso('Er-170', 'erbium-170', 68, 170, 169.9354702, True, + isotopic_abundance=0.14910), + 'Er-171': _iso('Er-171', 'erbium-171', 68, 171, 170.9380357, False), + 'Er-172': _iso('Er-172', 'erbium-172', 68, 172, 171.9393619, False), + 'Er-173': _iso('Er-173', 'erbium-173', 68, 173, 172.94240, False), + 'Er-174': _iso('Er-174', 'erbium-174', 68, 174, 173.94423, False), + 'Er-175': _iso('Er-175', 'erbium-175', 68, 175, 174.94777, False), + 'Er-176': _iso('Er-176', 'erbium-176', 68, 176, 175.94994, False), + 'Er-177': _iso('Er-177', 'erbium-177', 68, 177, 176.95399, False), + 'Tm-144': _iso('Tm-144', 'thulium-144', 69, 144, 143.97628, False), + 'Tm-145': _iso('Tm-145', 'thulium-145', 69, 145, 144.97039, False), + 'Tm-146': _iso('Tm-146', 'thulium-146', 69, 146, 145.96684, False), + 'Tm-147': _iso('Tm-147', 'thulium-147', 69, 147, 146.9613799, False), + 'Tm-148': _iso('Tm-148', 'thulium-148', 69, 148, 147.958384, False), + 'Tm-149': _iso('Tm-149', 'thulium-149', 69, 149, 148.95289, False), + 'Tm-150': _iso('Tm-150', 'thulium-150', 69, 150, 149.95009, False), + 'Tm-151': _iso('Tm-151', 'thulium-151', 69, 151, 150.945488, False), + 'Tm-152': _iso('Tm-152', 'thulium-152', 69, 152, 151.944422, False), + 'Tm-153': _iso('Tm-153', 'thulium-153', 69, 153, 152.942040, False), + 'Tm-154': _iso('Tm-154', 'thulium-154', 69, 154, 153.941570, False), + 'Tm-155': _iso('Tm-155', 'thulium-155', 69, 155, 154.939210, False), + 'Tm-156': _iso('Tm-156', 'thulium-156', 69, 156, 155.938992, False), + 'Tm-157': _iso('Tm-157', 'thulium-157', 69, 157, 156.936944, False), + 'Tm-158': _iso('Tm-158', 'thulium-158', 69, 158, 157.936980, False), + 'Tm-159': _iso('Tm-159', 'thulium-159', 69, 159, 158.934975, False), + 'Tm-160': _iso('Tm-160', 'thulium-160', 69, 160, 159.935263, False), + 'Tm-161': _iso('Tm-161', 'thulium-161', 69, 161, 160.933549, False), + 'Tm-162': _iso('Tm-162', 'thulium-162', 69, 162, 161.934002, False), + 'Tm-163': _iso('Tm-163', 'thulium-163', 69, 163, 162.9326592, False), + 'Tm-164': _iso('Tm-164', 'thulium-164', 69, 164, 163.933544, False), + 'Tm-165': _iso('Tm-165', 'thulium-165', 69, 165, 164.9324431, False), + 'Tm-166': _iso('Tm-166', 'thulium-166', 69, 166, 165.933561, False), + 'Tm-167': _iso('Tm-167', 'thulium-167', 69, 167, 166.9328562, False), + 'Tm-168': _iso('Tm-168', 'thulium-168', 69, 168, 167.9341774, False), + 'Tm-169': _iso('Tm-169', 'thulium-169', 69, 169, 168.9342179, True, + isotopic_abundance=1), + 'Tm-170': _iso('Tm-170', 'thulium-170', 69, 170, 169.9358060, False), + 'Tm-171': _iso('Tm-171', 'thulium-171', 69, 171, 170.9364339, False), + 'Tm-172': _iso('Tm-172', 'thulium-172', 69, 172, 171.9384055, False), + 'Tm-173': _iso('Tm-173', 'thulium-173', 69, 173, 172.9396084, False), + 'Tm-174': _iso('Tm-174', 'thulium-174', 69, 174, 173.942173, False), + 'Tm-175': _iso('Tm-175', 'thulium-175', 69, 175, 174.943841, False), + 'Tm-176': _iso('Tm-176', 'thulium-176', 69, 176, 175.94700, False), + 'Tm-177': _iso('Tm-177', 'thulium-177', 69, 177, 176.94904, False), + 'Tm-178': _iso('Tm-178', 'thulium-178', 69, 178, 177.95264, False), + 'Tm-179': _iso('Tm-179', 'thulium-179', 69, 179, 178.95534, False), + 'Yb-148': _iso('Yb-148', 'ytterbium-148', 70, 148, 147.96758, False), + 'Yb-149': _iso('Yb-149', 'ytterbium-149', 70, 149, 148.96436, False), + 'Yb-150': _iso('Yb-150', 'ytterbium-150', 70, 150, 149.95852, False), + 'Yb-151': _iso('Yb-151', 'ytterbium-151', 70, 151, 150.95540, False), + 'Yb-152': _iso('Yb-152', 'ytterbium-152', 70, 152, 151.95027, False), + 'Yb-153': _iso('Yb-153', 'ytterbium-153', 70, 153, 152.94932, False), + 'Yb-154': _iso('Yb-154', 'ytterbium-154', 70, 154, 153.946396, False), + 'Yb-155': _iso('Yb-155', 'ytterbium-155', 70, 155, 154.945783, False), + 'Yb-156': _iso('Yb-156', 'ytterbium-156', 70, 156, 155.942825, False), + 'Yb-157': _iso('Yb-157', 'ytterbium-157', 70, 157, 156.942645, False), + 'Yb-158': _iso('Yb-158', 'ytterbium-158', 70, 158, 157.9398705, False), + 'Yb-159': _iso('Yb-159', 'ytterbium-159', 70, 159, 158.940055, False), + 'Yb-160': _iso('Yb-160', 'ytterbium-160', 70, 160, 159.937557, False), + 'Yb-161': _iso('Yb-161', 'ytterbium-161', 70, 161, 160.937907, False), + 'Yb-162': _iso('Yb-162', 'ytterbium-162', 70, 162, 161.935774, False), + 'Yb-163': _iso('Yb-163', 'ytterbium-163', 70, 163, 162.936340, False), + 'Yb-164': _iso('Yb-164', 'ytterbium-164', 70, 164, 163.934495, False), + 'Yb-165': _iso('Yb-165', 'ytterbium-165', 70, 165, 164.935270, False), + 'Yb-166': _iso('Yb-166', 'ytterbium-166', 70, 166, 165.9338747, False), + 'Yb-167': _iso('Yb-167', 'ytterbium-167', 70, 167, 166.9349530, False), + 'Yb-168': _iso('Yb-168', 'ytterbium-168', 70, 168, 167.9338896, True, + isotopic_abundance=0.00123), + 'Yb-169': _iso('Yb-169', 'ytterbium-169', 70, 169, 168.9351825, False, + half_life=2766070.0799999996), + 'Yb-170': _iso('Yb-170', 'ytterbium-170', 70, 170, 169.9347664, True, + isotopic_abundance=0.02982), + 'Yb-171': _iso('Yb-171', 'ytterbium-171', 70, 171, 170.9363302, True, + isotopic_abundance=0.1409), + 'Yb-172': _iso('Yb-172', 'ytterbium-172', 70, 172, 171.9363859, True, + isotopic_abundance=0.2168), + 'Yb-173': _iso('Yb-173', 'ytterbium-173', 70, 173, 172.9382151, True, + isotopic_abundance=0.16103), + 'Yb-174': _iso('Yb-174', 'ytterbium-174', 70, 174, 173.9388664, True, + isotopic_abundance=0.32026), + 'Yb-175': _iso('Yb-175', 'ytterbium-175', 70, 175, 174.9412808, False), + 'Yb-176': _iso('Yb-176', 'ytterbium-176', 70, 176, 175.9425764, True, + isotopic_abundance=0.12996), + 'Yb-177': _iso('Yb-177', 'ytterbium-177', 70, 177, 176.9452656, False), + 'Yb-178': _iso('Yb-178', 'ytterbium-178', 70, 178, 177.946651, False), + 'Yb-179': _iso('Yb-179', 'ytterbium-179', 70, 179, 178.95004, False), + 'Yb-180': _iso('Yb-180', 'ytterbium-180', 70, 180, 179.95212, False), + 'Yb-181': _iso('Yb-181', 'ytterbium-181', 70, 181, 180.95589, False), + 'Lu-150': _iso('Lu-150', 'lutetium-150', 71, 150, 149.97355, False), + 'Lu-151': _iso('Lu-151', 'lutetium-151', 71, 151, 150.96768, False), + 'Lu-152': _iso('Lu-152', 'lutetium-152', 71, 152, 151.96412, False), + 'Lu-153': _iso('Lu-153', 'lutetium-153', 71, 153, 152.95875, False), + 'Lu-154': _iso('Lu-154', 'lutetium-154', 71, 154, 153.95736, False), + 'Lu-155': _iso('Lu-155', 'lutetium-155', 71, 155, 154.954321, False), + 'Lu-156': _iso('Lu-156', 'lutetium-156', 71, 156, 155.953033, False), + 'Lu-157': _iso('Lu-157', 'lutetium-157', 71, 157, 156.950127, False), + 'Lu-158': _iso('Lu-158', 'lutetium-158', 71, 158, 157.949316, False), + 'Lu-159': _iso('Lu-159', 'lutetium-159', 71, 159, 158.946636, False), + 'Lu-160': _iso('Lu-160', 'lutetium-160', 71, 160, 159.946033, False), + 'Lu-161': _iso('Lu-161', 'lutetium-161', 71, 161, 160.943572, False), + 'Lu-162': _iso('Lu-162', 'lutetium-162', 71, 162, 161.943283, False), + 'Lu-163': _iso('Lu-163', 'lutetium-163', 71, 163, 162.941179, False), + 'Lu-164': _iso('Lu-164', 'lutetium-164', 71, 164, 163.941339, False), + 'Lu-165': _iso('Lu-165', 'lutetium-165', 71, 165, 164.939407, False), + 'Lu-166': _iso('Lu-166', 'lutetium-166', 71, 166, 165.939859, False), + 'Lu-167': _iso('Lu-167', 'lutetium-167', 71, 167, 166.938270, False), + 'Lu-168': _iso('Lu-168', 'lutetium-168', 71, 168, 167.938736, False), + 'Lu-169': _iso('Lu-169', 'lutetium-169', 71, 169, 168.9376441, False), + 'Lu-170': _iso('Lu-170', 'lutetium-170', 71, 170, 169.938478, False), + 'Lu-171': _iso('Lu-171', 'lutetium-171', 71, 171, 170.9379170, False), + 'Lu-172': _iso('Lu-172', 'lutetium-172', 71, 172, 171.9390891, False), + 'Lu-173': _iso('Lu-173', 'lutetium-173', 71, 173, 172.9389340, False), + 'Lu-174': _iso('Lu-174', 'lutetium-174', 71, 174, 173.9403409, False), + 'Lu-175': _iso('Lu-175', 'lutetium-175', 71, 175, 174.9407752, True, + isotopic_abundance=0.97401), + 'Lu-176': _iso('Lu-176', 'lutetium-176', 71, 176, 175.9426897, False, + isotopic_abundance=0.02599), + 'Lu-177': _iso('Lu-177', 'lutetium-177', 71, 177, 176.9437615, False, + half_life=573696.0), + 'Lu-178': _iso('Lu-178', 'lutetium-178', 71, 178, 177.9459580, False), + 'Lu-179': _iso('Lu-179', 'lutetium-179', 71, 179, 178.9473309, False), + 'Lu-180': _iso('Lu-180', 'lutetium-180', 71, 180, 179.949888, False), + 'Lu-181': _iso('Lu-181', 'lutetium-181', 71, 181, 180.95191, False), + 'Lu-182': _iso('Lu-182', 'lutetium-182', 71, 182, 181.95504, False), + 'Lu-183': _iso('Lu-183', 'lutetium-183', 71, 183, 182.957363, False), + 'Lu-184': _iso('Lu-184', 'lutetium-184', 71, 184, 183.96091, False), + 'Lu-185': _iso('Lu-185', 'lutetium-185', 71, 185, 184.96362, False), + 'Hf-153': _iso('Hf-153', 'hafnium-153', 72, 153, 152.97069, False), + 'Hf-154': _iso('Hf-154', 'hafnium-154', 72, 154, 153.96486, False), + 'Hf-155': _iso('Hf-155', 'hafnium-155', 72, 155, 154.96311, False), + 'Hf-156': _iso('Hf-156', 'hafnium-156', 72, 156, 155.95935, False), + 'Hf-157': _iso('Hf-157', 'hafnium-157', 72, 157, 156.95824, False), + 'Hf-158': _iso('Hf-158', 'hafnium-158', 72, 158, 157.954801, False), + 'Hf-159': _iso('Hf-159', 'hafnium-159', 72, 159, 158.953996, False), + 'Hf-160': _iso('Hf-160', 'hafnium-160', 72, 160, 159.950691, False), + 'Hf-161': _iso('Hf-161', 'hafnium-161', 72, 161, 160.950278, False), + 'Hf-162': _iso('Hf-162', 'hafnium-162', 72, 162, 161.9472148, False), + 'Hf-163': _iso('Hf-163', 'hafnium-163', 72, 163, 162.947113, False), + 'Hf-164': _iso('Hf-164', 'hafnium-164', 72, 164, 163.944371, False), + 'Hf-165': _iso('Hf-165', 'hafnium-165', 72, 165, 164.944567, False), + 'Hf-166': _iso('Hf-166', 'hafnium-166', 72, 166, 165.942180, False), + 'Hf-167': _iso('Hf-167', 'hafnium-167', 72, 167, 166.942600, False), + 'Hf-168': _iso('Hf-168', 'hafnium-168', 72, 168, 167.940568, False), + 'Hf-169': _iso('Hf-169', 'hafnium-169', 72, 169, 168.941259, False), + 'Hf-170': _iso('Hf-170', 'hafnium-170', 72, 170, 169.939609, False), + 'Hf-171': _iso('Hf-171', 'hafnium-171', 72, 171, 170.940492, False), + 'Hf-172': _iso('Hf-172', 'hafnium-172', 72, 172, 171.939450, False), + 'Hf-173': _iso('Hf-173', 'hafnium-173', 72, 173, 172.940513, False), + 'Hf-174': _iso('Hf-174', 'hafnium-174', 72, 174, 173.9400461, False, + isotopic_abundance=0.0016), + 'Hf-175': _iso('Hf-175', 'hafnium-175', 72, 175, 174.9415092, False), + 'Hf-176': _iso('Hf-176', 'hafnium-176', 72, 176, 175.9414076, True, + isotopic_abundance=0.0526), + 'Hf-177': _iso('Hf-177', 'hafnium-177', 72, 177, 176.9432277, True, + isotopic_abundance=0.1860), + 'Hf-178': _iso('Hf-178', 'hafnium-178', 72, 178, 177.9437058, True, + isotopic_abundance=0.2728), + 'Hf-179': _iso('Hf-179', 'hafnium-179', 72, 179, 178.9458232, True, + isotopic_abundance=0.1362), + 'Hf-180': _iso('Hf-180', 'hafnium-180', 72, 180, 179.9465570, True, + isotopic_abundance=0.3508), + 'Hf-181': _iso('Hf-181', 'hafnium-181', 72, 181, 180.9491083, False), + 'Hf-182': _iso('Hf-182', 'hafnium-182', 72, 182, 181.9505612, False), + 'Hf-183': _iso('Hf-183', 'hafnium-183', 72, 183, 182.953530, False), + 'Hf-184': _iso('Hf-184', 'hafnium-184', 72, 184, 183.955446, False), + 'Hf-185': _iso('Hf-185', 'hafnium-185', 72, 185, 184.958862, False), + 'Hf-186': _iso('Hf-186', 'hafnium-186', 72, 186, 185.960897, False), + 'Hf-187': _iso('Hf-187', 'hafnium-187', 72, 187, 186.96477, False), + 'Hf-188': _iso('Hf-188', 'hafnium-188', 72, 188, 187.96685, False), + 'Hf-189': _iso('Hf-189', 'hafnium-189', 72, 189, 188.97084, False), + 'Ta-155': _iso('Ta-155', 'tantalum-155', 73, 155, 154.97424, False), + 'Ta-156': _iso('Ta-156', 'tantalum-156', 73, 156, 155.97203, False), + 'Ta-157': _iso('Ta-157', 'tantalum-157', 73, 157, 156.96818, False), + 'Ta-158': _iso('Ta-158', 'tantalum-158', 73, 158, 157.96654, False), + 'Ta-159': _iso('Ta-159', 'tantalum-159', 73, 159, 158.963023, False), + 'Ta-160': _iso('Ta-160', 'tantalum-160', 73, 160, 159.961488, False), + 'Ta-161': _iso('Ta-161', 'tantalum-161', 73, 161, 160.958452, False), + 'Ta-162': _iso('Ta-162', 'tantalum-162', 73, 162, 161.957294, False), + 'Ta-163': _iso('Ta-163', 'tantalum-163', 73, 163, 162.954337, False), + 'Ta-164': _iso('Ta-164', 'tantalum-164', 73, 164, 163.953534, False), + 'Ta-165': _iso('Ta-165', 'tantalum-165', 73, 165, 164.950781, False), + 'Ta-166': _iso('Ta-166', 'tantalum-166', 73, 166, 165.950512, False), + 'Ta-167': _iso('Ta-167', 'tantalum-167', 73, 167, 166.948093, False), + 'Ta-168': _iso('Ta-168', 'tantalum-168', 73, 168, 167.948047, False), + 'Ta-169': _iso('Ta-169', 'tantalum-169', 73, 169, 168.946011, False), + 'Ta-170': _iso('Ta-170', 'tantalum-170', 73, 170, 169.946175, False), + 'Ta-171': _iso('Ta-171', 'tantalum-171', 73, 171, 170.944476, False), + 'Ta-172': _iso('Ta-172', 'tantalum-172', 73, 172, 171.944895, False), + 'Ta-173': _iso('Ta-173', 'tantalum-173', 73, 173, 172.943750, False), + 'Ta-174': _iso('Ta-174', 'tantalum-174', 73, 174, 173.944454, False), + 'Ta-175': _iso('Ta-175', 'tantalum-175', 73, 175, 174.943737, False), + 'Ta-176': _iso('Ta-176', 'tantalum-176', 73, 176, 175.944857, False), + 'Ta-177': _iso('Ta-177', 'tantalum-177', 73, 177, 176.9444795, False), + 'Ta-178': _iso('Ta-178', 'tantalum-178', 73, 178, 177.945678, False), + 'Ta-179': _iso('Ta-179', 'tantalum-179', 73, 179, 178.9459366, False), + 'Ta-180': _iso('Ta-180', 'tantalum-180', 73, 180, 179.9474648, True, + isotopic_abundance=0.0001201), + 'Ta-181': _iso('Ta-181', 'tantalum-181', 73, 181, 180.9479958, True, + isotopic_abundance=0.9998799), + 'Ta-182': _iso('Ta-182', 'tantalum-182', 73, 182, 181.9501519, False), + 'Ta-183': _iso('Ta-183', 'tantalum-183', 73, 183, 182.9513726, False), + 'Ta-184': _iso('Ta-184', 'tantalum-184', 73, 184, 183.954008, False), + 'Ta-185': _iso('Ta-185', 'tantalum-185', 73, 185, 184.955559, False), + 'Ta-186': _iso('Ta-186', 'tantalum-186', 73, 186, 185.958551, False), + 'Ta-187': _iso('Ta-187', 'tantalum-187', 73, 187, 186.960386, False), + 'Ta-188': _iso('Ta-188', 'tantalum-188', 73, 188, 187.963916, False), + 'Ta-189': _iso('Ta-189', 'tantalum-189', 73, 189, 188.96583, False), + 'Ta-190': _iso('Ta-190', 'tantalum-190', 73, 190, 189.96939, False), + 'Ta-191': _iso('Ta-191', 'tantalum-191', 73, 191, 190.97156, False), + 'Ta-192': _iso('Ta-192', 'tantalum-192', 73, 192, 191.97514, False), + 'W-157': _iso('W-157', 'tungsten-157', 74, 157, 156.97884, False), + 'W-158': _iso('W-158', 'tungsten-158', 74, 158, 157.97456, False), + 'W-159': _iso('W-159', 'tungsten-159', 74, 159, 158.97264, False), + 'W-160': _iso('W-160', 'tungsten-160', 74, 160, 159.96846, False), + 'W-161': _iso('W-161', 'tungsten-161', 74, 161, 160.96720, False), + 'W-162': _iso('W-162', 'tungsten-162', 74, 162, 161.963499, False), + 'W-163': _iso('W-163', 'tungsten-163', 74, 163, 162.962524, False), + 'W-164': _iso('W-164', 'tungsten-164', 74, 164, 163.958961, False), + 'W-165': _iso('W-165', 'tungsten-165', 74, 165, 164.958281, False), + 'W-166': _iso('W-166', 'tungsten-166', 74, 166, 165.955031, False), + 'W-167': _iso('W-167', 'tungsten-167', 74, 167, 166.954805, False), + 'W-168': _iso('W-168', 'tungsten-168', 74, 168, 167.951806, False), + 'W-169': _iso('W-169', 'tungsten-169', 74, 169, 168.951779, False), + 'W-170': _iso('W-170', 'tungsten-170', 74, 170, 169.949232, False), + 'W-171': _iso('W-171', 'tungsten-171', 74, 171, 170.949451, False), + 'W-172': _iso('W-172', 'tungsten-172', 74, 172, 171.947292, False), + 'W-173': _iso('W-173', 'tungsten-173', 74, 173, 172.947689, False), + 'W-174': _iso('W-174', 'tungsten-174', 74, 174, 173.946079, False), + 'W-175': _iso('W-175', 'tungsten-175', 74, 175, 174.946717, False), + 'W-176': _iso('W-176', 'tungsten-176', 74, 176, 175.945634, False), + 'W-177': _iso('W-177', 'tungsten-177', 74, 177, 176.946643, False), + 'W-178': _iso('W-178', 'tungsten-178', 74, 178, 177.945883, False), + 'W-179': _iso('W-179', 'tungsten-179', 74, 179, 178.947077, False), + 'W-180': _iso('W-180', 'tungsten-180', 74, 180, 179.9467108, False, + isotopic_abundance=0.0012), + 'W-181': _iso('W-181', 'tungsten-181', 74, 181, 180.9481978, False, + half_life=10462608.0), + 'W-182': _iso('W-182', 'tungsten-182', 74, 182, 181.94820394, True, + isotopic_abundance=0.2650), + 'W-183': _iso('W-183', 'tungsten-183', 74, 183, 182.95022275, True, + isotopic_abundance=0.1431), + 'W-184': _iso('W-184', 'tungsten-184', 74, 184, 183.95093092, True, + isotopic_abundance=0.3064), + 'W-185': _iso('W-185', 'tungsten-185', 74, 185, 184.95341897, False), + 'W-186': _iso('W-186', 'tungsten-186', 74, 186, 185.9543628, True, + isotopic_abundance=0.2843), + 'W-187': _iso('W-187', 'tungsten-187', 74, 187, 186.9571588, False), + 'W-188': _iso('W-188', 'tungsten-188', 74, 188, 187.9584862, False, + half_life=6029251.2), + 'W-189': _iso('W-189', 'tungsten-189', 74, 189, 188.961763, False), + 'W-190': _iso('W-190', 'tungsten-190', 74, 190, 189.963091, False), + 'W-191': _iso('W-191', 'tungsten-191', 74, 191, 190.966531, False), + 'W-192': _iso('W-192', 'tungsten-192', 74, 192, 191.96817, False), + 'W-193': _iso('W-193', 'tungsten-193', 74, 193, 192.97178, False), + 'W-194': _iso('W-194', 'tungsten-194', 74, 194, 193.97367, False), + 'Re-159': _iso('Re-159', 'rhenium-159', 75, 159, 158.98418, False), + 'Re-160': _iso('Re-160', 'rhenium-160', 75, 160, 159.98182, False), + 'Re-161': _iso('Re-161', 'rhenium-161', 75, 161, 160.97757, False), + 'Re-162': _iso('Re-162', 'rhenium-162', 75, 162, 161.97584, False), + 'Re-163': _iso('Re-163', 'rhenium-163', 75, 163, 162.972080, False), + 'Re-164': _iso('Re-164', 'rhenium-164', 75, 164, 163.970453, False), + 'Re-165': _iso('Re-165', 'rhenium-165', 75, 165, 164.967103, False), + 'Re-166': _iso('Re-166', 'rhenium-166', 75, 166, 165.965761, False), + 'Re-167': _iso('Re-167', 'rhenium-167', 75, 167, 166.962595, False), + 'Re-168': _iso('Re-168', 'rhenium-168', 75, 168, 167.961573, False), + 'Re-169': _iso('Re-169', 'rhenium-169', 75, 169, 168.958766, False), + 'Re-170': _iso('Re-170', 'rhenium-170', 75, 170, 169.958220, False), + 'Re-171': _iso('Re-171', 'rhenium-171', 75, 171, 170.955716, False), + 'Re-172': _iso('Re-172', 'rhenium-172', 75, 172, 171.955420, False), + 'Re-173': _iso('Re-173', 'rhenium-173', 75, 173, 172.953243, False), + 'Re-174': _iso('Re-174', 'rhenium-174', 75, 174, 173.953115, False), + 'Re-175': _iso('Re-175', 'rhenium-175', 75, 175, 174.951381, False), + 'Re-176': _iso('Re-176', 'rhenium-176', 75, 176, 175.951623, False), + 'Re-177': _iso('Re-177', 'rhenium-177', 75, 177, 176.950328, False), + 'Re-178': _iso('Re-178', 'rhenium-178', 75, 178, 177.950989, False), + 'Re-179': _iso('Re-179', 'rhenium-179', 75, 179, 178.949989, False), + 'Re-180': _iso('Re-180', 'rhenium-180', 75, 180, 179.950792, False), + 'Re-181': _iso('Re-181', 'rhenium-181', 75, 181, 180.950058, False), + 'Re-182': _iso('Re-182', 'rhenium-182', 75, 182, 181.95121, False), + 'Re-183': _iso('Re-183', 'rhenium-183', 75, 183, 182.9508196, False), + 'Re-184': _iso('Re-184', 'rhenium-184', 75, 184, 183.9525228, False), + 'Re-185': _iso('Re-185', 'rhenium-185', 75, 185, 184.9529545, True, + isotopic_abundance=0.3740), + 'Re-186': _iso('Re-186', 'rhenium-186', 75, 186, 185.9549856, False, + half_life=321292.8), + 'Re-187': _iso('Re-187', 'rhenium-187', 75, 187, 186.9557501, False, + isotopic_abundance=0.6260), + 'Re-188': _iso('Re-188', 'rhenium-188', 75, 188, 187.9581115, False, + half_life=61203.600000000006), + 'Re-189': _iso('Re-189', 'rhenium-189', 75, 189, 188.9592260, False), + 'Re-190': _iso('Re-190', 'rhenium-190', 75, 190, 189.961744, False), + 'Re-191': _iso('Re-191', 'rhenium-191', 75, 191, 190.963122, False), + 'Re-192': _iso('Re-192', 'rhenium-192', 75, 192, 191.966088, False), + 'Re-193': _iso('Re-193', 'rhenium-193', 75, 193, 192.967541, False), + 'Re-194': _iso('Re-194', 'rhenium-194', 75, 194, 193.97076, False), + 'Re-195': _iso('Re-195', 'rhenium-195', 75, 195, 194.97254, False), + 'Re-196': _iso('Re-196', 'rhenium-196', 75, 196, 195.97580, False), + 'Re-197': _iso('Re-197', 'rhenium-197', 75, 197, 196.97799, False), + 'Re-198': _iso('Re-198', 'rhenium-198', 75, 198, 197.98160, False), + 'Os-161': _iso('Os-161', 'osmium-161', 76, 161, 160.98903, False), + 'Os-162': _iso('Os-162', 'osmium-162', 76, 162, 161.98443, False), + 'Os-163': _iso('Os-163', 'osmium-163', 76, 163, 162.98241, False), + 'Os-164': _iso('Os-164', 'osmium-164', 76, 164, 163.97802, False), + 'Os-165': _iso('Os-165', 'osmium-165', 76, 165, 164.97660, False), + 'Os-166': _iso('Os-166', 'osmium-166', 76, 166, 165.972692, False), + 'Os-167': _iso('Os-167', 'osmium-167', 76, 167, 166.971549, False), + 'Os-168': _iso('Os-168', 'osmium-168', 76, 168, 167.967808, False), + 'Os-169': _iso('Os-169', 'osmium-169', 76, 169, 168.967018, False), + 'Os-170': _iso('Os-170', 'osmium-170', 76, 170, 169.963578, False), + 'Os-171': _iso('Os-171', 'osmium-171', 76, 171, 170.963174, False), + 'Os-172': _iso('Os-172', 'osmium-172', 76, 172, 171.960017, False), + 'Os-173': _iso('Os-173', 'osmium-173', 76, 173, 172.959808, False), + 'Os-174': _iso('Os-174', 'osmium-174', 76, 174, 173.957064, False), + 'Os-175': _iso('Os-175', 'osmium-175', 76, 175, 174.956945, False), + 'Os-176': _iso('Os-176', 'osmium-176', 76, 176, 175.954806, False), + 'Os-177': _iso('Os-177', 'osmium-177', 76, 177, 176.954966, False), + 'Os-178': _iso('Os-178', 'osmium-178', 76, 178, 177.953254, False), + 'Os-179': _iso('Os-179', 'osmium-179', 76, 179, 178.953817, False), + 'Os-180': _iso('Os-180', 'osmium-180', 76, 180, 179.952375, False), + 'Os-181': _iso('Os-181', 'osmium-181', 76, 181, 180.953247, False), + 'Os-182': _iso('Os-182', 'osmium-182', 76, 182, 181.952110, False), + 'Os-183': _iso('Os-183', 'osmium-183', 76, 183, 182.953125, False), + 'Os-184': _iso('Os-184', 'osmium-184', 76, 184, 183.9524885, True, + isotopic_abundance=0.0002), + 'Os-185': _iso('Os-185', 'osmium-185', 76, 185, 184.9540417, False), + 'Os-186': _iso('Os-186', 'osmium-186', 76, 186, 185.9538350, False, + isotopic_abundance=0.0159), + 'Os-187': _iso('Os-187', 'osmium-187', 76, 187, 186.9557474, True, + isotopic_abundance=0.0196), + 'Os-188': _iso('Os-188', 'osmium-188', 76, 188, 187.9558352, True, + isotopic_abundance=0.1324), + 'Os-189': _iso('Os-189', 'osmium-189', 76, 189, 188.9581442, True, + isotopic_abundance=0.1615), + 'Os-190': _iso('Os-190', 'osmium-190', 76, 190, 189.9584437, True, + isotopic_abundance=0.2626), + 'Os-191': _iso('Os-191', 'osmium-191', 76, 191, 190.9609264, False), + 'Os-192': _iso('Os-192', 'osmium-192', 76, 192, 191.9614770, True, + isotopic_abundance=0.4078), + 'Os-193': _iso('Os-193', 'osmium-193', 76, 193, 192.9641479, False), + 'Os-194': _iso('Os-194', 'osmium-194', 76, 194, 193.9651772, False), + 'Os-195': _iso('Os-195', 'osmium-195', 76, 195, 194.968318, False), + 'Os-196': _iso('Os-196', 'osmium-196', 76, 196, 195.969641, False), + 'Os-197': _iso('Os-197', 'osmium-197', 76, 197, 196.97283, False), + 'Os-198': _iso('Os-198', 'osmium-198', 76, 198, 197.97441, False), + 'Os-199': _iso('Os-199', 'osmium-199', 76, 199, 198.97801, False), + 'Os-200': _iso('Os-200', 'osmium-200', 76, 200, 199.97984, False), + 'Os-201': _iso('Os-201', 'osmium-201', 76, 201, 200.98364, False), + 'Os-202': _iso('Os-202', 'osmium-202', 76, 202, 201.98595, False), + 'Ir-164': _iso('Ir-164', 'iridium-164', 77, 164, 163.99191, False), + 'Ir-165': _iso('Ir-165', 'iridium-165', 77, 165, 164.98750, False), + 'Ir-166': _iso('Ir-166', 'iridium-166', 77, 166, 165.98566, False), + 'Ir-167': _iso('Ir-167', 'iridium-167', 77, 167, 166.981666, False), + 'Ir-168': _iso('Ir-168', 'iridium-168', 77, 168, 167.979907, False), + 'Ir-169': _iso('Ir-169', 'iridium-169', 77, 169, 168.976298, False), + 'Ir-170': _iso('Ir-170', 'iridium-170', 77, 170, 169.974922, False), + 'Ir-171': _iso('Ir-171', 'iridium-171', 77, 171, 170.971640, False), + 'Ir-172': _iso('Ir-172', 'iridium-172', 77, 172, 171.970607, False), + 'Ir-173': _iso('Ir-173', 'iridium-173', 77, 173, 172.967506, False), + 'Ir-174': _iso('Ir-174', 'iridium-174', 77, 174, 173.966861, False), + 'Ir-175': _iso('Ir-175', 'iridium-175', 77, 175, 174.964150, False), + 'Ir-176': _iso('Ir-176', 'iridium-176', 77, 176, 175.963650, False), + 'Ir-177': _iso('Ir-177', 'iridium-177', 77, 177, 176.961301, False), + 'Ir-178': _iso('Ir-178', 'iridium-178', 77, 178, 177.961082, False), + 'Ir-179': _iso('Ir-179', 'iridium-179', 77, 179, 178.959120, False), + 'Ir-180': _iso('Ir-180', 'iridium-180', 77, 180, 179.959229, False), + 'Ir-181': _iso('Ir-181', 'iridium-181', 77, 181, 180.957625, False), + 'Ir-182': _iso('Ir-182', 'iridium-182', 77, 182, 181.958076, False), + 'Ir-183': _iso('Ir-183', 'iridium-183', 77, 183, 182.956840, False), + 'Ir-184': _iso('Ir-184', 'iridium-184', 77, 184, 183.957476, False), + 'Ir-185': _iso('Ir-185', 'iridium-185', 77, 185, 184.956698, False), + 'Ir-186': _iso('Ir-186', 'iridium-186', 77, 186, 185.957944, False), + 'Ir-187': _iso('Ir-187', 'iridium-187', 77, 187, 186.957542, False), + 'Ir-188': _iso('Ir-188', 'iridium-188', 77, 188, 187.958828, False), + 'Ir-189': _iso('Ir-189', 'iridium-189', 77, 189, 188.958715, False), + 'Ir-190': _iso('Ir-190', 'iridium-190', 77, 190, 189.9605412, False), + 'Ir-191': _iso('Ir-191', 'iridium-191', 77, 191, 190.9605893, True, + isotopic_abundance=0.373), + 'Ir-192': _iso('Ir-192', 'iridium-192', 77, 192, 191.9626002, False, + half_life=6377184.0), + 'Ir-193': _iso('Ir-193', 'iridium-193', 77, 193, 192.9629216, True, + isotopic_abundance=0.627), + 'Ir-194': _iso('Ir-194', 'iridium-194', 77, 194, 193.9650735, False), + 'Ir-195': _iso('Ir-195', 'iridium-195', 77, 195, 194.9659747, False), + 'Ir-196': _iso('Ir-196', 'iridium-196', 77, 196, 195.968397, False), + 'Ir-197': _iso('Ir-197', 'iridium-197', 77, 197, 196.969655, False), + 'Ir-198': _iso('Ir-198', 'iridium-198', 77, 198, 197.97228, False), + 'Ir-199': _iso('Ir-199', 'iridium-199', 77, 199, 198.973805, False), + 'Ir-200': _iso('Ir-200', 'iridium-200', 77, 200, 199.97680, False), + 'Ir-201': _iso('Ir-201', 'iridium-201', 77, 201, 200.97864, False), + 'Ir-202': _iso('Ir-202', 'iridium-202', 77, 202, 201.98199, False), + 'Ir-203': _iso('Ir-203', 'iridium-203', 77, 203, 202.98423, False), + 'Ir-204': _iso('Ir-204', 'iridium-204', 77, 204, 203.98960, False), + 'Pt-166': _iso('Pt-166', 'platinum-166', 78, 166, 165.99486, False), + 'Pt-167': _iso('Pt-167', 'platinum-167', 78, 167, 166.99269, False), + 'Pt-168': _iso('Pt-168', 'platinum-168', 78, 168, 167.98813, False), + 'Pt-169': _iso('Pt-169', 'platinum-169', 78, 169, 168.98657, False), + 'Pt-170': _iso('Pt-170', 'platinum-170', 78, 170, 169.982496, False), + 'Pt-171': _iso('Pt-171', 'platinum-171', 78, 171, 170.981245, False), + 'Pt-172': _iso('Pt-172', 'platinum-172', 78, 172, 171.977351, False), + 'Pt-173': _iso('Pt-173', 'platinum-173', 78, 173, 172.976443, False), + 'Pt-174': _iso('Pt-174', 'platinum-174', 78, 174, 173.972820, False), + 'Pt-175': _iso('Pt-175', 'platinum-175', 78, 175, 174.972410, False), + 'Pt-176': _iso('Pt-176', 'platinum-176', 78, 176, 175.968938, False), + 'Pt-177': _iso('Pt-177', 'platinum-177', 78, 177, 176.968470, False), + 'Pt-178': _iso('Pt-178', 'platinum-178', 78, 178, 177.965650, False), + 'Pt-179': _iso('Pt-179', 'platinum-179', 78, 179, 178.9653590, False), + 'Pt-180': _iso('Pt-180', 'platinum-180', 78, 180, 179.963032, False), + 'Pt-181': _iso('Pt-181', 'platinum-181', 78, 181, 180.963098, False), + 'Pt-182': _iso('Pt-182', 'platinum-182', 78, 182, 181.961172, False), + 'Pt-183': _iso('Pt-183', 'platinum-183', 78, 183, 182.961597, False), + 'Pt-184': _iso('Pt-184', 'platinum-184', 78, 184, 183.959915, False), + 'Pt-185': _iso('Pt-185', 'platinum-185', 78, 185, 184.960614, False), + 'Pt-186': _iso('Pt-186', 'platinum-186', 78, 186, 185.959351, False), + 'Pt-187': _iso('Pt-187', 'platinum-187', 78, 187, 186.960617, False), + 'Pt-188': _iso('Pt-188', 'platinum-188', 78, 188, 187.9593889, False), + 'Pt-189': _iso('Pt-189', 'platinum-189', 78, 189, 188.960831, False), + 'Pt-190': _iso('Pt-190', 'platinum-190', 78, 190, 189.9599297, False, + isotopic_abundance=0.00012), + 'Pt-191': _iso('Pt-191', 'platinum-191', 78, 191, 190.9616729, False), + 'Pt-192': _iso('Pt-192', 'platinum-192', 78, 192, 191.9610387, True, + isotopic_abundance=0.00782), + 'Pt-193': _iso('Pt-193', 'platinum-193', 78, 193, 192.9629824, False), + 'Pt-194': _iso('Pt-194', 'platinum-194', 78, 194, 193.9626809, True, + isotopic_abundance=0.3286), + 'Pt-195': _iso('Pt-195', 'platinum-195', 78, 195, 194.9647917, True, + isotopic_abundance=0.3378), + 'Pt-196': _iso('Pt-196', 'platinum-196', 78, 196, 195.96495209, True, + isotopic_abundance=0.2521), + 'Pt-197': _iso('Pt-197', 'platinum-197', 78, 197, 196.96734069, False), + 'Pt-198': _iso('Pt-198', 'platinum-198', 78, 198, 197.9678949, True, + isotopic_abundance=0.07356), + 'Pt-199': _iso('Pt-199', 'platinum-199', 78, 199, 198.9705952, False), + 'Pt-200': _iso('Pt-200', 'platinum-200', 78, 200, 199.971443, False), + 'Pt-201': _iso('Pt-201', 'platinum-201', 78, 201, 200.974513, False), + 'Pt-202': _iso('Pt-202', 'platinum-202', 78, 202, 201.975639, False), + 'Pt-203': _iso('Pt-203', 'platinum-203', 78, 203, 202.97893, False), + 'Pt-204': _iso('Pt-204', 'platinum-204', 78, 204, 203.98076, False), + 'Pt-205': _iso('Pt-205', 'platinum-205', 78, 205, 204.98608, False), + 'Pt-206': _iso('Pt-206', 'platinum-206', 78, 206, 205.98966, False), + 'Au-169': _iso('Au-169', 'gold-169', 79, 169, 168.99808, False), + 'Au-170': _iso('Au-170', 'gold-170', 79, 170, 169.99597, False), + 'Au-171': _iso('Au-171', 'gold-171', 79, 171, 170.991876, False), + 'Au-172': _iso('Au-172', 'gold-172', 79, 172, 171.989942, False), + 'Au-173': _iso('Au-173', 'gold-173', 79, 173, 172.986241, False), + 'Au-174': _iso('Au-174', 'gold-174', 79, 174, 173.984717, False), + 'Au-175': _iso('Au-175', 'gold-175', 79, 175, 174.981304, False), + 'Au-176': _iso('Au-176', 'gold-176', 79, 176, 175.980250, False), + 'Au-177': _iso('Au-177', 'gold-177', 79, 177, 176.976870, False), + 'Au-178': _iso('Au-178', 'gold-178', 79, 178, 177.976032, False), + 'Au-179': _iso('Au-179', 'gold-179', 79, 179, 178.973174, False), + 'Au-180': _iso('Au-180', 'gold-180', 79, 180, 179.972523, False), + 'Au-181': _iso('Au-181', 'gold-181', 79, 181, 180.970079, False), + 'Au-182': _iso('Au-182', 'gold-182', 79, 182, 181.969618, False), + 'Au-183': _iso('Au-183', 'gold-183', 79, 183, 182.967591, False), + 'Au-184': _iso('Au-184', 'gold-184', 79, 184, 183.967452, False), + 'Au-185': _iso('Au-185', 'gold-185', 79, 185, 184.965790, False), + 'Au-186': _iso('Au-186', 'gold-186', 79, 186, 185.965953, False), + 'Au-187': _iso('Au-187', 'gold-187', 79, 187, 186.964543, False), + 'Au-188': _iso('Au-188', 'gold-188', 79, 188, 187.965349, False), + 'Au-189': _iso('Au-189', 'gold-189', 79, 189, 188.963948, False), + 'Au-190': _iso('Au-190', 'gold-190', 79, 190, 189.964698, False), + 'Au-191': _iso('Au-191', 'gold-191', 79, 191, 190.963702, False), + 'Au-192': _iso('Au-192', 'gold-192', 79, 192, 191.964814, False), + 'Au-193': _iso('Au-193', 'gold-193', 79, 193, 192.9641373, False), + 'Au-194': _iso('Au-194', 'gold-194', 79, 194, 193.9654178, False), + 'Au-195': _iso('Au-195', 'gold-195', 79, 195, 194.9650352, False, + half_life=16078867.200000001), + 'Au-196': _iso('Au-196', 'gold-196', 79, 196, 195.9665699, False), + 'Au-197': _iso('Au-197', 'gold-197', 79, 197, 196.96656879, True, + isotopic_abundance=1), + 'Au-198': _iso('Au-198', 'gold-198', 79, 198, 197.96824242, False, + half_life=232862.688), + 'Au-199': _iso('Au-199', 'gold-199', 79, 199, 198.96876528, False), + 'Au-200': _iso('Au-200', 'gold-200', 79, 200, 199.970756, False), + 'Au-201': _iso('Au-201', 'gold-201', 79, 201, 200.9716575, False), + 'Au-202': _iso('Au-202', 'gold-202', 79, 202, 201.973856, False), + 'Au-203': _iso('Au-203', 'gold-203', 79, 203, 202.9751544, False), + 'Au-204': _iso('Au-204', 'gold-204', 79, 204, 203.97783, False), + 'Au-205': _iso('Au-205', 'gold-205', 79, 205, 204.97985, False), + 'Au-206': _iso('Au-206', 'gold-206', 79, 206, 205.98474, False), + 'Au-207': _iso('Au-207', 'gold-207', 79, 207, 206.98840, False), + 'Au-208': _iso('Au-208', 'gold-208', 79, 208, 207.99345, False), + 'Au-209': _iso('Au-209', 'gold-209', 79, 209, 208.99735, False), + 'Au-210': _iso('Au-210', 'gold-210', 79, 210, 210.00250, False), + 'Hg-171': _iso('Hg-171', 'mercury-171', 80, 171, 171.00353, False), + 'Hg-172': _iso('Hg-172', 'mercury-172', 80, 172, 171.99881, False), + 'Hg-173': _iso('Hg-173', 'mercury-173', 80, 173, 172.99709, False), + 'Hg-174': _iso('Hg-174', 'mercury-174', 80, 174, 173.992865, False), + 'Hg-175': _iso('Hg-175', 'mercury-175', 80, 175, 174.991441, False), + 'Hg-176': _iso('Hg-176', 'mercury-176', 80, 176, 175.987361, False), + 'Hg-177': _iso('Hg-177', 'mercury-177', 80, 177, 176.986277, False), + 'Hg-178': _iso('Hg-178', 'mercury-178', 80, 178, 177.982484, False), + 'Hg-179': _iso('Hg-179', 'mercury-179', 80, 179, 178.981831, False), + 'Hg-180': _iso('Hg-180', 'mercury-180', 80, 180, 179.978260, False), + 'Hg-181': _iso('Hg-181', 'mercury-181', 80, 181, 180.977819, False), + 'Hg-182': _iso('Hg-182', 'mercury-182', 80, 182, 181.974689, False), + 'Hg-183': _iso('Hg-183', 'mercury-183', 80, 183, 182.9744448, False), + 'Hg-184': _iso('Hg-184', 'mercury-184', 80, 184, 183.971714, False), + 'Hg-185': _iso('Hg-185', 'mercury-185', 80, 185, 184.971899, False), + 'Hg-186': _iso('Hg-186', 'mercury-186', 80, 186, 185.969362, False), + 'Hg-187': _iso('Hg-187', 'mercury-187', 80, 187, 186.969814, False), + 'Hg-188': _iso('Hg-188', 'mercury-188', 80, 188, 187.967567, False), + 'Hg-189': _iso('Hg-189', 'mercury-189', 80, 189, 188.968195, False), + 'Hg-190': _iso('Hg-190', 'mercury-190', 80, 190, 189.966323, False), + 'Hg-191': _iso('Hg-191', 'mercury-191', 80, 191, 190.967157, False), + 'Hg-192': _iso('Hg-192', 'mercury-192', 80, 192, 191.965635, False), + 'Hg-193': _iso('Hg-193', 'mercury-193', 80, 193, 192.966653, False), + 'Hg-194': _iso('Hg-194', 'mercury-194', 80, 194, 193.9654491, False), + 'Hg-195': _iso('Hg-195', 'mercury-195', 80, 195, 194.966721, False), + 'Hg-196': _iso('Hg-196', 'mercury-196', 80, 196, 195.9658326, True, + isotopic_abundance=0.0015), + 'Hg-197': _iso('Hg-197', 'mercury-197', 80, 197, 196.9672128, False), + 'Hg-198': _iso('Hg-198', 'mercury-198', 80, 198, 197.96676860, True, + isotopic_abundance=0.0997), + 'Hg-199': _iso('Hg-199', 'mercury-199', 80, 199, 198.96828064, True, + isotopic_abundance=0.1687), + 'Hg-200': _iso('Hg-200', 'mercury-200', 80, 200, 199.96832659, True, + isotopic_abundance=0.2310), + 'Hg-201': _iso('Hg-201', 'mercury-201', 80, 201, 200.97030284, True, + isotopic_abundance=0.1318), + 'Hg-202': _iso('Hg-202', 'mercury-202', 80, 202, 201.97064340, True, + isotopic_abundance=0.2986), + 'Hg-203': _iso('Hg-203', 'mercury-203', 80, 203, 202.9728728, False, + half_life=4027881.6), + 'Hg-204': _iso('Hg-204', 'mercury-204', 80, 204, 203.97349398, True, + isotopic_abundance=0.0687), + 'Hg-205': _iso('Hg-205', 'mercury-205', 80, 205, 204.9760734, False), + 'Hg-206': _iso('Hg-206', 'mercury-206', 80, 206, 205.977514, False), + 'Hg-207': _iso('Hg-207', 'mercury-207', 80, 207, 206.982300, False), + 'Hg-208': _iso('Hg-208', 'mercury-208', 80, 208, 207.985759, False), + 'Hg-209': _iso('Hg-209', 'mercury-209', 80, 209, 208.99072, False), + 'Hg-210': _iso('Hg-210', 'mercury-210', 80, 210, 209.99424, False), + 'Hg-211': _iso('Hg-211', 'mercury-211', 80, 211, 210.99933, False), + 'Hg-212': _iso('Hg-212', 'mercury-212', 80, 212, 212.00296, False), + 'Hg-213': _iso('Hg-213', 'mercury-213', 80, 213, 213.00823, False), + 'Hg-214': _iso('Hg-214', 'mercury-214', 80, 214, 214.01200, False), + 'Hg-215': _iso('Hg-215', 'mercury-215', 80, 215, 215.01740, False), + 'Hg-216': _iso('Hg-216', 'mercury-216', 80, 216, 216.02132, False), + 'Tl-176': _iso('Tl-176', 'thallium-176', 81, 176, 176.000624, False), + 'Tl-177': _iso('Tl-177', 'thallium-177', 81, 177, 176.996431, False), + 'Tl-178': _iso('Tl-178', 'thallium-178', 81, 178, 177.99485, False), + 'Tl-179': _iso('Tl-179', 'thallium-179', 81, 179, 178.991111, False), + 'Tl-180': _iso('Tl-180', 'thallium-180', 81, 180, 179.990057, False), + 'Tl-181': _iso('Tl-181', 'thallium-181', 81, 181, 180.9862600, False), + 'Tl-182': _iso('Tl-182', 'thallium-182', 81, 182, 181.985713, False), + 'Tl-183': _iso('Tl-183', 'thallium-183', 81, 183, 182.982193, False), + 'Tl-184': _iso('Tl-184', 'thallium-184', 81, 184, 183.981886, False), + 'Tl-185': _iso('Tl-185', 'thallium-185', 81, 185, 184.978789, False), + 'Tl-186': _iso('Tl-186', 'thallium-186', 81, 186, 185.978651, False), + 'Tl-187': _iso('Tl-187', 'thallium-187', 81, 187, 186.9759063, False), + 'Tl-188': _iso('Tl-188', 'thallium-188', 81, 188, 187.976021, False), + 'Tl-189': _iso('Tl-189', 'thallium-189', 81, 189, 188.973588, False), + 'Tl-190': _iso('Tl-190', 'thallium-190', 81, 190, 189.973828, False), + 'Tl-191': _iso('Tl-191', 'thallium-191', 81, 191, 190.9717842, False), + 'Tl-192': _iso('Tl-192', 'thallium-192', 81, 192, 191.972225, False), + 'Tl-193': _iso('Tl-193', 'thallium-193', 81, 193, 192.9705020, False), + 'Tl-194': _iso('Tl-194', 'thallium-194', 81, 194, 193.971081, False), + 'Tl-195': _iso('Tl-195', 'thallium-195', 81, 195, 194.969774, False), + 'Tl-196': _iso('Tl-196', 'thallium-196', 81, 196, 195.970481, False), + 'Tl-197': _iso('Tl-197', 'thallium-197', 81, 197, 196.969576, False), + 'Tl-198': _iso('Tl-198', 'thallium-198', 81, 198, 197.970483, False), + 'Tl-199': _iso('Tl-199', 'thallium-199', 81, 199, 198.969877, False), + 'Tl-200': _iso('Tl-200', 'thallium-200', 81, 200, 199.9709633, False), + 'Tl-201': _iso('Tl-201', 'thallium-201', 81, 201, 200.970822, False, + half_life=263139.83999999997), + 'Tl-202': _iso('Tl-202', 'thallium-202', 81, 202, 201.972102, False, + half_life=1077062.4), + 'Tl-203': _iso('Tl-203', 'thallium-203', 81, 203, 202.9723446, True, + isotopic_abundance=0.2952), + 'Tl-204': _iso('Tl-204', 'thallium-204', 81, 204, 203.9738639, False), + 'Tl-205': _iso('Tl-205', 'thallium-205', 81, 205, 204.9744278, True, + isotopic_abundance=0.7048), + 'Tl-206': _iso('Tl-206', 'thallium-206', 81, 206, 205.9761106, False), + 'Tl-207': _iso('Tl-207', 'thallium-207', 81, 207, 206.9774197, False), + 'Tl-208': _iso('Tl-208', 'thallium-208', 81, 208, 207.9820190, False), + 'Tl-209': _iso('Tl-209', 'thallium-209', 81, 209, 208.9853594, False), + 'Tl-210': _iso('Tl-210', 'thallium-210', 81, 210, 209.990074, False), + 'Tl-211': _iso('Tl-211', 'thallium-211', 81, 211, 210.993475, False), + 'Tl-212': _iso('Tl-212', 'thallium-212', 81, 212, 211.99834, False), + 'Tl-213': _iso('Tl-213', 'thallium-213', 81, 213, 213.001915, False), + 'Tl-214': _iso('Tl-214', 'thallium-214', 81, 214, 214.00694, False), + 'Tl-215': _iso('Tl-215', 'thallium-215', 81, 215, 215.01064, False), + 'Tl-216': _iso('Tl-216', 'thallium-216', 81, 216, 216.01580, False), + 'Tl-217': _iso('Tl-217', 'thallium-217', 81, 217, 217.01966, False), + 'Tl-218': _iso('Tl-218', 'thallium-218', 81, 218, 218.02479, False), + 'Pb-178': _iso('Pb-178', 'lead-178', 82, 178, 178.003831, False), + 'Pb-179': _iso('Pb-179', 'lead-179', 82, 179, 179.002201, False), + 'Pb-180': _iso('Pb-180', 'lead-180', 82, 180, 179.997928, False), + 'Pb-181': _iso('Pb-181', 'lead-181', 82, 181, 180.996653, False), + 'Pb-182': _iso('Pb-182', 'lead-182', 82, 182, 181.992672, False), + 'Pb-183': _iso('Pb-183', 'lead-183', 82, 183, 182.991872, False), + 'Pb-184': _iso('Pb-184', 'lead-184', 82, 184, 183.988136, False), + 'Pb-185': _iso('Pb-185', 'lead-185', 82, 185, 184.987610, False), + 'Pb-186': _iso('Pb-186', 'lead-186', 82, 186, 185.984238, False), + 'Pb-187': _iso('Pb-187', 'lead-187', 82, 187, 186.9839109, False), + 'Pb-188': _iso('Pb-188', 'lead-188', 82, 188, 187.980875, False), + 'Pb-189': _iso('Pb-189', 'lead-189', 82, 189, 188.980807, False), + 'Pb-190': _iso('Pb-190', 'lead-190', 82, 190, 189.978082, False), + 'Pb-191': _iso('Pb-191', 'lead-191', 82, 191, 190.978276, False), + 'Pb-192': _iso('Pb-192', 'lead-192', 82, 192, 191.975775, False), + 'Pb-193': _iso('Pb-193', 'lead-193', 82, 193, 192.976173, False), + 'Pb-194': _iso('Pb-194', 'lead-194', 82, 194, 193.974012, False), + 'Pb-195': _iso('Pb-195', 'lead-195', 82, 195, 194.974543, False), + 'Pb-196': _iso('Pb-196', 'lead-196', 82, 196, 195.972774, False), + 'Pb-197': _iso('Pb-197', 'lead-197', 82, 197, 196.9734312, False), + 'Pb-198': _iso('Pb-198', 'lead-198', 82, 198, 197.972034, False), + 'Pb-199': _iso('Pb-199', 'lead-199', 82, 199, 198.972913, False), + 'Pb-200': _iso('Pb-200', 'lead-200', 82, 200, 199.971819, False), + 'Pb-201': _iso('Pb-201', 'lead-201', 82, 201, 200.972883, False), + 'Pb-202': _iso('Pb-202', 'lead-202', 82, 202, 201.9721520, False), + 'Pb-203': _iso('Pb-203', 'lead-203', 82, 203, 202.9733911, False, + half_life=186922.80000000002), + 'Pb-204': _iso('Pb-204', 'lead-204', 82, 204, 203.9730440, True, + isotopic_abundance=0.014), + 'Pb-205': _iso('Pb-205', 'lead-205', 82, 205, 204.9744822, False), + 'Pb-206': _iso('Pb-206', 'lead-206', 82, 206, 205.9744657, True, + isotopic_abundance=0.241), + 'Pb-207': _iso('Pb-207', 'lead-207', 82, 207, 206.9758973, True, + isotopic_abundance=0.221), + 'Pb-208': _iso('Pb-208', 'lead-208', 82, 208, 207.9766525, True, + isotopic_abundance=0.524), + 'Pb-209': _iso('Pb-209', 'lead-209', 82, 209, 208.9810905, False), + 'Pb-210': _iso('Pb-210', 'lead-210', 82, 210, 209.9841889, False), + 'Pb-211': _iso('Pb-211', 'lead-211', 82, 211, 210.9887371, False), + 'Pb-212': _iso('Pb-212', 'lead-212', 82, 212, 211.9918977, False), + 'Pb-213': _iso('Pb-213', 'lead-213', 82, 213, 212.9965629, False), + 'Pb-214': _iso('Pb-214', 'lead-214', 82, 214, 213.9998059, False), + 'Pb-215': _iso('Pb-215', 'lead-215', 82, 215, 215.00474, False), + 'Pb-216': _iso('Pb-216', 'lead-216', 82, 216, 216.00803, False), + 'Pb-217': _iso('Pb-217', 'lead-217', 82, 217, 217.01314, False), + 'Pb-218': _iso('Pb-218', 'lead-218', 82, 218, 218.01659, False), + 'Pb-219': _iso('Pb-219', 'lead-219', 82, 219, 219.02177, False), + 'Pb-220': _iso('Pb-220', 'lead-220', 82, 220, 220.02541, False), + 'Bi-184': _iso('Bi-184', 'bismuth-184', 83, 184, 184.001275, False), + 'Bi-185': _iso('Bi-185', 'bismuth-185', 83, 185, 184.997600, False), + 'Bi-186': _iso('Bi-186', 'bismuth-186', 83, 186, 185.996644, False), + 'Bi-187': _iso('Bi-187', 'bismuth-187', 83, 187, 186.993147, False), + 'Bi-188': _iso('Bi-188', 'bismuth-188', 83, 188, 187.992287, False), + 'Bi-189': _iso('Bi-189', 'bismuth-189', 83, 189, 188.989195, False), + 'Bi-190': _iso('Bi-190', 'bismuth-190', 83, 190, 189.988622, False), + 'Bi-191': _iso('Bi-191', 'bismuth-191', 83, 191, 190.9857866, False), + 'Bi-192': _iso('Bi-192', 'bismuth-192', 83, 192, 191.985469, False), + 'Bi-193': _iso('Bi-193', 'bismuth-193', 83, 193, 192.982960, False), + 'Bi-194': _iso('Bi-194', 'bismuth-194', 83, 194, 193.982785, False), + 'Bi-195': _iso('Bi-195', 'bismuth-195', 83, 195, 194.9806488, False), + 'Bi-196': _iso('Bi-196', 'bismuth-196', 83, 196, 195.980667, False), + 'Bi-197': _iso('Bi-197', 'bismuth-197', 83, 197, 196.9788651, False), + 'Bi-198': _iso('Bi-198', 'bismuth-198', 83, 198, 197.979206, False), + 'Bi-199': _iso('Bi-199', 'bismuth-199', 83, 199, 198.977673, False), + 'Bi-200': _iso('Bi-200', 'bismuth-200', 83, 200, 199.978131, False), + 'Bi-201': _iso('Bi-201', 'bismuth-201', 83, 201, 200.977010, False), + 'Bi-202': _iso('Bi-202', 'bismuth-202', 83, 202, 201.977734, False), + 'Bi-203': _iso('Bi-203', 'bismuth-203', 83, 203, 202.976893, False), + 'Bi-204': _iso('Bi-204', 'bismuth-204', 83, 204, 203.9778361, False), + 'Bi-205': _iso('Bi-205', 'bismuth-205', 83, 205, 204.9773867, False), + 'Bi-206': _iso('Bi-206', 'bismuth-206', 83, 206, 205.9784993, False), + 'Bi-207': _iso('Bi-207', 'bismuth-207', 83, 207, 206.9784710, False, + half_life=995587200.0), + 'Bi-208': _iso('Bi-208', 'bismuth-208', 83, 208, 207.9797425, False), + 'Bi-209': _iso('Bi-209', 'bismuth-209', 83, 209, 208.9803991, False, + isotopic_abundance=1), + 'Bi-210': _iso('Bi-210', 'bismuth-210', 83, 210, 209.9841207, False), + 'Bi-211': _iso('Bi-211', 'bismuth-211', 83, 211, 210.9872697, False), + 'Bi-212': _iso('Bi-212', 'bismuth-212', 83, 212, 211.9912860, False), + 'Bi-213': _iso('Bi-213', 'bismuth-213', 83, 213, 212.9943851, False), + 'Bi-214': _iso('Bi-214', 'bismuth-214', 83, 214, 213.998712, False), + 'Bi-215': _iso('Bi-215', 'bismuth-215', 83, 215, 215.001770, False), + 'Bi-216': _iso('Bi-216', 'bismuth-216', 83, 216, 216.006306, False), + 'Bi-217': _iso('Bi-217', 'bismuth-217', 83, 217, 217.009372, False), + 'Bi-218': _iso('Bi-218', 'bismuth-218', 83, 218, 218.014188, False), + 'Bi-219': _iso('Bi-219', 'bismuth-219', 83, 219, 219.01748, False), + 'Bi-220': _iso('Bi-220', 'bismuth-220', 83, 220, 220.02235, False), + 'Bi-221': _iso('Bi-221', 'bismuth-221', 83, 221, 221.02587, False), + 'Bi-222': _iso('Bi-222', 'bismuth-222', 83, 222, 222.03078, False), + 'Bi-223': _iso('Bi-223', 'bismuth-223', 83, 223, 223.03450, False), + 'Bi-224': _iso('Bi-224', 'bismuth-224', 83, 224, 224.03947, False), + 'Po-186': _iso('Po-186', 'polonium-186', 84, 186, 186.004393, False), + 'Po-187': _iso('Po-187', 'polonium-187', 84, 187, 187.003041, False), + 'Po-188': _iso('Po-188', 'polonium-188', 84, 188, 187.999416, False), + 'Po-189': _iso('Po-189', 'polonium-189', 84, 189, 188.998473, False), + 'Po-190': _iso('Po-190', 'polonium-190', 84, 190, 189.995101, False), + 'Po-191': _iso('Po-191', 'polonium-191', 84, 191, 190.9945585, False), + 'Po-192': _iso('Po-192', 'polonium-192', 84, 192, 191.991336, False), + 'Po-193': _iso('Po-193', 'polonium-193', 84, 193, 192.991026, False), + 'Po-194': _iso('Po-194', 'polonium-194', 84, 194, 193.988186, False), + 'Po-195': _iso('Po-195', 'polonium-195', 84, 195, 194.988126, False), + 'Po-196': _iso('Po-196', 'polonium-196', 84, 196, 195.985526, False), + 'Po-197': _iso('Po-197', 'polonium-197', 84, 197, 196.985660, False), + 'Po-198': _iso('Po-198', 'polonium-198', 84, 198, 197.983389, False), + 'Po-199': _iso('Po-199', 'polonium-199', 84, 199, 198.983667, False), + 'Po-200': _iso('Po-200', 'polonium-200', 84, 200, 199.981799, False), + 'Po-201': _iso('Po-201', 'polonium-201', 84, 201, 200.9822598, False), + 'Po-202': _iso('Po-202', 'polonium-202', 84, 202, 201.980758, False), + 'Po-203': _iso('Po-203', 'polonium-203', 84, 203, 202.9814161, False), + 'Po-204': _iso('Po-204', 'polonium-204', 84, 204, 203.980310, False), + 'Po-205': _iso('Po-205', 'polonium-205', 84, 205, 204.981203, False), + 'Po-206': _iso('Po-206', 'polonium-206', 84, 206, 205.9804740, False), + 'Po-207': _iso('Po-207', 'polonium-207', 84, 207, 206.9815938, False), + 'Po-208': _iso('Po-208', 'polonium-208', 84, 208, 207.9812461, False), + 'Po-209': _iso('Po-209', 'polonium-209', 84, 209, 208.9824308, False), + 'Po-210': _iso('Po-210', 'polonium-210', 84, 210, 209.9828741, False), + 'Po-211': _iso('Po-211', 'polonium-211', 84, 211, 210.9866536, False), + 'Po-212': _iso('Po-212', 'polonium-212', 84, 212, 211.9888684, False), + 'Po-213': _iso('Po-213', 'polonium-213', 84, 213, 212.9928576, False), + 'Po-214': _iso('Po-214', 'polonium-214', 84, 214, 213.9952017, False), + 'Po-215': _iso('Po-215', 'polonium-215', 84, 215, 214.9994201, False), + 'Po-216': _iso('Po-216', 'polonium-216', 84, 216, 216.0019152, False), + 'Po-217': _iso('Po-217', 'polonium-217', 84, 217, 217.0063182, False), + 'Po-218': _iso('Po-218', 'polonium-218', 84, 218, 218.0089735, False), + 'Po-219': _iso('Po-219', 'polonium-219', 84, 219, 219.013614, False), + 'Po-220': _iso('Po-220', 'polonium-220', 84, 220, 220.016386, False), + 'Po-221': _iso('Po-221', 'polonium-221', 84, 221, 221.021228, False), + 'Po-222': _iso('Po-222', 'polonium-222', 84, 222, 222.024140, False), + 'Po-223': _iso('Po-223', 'polonium-223', 84, 223, 223.02907, False), + 'Po-224': _iso('Po-224', 'polonium-224', 84, 224, 224.03211, False), + 'Po-225': _iso('Po-225', 'polonium-225', 84, 225, 225.03707, False), + 'Po-226': _iso('Po-226', 'polonium-226', 84, 226, 226.04031, False), + 'Po-227': _iso('Po-227', 'polonium-227', 84, 227, 227.04539, False), + 'At-191': _iso('At-191', 'astatine-191', 85, 191, 191.004148, False), + 'At-192': _iso('At-192', 'astatine-192', 85, 192, 192.003152, False), + 'At-193': _iso('At-193', 'astatine-193', 85, 193, 192.999927, False), + 'At-194': _iso('At-194', 'astatine-194', 85, 194, 193.999236, False), + 'At-195': _iso('At-195', 'astatine-195', 85, 195, 194.9962685, False), + 'At-196': _iso('At-196', 'astatine-196', 85, 196, 195.995800, False), + 'At-197': _iso('At-197', 'astatine-197', 85, 197, 196.993189, False), + 'At-198': _iso('At-198', 'astatine-198', 85, 198, 197.992784, False), + 'At-199': _iso('At-199', 'astatine-199', 85, 199, 198.9905277, False), + 'At-200': _iso('At-200', 'astatine-200', 85, 200, 199.990351, False), + 'At-201': _iso('At-201', 'astatine-201', 85, 201, 200.9884171, False), + 'At-202': _iso('At-202', 'astatine-202', 85, 202, 201.988630, False), + 'At-203': _iso('At-203', 'astatine-203', 85, 203, 202.986943, False), + 'At-204': _iso('At-204', 'astatine-204', 85, 204, 203.987251, False), + 'At-205': _iso('At-205', 'astatine-205', 85, 205, 204.986076, False), + 'At-206': _iso('At-206', 'astatine-206', 85, 206, 205.986657, False), + 'At-207': _iso('At-207', 'astatine-207', 85, 207, 206.985800, False), + 'At-208': _iso('At-208', 'astatine-208', 85, 208, 207.9866133, False), + 'At-209': _iso('At-209', 'astatine-209', 85, 209, 208.9861702, False), + 'At-210': _iso('At-210', 'astatine-210', 85, 210, 209.9871479, False), + 'At-211': _iso('At-211', 'astatine-211', 85, 211, 210.9874966, False), + 'At-212': _iso('At-212', 'astatine-212', 85, 212, 211.9907377, False), + 'At-213': _iso('At-213', 'astatine-213', 85, 213, 212.9929370, False), + 'At-214': _iso('At-214', 'astatine-214', 85, 214, 213.9963721, False), + 'At-215': _iso('At-215', 'astatine-215', 85, 215, 214.9986528, False), + 'At-216': _iso('At-216', 'astatine-216', 85, 216, 216.0024236, False), + 'At-217': _iso('At-217', 'astatine-217', 85, 217, 217.0047192, False), + 'At-218': _iso('At-218', 'astatine-218', 85, 218, 218.008695, False), + 'At-219': _iso('At-219', 'astatine-219', 85, 219, 219.0111618, False), + 'At-220': _iso('At-220', 'astatine-220', 85, 220, 220.015433, False), + 'At-221': _iso('At-221', 'astatine-221', 85, 221, 221.018017, False), + 'At-222': _iso('At-222', 'astatine-222', 85, 222, 222.022494, False), + 'At-223': _iso('At-223', 'astatine-223', 85, 223, 223.025151, False), + 'At-224': _iso('At-224', 'astatine-224', 85, 224, 224.029749, False), + 'At-225': _iso('At-225', 'astatine-225', 85, 225, 225.03263, False), + 'At-226': _iso('At-226', 'astatine-226', 85, 226, 226.03716, False), + 'At-227': _iso('At-227', 'astatine-227', 85, 227, 227.04024, False), + 'At-228': _iso('At-228', 'astatine-228', 85, 228, 228.04475, False), + 'At-229': _iso('At-229', 'astatine-229', 85, 229, 229.04812, False), + 'Rn-193': _iso('Rn-193', 'radon-193', 86, 193, 193.009708, False), + 'Rn-194': _iso('Rn-194', 'radon-194', 86, 194, 194.006144, False), + 'Rn-195': _iso('Rn-195', 'radon-195', 86, 195, 195.005422, False), + 'Rn-196': _iso('Rn-196', 'radon-196', 86, 196, 196.002116, False), + 'Rn-197': _iso('Rn-197', 'radon-197', 86, 197, 197.001585, False), + 'Rn-198': _iso('Rn-198', 'radon-198', 86, 198, 197.998679, False), + 'Rn-199': _iso('Rn-199', 'radon-199', 86, 199, 198.998390, False), + 'Rn-200': _iso('Rn-200', 'radon-200', 86, 200, 199.995690, False), + 'Rn-201': _iso('Rn-201', 'radon-201', 86, 201, 200.995628, False), + 'Rn-202': _iso('Rn-202', 'radon-202', 86, 202, 201.993264, False), + 'Rn-203': _iso('Rn-203', 'radon-203', 86, 203, 202.993388, False), + 'Rn-204': _iso('Rn-204', 'radon-204', 86, 204, 203.991430, False), + 'Rn-205': _iso('Rn-205', 'radon-205', 86, 205, 204.991719, False), + 'Rn-206': _iso('Rn-206', 'radon-206', 86, 206, 205.990214, False), + 'Rn-207': _iso('Rn-207', 'radon-207', 86, 207, 206.9907303, False), + 'Rn-208': _iso('Rn-208', 'radon-208', 86, 208, 207.989635, False), + 'Rn-209': _iso('Rn-209', 'radon-209', 86, 209, 208.990415, False), + 'Rn-210': _iso('Rn-210', 'radon-210', 86, 210, 209.9896891, False), + 'Rn-211': _iso('Rn-211', 'radon-211', 86, 211, 210.9906011, False), + 'Rn-212': _iso('Rn-212', 'radon-212', 86, 212, 211.9907039, False), + 'Rn-213': _iso('Rn-213', 'radon-213', 86, 213, 212.9938831, False), + 'Rn-214': _iso('Rn-214', 'radon-214', 86, 214, 213.9953630, False), + 'Rn-215': _iso('Rn-215', 'radon-215', 86, 215, 214.9987459, False), + 'Rn-216': _iso('Rn-216', 'radon-216', 86, 216, 216.0002719, False), + 'Rn-217': _iso('Rn-217', 'radon-217', 86, 217, 217.0039280, False), + 'Rn-218': _iso('Rn-218', 'radon-218', 86, 218, 218.0056016, False), + 'Rn-219': _iso('Rn-219', 'radon-219', 86, 219, 219.0094804, False), + 'Rn-220': _iso('Rn-220', 'radon-220', 86, 220, 220.0113941, False), + 'Rn-221': _iso('Rn-221', 'radon-221', 86, 221, 221.0155371, False), + 'Rn-222': _iso('Rn-222', 'radon-222', 86, 222, 222.0175782, False), + 'Rn-223': _iso('Rn-223', 'radon-223', 86, 223, 223.0218893, False), + 'Rn-224': _iso('Rn-224', 'radon-224', 86, 224, 224.024096, False), + 'Rn-225': _iso('Rn-225', 'radon-225', 86, 225, 225.028486, False), + 'Rn-226': _iso('Rn-226', 'radon-226', 86, 226, 226.030861, False), + 'Rn-227': _iso('Rn-227', 'radon-227', 86, 227, 227.035304, False), + 'Rn-228': _iso('Rn-228', 'radon-228', 86, 228, 228.037835, False), + 'Rn-229': _iso('Rn-229', 'radon-229', 86, 229, 229.042257, False), + 'Rn-230': _iso('Rn-230', 'radon-230', 86, 230, 230.04514, False), + 'Rn-231': _iso('Rn-231', 'radon-231', 86, 231, 231.04987, False), + 'Fr-199': _iso('Fr-199', 'francium-199', 87, 199, 199.007259, False), + 'Fr-200': _iso('Fr-200', 'francium-200', 87, 200, 200.006586, False), + 'Fr-201': _iso('Fr-201', 'francium-201', 87, 201, 201.003867, False), + 'Fr-202': _iso('Fr-202', 'francium-202', 87, 202, 202.003320, False), + 'Fr-203': _iso('Fr-203', 'francium-203', 87, 203, 203.0009407, False), + 'Fr-204': _iso('Fr-204', 'francium-204', 87, 204, 204.000652, False), + 'Fr-205': _iso('Fr-205', 'francium-205', 87, 205, 204.9985939, False), + 'Fr-206': _iso('Fr-206', 'francium-206', 87, 206, 205.998666, False), + 'Fr-207': _iso('Fr-207', 'francium-207', 87, 207, 206.996946, False), + 'Fr-208': _iso('Fr-208', 'francium-208', 87, 208, 207.997138, False), + 'Fr-209': _iso('Fr-209', 'francium-209', 87, 209, 208.995955, False), + 'Fr-210': _iso('Fr-210', 'francium-210', 87, 210, 209.996422, False), + 'Fr-211': _iso('Fr-211', 'francium-211', 87, 211, 210.995556, False), + 'Fr-212': _iso('Fr-212', 'francium-212', 87, 212, 211.9962257, False), + 'Fr-213': _iso('Fr-213', 'francium-213', 87, 213, 212.9961860, False), + 'Fr-214': _iso('Fr-214', 'francium-214', 87, 214, 213.9989713, False), + 'Fr-215': _iso('Fr-215', 'francium-215', 87, 215, 215.0003418, False), + 'Fr-216': _iso('Fr-216', 'francium-216', 87, 216, 216.0031899, False), + 'Fr-217': _iso('Fr-217', 'francium-217', 87, 217, 217.0046323, False), + 'Fr-218': _iso('Fr-218', 'francium-218', 87, 218, 218.0075787, False), + 'Fr-219': _iso('Fr-219', 'francium-219', 87, 219, 219.0092524, False), + 'Fr-220': _iso('Fr-220', 'francium-220', 87, 220, 220.0123277, False), + 'Fr-221': _iso('Fr-221', 'francium-221', 87, 221, 221.0142552, False), + 'Fr-222': _iso('Fr-222', 'francium-222', 87, 222, 222.017552, False), + 'Fr-223': _iso('Fr-223', 'francium-223', 87, 223, 223.0197360, False), + 'Fr-224': _iso('Fr-224', 'francium-224', 87, 224, 224.023398, False), + 'Fr-225': _iso('Fr-225', 'francium-225', 87, 225, 225.025573, False), + 'Fr-226': _iso('Fr-226', 'francium-226', 87, 226, 226.029566, False), + 'Fr-227': _iso('Fr-227', 'francium-227', 87, 227, 227.031869, False), + 'Fr-228': _iso('Fr-228', 'francium-228', 87, 228, 228.035823, False), + 'Fr-229': _iso('Fr-229', 'francium-229', 87, 229, 229.038298, False), + 'Fr-230': _iso('Fr-230', 'francium-230', 87, 230, 230.042416, False), + 'Fr-231': _iso('Fr-231', 'francium-231', 87, 231, 231.045158, False), + 'Fr-232': _iso('Fr-232', 'francium-232', 87, 232, 232.04937, False), + 'Fr-233': _iso('Fr-233', 'francium-233', 87, 233, 233.05264, False), + 'Ra-201': _iso('Ra-201', 'radium-201', 88, 201, 201.01271, False), + 'Ra-202': _iso('Ra-202', 'radium-202', 88, 202, 202.009760, False), + 'Ra-203': _iso('Ra-203', 'radium-203', 88, 203, 203.009304, False), + 'Ra-204': _iso('Ra-204', 'radium-204', 88, 204, 204.006492, False), + 'Ra-205': _iso('Ra-205', 'radium-205', 88, 205, 205.006268, False), + 'Ra-206': _iso('Ra-206', 'radium-206', 88, 206, 206.003828, False), + 'Ra-207': _iso('Ra-207', 'radium-207', 88, 207, 207.003799, False), + 'Ra-208': _iso('Ra-208', 'radium-208', 88, 208, 208.001841, False), + 'Ra-209': _iso('Ra-209', 'radium-209', 88, 209, 209.001990, False), + 'Ra-210': _iso('Ra-210', 'radium-210', 88, 210, 210.000494, False), + 'Ra-211': _iso('Ra-211', 'radium-211', 88, 211, 211.0008932, False), + 'Ra-212': _iso('Ra-212', 'radium-212', 88, 212, 211.999787, False), + 'Ra-213': _iso('Ra-213', 'radium-213', 88, 213, 213.000384, False), + 'Ra-214': _iso('Ra-214', 'radium-214', 88, 214, 214.0000997, False), + 'Ra-215': _iso('Ra-215', 'radium-215', 88, 215, 215.0027204, False), + 'Ra-216': _iso('Ra-216', 'radium-216', 88, 216, 216.0035334, False), + 'Ra-217': _iso('Ra-217', 'radium-217', 88, 217, 217.0063207, False), + 'Ra-218': _iso('Ra-218', 'radium-218', 88, 218, 218.007141, False), + 'Ra-219': _iso('Ra-219', 'radium-219', 88, 219, 219.0100855, False), + 'Ra-220': _iso('Ra-220', 'radium-220', 88, 220, 220.0110259, False), + 'Ra-221': _iso('Ra-221', 'radium-221', 88, 221, 221.0139177, False), + 'Ra-222': _iso('Ra-222', 'radium-222', 88, 222, 222.0153748, False), + 'Ra-223': _iso('Ra-223', 'radium-223', 88, 223, 223.0185023, False), + 'Ra-224': _iso('Ra-224', 'radium-224', 88, 224, 224.0202120, False), + 'Ra-225': _iso('Ra-225', 'radium-225', 88, 225, 225.0236119, False), + 'Ra-226': _iso('Ra-226', 'radium-226', 88, 226, 226.0254103, False), + 'Ra-227': _iso('Ra-227', 'radium-227', 88, 227, 227.0291783, False), + 'Ra-228': _iso('Ra-228', 'radium-228', 88, 228, 228.0310707, False), + 'Ra-229': _iso('Ra-229', 'radium-229', 88, 229, 229.034942, False), + 'Ra-230': _iso('Ra-230', 'radium-230', 88, 230, 230.037055, False), + 'Ra-231': _iso('Ra-231', 'radium-231', 88, 231, 231.041027, False), + 'Ra-232': _iso('Ra-232', 'radium-232', 88, 232, 232.0434753, False), + 'Ra-233': _iso('Ra-233', 'radium-233', 88, 233, 233.047582, False), + 'Ra-234': _iso('Ra-234', 'radium-234', 88, 234, 234.050342, False), + 'Ra-235': _iso('Ra-235', 'radium-235', 88, 235, 235.05497, False), + 'Ac-206': _iso('Ac-206', 'actinium-206', 89, 206, 206.014452, False), + 'Ac-207': _iso('Ac-207', 'actinium-207', 89, 207, 207.011966, False), + 'Ac-208': _iso('Ac-208', 'actinium-208', 89, 208, 208.011550, False), + 'Ac-209': _iso('Ac-209', 'actinium-209', 89, 209, 209.009495, False), + 'Ac-210': _iso('Ac-210', 'actinium-210', 89, 210, 210.009436, False), + 'Ac-211': _iso('Ac-211', 'actinium-211', 89, 211, 211.007732, False), + 'Ac-212': _iso('Ac-212', 'actinium-212', 89, 212, 212.007813, False), + 'Ac-213': _iso('Ac-213', 'actinium-213', 89, 213, 213.006609, False), + 'Ac-214': _iso('Ac-214', 'actinium-214', 89, 214, 214.006918, False), + 'Ac-215': _iso('Ac-215', 'actinium-215', 89, 215, 215.006475, False), + 'Ac-216': _iso('Ac-216', 'actinium-216', 89, 216, 216.008743, False), + 'Ac-217': _iso('Ac-217', 'actinium-217', 89, 217, 217.009344, False), + 'Ac-218': _iso('Ac-218', 'actinium-218', 89, 218, 218.011642, False), + 'Ac-219': _iso('Ac-219', 'actinium-219', 89, 219, 219.012421, False), + 'Ac-220': _iso('Ac-220', 'actinium-220', 89, 220, 220.0147549, False), + 'Ac-221': _iso('Ac-221', 'actinium-221', 89, 221, 221.015592, False), + 'Ac-222': _iso('Ac-222', 'actinium-222', 89, 222, 222.0178442, False), + 'Ac-223': _iso('Ac-223', 'actinium-223', 89, 223, 223.0191377, False), + 'Ac-224': _iso('Ac-224', 'actinium-224', 89, 224, 224.0217232, False), + 'Ac-225': _iso('Ac-225', 'actinium-225', 89, 225, 225.0232300, False), + 'Ac-226': _iso('Ac-226', 'actinium-226', 89, 226, 226.0260984, False), + 'Ac-227': _iso('Ac-227', 'actinium-227', 89, 227, 227.0277523, False), + 'Ac-228': _iso('Ac-228', 'actinium-228', 89, 228, 228.0310215, False), + 'Ac-229': _iso('Ac-229', 'actinium-229', 89, 229, 229.032956, False), + 'Ac-230': _iso('Ac-230', 'actinium-230', 89, 230, 230.036327, False), + 'Ac-231': _iso('Ac-231', 'actinium-231', 89, 231, 231.038393, False), + 'Ac-232': _iso('Ac-232', 'actinium-232', 89, 232, 232.042034, False), + 'Ac-233': _iso('Ac-233', 'actinium-233', 89, 233, 233.044346, False), + 'Ac-234': _iso('Ac-234', 'actinium-234', 89, 234, 234.048139, False), + 'Ac-235': _iso('Ac-235', 'actinium-235', 89, 235, 235.050840, False), + 'Ac-236': _iso('Ac-236', 'actinium-236', 89, 236, 236.054988, False), + 'Ac-237': _iso('Ac-237', 'actinium-237', 89, 237, 237.05827, False), + 'Th-208': _iso('Th-208', 'thorium-208', 90, 208, 208.017900, False), + 'Th-209': _iso('Th-209', 'thorium-209', 90, 209, 209.017753, False), + 'Th-210': _iso('Th-210', 'thorium-210', 90, 210, 210.015094, False), + 'Th-211': _iso('Th-211', 'thorium-211', 90, 211, 211.014929, False), + 'Th-212': _iso('Th-212', 'thorium-212', 90, 212, 212.012988, False), + 'Th-213': _iso('Th-213', 'thorium-213', 90, 213, 213.013009, False), + 'Th-214': _iso('Th-214', 'thorium-214', 90, 214, 214.011500, False), + 'Th-215': _iso('Th-215', 'thorium-215', 90, 215, 215.0117248, False), + 'Th-216': _iso('Th-216', 'thorium-216', 90, 216, 216.011056, False), + 'Th-217': _iso('Th-217', 'thorium-217', 90, 217, 217.013117, False), + 'Th-218': _iso('Th-218', 'thorium-218', 90, 218, 218.013276, False), + 'Th-219': _iso('Th-219', 'thorium-219', 90, 219, 219.015537, False), + 'Th-220': _iso('Th-220', 'thorium-220', 90, 220, 220.015748, False), + 'Th-221': _iso('Th-221', 'thorium-221', 90, 221, 221.018184, False), + 'Th-222': _iso('Th-222', 'thorium-222', 90, 222, 222.018469, False), + 'Th-223': _iso('Th-223', 'thorium-223', 90, 223, 223.0208119, False), + 'Th-224': _iso('Th-224', 'thorium-224', 90, 224, 224.021464, False), + 'Th-225': _iso('Th-225', 'thorium-225', 90, 225, 225.0239514, False), + 'Th-226': _iso('Th-226', 'thorium-226', 90, 226, 226.0249034, False), + 'Th-227': _iso('Th-227', 'thorium-227', 90, 227, 227.0277042, False), + 'Th-228': _iso('Th-228', 'thorium-228', 90, 228, 228.0287413, False, + half_life=60359040.0), + 'Th-229': _iso('Th-229', 'thorium-229', 90, 229, 229.0317627, False), + 'Th-230': _iso('Th-230', 'thorium-230', 90, 230, 230.0331341, False), + 'Th-231': _iso('Th-231', 'thorium-231', 90, 231, 231.0363046, False), + 'Th-232': _iso('Th-232', 'thorium-232', 90, 232, 232.0380558, False, + isotopic_abundance=1), + 'Th-233': _iso('Th-233', 'thorium-233', 90, 233, 233.0415823, False), + 'Th-234': _iso('Th-234', 'thorium-234', 90, 234, 234.0436014, False), + 'Th-235': _iso('Th-235', 'thorium-235', 90, 235, 235.047255, False), + 'Th-236': _iso('Th-236', 'thorium-236', 90, 236, 236.049657, False), + 'Th-237': _iso('Th-237', 'thorium-237', 90, 237, 237.053629, False), + 'Th-238': _iso('Th-238', 'thorium-238', 90, 238, 238.05650, False), + 'Th-239': _iso('Th-239', 'thorium-239', 90, 239, 239.06077, False), + 'Pa-212': _iso('Pa-212', 'protactinium-212', 91, 212, 212.023203, False), + 'Pa-213': _iso('Pa-213', 'protactinium-213', 91, 213, 213.021109, False), + 'Pa-214': _iso('Pa-214', 'protactinium-214', 91, 214, 214.020918, False), + 'Pa-215': _iso('Pa-215', 'protactinium-215', 91, 215, 215.019183, False), + 'Pa-216': _iso('Pa-216', 'protactinium-216', 91, 216, 216.019109, False), + 'Pa-217': _iso('Pa-217', 'protactinium-217', 91, 217, 217.018325, False), + 'Pa-218': _iso('Pa-218', 'protactinium-218', 91, 218, 218.020059, False), + 'Pa-219': _iso('Pa-219', 'protactinium-219', 91, 219, 219.019904, False), + 'Pa-220': _iso('Pa-220', 'protactinium-220', 91, 220, 220.021705, False), + 'Pa-221': _iso('Pa-221', 'protactinium-221', 91, 221, 221.021875, False), + 'Pa-222': _iso('Pa-222', 'protactinium-222', 91, 222, 222.023784, False), + 'Pa-223': _iso('Pa-223', 'protactinium-223', 91, 223, 223.023963, False), + 'Pa-224': _iso('Pa-224', 'protactinium-224', 91, 224, 224.0256176, False), + 'Pa-225': _iso('Pa-225', 'protactinium-225', 91, 225, 225.026131, False), + 'Pa-226': _iso('Pa-226', 'protactinium-226', 91, 226, 226.027948, False), + 'Pa-227': _iso('Pa-227', 'protactinium-227', 91, 227, 227.0288054, False), + 'Pa-228': _iso('Pa-228', 'protactinium-228', 91, 228, 228.0310517, False), + 'Pa-229': _iso('Pa-229', 'protactinium-229', 91, 229, 229.0320972, False), + 'Pa-230': _iso('Pa-230', 'protactinium-230', 91, 230, 230.0345410, False), + 'Pa-231': _iso('Pa-231', 'protactinium-231', 91, 231, 231.0358842, False, + isotopic_abundance=1), + 'Pa-232': _iso('Pa-232', 'protactinium-232', 91, 232, 232.0385917, False), + 'Pa-233': _iso('Pa-233', 'protactinium-233', 91, 233, 233.0402472, False), + 'Pa-234': _iso('Pa-234', 'protactinium-234', 91, 234, 234.0433072, False), + 'Pa-235': _iso('Pa-235', 'protactinium-235', 91, 235, 235.045399, False), + 'Pa-236': _iso('Pa-236', 'protactinium-236', 91, 236, 236.048668, False), + 'Pa-237': _iso('Pa-237', 'protactinium-237', 91, 237, 237.051023, False), + 'Pa-238': _iso('Pa-238', 'protactinium-238', 91, 238, 238.054637, False), + 'Pa-239': _iso('Pa-239', 'protactinium-239', 91, 239, 239.05726, False), + 'Pa-240': _iso('Pa-240', 'protactinium-240', 91, 240, 240.06098, False), + 'Pa-241': _iso('Pa-241', 'protactinium-241', 91, 241, 241.06408, False), + 'U-217': _iso('U-217', 'uranium-217', 92, 217, 217.02466, False), + 'U-218': _iso('U-218', 'uranium-218', 92, 218, 218.023523, False), + 'U-219': _iso('U-219', 'uranium-219', 92, 219, 219.024999, False), + 'U-220': _iso('U-220', 'uranium-220', 92, 220, 220.02462, False), + 'U-221': _iso('U-221', 'uranium-221', 92, 221, 221.02628, False), + 'U-222': _iso('U-222', 'uranium-222', 92, 222, 222.02600, False), + 'U-223': _iso('U-223', 'uranium-223', 92, 223, 223.027739, False), + 'U-224': _iso('U-224', 'uranium-224', 92, 224, 224.027605, False), + 'U-225': _iso('U-225', 'uranium-225', 92, 225, 225.029391, False), + 'U-226': _iso('U-226', 'uranium-226', 92, 226, 226.029339, False), + 'U-227': _iso('U-227', 'uranium-227', 92, 227, 227.031157, False), + 'U-228': _iso('U-228', 'uranium-228', 92, 228, 228.031371, False), + 'U-229': _iso('U-229', 'uranium-229', 92, 229, 229.0335063, False), + 'U-230': _iso('U-230', 'uranium-230', 92, 230, 230.0339401, False), + 'U-231': _iso('U-231', 'uranium-231', 92, 231, 231.0362939, False), + 'U-232': _iso('U-232', 'uranium-232', 92, 232, 232.0371563, False), + 'U-233': _iso('U-233', 'uranium-233', 92, 233, 233.0396355, False), + 'U-234': _iso('U-234', 'uranium-234', 92, 234, 234.0409523, False, + isotopic_abundance=0.000054), + 'U-235': _iso('U-235', 'uranium-235', 92, 235, 235.0439301, False, + isotopic_abundance=0.007204), + 'U-236': _iso('U-236', 'uranium-236', 92, 236, 236.0455682, False), + 'U-237': _iso('U-237', 'uranium-237', 92, 237, 237.0487304, False), + 'U-238': _iso('U-238', 'uranium-238', 92, 238, 238.0507884, False, + isotopic_abundance=0.992742), + 'U-239': _iso('U-239', 'uranium-239', 92, 239, 239.0542935, False), + 'U-240': _iso('U-240', 'uranium-240', 92, 240, 240.0565934, False), + 'U-241': _iso('U-241', 'uranium-241', 92, 241, 241.06033, False), + 'U-242': _iso('U-242', 'uranium-242', 92, 242, 242.06293, False), + 'U-243': _iso('U-243', 'uranium-243', 92, 243, 243.06699, False), + 'Np-219': _iso('Np-219', 'neptunium-219', 93, 219, 219.03143, False), + 'Np-220': _iso('Np-220', 'neptunium-220', 93, 220, 220.03254, False), + 'Np-221': _iso('Np-221', 'neptunium-221', 93, 221, 221.03204, False), + 'Np-222': _iso('Np-222', 'neptunium-222', 93, 222, 222.03330, False), + 'Np-223': _iso('Np-223', 'neptunium-223', 93, 223, 223.03285, False), + 'Np-224': _iso('Np-224', 'neptunium-224', 93, 224, 224.03422, False), + 'Np-225': _iso('Np-225', 'neptunium-225', 93, 225, 225.033911, False), + 'Np-226': _iso('Np-226', 'neptunium-226', 93, 226, 226.035188, False), + 'Np-227': _iso('Np-227', 'neptunium-227', 93, 227, 227.034957, False), + 'Np-228': _iso('Np-228', 'neptunium-228', 93, 228, 228.036067, False), + 'Np-229': _iso('Np-229', 'neptunium-229', 93, 229, 229.036264, False), + 'Np-230': _iso('Np-230', 'neptunium-230', 93, 230, 230.037828, False), + 'Np-231': _iso('Np-231', 'neptunium-231', 93, 231, 231.038245, False), + 'Np-232': _iso('Np-232', 'neptunium-232', 93, 232, 232.04011, False), + 'Np-233': _iso('Np-233', 'neptunium-233', 93, 233, 233.040741, False), + 'Np-234': _iso('Np-234', 'neptunium-234', 93, 234, 234.0428953, False), + 'Np-235': _iso('Np-235', 'neptunium-235', 93, 235, 235.0440635, False), + 'Np-236': _iso('Np-236', 'neptunium-236', 93, 236, 236.046570, False), + 'Np-237': _iso('Np-237', 'neptunium-237', 93, 237, 237.0481736, False), + 'Np-238': _iso('Np-238', 'neptunium-238', 93, 238, 238.0509466, False), + 'Np-239': _iso('Np-239', 'neptunium-239', 93, 239, 239.0529392, False), + 'Np-240': _iso('Np-240', 'neptunium-240', 93, 240, 240.056165, False), + 'Np-241': _iso('Np-241', 'neptunium-241', 93, 241, 241.058253, False), + 'Np-242': _iso('Np-242', 'neptunium-242', 93, 242, 242.06164, False), + 'Np-243': _iso('Np-243', 'neptunium-243', 93, 243, 243.064280, False), + 'Np-244': _iso('Np-244', 'neptunium-244', 93, 244, 244.06785, False), + 'Np-245': _iso('Np-245', 'neptunium-245', 93, 245, 245.07080, False), + 'Pu-228': _iso('Pu-228', 'plutonium-228', 94, 228, 228.038732, False), + 'Pu-229': _iso('Pu-229', 'plutonium-229', 94, 229, 229.040144, False), + 'Pu-230': _iso('Pu-230', 'plutonium-230', 94, 230, 230.039650, False), + 'Pu-231': _iso('Pu-231', 'plutonium-231', 94, 231, 231.041102, False), + 'Pu-232': _iso('Pu-232', 'plutonium-232', 94, 232, 232.041185, False), + 'Pu-233': _iso('Pu-233', 'plutonium-233', 94, 233, 233.042998, False), + 'Pu-234': _iso('Pu-234', 'plutonium-234', 94, 234, 234.0433174, False), + 'Pu-235': _iso('Pu-235', 'plutonium-235', 94, 235, 235.045286, False), + 'Pu-236': _iso('Pu-236', 'plutonium-236', 94, 236, 236.0460581, False), + 'Pu-237': _iso('Pu-237', 'plutonium-237', 94, 237, 237.0484098, False), + 'Pu-238': _iso('Pu-238', 'plutonium-238', 94, 238, 238.0495601, False), + 'Pu-239': _iso('Pu-239', 'plutonium-239', 94, 239, 239.0521636, False), + 'Pu-240': _iso('Pu-240', 'plutonium-240', 94, 240, 240.0538138, False), + 'Pu-241': _iso('Pu-241', 'plutonium-241', 94, 241, 241.0568517, False), + 'Pu-242': _iso('Pu-242', 'plutonium-242', 94, 242, 242.0587428, False), + 'Pu-243': _iso('Pu-243', 'plutonium-243', 94, 243, 243.0620036, False), + 'Pu-244': _iso('Pu-244', 'plutonium-244', 94, 244, 244.0642053, False), + 'Pu-245': _iso('Pu-245', 'plutonium-245', 94, 245, 245.067826, False), + 'Pu-246': _iso('Pu-246', 'plutonium-246', 94, 246, 246.070205, False), + 'Pu-247': _iso('Pu-247', 'plutonium-247', 94, 247, 247.07419, False), + 'Am-230': _iso('Am-230', 'americium-230', 95, 230, 230.04609, False), + 'Am-231': _iso('Am-231', 'americium-231', 95, 231, 231.04556, False), + 'Am-232': _iso('Am-232', 'americium-232', 95, 232, 232.04645, False), + 'Am-233': _iso('Am-233', 'americium-233', 95, 233, 233.04644, False), + 'Am-234': _iso('Am-234', 'americium-234', 95, 234, 234.04773, False), + 'Am-235': _iso('Am-235', 'americium-235', 95, 235, 235.047908, False), + 'Am-236': _iso('Am-236', 'americium-236', 95, 236, 236.04943, False), + 'Am-237': _iso('Am-237', 'americium-237', 95, 237, 237.049996, False), + 'Am-238': _iso('Am-238', 'americium-238', 95, 238, 238.051985, False), + 'Am-239': _iso('Am-239', 'americium-239', 95, 239, 239.0530247, False), + 'Am-240': _iso('Am-240', 'americium-240', 95, 240, 240.055300, False), + 'Am-241': _iso('Am-241', 'americium-241', 95, 241, 241.0568293, False), + 'Am-242': _iso('Am-242', 'americium-242', 95, 242, 242.0595494, False), + 'Am-243': _iso('Am-243', 'americium-243', 95, 243, 243.0613813, False), + 'Am-244': _iso('Am-244', 'americium-244', 95, 244, 244.0642851, False), + 'Am-245': _iso('Am-245', 'americium-245', 95, 245, 245.0664548, False), + 'Am-246': _iso('Am-246', 'americium-246', 95, 246, 246.069775, False), + 'Am-247': _iso('Am-247', 'americium-247', 95, 247, 247.07209, False), + 'Am-248': _iso('Am-248', 'americium-248', 95, 248, 248.07575, False), + 'Am-249': _iso('Am-249', 'americium-249', 95, 249, 249.07848, False), + 'Cm-232': _iso('Cm-232', 'curium-232', 96, 232, 232.04982, False), + 'Cm-233': _iso('Cm-233', 'curium-233', 96, 233, 233.050770, False), + 'Cm-234': _iso('Cm-234', 'curium-234', 96, 234, 234.050160, False), + 'Cm-235': _iso('Cm-235', 'curium-235', 96, 235, 235.05154, False), + 'Cm-236': _iso('Cm-236', 'curium-236', 96, 236, 236.051374, False), + 'Cm-237': _iso('Cm-237', 'curium-237', 96, 237, 237.052869, False), + 'Cm-238': _iso('Cm-238', 'curium-238', 96, 238, 238.053081, False), + 'Cm-239': _iso('Cm-239', 'curium-239', 96, 239, 239.054910, False), + 'Cm-240': _iso('Cm-240', 'curium-240', 96, 240, 240.0555297, False), + 'Cm-241': _iso('Cm-241', 'curium-241', 96, 241, 241.0576532, False), + 'Cm-242': _iso('Cm-242', 'curium-242', 96, 242, 242.0588360, False), + 'Cm-243': _iso('Cm-243', 'curium-243', 96, 243, 243.0613893, False), + 'Cm-244': _iso('Cm-244', 'curium-244', 96, 244, 244.0627528, False), + 'Cm-245': _iso('Cm-245', 'curium-245', 96, 245, 245.0654915, False), + 'Cm-246': _iso('Cm-246', 'curium-246', 96, 246, 246.0672238, False), + 'Cm-247': _iso('Cm-247', 'curium-247', 96, 247, 247.0703541, False), + 'Cm-248': _iso('Cm-248', 'curium-248', 96, 248, 248.0723499, False), + 'Cm-249': _iso('Cm-249', 'curium-249', 96, 249, 249.0759548, False), + 'Cm-250': _iso('Cm-250', 'curium-250', 96, 250, 250.078358, False), + 'Cm-251': _iso('Cm-251', 'curium-251', 96, 251, 251.082286, False), + 'Cm-252': _iso('Cm-252', 'curium-252', 96, 252, 252.08487, False), + 'Bk-234': _iso('Bk-234', 'berkelium-234', 97, 234, 234.05727, False), + 'Bk-235': _iso('Bk-235', 'berkelium-235', 97, 235, 235.05658, False), + 'Bk-236': _iso('Bk-236', 'berkelium-236', 97, 236, 236.05748, False), + 'Bk-237': _iso('Bk-237', 'berkelium-237', 97, 237, 237.05710, False), + 'Bk-238': _iso('Bk-238', 'berkelium-238', 97, 238, 238.05820, False), + 'Bk-239': _iso('Bk-239', 'berkelium-239', 97, 239, 239.05824, False), + 'Bk-240': _iso('Bk-240', 'berkelium-240', 97, 240, 240.05976, False), + 'Bk-241': _iso('Bk-241', 'berkelium-241', 97, 241, 241.06016, False), + 'Bk-242': _iso('Bk-242', 'berkelium-242', 97, 242, 242.06198, False), + 'Bk-243': _iso('Bk-243', 'berkelium-243', 97, 243, 243.0630078, False), + 'Bk-244': _iso('Bk-244', 'berkelium-244', 97, 244, 244.065181, False), + 'Bk-245': _iso('Bk-245', 'berkelium-245', 97, 245, 245.0663618, False), + 'Bk-246': _iso('Bk-246', 'berkelium-246', 97, 246, 246.068673, False), + 'Bk-247': _iso('Bk-247', 'berkelium-247', 97, 247, 247.0703073, False), + 'Bk-248': _iso('Bk-248', 'berkelium-248', 97, 248, 248.073088, False), + 'Bk-249': _iso('Bk-249', 'berkelium-249', 97, 249, 249.0749877, False), + 'Bk-250': _iso('Bk-250', 'berkelium-250', 97, 250, 250.0783167, False), + 'Bk-251': _iso('Bk-251', 'berkelium-251', 97, 251, 251.080762, False), + 'Bk-252': _iso('Bk-252', 'berkelium-252', 97, 252, 252.08431, False), + 'Bk-253': _iso('Bk-253', 'berkelium-253', 97, 253, 253.08688, False), + 'Bk-254': _iso('Bk-254', 'berkelium-254', 97, 254, 254.09060, False), + 'Cf-237': _iso('Cf-237', 'californium-237', 98, 237, 237.062198, False), + 'Cf-238': _iso('Cf-238', 'californium-238', 98, 238, 238.06149, False), + 'Cf-239': _iso('Cf-239', 'californium-239', 98, 239, 239.06253, False), + 'Cf-240': _iso('Cf-240', 'californium-240', 98, 240, 240.062256, False), + 'Cf-241': _iso('Cf-241', 'californium-241', 98, 241, 241.06369, False), + 'Cf-242': _iso('Cf-242', 'californium-242', 98, 242, 242.063754, False), + 'Cf-243': _iso('Cf-243', 'californium-243', 98, 243, 243.06548, False), + 'Cf-244': _iso('Cf-244', 'californium-244', 98, 244, 244.0660008, False), + 'Cf-245': _iso('Cf-245', 'californium-245', 98, 245, 245.0680487, False), + 'Cf-246': _iso('Cf-246', 'californium-246', 98, 246, 246.0688055, False), + 'Cf-247': _iso('Cf-247', 'californium-247', 98, 247, 247.070965, False), + 'Cf-248': _iso('Cf-248', 'californium-248', 98, 248, 248.0721851, False), + 'Cf-249': _iso('Cf-249', 'californium-249', 98, 249, 249.0748539, False), + 'Cf-250': _iso('Cf-250', 'californium-250', 98, 250, 250.0764062, False), + 'Cf-251': _iso('Cf-251', 'californium-251', 98, 251, 251.0795886, False), + 'Cf-252': _iso('Cf-252', 'californium-252', 98, 252, 252.0816272, False), + 'Cf-253': _iso('Cf-253', 'californium-253', 98, 253, 253.0851345, False), + 'Cf-254': _iso('Cf-254', 'californium-254', 98, 254, 254.087324, False), + 'Cf-255': _iso('Cf-255', 'californium-255', 98, 255, 255.09105, False), + 'Cf-256': _iso('Cf-256', 'californium-256', 98, 256, 256.09344, False), + 'Es-239': _iso('Es-239', 'einsteinium-239', 99, 239, 239.06823, False), + 'Es-240': _iso('Es-240', 'einsteinium-240', 99, 240, 240.06892, False), + 'Es-241': _iso('Es-241', 'einsteinium-241', 99, 241, 241.06856, False), + 'Es-242': _iso('Es-242', 'einsteinium-242', 99, 242, 242.06957, False), + 'Es-243': _iso('Es-243', 'einsteinium-243', 99, 243, 243.06951, False), + 'Es-244': _iso('Es-244', 'einsteinium-244', 99, 244, 244.07088, False), + 'Es-245': _iso('Es-245', 'einsteinium-245', 99, 245, 245.07125, False), + 'Es-246': _iso('Es-246', 'einsteinium-246', 99, 246, 246.07290, False), + 'Es-247': _iso('Es-247', 'einsteinium-247', 99, 247, 247.073622, False), + 'Es-248': _iso('Es-248', 'einsteinium-248', 99, 248, 248.075471, False), + 'Es-249': _iso('Es-249', 'einsteinium-249', 99, 249, 249.076411, False), + 'Es-250': _iso('Es-250', 'einsteinium-250', 99, 250, 250.07861, False), + 'Es-251': _iso('Es-251', 'einsteinium-251', 99, 251, 251.0799936, False), + 'Es-252': _iso('Es-252', 'einsteinium-252', 99, 252, 252.082980, False), + 'Es-253': _iso('Es-253', 'einsteinium-253', 99, 253, 253.0848257, False), + 'Es-254': _iso('Es-254', 'einsteinium-254', 99, 254, 254.0880222, False), + 'Es-255': _iso('Es-255', 'einsteinium-255', 99, 255, 255.090275, False), + 'Es-256': _iso('Es-256', 'einsteinium-256', 99, 256, 256.09360, False), + 'Es-257': _iso('Es-257', 'einsteinium-257', 99, 257, 257.09598, False), + 'Es-258': _iso('Es-258', 'einsteinium-258', 99, 258, 258.09952, False), + 'Fm-241': _iso('Fm-241', 'fermium-241', 100, 241, 241.07421, False), + 'Fm-242': _iso('Fm-242', 'fermium-242', 100, 242, 242.07343, False), + 'Fm-243': _iso('Fm-243', 'fermium-243', 100, 243, 243.07446, False), + 'Fm-244': _iso('Fm-244', 'fermium-244', 100, 244, 244.07404, False), + 'Fm-245': _iso('Fm-245', 'fermium-245', 100, 245, 245.07535, False), + 'Fm-246': _iso('Fm-246', 'fermium-246', 100, 246, 246.075350, False), + 'Fm-247': _iso('Fm-247', 'fermium-247', 100, 247, 247.07694, False), + 'Fm-248': _iso('Fm-248', 'fermium-248', 100, 248, 248.0771865, False), + 'Fm-249': _iso('Fm-249', 'fermium-249', 100, 249, 249.0789275, False), + 'Fm-250': _iso('Fm-250', 'fermium-250', 100, 250, 250.0795210, False), + 'Fm-251': _iso('Fm-251', 'fermium-251', 100, 251, 251.081540, False), + 'Fm-252': _iso('Fm-252', 'fermium-252', 100, 252, 252.0824671, False), + 'Fm-253': _iso('Fm-253', 'fermium-253', 100, 253, 253.0851846, False), + 'Fm-254': _iso('Fm-254', 'fermium-254', 100, 254, 254.0868544, False), + 'Fm-255': _iso('Fm-255', 'fermium-255', 100, 255, 255.0899640, False), + 'Fm-256': _iso('Fm-256', 'fermium-256', 100, 256, 256.0917745, False), + 'Fm-257': _iso('Fm-257', 'fermium-257', 100, 257, 257.0951061, False), + 'Fm-258': _iso('Fm-258', 'fermium-258', 100, 258, 258.09708, False), + 'Fm-259': _iso('Fm-259', 'fermium-259', 100, 259, 259.10060, False), + 'Fm-260': _iso('Fm-260', 'fermium-260', 100, 260, 260.10281, False), + 'Md-245': _iso('Md-245', 'mendelevium-245', 101, 245, 245.08081, False), + 'Md-246': _iso('Md-246', 'mendelevium-246', 101, 246, 246.08171, False), + 'Md-247': _iso('Md-247', 'mendelevium-247', 101, 247, 247.08152, False), + 'Md-248': _iso('Md-248', 'mendelevium-248', 101, 248, 248.08282, False), + 'Md-249': _iso('Md-249', 'mendelevium-249', 101, 249, 249.08291, False), + 'Md-250': _iso('Md-250', 'mendelevium-250', 101, 250, 250.08441, False), + 'Md-251': _iso('Md-251', 'mendelevium-251', 101, 251, 251.084774, False), + 'Md-252': _iso('Md-252', 'mendelevium-252', 101, 252, 252.08643, False), + 'Md-253': _iso('Md-253', 'mendelevium-253', 101, 253, 253.087144, False), + 'Md-254': _iso('Md-254', 'mendelevium-254', 101, 254, 254.08959, False), + 'Md-255': _iso('Md-255', 'mendelevium-255', 101, 255, 255.0910841, False), + 'Md-256': _iso('Md-256', 'mendelevium-256', 101, 256, 256.09389, False), + 'Md-257': _iso('Md-257', 'mendelevium-257', 101, 257, 257.0955424, False), + 'Md-258': _iso('Md-258', 'mendelevium-258', 101, 258, 258.0984315, False), + 'Md-259': _iso('Md-259', 'mendelevium-259', 101, 259, 259.10051, False), + 'Md-260': _iso('Md-260', 'mendelevium-260', 101, 260, 260.10365, False), + 'Md-261': _iso('Md-261', 'mendelevium-261', 101, 261, 261.10583, False), + 'Md-262': _iso('Md-262', 'mendelevium-262', 101, 262, 262.10910, False), + 'No-248': _iso('No-248', 'nobelium-248', 102, 248, 248.08655, False), + 'No-249': _iso('No-249', 'nobelium-249', 102, 249, 249.08780, False), + 'No-250': _iso('No-250', 'nobelium-250', 102, 250, 250.08756, False), + 'No-251': _iso('No-251', 'nobelium-251', 102, 251, 251.08894, False), + 'No-252': _iso('No-252', 'nobelium-252', 102, 252, 252.088967, False), + 'No-253': _iso('No-253', 'nobelium-253', 102, 253, 253.0905641, False), + 'No-254': _iso('No-254', 'nobelium-254', 102, 254, 254.090956, False), + 'No-255': _iso('No-255', 'nobelium-255', 102, 255, 255.093191, False), + 'No-256': _iso('No-256', 'nobelium-256', 102, 256, 256.0942829, False), + 'No-257': _iso('No-257', 'nobelium-257', 102, 257, 257.0968878, False), + 'No-258': _iso('No-258', 'nobelium-258', 102, 258, 258.09821, False), + 'No-259': _iso('No-259', 'nobelium-259', 102, 259, 259.10103, False), + 'No-260': _iso('No-260', 'nobelium-260', 102, 260, 260.10264, False), + 'No-261': _iso('No-261', 'nobelium-261', 102, 261, 261.10570, False), + 'No-262': _iso('No-262', 'nobelium-262', 102, 262, 262.10746, False), + 'No-263': _iso('No-263', 'nobelium-263', 102, 263, 263.11071, False), + 'No-264': _iso('No-264', 'nobelium-264', 102, 264, 264.11273, False), + 'Lr-251': _iso('Lr-251', 'lawrencium-251', 103, 251, 251.09418, False), + 'Lr-252': _iso('Lr-252', 'lawrencium-252', 103, 252, 252.09526, False), + 'Lr-253': _iso('Lr-253', 'lawrencium-253', 103, 253, 253.09509, False), + 'Lr-254': _iso('Lr-254', 'lawrencium-254', 103, 254, 254.09648, False), + 'Lr-255': _iso('Lr-255', 'lawrencium-255', 103, 255, 255.096562, False), + 'Lr-256': _iso('Lr-256', 'lawrencium-256', 103, 256, 256.098494, False), + 'Lr-257': _iso('Lr-257', 'lawrencium-257', 103, 257, 257.099418, False), + 'Lr-258': _iso('Lr-258', 'lawrencium-258', 103, 258, 258.10176, False), + 'Lr-259': _iso('Lr-259', 'lawrencium-259', 103, 259, 259.102902, False), + 'Lr-260': _iso('Lr-260', 'lawrencium-260', 103, 260, 260.10550, False), + 'Lr-261': _iso('Lr-261', 'lawrencium-261', 103, 261, 261.10688, False), + 'Lr-262': _iso('Lr-262', 'lawrencium-262', 103, 262, 262.10961, False), + 'Lr-263': _iso('Lr-263', 'lawrencium-263', 103, 263, 263.11136, False), + 'Lr-264': _iso('Lr-264', 'lawrencium-264', 103, 264, 264.11420, False), + 'Lr-265': _iso('Lr-265', 'lawrencium-265', 103, 265, 265.11619, False), + 'Lr-266': _iso('Lr-266', 'lawrencium-266', 103, 266, 266.11983, False), + 'Rf-253': _iso('Rf-253', 'rutherfordium-253', 104, 253, 253.10044, False), + 'Rf-254': _iso('Rf-254', 'rutherfordium-254', 104, 254, 254.10005, False), + 'Rf-255': _iso('Rf-255', 'rutherfordium-255', 104, 255, 255.10127, False), + 'Rf-256': _iso('Rf-256', 'rutherfordium-256', 104, 256, 256.101152, False), + 'Rf-257': _iso('Rf-257', 'rutherfordium-257', 104, 257, 257.102918, False), + 'Rf-258': _iso('Rf-258', 'rutherfordium-258', 104, 258, 258.103428, False), + 'Rf-259': _iso('Rf-259', 'rutherfordium-259', 104, 259, 259.105596, False), + 'Rf-260': _iso('Rf-260', 'rutherfordium-260', 104, 260, 260.10644, False), + 'Rf-261': _iso('Rf-261', 'rutherfordium-261', 104, 261, 261.108773, False), + 'Rf-262': _iso('Rf-262', 'rutherfordium-262', 104, 262, 262.10992, False), + 'Rf-263': _iso('Rf-263', 'rutherfordium-263', 104, 263, 263.11249, False), + 'Rf-264': _iso('Rf-264', 'rutherfordium-264', 104, 264, 264.11388, False), + 'Rf-265': _iso('Rf-265', 'rutherfordium-265', 104, 265, 265.11668, False), + 'Rf-266': _iso('Rf-266', 'rutherfordium-266', 104, 266, 266.11817, False), + 'Rf-267': _iso('Rf-267', 'rutherfordium-267', 104, 267, 267.12179, False), + 'Rf-268': _iso('Rf-268', 'rutherfordium-268', 104, 268, 268.12397, False), + 'Db-255': _iso('Db-255', 'dubnium-255', 105, 255, 255.10707, False), + 'Db-256': _iso('Db-256', 'dubnium-256', 105, 256, 256.10789, False), + 'Db-257': _iso('Db-257', 'dubnium-257', 105, 257, 257.10758, False), + 'Db-258': _iso('Db-258', 'dubnium-258', 105, 258, 258.10928, False), + 'Db-259': _iso('Db-259', 'dubnium-259', 105, 259, 259.109492, False), + 'Db-260': _iso('Db-260', 'dubnium-260', 105, 260, 260.11130, False), + 'Db-261': _iso('Db-261', 'dubnium-261', 105, 261, 261.11192, False), + 'Db-262': _iso('Db-262', 'dubnium-262', 105, 262, 262.11407, False), + 'Db-263': _iso('Db-263', 'dubnium-263', 105, 263, 263.11499, False), + 'Db-264': _iso('Db-264', 'dubnium-264', 105, 264, 264.11741, False), + 'Db-265': _iso('Db-265', 'dubnium-265', 105, 265, 265.11861, False), + 'Db-266': _iso('Db-266', 'dubnium-266', 105, 266, 266.12103, False), + 'Db-267': _iso('Db-267', 'dubnium-267', 105, 267, 267.12247, False), + 'Db-268': _iso('Db-268', 'dubnium-268', 105, 268, 268.12567, False), + 'Db-269': _iso('Db-269', 'dubnium-269', 105, 269, 269.12791, False), + 'Db-270': _iso('Db-270', 'dubnium-270', 105, 270, 270.13136, False), + 'Sg-258': _iso('Sg-258', 'seaborgium-258', 106, 258, 258.11298, False), + 'Sg-259': _iso('Sg-259', 'seaborgium-259', 106, 259, 259.11440, False), + 'Sg-260': _iso('Sg-260', 'seaborgium-260', 106, 260, 260.114384, False), + 'Sg-261': _iso('Sg-261', 'seaborgium-261', 106, 261, 261.115949, False), + 'Sg-262': _iso('Sg-262', 'seaborgium-262', 106, 262, 262.116337, False), + 'Sg-263': _iso('Sg-263', 'seaborgium-263', 106, 263, 263.11829, False), + 'Sg-264': _iso('Sg-264', 'seaborgium-264', 106, 264, 264.11893, False), + 'Sg-265': _iso('Sg-265', 'seaborgium-265', 106, 265, 265.12109, False), + 'Sg-266': _iso('Sg-266', 'seaborgium-266', 106, 266, 266.12198, False), + 'Sg-267': _iso('Sg-267', 'seaborgium-267', 106, 267, 267.12436, False), + 'Sg-268': _iso('Sg-268', 'seaborgium-268', 106, 268, 268.12539, False), + 'Sg-269': _iso('Sg-269', 'seaborgium-269', 106, 269, 269.12863, False), + 'Sg-270': _iso('Sg-270', 'seaborgium-270', 106, 270, 270.13043, False), + 'Sg-271': _iso('Sg-271', 'seaborgium-271', 106, 271, 271.13393, False), + 'Sg-272': _iso('Sg-272', 'seaborgium-272', 106, 272, 272.13589, False), + 'Sg-273': _iso('Sg-273', 'seaborgium-273', 106, 273, 273.13958, False), + 'Bh-260': _iso('Bh-260', 'bohrium-260', 107, 260, 260.12166, False), + 'Bh-261': _iso('Bh-261', 'bohrium-261', 107, 261, 261.12145, False), + 'Bh-262': _iso('Bh-262', 'bohrium-262', 107, 262, 262.12297, False), + 'Bh-263': _iso('Bh-263', 'bohrium-263', 107, 263, 263.12292, False), + 'Bh-264': _iso('Bh-264', 'bohrium-264', 107, 264, 264.12459, False), + 'Bh-265': _iso('Bh-265', 'bohrium-265', 107, 265, 265.12491, False), + 'Bh-266': _iso('Bh-266', 'bohrium-266', 107, 266, 266.12679, False), + 'Bh-267': _iso('Bh-267', 'bohrium-267', 107, 267, 267.12750, False), + 'Bh-268': _iso('Bh-268', 'bohrium-268', 107, 268, 268.12969, False), + 'Bh-269': _iso('Bh-269', 'bohrium-269', 107, 269, 269.13042, False), + 'Bh-270': _iso('Bh-270', 'bohrium-270', 107, 270, 270.13336, False), + 'Bh-271': _iso('Bh-271', 'bohrium-271', 107, 271, 271.13526, False), + 'Bh-272': _iso('Bh-272', 'bohrium-272', 107, 272, 272.13826, False), + 'Bh-273': _iso('Bh-273', 'bohrium-273', 107, 273, 273.14024, False), + 'Bh-274': _iso('Bh-274', 'bohrium-274', 107, 274, 274.14355, False), + 'Bh-275': _iso('Bh-275', 'bohrium-275', 107, 275, 275.14567, False), + 'Hs-263': _iso('Hs-263', 'hassium-263', 108, 263, 263.12852, False), + 'Hs-264': _iso('Hs-264', 'hassium-264', 108, 264, 264.128357, False), + 'Hs-265': _iso('Hs-265', 'hassium-265', 108, 265, 265.129793, False), + 'Hs-266': _iso('Hs-266', 'hassium-266', 108, 266, 266.130046, False), + 'Hs-267': _iso('Hs-267', 'hassium-267', 108, 267, 267.13167, False), + 'Hs-268': _iso('Hs-268', 'hassium-268', 108, 268, 268.13186, False), + 'Hs-269': _iso('Hs-269', 'hassium-269', 108, 269, 269.13375, False), + 'Hs-270': _iso('Hs-270', 'hassium-270', 108, 270, 270.13429, False), + 'Hs-271': _iso('Hs-271', 'hassium-271', 108, 271, 271.13717, False), + 'Hs-272': _iso('Hs-272', 'hassium-272', 108, 272, 272.13850, False), + 'Hs-273': _iso('Hs-273', 'hassium-273', 108, 273, 273.14168, False), + 'Hs-274': _iso('Hs-274', 'hassium-274', 108, 274, 274.14330, False), + 'Hs-275': _iso('Hs-275', 'hassium-275', 108, 275, 275.14667, False), + 'Hs-276': _iso('Hs-276', 'hassium-276', 108, 276, 276.14846, False), + 'Hs-277': _iso('Hs-277', 'hassium-277', 108, 277, 277.15190, False), + 'Mt-265': _iso('Mt-265', 'meitnerium-265', 109, 265, 265.13600, False), + 'Mt-266': _iso('Mt-266', 'meitnerium-266', 109, 266, 266.13737, False), + 'Mt-267': _iso('Mt-267', 'meitnerium-267', 109, 267, 267.13719, False), + 'Mt-268': _iso('Mt-268', 'meitnerium-268', 109, 268, 268.13865, False), + 'Mt-269': _iso('Mt-269', 'meitnerium-269', 109, 269, 269.13882, False), + 'Mt-270': _iso('Mt-270', 'meitnerium-270', 109, 270, 270.14033, False), + 'Mt-271': _iso('Mt-271', 'meitnerium-271', 109, 271, 271.14074, False), + 'Mt-272': _iso('Mt-272', 'meitnerium-272', 109, 272, 272.14341, False), + 'Mt-273': _iso('Mt-273', 'meitnerium-273', 109, 273, 273.14440, False), + 'Mt-274': _iso('Mt-274', 'meitnerium-274', 109, 274, 274.14724, False), + 'Mt-275': _iso('Mt-275', 'meitnerium-275', 109, 275, 275.14882, False), + 'Mt-276': _iso('Mt-276', 'meitnerium-276', 109, 276, 276.15159, False), + 'Mt-277': _iso('Mt-277', 'meitnerium-277', 109, 277, 277.15327, False), + 'Mt-278': _iso('Mt-278', 'meitnerium-278', 109, 278, 278.15631, False), + 'Mt-279': _iso('Mt-279', 'meitnerium-279', 109, 279, 279.15808, False), + 'Ds-267': _iso('Ds-267', 'darmstadtium-267', 110, 267, 267.14377, False), + 'Ds-268': _iso('Ds-268', 'darmstadtium-268', 110, 268, 268.14348, False), + 'Ds-269': _iso('Ds-269', 'darmstadtium-269', 110, 269, 269.144752, False), + 'Ds-270': _iso('Ds-270', 'darmstadtium-270', 110, 270, 270.144584, False), + 'Ds-271': _iso('Ds-271', 'darmstadtium-271', 110, 271, 271.14595, False), + 'Ds-272': _iso('Ds-272', 'darmstadtium-272', 110, 272, 272.14602, False), + 'Ds-273': _iso('Ds-273', 'darmstadtium-273', 110, 273, 273.14856, False), + 'Ds-274': _iso('Ds-274', 'darmstadtium-274', 110, 274, 274.14941, False), + 'Ds-275': _iso('Ds-275', 'darmstadtium-275', 110, 275, 275.15203, False), + 'Ds-276': _iso('Ds-276', 'darmstadtium-276', 110, 276, 276.15303, False), + 'Ds-277': _iso('Ds-277', 'darmstadtium-277', 110, 277, 277.15591, False), + 'Ds-278': _iso('Ds-278', 'darmstadtium-278', 110, 278, 278.15704, False), + 'Ds-279': _iso('Ds-279', 'darmstadtium-279', 110, 279, 279.16010, False), + 'Ds-280': _iso('Ds-280', 'darmstadtium-280', 110, 280, 280.16131, False), + 'Ds-281': _iso('Ds-281', 'darmstadtium-281', 110, 281, 281.16451, False), + 'Rg-272': _iso('Rg-272', 'roentgenium-272', 111, 272, 272.15327, False), + 'Rg-273': _iso('Rg-273', 'roentgenium-273', 111, 273, 273.15313, False), + 'Rg-274': _iso('Rg-274', 'roentgenium-274', 111, 274, 274.15525, False), + 'Rg-275': _iso('Rg-275', 'roentgenium-275', 111, 275, 275.15594, False), + 'Rg-276': _iso('Rg-276', 'roentgenium-276', 111, 276, 276.15833, False), + 'Rg-277': _iso('Rg-277', 'roentgenium-277', 111, 277, 277.15907, False), + 'Rg-278': _iso('Rg-278', 'roentgenium-278', 111, 278, 278.16149, False), + 'Rg-279': _iso('Rg-279', 'roentgenium-279', 111, 279, 279.16272, False), + 'Rg-280': _iso('Rg-280', 'roentgenium-280', 111, 280, 280.16514, False), + 'Rg-281': _iso('Rg-281', 'roentgenium-281', 111, 281, 281.16636, False), + 'Rg-282': _iso('Rg-282', 'roentgenium-282', 111, 282, 282.16912, False), + 'Rg-283': _iso('Rg-283', 'roentgenium-283', 111, 283, 283.17054, False), + 'Cn-276': _iso('Cn-276', 'copernicium-276', 112, 276, 276.16141, False), + 'Cn-277': _iso('Cn-277', 'copernicium-277', 112, 277, 277.16364, False), + 'Cn-278': _iso('Cn-278', 'copernicium-278', 112, 278, 278.16416, False), + 'Cn-279': _iso('Cn-279', 'copernicium-279', 112, 279, 279.16654, False), + 'Cn-280': _iso('Cn-280', 'copernicium-280', 112, 280, 280.16715, False), + 'Cn-281': _iso('Cn-281', 'copernicium-281', 112, 281, 281.16975, False), + 'Cn-282': _iso('Cn-282', 'copernicium-282', 112, 282, 282.17050, False), + 'Cn-283': _iso('Cn-283', 'copernicium-283', 112, 283, 283.17327, False), + 'Cn-284': _iso('Cn-284', 'copernicium-284', 112, 284, 284.17416, False), + 'Cn-285': _iso('Cn-285', 'copernicium-285', 112, 285, 285.17712, False), + 'Nh-278': _iso('Nh-278', 'nihonium-278', 113, 278, 278.17058, False), + 'Nh-279': _iso('Nh-279', 'nihonium-279', 113, 279, 279.17095, False), + 'Nh-280': _iso('Nh-280', 'nihonium-280', 113, 280, 280.17293, False), + 'Nh-281': _iso('Nh-281', 'nihonium-281', 113, 281, 281.17348, False), + 'Nh-282': _iso('Nh-282', 'nihonium-282', 113, 282, 282.17567, False), + 'Nh-283': _iso('Nh-283', 'nihonium-283', 113, 283, 283.17657, False), + 'Nh-284': _iso('Nh-284', 'nihonium-284', 113, 284, 284.17873, False), + 'Nh-285': _iso('Nh-285', 'nihonium-285', 113, 285, 285.17973, False), + 'Nh-286': _iso('Nh-286', 'nihonium-286', 113, 286, 286.18221, False), + 'Nh-287': _iso('Nh-287', 'nihonium-287', 113, 287, 287.18339, False), + 'Fl-285': _iso('Fl-285', 'flerovium-285', 114, 285, 285.18364, False), + 'Fl-286': _iso('Fl-286', 'flerovium-286', 114, 286, 286.18423, False), + 'Fl-287': _iso('Fl-287', 'flerovium-287', 114, 287, 287.18678, False), + 'Fl-288': _iso('Fl-288', 'flerovium-288', 114, 288, 288.18757, False), + 'Fl-289': _iso('Fl-289', 'flerovium-289', 114, 289, 289.19042, False), + 'Mc-287': _iso('Mc-287', 'moscovium-287', 115, 287, 287.19070, False), + 'Mc-288': _iso('Mc-288', 'moscovium-288', 115, 288, 288.19274, False), + 'Mc-289': _iso('Mc-289', 'moscovium-289', 115, 289, 289.19363, False), + 'Mc-290': _iso('Mc-290', 'moscovium-290', 115, 290, 290.19598, False), + 'Mc-291': _iso('Mc-291', 'moscovium-291', 115, 291, 291.19707, False), + 'Lv-289': _iso('Lv-289', 'livermorium-289', 116, 289, 289.19816, False), + 'Lv-290': _iso('Lv-290', 'livermorium-290', 116, 290, 290.19864, False), + 'Lv-291': _iso('Lv-291', 'livermorium-291', 116, 291, 291.20108, False), + 'Lv-292': _iso('Lv-292', 'livermorium-292', 116, 292, 292.20174, False), + 'Lv-293': _iso('Lv-293', 'livermorium-293', 116, 293, 293.20449, False), + 'Ts-291': _iso('Ts-291', 'tennessine-291', 117, 291, 291.20553, False), + 'Ts-292': _iso('Ts-292', 'tennessine-292', 117, 292, 292.20746, False), + 'Ts-293': _iso('Ts-293', 'tennessine-293', 117, 293, 293.20824, False), + 'Ts-294': _iso('Ts-294', 'tennessine-294', 117, 294, 294.21046, False), + 'Og-293': _iso('Og-293', 'oganesson-293', 118, 293, 293.21356, False), + 'Og-294': _iso('Og-294', 'oganesson-294', 118, 294, 294.21392, False), + 'Og-295': _iso('Og-295', 'oganesson-295', 118, 295, 295.21624, False), +} diff --git a/plasmapy/atomic/names.py b/plasmapy/atomic/names.py index 1fc89da0..7434c70a 100644 --- a/plasmapy/atomic/names.py +++ b/plasmapy/atomic/names.py @@ -5,33 +5,32 @@ import warnings from typing import (Union, Optional, Any, Tuple) -from .elements import (_atomic_symbols, _atomic_symbols_dict, _Elements) - +from .elements import _atomic_symbols, _atomic_symbols_dict, _Elements from .isotopes import _Isotopes +from .particles import ParticleZoo -from .particles import (_is_special_particle, _get_standard_symbol) +from .parsing import _dealias_particle_aliases +from .classes import Particle from ..utils import (AtomicWarning, InvalidElementError, InvalidIsotopeError, InvalidIonError, - AtomicError, - InvalidParticleError, - ChargeError) + InvalidParticleError) # TODO: Create an ion_symbol function # TODO: Create a particle_symbol function -def atomic_symbol(argument: Union[str, int]) -> str: +def atomic_symbol(element: Union[str, int]) -> str: r"""Returns the atomic symbol. Parameters ---------- - argument: string or integer + element: string or integer A string representing an element, isotope, or ion; or an - integer representing an atomic number. + integer or string representing an atomic number. Returns ------- @@ -95,70 +94,23 @@ def atomic_symbol(argument: Union[str, int]) -> str: """ - if _is_special_particle(argument): - raise InvalidElementError(f"{argument} is not a valid element.") - try: - argument, Z = _extract_integer_charge(argument) + particle = Particle(element) except InvalidParticleError: - raise InvalidParticleError("Invalid charge in atomic_symbol") - - if not isinstance(argument, (str, int)): - raise TypeError("The first argument in atomic_symbol must be either " - "a string representing an element or isotope, or an " - "integer representing the atomic number (or 0 for " - "neutrons).") - - if isinstance(argument, str) and argument.isdigit(): - argument = int(argument) - - if isinstance(argument, int): - - try: - element = _atomic_symbols[argument] - except KeyError: - raise InvalidParticleError(f"{argument} is an invalid atomic " - "number in atomic_symbol.") - - elif _is_hydrogen(argument): - element = 'H' - elif _is_alpha(argument): - element = 'He' - elif isinstance(argument, str): - - if argument.count('-') == 1: - dash_position = argument.find('-') - mass_numb = argument[dash_position+1:] - if not mass_numb.isdigit(): - raise InvalidParticleError("Invalid isotope format in " - "atomic_symbol") - argument = argument[:dash_position] - else: - mass_numb = '' - - if argument.lower() in _atomic_symbols_dict.keys(): - element = _atomic_symbols_dict[argument.lower()] - elif argument in _atomic_symbols.values(): - element = argument.capitalize() - else: - raise InvalidParticleError(f"{argument} is an invalid argument " - "for atomic_symbol") - - if mass_numb.isdigit(): - - isotope = element.capitalize() + '-' + mass_numb - - if isotope not in _Isotopes.keys(): - raise InvalidParticleError( - "The input in atomic_symbol corresponding " - f"to {isotope} is not a valid isotope.") - - if Z is not None and \ - Z > _Elements[element]['atomic_number']: - raise InvalidParticleError("Cannot have an ionization state greater " - "than the atomic number.") + raise InvalidParticleError( + f"The argument {repr(element)} to atomic_symbol does not " + f"represent a valid particle.") + except TypeError: + raise TypeError( + f"The argument {repr(element)} to atomic_symbol is not an " + f"integer or string.") - return element + if particle.element: + return particle._atomic_symbol + else: + raise InvalidElementError( + f"The argument {repr(element)} to atomic_symbol does not " + f"represent a valid element.") def isotope_symbol(argument: Union[str, int], mass_numb: int = None) -> str: @@ -230,11 +182,18 @@ def isotope_symbol(argument: Union[str, int], mass_numb: int = None) -> str: """ + try: + particle = Particle(argument, mass_numb=mass_numb) + except InvalidParticleError: + raise InvalidParticleError + except Exception: + pass + # TODO: Remove this functionality when particle_symbol comes online if _is_neutron(argument, mass_numb): return 'n' - if _is_special_particle(argument): + if argument in ParticleZoo.everything - {'p+'}: raise InvalidIsotopeError("The argument {argument} does not " "correspond to a valid isotope in " "isotope_symbol.") @@ -339,6 +298,23 @@ def isotope_symbol(argument: Union[str, int], mass_numb: int = None) -> str: return isotope +def ion_symbol(argument: Union[str, int], mass_numb: int = None, + Z: int = None) -> str: + r"""Returns the ion symbol.""" + particle = Particle(argument, mass_numb=mass_numb, Z=Z) + if particle.ion: + return particle.ion + else: + raise InvalidIonError + + +def particle_symbol(argument: Union[str, int], mass_numb: int = None, + Z: int = None) -> str: + r"""Returns the symbol of a particle.""" + particle = Particle(argument, mass_numb=mass_numb, Z=Z) + return particle.particle + + def element_name(argument: Union[str, int]) -> str: r"""Returns the name of an element. @@ -454,10 +430,15 @@ def _extract_integer_charge(argument: str) -> Tuple[str, int]: """ + warnings.warn("_extract_integer_charge is deprecated.", DeprecationWarning) + if not isinstance(argument, str): return argument, None - argument = _get_standard_symbol(argument) + try: + argument = _dealias_particle_aliases(argument) + except Exception: + pass if argument in ['n', 'antineutron'] or 'nu_' in argument: return argument, 0 @@ -530,6 +511,8 @@ def _is_neutron(argument: Any, mass_numb: int = None) -> bool: r"""Returns True if the argument corresponds to a neutron, and False otherwise.""" + warnings.warn("_is_neutron is deprecated.", DeprecationWarning) + if argument == 0 and mass_numb == 1: return True elif isinstance(argument, str) and mass_numb is None: @@ -546,6 +529,8 @@ def _is_hydrogen(argument: Any, r"""Returns True if the argument corresponds to hydrogen, and False otherwise.""" + warnings.warn("_is_hydrogen is deprecated.", DeprecationWarning) + case_sensitive_aliases = ['p', 'p+', 'H', 'D', 'T'] case_insensitive_aliases = ['proton', 'protium', 'deuterium', @@ -579,6 +564,8 @@ def _is_electron(arg: Any) -> bool: r"""Returns True if the argument corresponds to an electron, and False otherwise.""" + warnings.warn("_is_electron is deprecated.", DeprecationWarning) + if not isinstance(arg, str): return False @@ -589,6 +576,8 @@ def _is_positron(arg: Any) -> bool: r"""Returns True if the argument corresponds to a positron, and False otherwise.""" + warnings.warn("_is_positron is deprecated.", DeprecationWarning) + if not isinstance(arg, str): return False @@ -599,6 +588,8 @@ def _is_antiproton(arg: Any) -> bool: r"""Returns True if the argument corresponds to an antiproton, and False otherwise.""" + warnings.warn("_is_antiproton is deprecated.", DeprecationWarning) + if not isinstance(arg, str): return False @@ -609,6 +600,8 @@ def _is_antineutron(arg: Any) -> bool: r"""Returns True if the argument corresponds to an antineutron, and False otherwise.""" + warnings.warn("_is_antineutron is deprecated.", DeprecationWarning) + if not isinstance(arg, str): return False @@ -620,6 +613,8 @@ def _is_proton(arg: Any, Z: int = None, mass_numb: int = None) -> bool: False otherwise. This function returns False for 'H-1' if no charge state is given.""" + warnings.warn("_is_proton is deprecated.", DeprecationWarning) + argument, Z_from_arg = _extract_integer_charge(arg) if (Z is None) == (Z_from_arg is None): @@ -640,6 +635,8 @@ def _is_alpha(arg: Any) -> bool: r"""Returns True if the argument corresponds to an alpha particle, and False otherwise.""" + warnings.warn("_is_alpha is deprecated.", DeprecationWarning) + if not isinstance(arg, str): return False diff --git a/plasmapy/atomic/nuclear.py b/plasmapy/atomic/nuclear.py index 313c24c4..46259862 100644 --- a/plasmapy/atomic/nuclear.py +++ b/plasmapy/atomic/nuclear.py @@ -17,7 +17,11 @@ _is_antiproton, _is_antineutron) -from ..utils import (InvalidElementError, InvalidIsotopeError) +from ..utils import (InvalidElementError, + InvalidParticleError, + InvalidIsotopeError) + +from .classes import Particle def nuclear_binding_energy(argument, mass_numb=None): @@ -41,6 +45,17 @@ def nuclear_binding_energy(argument, mass_numb=None): binding_energy: Quantity The binding energy of the nucleus in units of Joules. + Raises + ------ + InvalidParticleError + If the inputs do not correspond to a valid particle. + + InvalidIsotopeError + If the inputs do not correspond to a valid isotope. + + TypeError + If the inputs are not of the correct types. + See also -------- @@ -61,31 +76,22 @@ def nuclear_binding_energy(argument, mass_numb=None): >>> before = nuclear_binding_energy("D") + nuclear_binding_energy("T") >>> after = nuclear_binding_energy("alpha") >>> (after - before).to(u.MeV) # released energy from D + T --> alpha + n - <Quantity 17.58932778 MeV> + <Quantity 17.58929687 MeV> """ - if _is_neutron(argument) and mass_numb is None or mass_numb == 1: - return 0.0 * units.J - - isotope = isotope_symbol(argument, mass_numb) - - number_of_protons = atomic_number(argument) - nuclide_mass = ion_mass(isotope, Z=number_of_protons) - - if mass_numb is None: - mass_numb = mass_number(argument) - number_of_neutrons = mass_numb - number_of_protons - - if number_of_protons == 1 and number_of_neutrons == 0: - binding_energy = 0.0 * units.J - else: - mass_of_nucleons = (number_of_protons * constants.m_p + - number_of_neutrons * constants.m_n) - mass_defect = mass_of_nucleons - nuclide_mass - binding_energy = mass_defect * constants.c**2 - - return binding_energy.to(units.J) + try: + return Particle(argument, mass_numb=mass_numb).binding_energy + except TypeError: + raise TypeError("Invalid inputs to nuclear_binding_energy") + except InvalidParticleError: + raise InvalidParticleError( + f"The inputs to nuclear_binding_energy do not correspond to a " + f"valid particle.") + except InvalidIsotopeError: + raise InvalidIsotopeError( + f"The inputs to nuclear_binding_energy do not correspond to a " + f"valid isotope.") def nuclear_reaction_energy(*args, **kwargs): diff --git a/plasmapy/atomic/parsing.py b/plasmapy/atomic/parsing.py new file mode 100644 index 00000000..7bd4da13 --- /dev/null +++ b/plasmapy/atomic/parsing.py @@ -0,0 +1,420 @@ +import numpy as np +import re +import warnings +from typing import (Union, Dict) + +from .elements import (_atomic_symbols, _atomic_symbols_dict, _Elements) +from .isotopes import _Isotopes +from .particles import _Particles, ParticleZoo + +from ..utils import (AtomicWarning, + InvalidElementError, + InvalidParticleError) + + +def _create_alias_dicts(Particles: dict) -> (Dict[str, str], Dict[str, str]): + """Create dictionaries for case sensitive aliases and case + insensitive aliases of special particles and antiparticles. + + The keys of these dictionaries are the aliases, and the values + are the corresponding standardized symbol for the particle or + antiparticle.""" + + case_sensitive_aliases = {} + case_insensitive_aliases = {} + + for symbol in Particles.keys(): + name = Particles[symbol]['name'] + case_insensitive_aliases[name.lower()] = symbol + + case_sensitive_aliases_for_a_symbol = [ + (['beta-', 'e'], 'e-'), + (['beta+'], 'e+'), + (['p', 'H-1+', 'H-1 1+', 'H-1 +1'], 'p+'), + (['n-1'], 'n'), + (['H-2'], 'D'), + (['H-2+', 'H-2 1+', 'H-2 +1', 'D+'], 'D 1+'), + (['H-3+', 'H-3 1+', 'H-3 +1', 'T+'], 'T 1+'), + ] + + case_insensitive_aliases_for_a_symbol = [ + (['antielectron'], 'e+'), + (['muon-'], 'mu-'), + (['muon+'], 'mu+'), + (['tau particle'], 'tau-'), + (['protium'], 'H-1'), + (['protium+', 'protium 1+', 'protium +1', 'hydrogen-1+', + 'hydrogen-1 1+', 'hydrogen-1 +1'], 'p+'), + (['deuterium', 'hydrogen-2'], 'D'), + (['deuteron', 'deuterium+', 'deuterium 1+', 'deuterium +1'], + 'D 1+'), + (['tritium', 'hydrogen-3'], 'T'), + (['triton', 'tritium+', 'tritium 1+', 'tritium +1'], 'T 1+'), + (['alpha'], 'He-4 2+'), + ] + + for aliases, symbol in case_sensitive_aliases_for_a_symbol: + for alias in aliases: + case_sensitive_aliases[alias] = symbol + + for aliases, symbol in case_insensitive_aliases_for_a_symbol: + for alias in aliases: + case_insensitive_aliases[alias.lower()] = symbol + + alias_keys = list(case_insensitive_aliases.keys()) + + for alias in alias_keys: + if 'anti' in alias and 'anti-' not in alias: + symbol = case_insensitive_aliases[alias].lower() + new_alias = alias.replace('anti', 'anti-') + case_insensitive_aliases[new_alias] = symbol + + return case_sensitive_aliases, case_insensitive_aliases + + +_case_sensitive_aliases, _case_insensitive_aliases = \ + _create_alias_dicts(_Particles) + + +def _dealias_particle_aliases(alias: Union[str, int]) -> str: + """Returns the standard symbol for a particle or antiparticle + when the argument is a valid alias. If the argument is not a + valid alias, then this function returns the original argument + (which will usually be a string but may be an int representing + atomic number).""" + if not isinstance(alias, str): + symbol = alias + elif (alias in _case_sensitive_aliases.values() or + alias in _case_insensitive_aliases.values()): + symbol = alias + elif alias in _case_sensitive_aliases.keys(): + symbol = _case_sensitive_aliases[alias] + elif alias.lower() in _case_insensitive_aliases.keys(): + symbol = _case_insensitive_aliases[alias.lower()] + else: + symbol = alias + return symbol + + +def _invalid_particle_errmsg(argument, mass_numb=None, Z=None): + r"""Returns a string with an appropriate error message for an + InvalidParticleError.""" + errmsg = f"The argument {repr(argument)} " + if mass_numb is not None or Z is not None: + errmsg += "with " + if mass_numb is not None: + errmsg += f"mass_numb = {repr(mass_numb)} " + if mass_numb is not None and Z is not None: + errmsg += "and " + if Z is not None: + errmsg += f"integer charge Z = {repr(Z)} " + errmsg += "does not correspond to a valid particle." + return errmsg + + +def _parse_and_check_atomic_input( + argument: Union[str, int], + mass_numb: int = None, + Z: int = None): + r"""Parses information about a particle into a dictionary + containing standard symbols, while checking to make sure + that the particle is valid. + + Parameters + ---------- + + argument : string or integer + String containing information for an element, isotope, or ion + in any of the allowed formats; or an integer representing an + atomic number. + + mass_numb : integer, optional + The mass number of an isotope. + + Z : integer, optional + The integer charge of an ion. + + Returns + ------- + + nomenclature_dict : dict + A dictionary containing information about the element, isotope, + or ion. The key 'symbol' corresponds to the particle symbol + containing the most information, 'element' corresponds to the + atomic symbol, 'isotope' corresponds to the isotope symbol, + 'ion' corresponds to the ion symbol, 'mass_numb' corresponds + to the mass number, and 'Z' corresponds to the integer charge. + The corresponding items will be given by None if the necessary + information is not provided. + + Raises + ------ + + InvalidParticleError + If the arguments do not correspond to a valid particle or + antiparticle. + + InvalidElementError + If the particle is valid but does not correspond to an element, + ion, or isotope. + + TypeError + If the argument or any of the keyword arguments is not of the + correct type. + + """ + + def _atomic_number_to_symbol(atomic_numb: int): + r"""Returns the atomic symbol associated with an integer + representing an atomic number, or raises an InvalidParticleError + if the atomic number does not represent a known element.""" + + if atomic_numb in _atomic_symbols.keys(): + element = _atomic_symbols[atomic_numb] + return element + else: + raise InvalidParticleError( + f"{atomic_numb} is not a valid atomic number.") + + def _extract_charge(arg: str): + r"""Receives a string representing an element, isotope, or ion. + Returns a tuple containing a string that should represent an + element or isotope, and either an integer representing the + charge or None if no charge information is provided. Raises + an InvalidParticleError if charge information is inputted + incorrectly.""" + + invalid_charge_errmsg = ( + f"Invalid charge information in the particle string '{arg}'.") + + if arg.count(' ') == 1: # Cases like 'H 1-' and 'Fe-56 1+' + isotope_info, charge_info = arg.split(' ') + + sign_indicator_only_on_one_end = ( + charge_info.endswith(('-', '+')) ^ + charge_info.startswith(('-', '+'))) + + just_one_sign_indicator = ( + (charge_info.count('-') == 1 and + charge_info.count('+') == 0) or + (charge_info.count('-') == 0 and + charge_info.count('+') == 1)) + + if not sign_indicator_only_on_one_end and just_one_sign_indicator: + raise InvalidParticleError(invalid_charge_errmsg) from None + + if '-' in charge_info: + sign = -1 + elif '+' in charge_info: + sign = 1 + + charge_str = charge_info.strip('+-') + Z_from_arg = sign * int(charge_str) + + elif arg.endswith(('-', '+')): # Cases like 'H-' and 'Pb-209+++' + char = arg[-1] + match = re.match(f"[{char}]*", arg[::-1]) + Z_from_arg = match.span()[1] + isotope_info = arg[0:len(arg) - match.span()[1]] + + if char == '-': + Z_from_arg = -Z_from_arg + if isotope_info.endswith(('-', '+')): + raise InvalidParticleError(invalid_charge_errmsg) from None + else: + isotope_info = arg + Z_from_arg = None + + return isotope_info, Z_from_arg + + def _extract_mass_number(isotope_info: str): + r"""Receives a string representing an element or isotope. + Returns a tuple containing a string that should represent + an element, and either an integer representing the mass + number or None if no mass number is available. Raises an + InvalidParticleError if the mass number information is + inputted incorrectly.""" + + if isotope_info == 'D': + element_info, mass_numb = 'H', 2 + elif isotope_info == 'T': + element_info = 'H' + mass_numb = 3 + elif isotope_info == 'p': + element_info = 'H' + mass_numb = 1 + elif '-' not in isotope_info: + element_info = isotope_info + mass_numb = None + elif isotope_info.count('-') == 1: + element_info, mass_numb_str = isotope_info.split('-') + try: + mass_numb = int(mass_numb_str) + except ValueError: + raise InvalidParticleError( + f"Invalid mass number in isotope string " + f"'{isotope_info}'.") from None + + return element_info, mass_numb + + def _get_element(element_info: str) -> str: + r"""Receives a string representing an element's symbol or + name, and returns a string representing the atomic symbol.""" + + if element_info.lower() in _atomic_symbols_dict.keys(): + element = _atomic_symbols_dict[element_info.lower()] + elif element_info in _atomic_symbols.values(): + element = element_info + else: + raise InvalidParticleError( + f"The string '{element_info}' does not correspond to " + f"a valid element.") + + return element + + def _reconstruct_isotope_symbol(element: str, mass_numb: int) -> str: + r"""Receives a string representing an atomic symbol and an + integer representing a mass number. Returns the isotope symbol + or None if no mass number information is available. Raises an + InvalidParticleError for isotopes that have not been discovered.""" + + if mass_numb is not None: + isotope = f"{element}-{mass_numb}" + + if isotope == 'H-2': + isotope = 'D' + elif isotope == 'H-3': + isotope = 'T' + + if isotope not in _Isotopes.keys(): + raise InvalidParticleError( + f"The string '{isotope}' does not correspond to " + f"a valid isotope.") + + else: + isotope = None + + return isotope + + def _reconstruct_ion_symbol( + element: str, isotope: int = None, Z: int = None): + r"""Receives a string representing an atomic symbol and/or a + string representing an isotope, and an integer representing the + integer charge. Returns a string representing the ion symbol, + or None if no charge information is available.""" + + if Z is not None: + if Z < 0: + sign = '-' + else: + sign = '+' + + if isotope is None: + base = element + else: + base = isotope + + ion = f"{base} {np.abs(Z)}{sign}" + else: + ion = None + + if ion == 'H-1 1+': + ion = 'p+' + + return ion + + if not isinstance(argument, (str, int)): # coveralls: ignore + raise TypeError(f"The argument {argument} is not an integer or " + "string.") + + arg = _dealias_particle_aliases(argument) + + if arg in ParticleZoo.everything - {'p+'}: + if (mass_numb is not None) or (Z is not None): + raise InvalidParticleError( + f"The keywords mass_numb and Z should not be specified " + f"for particle '{argument}', which is a special particle.") + else: + raise InvalidElementError(f"{argument} is not a valid element.") + + if isinstance(arg, str) and arg.isdigit(): + arg = int(arg) + + if isinstance(arg, int): + element = _atomic_number_to_symbol(arg) + Z_from_arg = None + mass_numb_from_arg = None + elif isinstance(arg, str): + isotope_info, Z_from_arg = _extract_charge(arg) + element_info, mass_numb_from_arg = \ + _extract_mass_number(isotope_info) + element = _get_element(element_info) + + if mass_numb is not None and mass_numb_from_arg is not None: + if mass_numb != mass_numb_from_arg: + raise InvalidParticleError( + "The mass number extracted from the particle string " + f"'{argument}' is inconsistent with the keyword mass_numb = " + f"{mass_numb}.") + else: + warnings.warn("Redundant mass number information for particle " + f"'{argument}' with mass_numb = {mass_numb}.", + AtomicWarning) + + if mass_numb_from_arg is not None: + mass_numb = mass_numb_from_arg + + if Z is not None and Z_from_arg is not None: + if Z != Z_from_arg: + raise InvalidParticleError( + "The integer charge extracted from the particle string " + f"'{argument}' is inconsistent with the keyword Z = {Z}.") + else: + warnings.warn("Redundant charge information for particle " + f"'{argument}' with Z = {Z}.", AtomicWarning) + + if Z_from_arg is not None: + Z = Z_from_arg + + if isinstance(Z, int): + if Z > _Elements[element]['atomic_number']: + raise InvalidParticleError( + f"The integer charge Z = {Z} cannot exceed the atomic number " + f"of {element}, which is " + f"{_Elements[element]['atomic_number']}.") + elif Z < -3: + warnings.warn(f"Particle '{argument}' has an integer charge " + f"of Z = {Z}, which is unlikely to occur in " + f"nature.", AtomicWarning) + + isotope = _reconstruct_isotope_symbol(element, mass_numb) + ion = _reconstruct_ion_symbol(element, isotope, Z) + + if ion: + symbol = ion + elif isotope: + symbol = isotope + else: + symbol = element + + nomenclature_dict = { + 'symbol': symbol, + 'element': element, + 'isotope': isotope, + 'ion': ion, + 'mass_numb': mass_numb, + 'Z': Z, + } + + return nomenclature_dict + + +def _call_string(arg: Union[str, int], kwargs: Dict = {}) -> str: + r"""Return a string that recreates the call to create a particular + particle from the input.""" + if kwargs != {}: + keyword_string = ", " \ + + str(kwargs).strip(r"}{'").replace("'", "").replace(":", " =") + else: + keyword_string = "" + return f"Particle({repr(arg)}{keyword_string})" diff --git a/plasmapy/atomic/particles.py b/plasmapy/atomic/particles.py index 7e845b99..4a1aed09 100644 --- a/plasmapy/atomic/particles.py +++ b/plasmapy/atomic/particles.py @@ -1,9 +1,129 @@ -import typing +from typing import Set, Dict, List, Optional, Union from astropy import units as u, constants as const import numpy as np -def _create_Particles_dict() -> typing.Dict[str, dict]: +class _ParticleZooClass(): + r"""Creates an object with taxonomy information for special particles. + + The _taxonomy_dict attribute contains the name of each classification + (e.g., 'lepton', 'baryon', 'matter', etc.) as the keys and a set of + particle symbol strings of the particles belonging to that classification. + + The attributes of this class provide sets of strings representing + particles in the corresponding category. + + Examples + -------- + >>> ParticleZoo = _ParticleZooClass() + >>> 'e-' in ParticleZoo.leptons + True + >>> 'nu_e' in ParticleZoo.antineutrinos + False + >>> 'mu+' in ParticleZoo.antiparticles + True + """ + + def __init__(self): + + leptons = {'e-', 'mu-', 'tau-', 'nu_e', 'nu_mu', 'nu_tau'} + + antileptons = {'e+', 'mu+', 'tau+', 'anti_nu_e', + 'anti_nu_mu', 'anti_nu_tau'} + + baryons = {'p+', 'n'} + + antibaryons = {'p-', 'antineutron'} + + particles = leptons | baryons + + antiparticles = antileptons | antibaryons + + fermions = leptons | antileptons | baryons | antibaryons + + bosons = set() + + neutrinos = {lepton for lepton in leptons if 'nu' in lepton} + + antineutrinos = {antilepton for antilepton in antileptons + if 'nu' in antilepton} + + self._taxonomy_dict = { + 'lepton': leptons, + 'antilepton': antileptons, + 'baryon': baryons, + 'antibaryon': antibaryons, + 'fermion': fermions, + 'boson': bosons, + 'neutrino': neutrinos, + 'antineutrinos': antineutrinos, + 'matter': particles, + 'antimatter': antiparticles, + } + + @property + def leptons(self) -> Set[str]: + r"""Returns a set of strings representing leptons.""" + return self._taxonomy_dict['lepton'] + + @property + def antileptons(self) -> Set[str]: + r"""Returns a set of strings representing antileptons.""" + return self._taxonomy_dict['antilepton'] + + @property + def baryons(self) -> Set[str]: + r"""Returns a set of strings representing baryons.""" + return self._taxonomy_dict['baryon'] + + @property + def antibaryons(self) -> Set[str]: + r"""Returns a set of strings representing antibaryons.""" + return self._taxonomy_dict['antibaryon'] + + @property + def fermions(self) -> Set[str]: + r"""Returns a set of strings representing fermions.""" + return self._taxonomy_dict['fermion'] + + @property + def bosons(self) -> Set[str]: + r"""Returns a set of strings representing bosons.""" + return self._taxonomy_dict['boson'] + + @property + def neutrinos(self) -> Set[str]: + r"""Returns a set of strings representing neutrinos.""" + return self._taxonomy_dict['neutrino'] + + @property + def antineutrinos(self) -> Set[str]: + r"""Returns a set of strings representing antineutrinos.""" + return self._taxonomy_dict['antineutrinos'] + + @property + def particles(self) -> Set[str]: + r"""Returns a set of strings representing particles (as + opposed to antiparticles).""" + return self._taxonomy_dict['matter'] + + @property + def antiparticles(self) -> Set[str]: + r"""Returns a set of strings representing antiparticles.""" + return self._taxonomy_dict['antimatter'] + + @property + def everything(self) -> Set[str]: + r"""Returns a set of strings representing all particles and + antiparticles""" + return \ + self._taxonomy_dict['matter'] | self._taxonomy_dict['antimatter'] + + +ParticleZoo = _ParticleZooClass() + + +def _create_Particles_dict() -> Dict[str, dict]: """Create a dictionary of dictionaries that contains physical information for particles and antiparticles that are not elements or ions. @@ -14,25 +134,6 @@ def _create_Particles_dict() -> typing.Dict[str, dict]: strings such as 'name', 'mass', and 'spin' as the keys and the corresponding atomic properties as symbols.""" - leptons = ['e-', 'mu-', 'tau-', 'nu_e', 'nu_mu', 'nu_tau'] - antileptons = ['e+', 'mu+', 'tau+', 'anti_nu_e', - 'anti_nu_mu', 'anti_nu_tau'] - - baryons = ['p', 'n'] - antibaryons = ['p-', 'antineutron'] - - everything = leptons + antileptons + baryons + antibaryons - - particles = leptons + baryons - antiparticles = antileptons + antibaryons - - fermions = leptons + antileptons + baryons + antibaryons - bosons = [] - - neutrinos = [lepton for lepton in leptons if 'nu' in lepton] - antineutrinos = [antilepton for antilepton in antileptons - if 'nu' in antilepton] - symbols_and_names = [ ('e-', 'electron'), ('e+', 'positron'), @@ -46,7 +147,7 @@ def _create_Particles_dict() -> typing.Dict[str, dict]: ('anti_nu_mu', 'muon antineutrino'), ('nu_tau', 'tau neutrino'), ('anti_nu_tau', 'tau antineutrino'), - ('p', 'proton'), + ('p+', 'proton'), ('p-', 'antiproton'), ('n', 'neutron'), ('antineutron', 'antineutron'), @@ -54,47 +155,47 @@ def _create_Particles_dict() -> typing.Dict[str, dict]: Particles = dict() - for thing in everything: + for thing in ParticleZoo.everything: Particles[thing] = dict() for symbol, name in symbols_and_names: Particles[symbol]['name'] = name - for fermion in fermions: + for fermion in ParticleZoo.fermions: Particles[fermion]['spin'] = 0.5 - for boson in bosons: + for boson in ParticleZoo.bosons: # coveralls: ignore Particles[boson]['spin'] = 0 - for lepton in leptons: + for lepton in ParticleZoo.leptons: Particles[lepton]['class'] = 'lepton' Particles[lepton]['lepton number'] = 1 Particles[lepton]['baryon number'] = 0 - if lepton not in neutrinos: + if lepton not in ParticleZoo.neutrinos: Particles[lepton]['charge'] = -1 else: Particles[lepton]['charge'] = 0 - for antilepton in antileptons: + for antilepton in ParticleZoo.antileptons: Particles[antilepton]['class'] = 'antilepton' Particles[antilepton]['lepton number'] = -1 Particles[antilepton]['baryon number'] = 0 - if antilepton not in antineutrinos: + if antilepton not in ParticleZoo.antineutrinos: Particles[antilepton]['charge'] = 1 else: Particles[antilepton]['charge'] = 0 - for baryon in baryons: + for baryon in ParticleZoo.baryons: Particles[baryon]['class'] = 'baryon' Particles[baryon]['lepton number'] = 0 Particles[baryon]['baryon number'] = 1 - for antibaryon in antibaryons: + for antibaryon in ParticleZoo.antibaryons: Particles[antibaryon]['class'] = 'antibaryon' Particles[antibaryon]['lepton number'] = 0 Particles[antibaryon]['baryon number'] = -1 - for thing in leptons + antileptons: + for thing in ParticleZoo.leptons | ParticleZoo.antileptons: if 'e' in thing: Particles[thing]['generation'] = 1 elif 'mu' in thing: @@ -102,7 +203,7 @@ def _create_Particles_dict() -> typing.Dict[str, dict]: elif 'tau' in thing: Particles[thing]['generation'] = 3 - for thing in leptons + antileptons: + for thing in ParticleZoo.leptons | ParticleZoo.antileptons: if 'nu' not in thing: if 'e' in thing: Particles[thing]['mass'] = const.m_e @@ -113,10 +214,17 @@ def _create_Particles_dict() -> typing.Dict[str, dict]: Particles[thing]['mass'] = 3.167_47e-27 * u.kg Particles[thing]['half-life'] = 2.906e-13 * u.s - for thing in ['p', 'p-']: + # Neutrinos are now known to have a tiny but non-zero mass, but + # it is not known what the masses of the neutrinos actually are. + # Setting the neutrino mass to None here will + + for thing in ParticleZoo.neutrinos | ParticleZoo.antineutrinos: + Particles[thing]['mass'] = None + + for thing in ['p+', 'p-']: Particles[thing]['mass'] = const.m_p - Particles['p']['charge'] = 1 + Particles['p+']['charge'] = 1 Particles['p-']['charge'] = -1 for thing in ['n', 'antineutron']: @@ -124,126 +232,23 @@ def _create_Particles_dict() -> typing.Dict[str, dict]: Particles[thing]['half-life'] = 881.5 * u.s Particles[thing]['charge'] = 0 - for thing in everything: + for thing in ParticleZoo.everything: if 'half-life' not in Particles[thing].keys(): Particles[thing]['half-life'] = np.inf * u.s - for particle in particles: + for particle in ParticleZoo.particles: Particles[particle]['antimatter'] = False - for antiparticle in antiparticles: + for antiparticle in ParticleZoo.antiparticles: Particles[antiparticle]['antimatter'] = True return Particles -def _create_alias_dicts(Particles: dict) -> (typing.Dict[str, str], - typing.Dict[str, str]): - """Create dictionaries for case sensitive aliases and case - insensitive aliases of special particles and antiparticles. - - The keys of these dictionaries are the aliases, and the values - are the corresponding standardized symbol for the particle or - antiparticle.""" - - case_sensitive_aliases = {} - case_insensitive_aliases = {} - - for symbol in Particles.keys(): - name = Particles[symbol]['name'] - case_insensitive_aliases[name.lower()] = symbol - - case_sensitive_aliases_for_a_symbol = [ - (['beta-'], 'e-'), - (['beta+'], 'e+'), - (['p+'], 'p'), - (['n-1'], 'n'), - (['H-2'], 'D'), - (['H-2+', 'H-2 1+', 'H-2 +1', 'D+'], 'D 1+'), - (['H-3+', 'H-3 1+', 'H-3 +1', 'T+'], 'T 1+'), - ] - - case_insensitive_aliases_for_a_symbol = [ - (['antielectron'], 'e+'), - (['muon-'], 'mu-'), - (['muon+'], 'mu+'), - (['tau particle'], 'tau-'), - (['protium'], 'H-1'), - (['protium+', 'protium 1+', 'protium +1'], 'p'), - (['deuterium', 'hydrogen-2'], 'D'), - (['deuteron', 'deuterium+', 'deuterium 1+', 'deuterium +1'], - 'D 1+'), - (['tritium', 'hydrogen-3'], 'T'), - (['triton', 'tritium+', 'tritium 1+', 'tritium +1'], 'T 1+'), - (['alpha'], 'He-4 2+'), - ] - - for aliases, symbol in case_sensitive_aliases_for_a_symbol: - for alias in aliases: - case_sensitive_aliases[alias] = symbol - - for aliases, symbol in case_insensitive_aliases_for_a_symbol: - for alias in aliases: - case_insensitive_aliases[alias.lower()] = symbol - - alias_keys = list(case_insensitive_aliases.keys()) - - for alias in alias_keys: - if 'anti' in alias and 'anti-' not in alias: - symbol = case_insensitive_aliases[alias].lower() - new_alias = alias.replace('anti', 'anti-') - case_insensitive_aliases[new_alias] = symbol - - return case_sensitive_aliases, case_insensitive_aliases - - _Particles = _create_Particles_dict() -_case_sensitive_aliases, _case_insensitive_aliases = \ - _create_alias_dicts(_Particles) - - -def _get_standard_symbol(alias: typing.Union[str, int]) -> str: - """Returns the standard symbol for a particle or antiparticle - when the argument is a valid alias. If the argument is not a - valid alias, then this function returns the original argument - (which will usually be a string but may be an int representing - atomic number).""" - - if not isinstance(alias, str): - return alias - - if alias in _case_sensitive_aliases.keys(): - return _case_sensitive_aliases[alias] - elif alias.lower() in _case_insensitive_aliases.keys(): - return _case_insensitive_aliases[alias.lower()] - else: - return alias - - -def _is_special_particle(alias: typing.Union[str, int]) -> bool: - r"""Returns true if a particle is a special particle, and False - otherwise.""" - - special_particles = [ - 'n', 'antineutron', 'p-', - 'e-', 'e+', 'nu_e', 'anti_nu_e', - 'mu-', 'mu+', 'nu_mu', 'anti_nu_mu', - 'tau-', 'tau+', 'nu_tau', 'anti_nu_tau', - ] - - symbol = _get_standard_symbol(alias) - - return symbol in special_particles - if __name__ == "__main__": # coveralls: ignore from pprint import pprint - print("Case insensitive aliases:") - pprint(_case_insensitive_aliases) - print(20*"=") - print("Case sensitive aliases:") - pprint(_case_sensitive_aliases) - print(20*"=") print("Particles:") pprint(_Particles)
Create an object-oriented Particle interface This has been brewing in a bunch of places, including discussion on #171 and [this chat on Matrix](https://matrix.to/#/!hkWCiyhQyxiYJlUtKF:matrix.org/$15130263015090890CUoWY:matrix.org). Right now, we're calling all our particle-dependent functionality via strings. Those tend to be problematic for fitting in the complete information on those (there's different elements, isotopes, ionization states, @namurphy I'm likely missing a bunch so please correct me here) while keeping them readable. The idea here is that we could create a class for Particles, keep that information (element type, charge state, isotope) there as fields and do all the calculations and operations on that. This would let us avoid having a bunch of duplicate code in every single place a particle type is necessary. We'd still like to be able to pass strings in (as before, `particle='e'`), so what @namurphy proposed is a decorator to go through the particle type argument and if a string was passed, turn that string into a Particle and pass that in. This seems reasonable AND it involves using a decorator, so it's totally baller to me. The whole class setup would have to be figured out - do we want to have the same kind of object for atoms/nuclei and for electrons/other charged leptons (no isotopes for the latter)? What kind of inheritance scheme is best for those? I'm not sure yet, but this is part of figuring this out.
PlasmaPy/PlasmaPy
diff --git a/plasmapy/atomic/tests/test_atomic.py b/plasmapy/atomic/tests/test_atomic.py index 7ee9a731..bae6a504 100644 --- a/plasmapy/atomic/tests/test_atomic.py +++ b/plasmapy/atomic/tests/test_atomic.py @@ -89,8 +89,9 @@ def test_atomic_symbol(argument, expected): """Test that atomic_symbol returns the expected result.""" assert atomic_symbol(argument) == expected, \ - (f"atomic_symbol({argument}) is returning {atomic_symbol(argument)} " - f"which differs from the expected value of {expected}.") + (f"atomic_symbol({repr(argument)}) is returning " + f"{atomic_symbol(argument)} " + f"which differs from the expected value of {repr(expected)}.") # (argument, expected_error) @@ -122,7 +123,8 @@ def test_atomic_symbol(argument, expected): def test_atomic_symbol_error(argument, expected_error): """Test that atomic_symbol raises the expected exceptions.""" with pytest.raises(expected_error, message=( - f"atomic_symbol({argument}) is not raising {expected_error}.")): + f"atomic_symbol({repr(argument)}) is not raising " + f"{expected_error}.")): atomic_symbol(argument) @@ -154,12 +156,7 @@ def test_atomic_symbol_error(argument, expected_error): (('protium',), 'H-1'), (('N-13 2+',), 'N-13'), (('Hydrogen-3 +1',), 'T'), - (('neutron',), 'n'), - (('n',), 'n'), - ((0, 1), 'n'), - (('neutron',), 'n'), - (('Neutron',), 'n'), - (('n-1',), 'n')] +] @pytest.mark.parametrize( @@ -168,8 +165,8 @@ def test_isotope_symbol(arguments, expected): """Test that isotope_symbol returns the expected results.""" assert isotope_symbol(*arguments) == expected, \ (f"isotope_symbol is returning {isotope_symbol(*arguments)} " - f"for arguments of {arguments}, which differs from the " - f"expected value of {expected}.") + f"for arguments of {repr(arguments)}, which differs from the " + f"expected value of {repr(expected)}.") # (argument, kwargs, expected_error) @@ -193,6 +190,7 @@ def test_isotope_symbol(arguments, expected): ('D', {"mass_numb": 3}, InvalidParticleError), ('T', {"mass_numb": 2}, InvalidParticleError), ('Fe', {"mass_numb": None}, InvalidIsotopeError), + ('He', {"mass_numb": 99}, InvalidParticleError), ('d', {}, InvalidParticleError), ('h-3', {}, InvalidParticleError), ('h', {}, InvalidParticleError), @@ -205,7 +203,7 @@ def test_isotope_symbol(arguments, expected): def test_isotope_symbol_error(argument, kwargs, expected_error): """Test that isotope_symbol raises the expected exceptions.""" with pytest.raises(expected_error, message=( - f"isotope_symbol({argument}, **{kwargs}) is not raising a " + f"isotope_symbol({repr(argument)}, **{kwargs}) is not raising a " f"{expected_error}.")): isotope_symbol(argument, **kwargs) @@ -226,7 +224,7 @@ def test_isotope_symbol_error(argument, kwargs, expected_error): def test_isotope_symbol_warnings(argument, kwargs, expected_warning): """Test that isotope_symbol issues the expected warnings.""" with pytest.warns(expected_warning, message=( - f"isotope_symbol({argument}, **{kwargs}) is not issuing a " + f"isotope_symbol({repr(argument)}, **{kwargs}) is not issuing a " f"{expected_warning}.")): isotope_symbol(argument, **kwargs) @@ -257,7 +255,8 @@ def test_isotope_symbol_warnings(argument, kwargs, expected_warning): def test_atomic_number(argument, expected): """Test that atomic_number returns the expected results.""" assert atomic_number(argument) == expected, \ - (f"atomic_number({argument}) is expecting a result of {expected} but " + (f"atomic_number({repr(argument)}) is expecting a result of " + f"{repr(expected)} but " f"is getting a result of {atomic_number(argument)}.") @@ -280,7 +279,8 @@ def test_atomic_number(argument, expected): def test_atomic_number_error(argument, expected_error): """Test that atomic_number raises the expected exceptions.""" with pytest.raises(expected_error, warning=( - f"atomic_number({argument}) is not raising a {expected_error}")): + f"atomic_number({repr(argument)}) is not raising a " + f"{expected_error}")): atomic_number(argument) @@ -309,9 +309,9 @@ def test_atomic_number_error(argument, expected_error): def test_mass_number(isotope, expected): """Test that mass_number returns the expected results.""" assert mass_number(isotope) == expected, \ - (f"mass_number({isotope}) is returning a value of " + (f"mass_number({repr(isotope)}) is returning a value of " f"{mass_number(isotope)}, which differs from the expected " - f"value of {expected}.") + f"value of {repr(expected)}.") # (argument, expected_error) @@ -362,9 +362,9 @@ def test_mass_number_error(argument, expected_error): def test_element_name(argument, expected): """Test that element_name returns the expected results.""" assert element_name(argument) == expected, \ - (f"element_name({argument}) is returning a value of " + (f"element_name({repr(argument)}) is returning a value of " f"{element_name(argument)}, which differs from the expected " - f"value of {expected}.") + f"value of {repr(expected)}.") # (argument, expected_error) @@ -418,7 +418,7 @@ def test_standard_atomic_weight(argument, expected): """Test that standard_atomic_weight returns the expected values for hydrogen.""" assert standard_atomic_weight(argument).value == expected, \ - f"Incorrect standard_atomic_weight for {argument}." + f"Incorrect standard_atomic_weight for {repr(argument)}." # (argument, expected_error) @@ -443,7 +443,7 @@ def test_standard_atomic_weight(argument, expected): def test_standard_atomic_weight_error(argument, expected_error): """Test that standard_atomic_weight raises the expected exceptions.""" with pytest.raises(expected_error, message=( - f"standard_atomic_weight({argument}) is not raising a " + f"standard_atomic_weight({repr(argument)}) is not raising a " "{expected_error}.")): standard_atomic_weight(argument) @@ -506,7 +506,8 @@ def test_isotope_mass(arg1, arg2): def test_isotope_mass_error(argument, expected_error): """Test that isotope_mass raises the expected exceptions.""" with pytest.raises(expected_error, warning=( - f"isotope_mass({argument}) is not raising a {expected_error}")): + f"isotope_mass({repr(argument)}) is not raising a " + f"{expected_error}")): isotope_mass(argument) @@ -542,8 +543,8 @@ def test_ion_mass_unit(): def test_ion_mass_proton_mass(arg, kwargs): should_be_proton_mass = ion_mass(arg, **kwargs) assert should_be_proton_mass == const.m_p, \ - (f"ion_mass({arg}, **{kwargs}) should be returning the proton mass, " - f"but is instead returning {should_be_proton_mass}.") + (f"ion_mass({repr(arg)}, **{kwargs}) should be returning the proton " + f"mass, but is instead returning {repr(should_be_proton_mass)}.") def test_ion_mass_miscellaneous_cases(): @@ -585,15 +586,15 @@ def test_ion_mass_equivalent_args(arg1, kwargs1, arg2, kwargs2, expected): result2 = ion_mass(arg2, **kwargs2) assert result1 == result2, \ - (f"ion_mass({arg1}, **{kwargs1}) = {result1}, whereas " - f"ion_mass({arg2}, **{kwargs2}) = {result2}. " + (f"ion_mass({repr(arg1)}, **{kwargs1}) = {repr(result1)}, whereas " + f"ion_mass({repr(arg2)}, **{kwargs2}) = {repr(result2)}. " f"These results are not equivalent as expected.") if expected is not None: assert result1 == result2 == expected, \ - (f"ion_mass({arg1}, **{kwargs1}) = {result1} and " - f"ion_mass({arg2}, **{kwargs2}) = {result2}, but " - f"these results are not equivalent to {expected} as expected.") + (f"ion_mass({repr(arg1)}, **{kwargs1}) = {repr(result1)} and " + f"ion_mass({repr(arg2)}, **{kwargs2}) = {repr(result2)}, but " + f"these results are not equal to {repr(expected)} as expected.") # (argument, kwargs, expected_error) @@ -609,7 +610,6 @@ def test_ion_mass_equivalent_args(arg1, kwargs1, arg2, kwargs2, expected): ('n', {}, InvalidIonError), ('He 1+', {"mass_numb": 99}, InvalidParticleError), (1 * u.m, {}, u.UnitConversionError), - ('Og', {"Z": 1}, MissingAtomicDataError), ('fe-56 1+', {}, InvalidParticleError)] @@ -618,8 +618,8 @@ def test_ion_mass_equivalent_args(arg1, kwargs1, arg2, kwargs2, expected): def test_ion_mass_error(argument, kwargs, expected_error): """Test errors that should be raised by ion_mass.""" with pytest.raises(expected_error, message=( - f"ion_mass({argument}, **{kwargs}) is not raising a " - f"{expected_error}.")): + f"ion_mass({repr(argument)}, **{kwargs}) is not raising a " + f"{repr(expected_error)}.")): ion_mass(argument, **kwargs) @@ -634,7 +634,7 @@ def test_ion_mass_error(argument, kwargs, expected_error): def test_ion_mass_warnings(argument, kwargs, expected_warning): """Test that ion_mass issues the expected warnings.""" with pytest.warns(expected_warning, message=( - f"ion_mass({argument}, **{kwargs}) is not issuing a " + f"ion_mass({repr(argument)}, **{kwargs}) is not issuing a " f"{expected_warning}.")): ion_mass(argument, **kwargs) @@ -664,7 +664,7 @@ def test_ion_mass_warnings(argument, kwargs, expected_warning): def test_is_isotope_stable(argument): """Test that is_isotope_stable returns True for stable isotopes.""" assert is_isotope_stable(*argument), \ - f"is_isotope_stable is not returning True for {argument}" + f"is_isotope_stable is not returning True for {repr(argument)}" # (argument) @@ -672,7 +672,6 @@ def test_is_isotope_stable(argument): ('Be-8',), ('n',), ('n-1',), - (0, 1), ('U-235',), ('uranium-235',), ('T',), @@ -691,7 +690,7 @@ def test_is_isotope_stable(argument): def test_is_isotope_stable_false(argument): """Test that is_isotope_stable returns False for unstable isotopes.""" assert not is_isotope_stable(*argument), \ - f"is_isotope_stable is not returning False for {argument}" + f"is_isotope_stable is not returning False for {repr(argument)}" # (argument, expected_error) @@ -708,7 +707,7 @@ def test_is_isotope_stable_false(argument): def test_is_isotope_stable_error(argument, expected_error): """Test errors that should be raised by is_isotope_stable.""" with pytest.raises(expected_error, message=( - f"is_isotope_stable({argument}) is not raising a " + f"is_isotope_stable({repr(argument)}) is not raising a " f"{expected_error}")): is_isotope_stable(*argument) @@ -753,7 +752,7 @@ def test_half_life_unstable_isotopes(): if 'half_life' not in _Isotopes[isotope].keys() and \ not _Isotopes[isotope].keys(): with pytest.warns(AtomicWarning, message=( - f"No AtomicWarning issued for {isotope}")): + f"No AtomicWarning issued for {repr(isotope)}")): assert half_life(isotope) is None @@ -777,8 +776,8 @@ def test_half_life_u_220(): f"half-life data") assert half_life_isotope is None, \ - (f"half_life should return None for an isotope without half-life " - f"data, but is returning {half_life_isotope}") + (f"half_life should return {None} for an isotope without half-life" + f" data, but is returning {half_life_isotope}") atomic_TypeError_funcs_table = [ @@ -834,7 +833,7 @@ def test_atomic_TypeErrors(func, argument): 'grumblemuffins', 'H-0', 'Og-294b', - 'H-934361079326356530741942970523610389', + 'H-9343610', 'Fe 2+4', 'Fe+24', 'Fe +59', @@ -980,8 +979,9 @@ def test_isotopic_abundances_sum(element, isotopes): def test_integer_charge(argument, expected): """Test that integer_charge returns the expected results.""" assert integer_charge(argument) == expected, \ - (f"integer_charge({argument}) is returning {integer_charge(argument)}" - f" which differs from the expected result of {expected}.") + (f"integer_charge({repr(argument)}) is returning " + f"{integer_charge(argument)} which differs from the expected result " + f"of {expected}.") # (argument, expected_error) @@ -1001,7 +1001,8 @@ def test_integer_charge(argument, expected): def test_integer_charge_error(argument, expected_error): """Test that integer_charge raises the expected exceptions.""" with pytest.raises(expected_error, message=( - f"integer_charge({argument} is not raising a {expected_error}.")): + f"integer_charge({repr(argument)} is not raising a " + f"{expected_error}.")): integer_charge(argument) @@ -1017,7 +1018,8 @@ def test_integer_charge_error(argument, expected_error): def test_integer_charge_warnings(argument, expected_warning): """Test that integer_charge issues appropriate warnings.""" with pytest.warns(expected_warning, message=( - f"integer_charge({argument}) is not issuing {expected_warning}")): + f"integer_charge({repr(argument)}) is not issuing " + f"{expected_warning}")): integer_charge(argument) @@ -1043,7 +1045,7 @@ def test_electric_charge(): def test_electric_charge_error(argument, expected_error): """Test that electric_charge raises the expected exceptions.""" with pytest.raises(expected_error, message=( - f"electric_charge({argument}) is not raising a " + f"electric_charge({repr(argument)}) is not raising a " f"{expected_error}.")): electric_charge(argument) @@ -1059,7 +1061,7 @@ def test_electric_charge_error(argument, expected_error): def test_electric_charge_warning(argument, expected_warning): """Test that electric_charge issues the expected warnings.""" with pytest.warns(expected_warning, message=( - f"electric_charge({argument}) is not issuing a " + f"electric_charge({repr(argument)}) is not issuing a " f"{expected_warning}.")): electric_charge(argument) diff --git a/plasmapy/atomic/tests/test_classes.py b/plasmapy/atomic/tests/test_classes.py new file mode 100644 index 00000000..ff9bc105 --- /dev/null +++ b/plasmapy/atomic/tests/test_classes.py @@ -0,0 +1,398 @@ +import pytest +import numpy as np +from astropy import units as u +import inspect + +from ...constants import m_p, m_e, m_n, e + +from ...utils import ( + AtomicWarning, + AtomicError, + MissingAtomicDataError, + InvalidParticleError, + InvalidElementError, + InvalidIsotopeError, + InvalidIonError, + ChargeError, +) + +from ..classes import Particle +from ..parsing import _call_string + +# (arg, kwargs, results_dict +test_Particle_table = [ + + ('neutron', {}, + {'particle': 'n', + 'element': None, + 'isotope': None, + 'ion': None, + 'integer_charge': 0, + 'atomic_number': InvalidElementError, + 'mass_number': InvalidIsotopeError, + 'baryon_number': 1, + 'lepton_number': 0, + 'mass': m_n, + 'nuclide_mass': m_n, + 'binding_energy': 0 * u.J}), + + ('p+', {}, + {'particle': 'p+', + 'element': 'H', + 'element_name': 'hydrogen', + 'isotope': 'H-1', + 'ion': 'p+', + 'mass': m_p, + 'nuclide_mass': m_p, + 'integer_charge': 1, + 'charge': e.si, + 'spin': 1 / 2, + 'half_life': np.inf * u.s, + 'atomic_number': 1, + 'mass_number': 1, + 'lepton_number': 0, + 'baryon_number': 1, + 'reduced_mass(Particle("p"))': m_p / 2, + 'reduced_mass(m_p)': m_p / 2, + '__str__()': 'p+', + '__repr__()': 'Particle("p+")', + 'is_category("fermion")': True, + 'is_category(["fermion"])': True, + 'is_category({"fermion"})': True, + 'is_category("boson", "fermion", any=True)': True, + 'is_category("boson", "fermion", any=False)': False, + 'is_category(("element", "isotope", "ion"))': True, + 'binding_energy': 0 * u.J, + }), + + ('p-', {}, + {'particle': 'p-', + 'element': None, + 'element_name': InvalidElementError, + 'isotope': None, + 'ion': None, + 'mass': m_p, + 'integer_charge': -1, + 'spin': 1 / 2, + 'half_life': np.inf * u.s, + 'atomic_number': InvalidElementError, + 'mass_number': InvalidIsotopeError, + 'lepton_number': 0, + 'baryon_number': -1, + '__str__()': 'p-', + '__repr__()': 'Particle("p-")'}), + + ('e-', {}, + {'particle': 'e-', + 'element': None, + 'element_name': InvalidElementError, + 'isotope': None, + 'ion': None, + 'mass': m_e, + 'integer_charge': -1, + 'spin': 1 / 2, + 'half_life': np.inf * u.s, + 'atomic_number': InvalidElementError, + 'lepton_number': 1, + 'baryon_number': 0, + 'reduced_mass(Particle("e+"))': m_e / 2, + 'reduced_mass("e-")': m_e / 2, + '__str__()': 'e-', + '__repr__()': 'Particle("e-")', + 'binding_energy': InvalidIsotopeError}), + + ('e+', {}, + {'particle': 'e+', + 'element': None, + 'isotope': None, + 'ion': None, + 'mass': m_e, + 'nuclide_mass': InvalidIsotopeError, + 'integer_charge': 1, + 'spin': 1 / 2, + 'half_life': np.inf * u.s, + 'atomic_number': InvalidElementError, + 'lepton_number': -1, + 'baryon_number': 0, + '__str__()': 'e+', + '__repr__()': 'Particle("e+")'}), + + ('H', {}, + {'particle': 'H', + 'element': 'H', + 'isotope': None, + 'ion': None, + 'charge': ChargeError, + 'integer_charge': ChargeError, + 'mass_number': InvalidIsotopeError, + 'baryon_number': AtomicError, + 'lepton_number': 0, + 'half_life': InvalidIsotopeError, + 'standard_atomic_weight': (1.008 * u.u).to(u.kg), + 'mass': (1.008 * u.u).to(u.kg), + 'nuclide_mass': InvalidIsotopeError}), + + ('D+', {}, + {'particle': 'D 1+', + 'element': 'H', + 'element_name': 'hydrogen', + 'isotope': 'D', + 'ion': 'D 1+', + 'integer_charge': 1, + 'atomic_number': 1, + 'mass_number': 2, + 'baryon_number': 2, + 'lepton_number': 0}), + + ('tritium', {'Z': 1}, + {'particle': 'T 1+', + 'element': 'H', + 'isotope': 'T', + 'ion': 'T 1+', + 'integer_charge': 1, + 'atomic_number': 1, + 'mass_number': 3, + 'baryon_number': 3, + 'lepton_number': 0}), + + ('Fe', {'Z': 17, 'mass_numb': 56}, + {'particle': 'Fe-56 17+', + 'element': 'Fe', + 'element_name': 'iron', + 'isotope': 'Fe-56', + 'ion': 'Fe-56 17+', + 'integer_charge': 17, + 'atomic_number': 26, + 'mass_number': 56, + 'baryon_number': 56, + '__str__()': 'Fe-56 17+', + '__repr__()': 'Particle("Fe-56 17+")'}), + + ('alpha', {}, + {'particle': 'He-4 2+', + 'element': 'He', + 'element_name': 'helium', + 'isotope': 'He-4', + 'ion': 'He-4 2+', + 'integer_charge': 2, + 'atomic_number': 2, + 'mass_number': 4, + 'baryon_number': 4, + 'lepton_number': 0, + 'half_life': np.inf * u.s}), + + ('Li', {'mass_numb': 7}, + {'particle': 'Li-7', + 'element': 'Li', + 'element_name': 'lithium', + 'isotope': 'Li-7', + 'ion': None, + 'integer_charge': ChargeError, + 'atomic_number': 3, + 'mass_number': 7, + 'baryon_number': 7, + 'half_life': np.inf * u.s, + 'nuclide_mass': 1.1647614796180465e-26 * u.kg}), + + ('Cn-276', {"Z": 22}, + {'particle': 'Cn-276 22+', + 'element': 'Cn', + 'isotope': 'Cn-276', + 'ion': 'Cn-276 22+', + 'element_name': 'copernicium', + 'integer_charge': 22, + 'atomic_number': 112, + 'mass_number': 276, + 'baryon_number': 276, + 'lepton_number': 0, + 'half_life': MissingAtomicDataError}), + + ('muon', {}, + {'particle': 'mu-', + 'element': None, + 'isotope': None, + 'ion': None, + 'integer_charge': -1, + 'atomic_number': InvalidElementError, + 'mass_number': InvalidIsotopeError, + 'baryon_number': 0, + 'lepton_number': 1}), + + ('nu_tau', {}, + {'particle': 'nu_tau', + 'element': None, + 'isotope': None, + 'mass': MissingAtomicDataError, + 'integer_charge': 0, + 'mass_number': InvalidIsotopeError, + 'element_name': InvalidElementError, + 'baryon_number': 0, + 'lepton_number': 1, + 'half_life': np.inf * u.s, + 'is_category("fermion")': True, + 'is_category("neutrino")': True, + 'is_category("boson")': False, + 'is_category("matter", exclude={"antimatter"})': True, + 'is_category("matter", exclude=["antimatter"])': True, + 'is_category("matter", exclude="antimatter")': True, + 'is_category("matter", "boson", any=True)': True, + 'is_category(["fermion", "lepton"], exclude="matter")': False, + 'is_category("lepton", "invalid")': AtomicError, + 'is_category(["boson"], exclude=["lepton", "invalid"])': AtomicError, + 'is_category("boson", exclude="boson")': AtomicError, + 'reduced_mass("electron")': MissingAtomicDataError}), + +] + + [email protected]("arg, kwargs, expected_dict", test_Particle_table) +def test_Particle_class(arg, kwargs, expected_dict): + r"""Test that Particle objects for different subatomic particles, + elements, isotopes, and ions return the expected properties. Provide + a detailed error message that lists all of the inconsistencies with + the expected results.""" + + call = _call_string(arg, kwargs) + errmsg = "" + + try: + particle = Particle(arg, **kwargs) + except Exception as exc: + raise AtomicError(f"Problem creating {call}") from exc + + for key in expected_dict.keys(): + expected = expected_dict[key] + + if inspect.isclass(expected) and issubclass(expected, Exception): + + # Exceptions are expected to be raised when accessing certain + # attributes for some particles. For example, accessing a + # neutrino's mass should raise a MissingAtomicDataError since + # only upper limits of neutrino masses are presently available. + # If expected_dict[key] is an exception, then check to make + # sure that this exception is raised. + + try: + with pytest.raises(expected): + exec(f"particle.{key}") + except pytest.fail.Exception as exc_failed_fail: + errmsg += f"\n{call}[{key}] does not raise {expected}." + except Exception as exc_bad: + errmsg += (f"\n{call}[{key}] does not raise {expected} but " + f"raises a different exception.") + + else: + + try: + result = eval(f"particle.{key}") + assert result == expected + except AssertionError as exc_assert: + errmsg += f"\n{call}.{key} does not equal {expected}." + except Exception as exc_general: + errmsg += f"\n{call}.{key} raises an unexpected exception." + + if len(errmsg) > 0: + raise Exception(f"Problems with {call}:{errmsg}") + + +equivalent_particles_table = [ + ['H', 'hydrogen', 'hYdRoGeN'], + ['p+', 'proton', 'H-1+', 'H-1 1+', 'H-1 +1'], + ['D', 'H-2', 'Hydrogen-2', 'deuterium'], + ['T', 'H-3', 'Hydrogen-3', 'tritium'], + ['alpha', 'He-4++', 'He-4 2+', 'He-4 +2'], + ['e-', 'electron', 'e'], + ['e+', 'positron'], + ['p-', 'antiproton'], + ['n', 'n-1', 'neutron', 'NEUTRON'], + ['muon', 'mu-', 'muon-'], + ['tau', 'tau-'], +] + + [email protected]("equivalent_particles", equivalent_particles_table) +def test_Particle_equivalent_cases(equivalent_particles): + r"""Test that all instances of a list of particles are equivalent, + except for the _original_* private variables which will differ.""" + + equivalent_Particle_classes = [] + + for particle in equivalent_particles: + equivalent_Particle_classes.append(Particle(particle)) + + for Q in equivalent_Particle_classes[1:]: + assert Q == equivalent_Particle_classes[0], \ + f"{equivalent_particles}" + + +# arg, kwargs, attribute, exception +test_Particle_error_table = [ + ('a', {}, "", InvalidParticleError), + ('d+', {'mass_numb': 9}, "", InvalidParticleError), + ('H', {'mass_numb': 99}, "", InvalidParticleError), + ('Au-818', {}, "", InvalidParticleError), + ('Au-12', {}, "", InvalidParticleError), + ('Au', {'mass_numb': 13}, "", InvalidParticleError), + ('Au', {'mass_numb': 921}, "", InvalidParticleError), + ('e-', {'Z': -1}, "", InvalidParticleError), + ('e-', {}, '.atomic_number', InvalidElementError), + ('alpha', {}, '.standard_atomic_weight', InvalidElementError), + ('Fe-56', {}, '.standard_atomic_weight', InvalidElementError), + ('tau-', {}, '.element_name', InvalidElementError), + ('tau+', {}, '.atomic_number', InvalidElementError), + ('neutron', {}, '.atomic_number', InvalidElementError), + ('H', {'Z': 0}, '.mass_number', InvalidIsotopeError), + ('neutron', {}, '.mass_number', InvalidIsotopeError), + ('He', {'mass_numb': 4}, '.charge', ChargeError), + ('He', {'mass_numb': 4}, '.integer_charge', ChargeError), + ('Fe', {}, '.spin', MissingAtomicDataError), + ('nu_e', {}, '.mass', MissingAtomicDataError), + ('Og', {}, '.standard_atomic_weight', MissingAtomicDataError), + ('Fe', {'Z': '1'}, "", TypeError), + ('Fe', {'mass_numb': '1'}, "", TypeError), + ([], {}, "", TypeError), +] + + [email protected]( + "arg, kwargs, attribute, exception", test_Particle_error_table) +def test_Particle_errors(arg, kwargs, attribute, exception): + r"""Test that the appropriate exceptions are raised during the creation + and use of a Particle object.""" + call = _call_string(arg, kwargs) + with pytest.raises(exception, message=( + f"The following command: " + f"\n\n >>> {_call_string(arg, kwargs)}{attribute}\n\n" + f"did not raise a {exception.__name__} as expected")): + exec(f'Particle(arg, **kwargs){attribute}') + + +# arg, kwargs, attribute, exception +test_Particle_warning_table = [ + ('H----', {}, "", AtomicWarning), + ('alpha', {'mass_numb': 4}, "", AtomicWarning), + ('alpha', {'Z': 2}, "", AtomicWarning) +] + + [email protected]( + "arg, kwargs, attribute, warning", test_Particle_warning_table) +def test_Particle_warnings(arg, kwargs, attribute, warning): + r"""Test that the appropriate warnings are issued during the creation + and use of a Particle object.""" + with pytest.warns(warning, message=( + f"The following command: " + f"\n\n >>> {_call_string(arg, kwargs)}{attribute}\n\n" + f"did not issue a {warning.__name__} as expected")): + exec(f'Particle(arg, **kwargs){attribute}') + + +def test_Particle_cmp(): + r"""Test __eq__ and __ne__ in the Particle class.""" + proton1 = Particle('p+') + proton2 = Particle('proton') + electron = Particle('e-') + + assert proton1 == proton2, "Particle('p+') == Particle('proton') is False." + assert proton1 != electron, "Particle('p+') == Particle('e-') is True." + assert not proton1 == 1, "Particle('p+') == 1 is True." diff --git a/plasmapy/atomic/tests/test_nuclear.py b/plasmapy/atomic/tests/test_nuclear.py index b1cb9c9f..b1d5b80f 100644 --- a/plasmapy/atomic/tests/test_nuclear.py +++ b/plasmapy/atomic/tests/test_nuclear.py @@ -2,7 +2,7 @@ from astropy import units as u, constants as const import numpy as np from ..nuclear import (nuclear_binding_energy, nuclear_reaction_energy) -from ...utils import (InvalidElementError, InvalidIsotopeError) +from ...utils import (InvalidParticleError, InvalidIsotopeError) import pytest @@ -20,17 +20,19 @@ def test_nuclear_binding_energy_D_T(): assert np.isclose(E_in_MeV, 17.58, rtol=0.01) -# (argument, expected_error) +# (argument, kwargs, expected_error) nuclear_binding_energy_table = [ - ("H", InvalidIsotopeError), - (1.1, TypeError)] + ("H", {}, InvalidIsotopeError), + ('He-99', {}, InvalidParticleError), + ("He", {"mass_numb": 99}, InvalidParticleError), + (1.1, {}, TypeError)] [email protected]("argument, expected_error", [email protected]("argument, kwargs, expected_error", nuclear_binding_energy_table) -def test_nuclear_binding_energy_error(argument, expected_error): +def test_nuclear_binding_energy_error(argument, kwargs, expected_error): with pytest.raises(expected_error): - nuclear_binding_energy(argument) + nuclear_binding_energy(argument, **kwargs) def test_nuclear_reaction_energy(): diff --git a/plasmapy/atomic/tests/test_parsing.py b/plasmapy/atomic/tests/test_parsing.py new file mode 100644 index 00000000..7780504f --- /dev/null +++ b/plasmapy/atomic/tests/test_parsing.py @@ -0,0 +1,291 @@ +import pytest + +from ...utils import ( + InvalidParticleError, + InvalidElementError, + AtomicWarning, +) + +from ..particles import ParticleZoo + +from ..parsing import ( + _dealias_particle_aliases, + _case_insensitive_aliases, + _case_sensitive_aliases, + _parse_and_check_atomic_input, + _call_string, +) + +aliases_and_symbols = [ + ('electron', 'e-'), + ('beta-', 'e-'), + ('beta+', 'e+'), + ('positron', 'e+'), + ('proton', 'p+'), + ('', ''), + (5, 5), + ('deuterium+', 'D 1+'), + ('deuterium 1+', 'D 1+'), + ('tritium +1', 'T 1+'), + ('alpha', 'He-4 2+'), + ('D+', 'D 1+'), + ('Deuterium', 'D'), + ('deuteron', 'D 1+'), + ('triton', 'T 1+'), + ('muon', 'mu-'), + ('antimuon', 'mu+'), + ('tau particle', 'tau-'), + ('antitau', 'tau+'), + ('p', 'p+'), + ('H-1 1+', 'p+'), + ('H-1+', 'p+'), + ('H-1 +1', 'p+'), + ('hydrogen-1+', 'p+'), +] + + [email protected]("alias, symbol", aliases_and_symbols) +def test_dealias_particle_aliases(alias, symbol): + """Test that _dealias_particle_aliases correctly takes in aliases and + returns the corresponding symbols, and returns the original argument + if the argument does not correspond to an alias.""" + result = _dealias_particle_aliases(alias) + assert result == symbol, \ + (f"_dealias_particle_aliases({alias}) returns '{result}', which " + f"differs from the expected symbol of '{symbol}'.\n\n" + f"_case_insensitive_aliases:\n{_case_insensitive_aliases}\n\n" + f"_case_sensitive_aliases:\n{_case_sensitive_aliases}") + + +alias_dictionaries = [_case_sensitive_aliases, _case_insensitive_aliases] + + [email protected]("alias_dict", alias_dictionaries) +def test_alias_dict_properties(alias_dict): + """Test properties of the alias dictionaries.""" + + for key in alias_dict.keys(): + assert isinstance(key, str), \ + (f"The following key should be a string, but isn't: {key}\n\n" + f"The entire dictionary is:\n\n{alias_dict}") + + for value in alias_dict.values(): + assert isinstance(value, str), \ + (f"The following value should be a string, but isn't: {value}\n\n" + f"The entire dictionary is:\n\n{alias_dict}") + + +# (arg, kwargs, expected) +parse_check_table = [ + + ('He', {'Z': 1, 'mass_numb': 4}, + {'symbol': 'He-4 1+', + 'element': 'He', + 'isotope': 'He-4', + 'ion': 'He-4 1+', + 'mass_numb': 4, + 'Z': 1}), + + ('alpha', {}, + {'symbol': 'He-4 2+', + 'element': 'He', + 'isotope': 'He-4', + 'ion': 'He-4 2+', + 'mass_numb': 4, + 'Z': 2}), + + (1, {}, + {'symbol': 'H', + 'element': 'H', + 'isotope': None, + 'ion': None, + 'Z': None, + 'mass_numb': None}), + + ('p', {}, + {'symbol': 'p+', + 'element': 'H', + 'isotope': 'H-1', + 'ion': 'p+', + 'Z': 1, + 'mass_numb': 1}), + + ('H', {'mass_numb': 2}, + {'symbol': 'D', + 'element': 'H', + 'isotope': 'D', + 'ion': None, + 'Z': None, + 'mass_numb': 2}), + + (2, {}, + {'symbol': 'He', + 'element': 'He', + 'isotope': None, + 'ion': None, + 'Z': None, + 'mass_numb': None}), + + ('T', {'Z': 0}, + {'symbol': 'T 0+', + 'element': 'H', + 'isotope': 'T', + 'ion': 'T 0+', + 'Z': 0, + 'mass_numb': 3}), + + ('Fe-56+++++++', {}, + {'symbol': 'Fe-56 7+', + 'element': 'Fe', + 'isotope': 'Fe-56', + 'ion': 'Fe-56 7+', + 'Z': 7, + 'mass_numb': 56}), + + ('H-', {}, + {'symbol': 'H 1-', + 'element': 'H', + 'isotope': None, + 'ion': 'H 1-', + 'Z': -1, + 'mass_numb': None}), + + ('D+', {}, + {'symbol': 'D 1+', + 'element': 'H', + 'isotope': 'D', + 'ion': 'D 1+', + 'Z': 1, + 'mass_numb': 2}), + + ('Au', {}, + {'symbol': 'Au', + 'element': 'Au', + 'isotope': None, + 'ion': None, + 'Z': None, + 'mass_numb': None}), + + ('Ar 2-', {}, + {'symbol': 'Ar 2-', + 'element': 'Ar', + 'isotope': None, + 'ion': 'Ar 2-', + 'Z': -2, + 'mass_numb': None}), + + ('Fe +24', {'mass_numb': 56}, + {'symbol': 'Fe-56 24+', + 'element': 'Fe', + 'isotope': 'Fe-56', + 'ion': 'Fe-56 24+', + 'Z': 24, + 'mass_numb': 56}), + + ('Be-8 +3', {}, + {'symbol': 'Be-8 3+', + 'element': 'Be', + 'isotope': 'Be-8', + 'ion': 'Be-8 3+', + 'Z': 3, + 'mass_numb': 8}), + + ('p+', {}, + {'symbol': 'p+', + 'element': 'H', + 'isotope': 'H-1', + 'ion': 'p+', + 'Z': 1, + 'mass_numb': 1}), + +] + + [email protected]('arg, kwargs, expected', parse_check_table) +def test_parse_and_check_atomic_input(arg, kwargs, expected): + result = _parse_and_check_atomic_input(arg, **kwargs) + assert result == expected, ( + "Error in _parse_and_check_atomic_input.\n" + "The resulting dictionary is:\n\n" + f"{result}\n\n" + "whereas the expected dictionary is:\n\n" + f"{expected}\n") + + +# (arg, kwargs) +invalid_particles_table = [ + ('H-0', {}), + ('Og-294b', {}), + ('H-934361', {}), + ('Fe 2+4', {}), + ('Fe+24', {}), + ('Fe +59', {}), + ('C++++++++++++++++', {}), + ('C-++++', {}), + ('h', {}), + ('H++', {}), + ('H 2+', {}), + ('T+++', {}), + ('D', {'Z': 2}), + ('d', {}), + ('he', {}), + ('au', {}), + (0, {}), + (119, {}), + (0, {'mass_numb': 1}), + ('p-', {'mass_numb': -1, 'Z': 1}), + ('e-', {'Z': -1}), + (0, {'mass_numb': 1}), + ('n', {'mass_numb': 1}), + ('He-4', {'mass_numb': 3}), + ('He 1+', {'mass_numb': 99}), + ('He-99', {}), + ('H-2+', {'Z': 0}), + ('H-', {'Z': 1}), +] + + [email protected]('arg, kwargs', invalid_particles_table) +def test_parse_InvalidParticleErrors(arg, kwargs): + r"""Tests that _parse_and_check_atomic_input raises an + InvalidParticleError when the input does not correspond + to a real particle.""" + with pytest.raises(InvalidParticleError, message=( + "An InvalidParticleError was expected to be raised by " + f"{_call_string(arg, kwargs)}, but no exception was raised.")): + _parse_and_check_atomic_input(arg, **kwargs) + + [email protected]('arg', ParticleZoo.everything - {'p+'}) +def test_parse_InvalidElementErrors(arg): + r"""Tests that _parse_and_check_atomic_input raises an + InvalidElementError when the input corresponds to a valid + particle but not a valid element, isotope, or ion.""" + with pytest.raises(InvalidElementError, message=( + "An InvalidElementError was expected to be raised by " + f"{_call_string(arg)}, but no exception was raised.")): + _parse_and_check_atomic_input(arg) + + +# (arg, kwargs, num_warnings) +atomic_warnings_table = [ + ('H-2 1+', {'Z': 1, 'mass_numb': 2}, 2), + ('H 1+', {'Z': 1}, 1), + ('H-3', {'mass_numb': 3}, 1), + ('Fe-56', {'Z': -4}, 1), + ('Og-294 43-', {'Z': -43, 'mass_numb': 294}, 3) +] + + [email protected]('arg, kwargs, num_warnings', atomic_warnings_table) +def test_parse_AtomicWarnings(arg, kwargs, num_warnings): + r"""Tests that _parse_and_check_atomic_input issues an AtomicWarning + under the required conditions. """ + with pytest.warns(AtomicWarning, message=( + f"No AtomicWarning was issued by {_call_string(arg, kwargs)} but " + f"the expected number of warnings was {num_warnings}")) as record: + _parse_and_check_atomic_input(arg, **kwargs) + assert len(record) == num_warnings, ( + f"The number of AtomicWarnings issued by {_call_string(arg, kwargs)} " + f"was {len(record)}, which differs from the expected number " + f"of {num_warnings} warnings.") diff --git a/plasmapy/atomic/tests/test_particles.py b/plasmapy/atomic/tests/test_particles.py index 2ef407da..ff454d91 100644 --- a/plasmapy/atomic/tests/test_particles.py +++ b/plasmapy/atomic/tests/test_particles.py @@ -1,17 +1,12 @@ import pytest -from ..particles import ( - _get_standard_symbol, - _Particles, - _case_sensitive_aliases, - _case_insensitive_aliases) - +from ..particles import (ParticleZoo, _Particles) particle_antiparticle_pairs = [ ('e-', 'e+'), ('mu-', 'mu+'), ('tau-', 'tau+'), - ('p', 'p-'), + ('p+', 'p-'), ('n', 'antineutron'), ('nu_e', 'anti_nu_e'), ('nu_mu', 'anti_nu_mu'), @@ -55,56 +50,32 @@ def test_particle_antiparticle_pairs(particle, antiparticle): "for 'anti'.") -aliases_and_symbols = [ - ('electron', 'e-'), - ('beta-', 'e-'), - ('beta+', 'e+'), - ('positron', 'e+'), - ('proton', 'p'), - ('', ''), - (5, 5), - ('deuterium+', 'D 1+'), - ('deuterium 1+', 'D 1+'), - ('tritium +1', 'T 1+'), - ('alpha', 'He-4 2+'), - ('D+', 'D 1+'), - ('Deuterium', 'D'), - ('deuteron', 'D 1+'), - ('triton', 'T 1+'), - ('muon', 'mu-'), - ('antimuon', 'mu+'), - ('tau particle', 'tau-'), - ('antitau', 'tau+'), - ('p+', 'p'), +required_keys = [ + 'name', + 'spin', + 'class', + 'lepton number', + 'baryon number', + 'charge', + 'half-life', + 'mass', + 'antimatter', ] [email protected]("alias,symbol", aliases_and_symbols) -def test_get_standard_symbol(alias, symbol): - """Test that _get_standard_symbol correctly takes in aliases and - returns the corresponding symbols, and returns the original argument - if the argument does not correspond to an alias.""" - result = _get_standard_symbol(alias) - assert result == symbol, \ - (f"_get_standard_symbol({alias}) returns {result}, which differs " - f"from the expected symbol of {symbol}.\n\n" - f"_case_insensitive_aliases:\n{_case_insensitive_aliases}\n\n" - f"_case_sensitive_aliases:\n{_case_sensitive_aliases}") - - -alias_dictionaries = [_case_sensitive_aliases, _case_insensitive_aliases] - [email protected]("particle", ParticleZoo.everything) +def test__Particles_required_keys(particle): + r"""Test that required keys are present for all particles.""" [email protected]("alias_dict", alias_dictionaries) -def test_alias_dict_properties(alias_dict): - """Test properties of the alias dictionaries.""" + missing_keys = [] - for key in alias_dict.keys(): - assert isinstance(key, str), \ - (f"The following key should be a string, but isn't: {key}\n\n" - f"The entire dictionary is:\n\n{alias_dict}") + for key in required_keys: + try: + _Particles[particle][key] + except KeyError: + missing_keys.append(key) - for value in alias_dict.values(): - assert isinstance(value, str), \ - (f"The following value should be a string, but isn't: {value}\n\n" - f"The entire dictionary is:\n\n{alias_dict}") + if missing_keys: + raise KeyError( + "The following keys are missing from " + f"_Particles['{particle}']:\n{missing_keys}")
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": -1, "issue_text_score": 0, "test_score": -1 }, "num_modified_files": 7 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/automated-code-tests.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astropy==6.0.1 astropy-iers-data==0.2025.3.31.0.36.18 certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 coveralls==4.0.1 docopt==0.6.2 exceptiongroup==1.2.2 execnet==2.1.1 flake8==7.2.0 idna==3.10 iniconfig==2.1.0 mccabe==0.7.0 numpy==1.26.4 packaging==24.2 -e git+https://github.com/PlasmaPy/PlasmaPy.git@f747b371c5245b620d0463d6e315629fa61a7bf0#egg=plasmapy pluggy==1.5.0 pycodestyle==2.13.0 pyerfa==2.0.1.5 pyflakes==3.3.1 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 PyYAML==6.0.2 requests==2.32.3 scipy==1.13.1 tomli==2.2.1 typing_extensions==4.13.0 urllib3==2.3.0
name: PlasmaPy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astropy==6.0.1 - astropy-iers-data==0.2025.3.31.0.36.18 - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - coveralls==4.0.1 - docopt==0.6.2 - exceptiongroup==1.2.2 - execnet==2.1.1 - flake8==7.2.0 - idna==3.10 - iniconfig==2.1.0 - mccabe==0.7.0 - numpy==1.26.4 - packaging==24.2 - pluggy==1.5.0 - pycodestyle==2.13.0 - pyerfa==2.0.1.5 - pyflakes==3.3.1 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - pyyaml==6.0.2 - requests==2.32.3 - scipy==1.13.1 - tomli==2.2.1 - typing-extensions==4.13.0 - urllib3==2.3.0 prefix: /opt/conda/envs/PlasmaPy
[ "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[1-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[p-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[T-H0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[deuterium-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[deuteron-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Tritium-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[triton-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H-2-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[D-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[T-H1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H-3-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Hydrogen-3-H]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[helium-He]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[2-He]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[alpha-He]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[gold-Au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Gold-Au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[79-Au0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[79-Au1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[P-P]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[118-Og]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[N-14-N]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[N-N]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[H", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[hydrogen", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[deuterium", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe+-Fe]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe++-Fe]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe--Fe]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol[Fe++++++++++++++-Fe]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments0-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments1-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments2-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments3-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments4-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments5-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments6-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments7-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments8-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments9-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments10-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments11-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments12-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments13-T]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments14-D]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments15-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments16-He-4]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments17-Au-197]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments18-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments19-Be-8]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments20-N-13]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments21-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments22-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments23-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments24-N-13]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol[arguments25-T]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[H-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[D-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[deuterium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Deuterium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[tritium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[p-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[P-15]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Alpha-2]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[C-12-6]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Argon-18]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[protium-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[H-3-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[p+-1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[Be-8-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[N-7]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[N", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number[N+++-7]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[helium-3-3]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[Au-197-197]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[deuterium-2]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[D-2]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[H-2-2]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[tritium-3]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[T-3]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[p-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[n-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[neutron-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[n-1-1]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[Be-8-8]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[N-13-13]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[N-13", "plasmapy/atomic/tests/test_atomic.py::test_mass_number[N-13+++-13]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[H-359-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[C-12b-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[-1.5-Exception]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[N-13+-+--InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_mass_number_error[h-3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[D-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[deuterium-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Au-gold]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[alpha-helium0]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[helium-4-helium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[H-2-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Deuterium-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Hydrogen-3-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[hydrogen-3-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[H-3-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[tritium-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Alpha-helium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[alpha-helium1]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[1-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[26-iron]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[79-gold]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[p-hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[P-phosphorus]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Be-8-beryllium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[Li-7-lithium]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[N-nitrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[N+++-nitrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name[D--hydrogen]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[vegancupcakes-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[C-+--InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[1.24-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[H++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[t-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[pb-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[d-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[h-3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[Pb-9-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_element_name_error[H", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_value_between", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_unit", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight[H-1.008]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight[1-1.008]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight[Hydrogen-1.008]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_berkelium_249", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_n", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_si_30_units", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg10-arg20]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg11-arg21]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg12-arg22]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg13-arg23]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg14-arg24]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg15-arg25]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg16-arg26]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass[arg17-arg27]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_for_hydrogen_with_no_mass_number", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_unit", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[proton-kwargs0]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[H-1+-kwargs1]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[H-1", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[H-1-kwargs4]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[hydrogen-1-kwargs5]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[p+-kwargs6]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[antiproton-kwargs7]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_proton_mass[p--kwargs8]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_miscellaneous_cases", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[e+-kwargs10-positron-kwargs20-expected0]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[alpha-kwargs11-He-4++-kwargs21-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[alpha-kwargs12-helium-4", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[deuteron-kwargs13-H-kwargs23-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[D+-kwargs14-H-2+-kwargs24-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[D+-kwargs15-D", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[Deuterium+-kwargs16-D-kwargs26-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[triton-kwargs17-H-kwargs27-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[T+-kwargs18-H-3+-kwargs28-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[T+-kwargs19-T", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[Tritium+-kwargs110-T-kwargs210-None]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[H-kwargs113-1-kwargs213-None]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument0]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument1]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument2]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument3]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument4]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument5]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument6]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument7]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument8]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument9]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument10]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument11]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument12]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument13]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument14]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable[argument15]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument0]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument1]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument2]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument3]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument4]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument5]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument6]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument7]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument8]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument9]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument10]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument11]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument12]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument13]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_false[argument14]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes", "plasmapy/atomic/tests/test_atomic.py::test_half_life", "plasmapy/atomic/tests/test_atomic.py::test_half_life_unstable_isotopes", "plasmapy/atomic/tests/test_atomic.py::test_half_life_u_220", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_symbol-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_symbol-argument1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_symbol-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_symbol-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_symbol-argument4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_symbol-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_number-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_number-argument7]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[atomic_number-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[is_isotope_stable-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[is_isotope_stable-argument10]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[is_isotope_stable-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[half_life-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[half_life-argument13]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[half_life-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[mass_number-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[mass_number-argument16]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[mass_number-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[element_name-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[element_name-argument19]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[element_name-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[standard_atomic_weight-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[standard_atomic_weight-argument22]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[standard_atomic_weight-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_mass-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_mass-argument25]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[isotope_mass-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[ion_mass-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[ion_mass-argument28]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[ion_mass-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_binding_energy-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_binding_energy-argument31]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_binding_energy-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_reaction_energy-1.1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_reaction_energy-argument34]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_TypeErrors[nuclear_reaction_energy-(1+1j)]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_symbol-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotope_symbol-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[atomic_number-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[is_isotope_stable-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[half_life-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[mass_number-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[element_name-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[standard_atomic_weight-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[ion_mass-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[known_isotopes-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[stable_isotopes-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[common_isotopes-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[isotopic_abundance-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[integer_charge-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge--1]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-119]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-grumblemuffins]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-H-0]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-Og-294b]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-H-9343610]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-Fe+24]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-C++++++++++++++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-C-++++]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-h]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-d]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-he]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-au]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-alpha", "plasmapy/atomic/tests/test_atomic.py::test_atomic_ParticleErrors[electric_charge-alpha-4]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_cases", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_len", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[1-isotopes0]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[3-isotopes1]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[5-isotopes2]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[7-isotopes3]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[9-isotopes4]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[11-isotopes5]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[13-isotopes6]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[15-isotopes7]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[17-isotopes8]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[19-isotopes9]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[21-isotopes10]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[23-isotopes11]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[25-isotopes12]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[27-isotopes13]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[29-isotopes14]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[31-isotopes15]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[33-isotopes16]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[35-isotopes17]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[37-isotopes18]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[39-isotopes19]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[41-isotopes20]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[43-isotopes21]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[45-isotopes22]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[47-isotopes23]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[49-isotopes24]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[51-isotopes25]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[53-isotopes26]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[55-isotopes27]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[57-isotopes28]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[59-isotopes29]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[61-isotopes30]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[63-isotopes31]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[65-isotopes32]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[67-isotopes33]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[69-isotopes34]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[71-isotopes35]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[73-isotopes36]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[75-isotopes37]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[77-isotopes38]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[79-isotopes39]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[81-isotopes40]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[89-isotopes41]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundances_sum[91-isotopes42]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[H+-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[D", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[tritium", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[H---1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[Fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[N-----3]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[N++-2]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[alpha-2]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[proton-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[deuteron-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[triton-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[electron--1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[e---1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[e+-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[positron-1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[n-0]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[neutron-0]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[p---1]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge[antiproton--1]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[n-kwargs0-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[n-1-kwargs1-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[N-kwargs2-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[N-1-kwargs3-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[N-7-kwargs4-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[neutron-kwargs5-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[James", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[0-kwargs7-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[0-kwargs8-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_neutron[n0-kwargs9-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen--False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[hydrogen---False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H--False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[proton-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[protium-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[deuterium-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[tritium-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[triton-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[deuteron-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[h-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[D-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[D+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-2-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-2+-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-2", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-3", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[He-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-1-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[H-7-False-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[antiproton-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[1-True-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[1-False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen[p--False-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen_errors[H", "plasmapy/atomic/tests/test_atomic.py::test_is_hydrogen_errors[D++-kwargs1-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[e--True]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[e+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[Electron-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[electron-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[positron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_electron[-1-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[e--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[e+-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[Electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[positron-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_positron[1-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[p-kwargs0-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[p+-kwargs1-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[hydrogen-1+-kwargs2-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1-kwargs4-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-kwargs5-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[p--kwargs6-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[antiproton-kwargs7-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[Antiproton-kwargs8-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[proton-kwargs9-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[Proton-kwargs10-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[P-kwargs11-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[P+-kwargs12-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[1-kwargs13-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[1-kwargs14-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-kwargs15-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1-kwargs16-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-kwargs17-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_proton[H-1-kwargs18-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[e--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[e+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[Electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[positron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p--True]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[Antiproton-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[antiproton-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[p---False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[P--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_antiproton[57-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[e--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[e+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[electron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[positron-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[E-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[E--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[beta-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Antiproton-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[antiproton-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p+-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[p---False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[P--False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[57-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[alpha-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-4", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-4++-True]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-3", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-5", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Helium-4", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-4-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[helium-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[He-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Fe-56-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[Fe-False]", "plasmapy/atomic/tests/test_atomic.py::test_is_alpha[he-4", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[H-H-None]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[H+-H-1]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[D", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[alpha-He-4-2]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[Fe-Fe-None]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[Titanium-Titanium-None]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[N-7+++-N-7-3]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[H-1--H-1--1]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge[He-4--He-4--1]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_errors[H-1-+-+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_errors[H", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_errors[Fe", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_warnings[H-1-----AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_warnings[Fe", "plasmapy/atomic/tests/test_atomic.py::test_extract_integer_charge_warnings[lead", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[neutron-kwargs0-expected_dict0]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[p+-kwargs1-expected_dict1]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[p--kwargs2-expected_dict2]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[e--kwargs3-expected_dict3]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[e+-kwargs4-expected_dict4]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[H-kwargs5-expected_dict5]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[D+-kwargs6-expected_dict6]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[tritium-kwargs7-expected_dict7]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[Fe-kwargs8-expected_dict8]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[alpha-kwargs9-expected_dict9]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[Cn-276-kwargs11-expected_dict11]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[muon-kwargs12-expected_dict12]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[nu_tau-kwargs13-expected_dict13]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles0]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles1]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles2]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles3]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles4]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles5]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles6]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles7]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles8]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles9]", "plasmapy/atomic/tests/test_classes.py::test_Particle_equivalent_cases[equivalent_particles10]", "plasmapy/atomic/tests/test_classes.py::test_Particle_cmp", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_binding_energy", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_binding_energy_D_T", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_binding_energy_error[H-kwargs0-InvalidIsotopeError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_binding_energy_error[He-99-kwargs1-InvalidParticleError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_binding_energy_error[He-kwargs2-InvalidParticleError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_binding_energy_error[1.1-kwargs3-TypeError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_triple_alpha", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_alpha_decay", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_triple_alpha_r", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_beta", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_error[H", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_error[1-kwargs1-TypeError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_error[H-1", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_error[invalid", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_error[p", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[H-1-p-0.0-0.0]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants1-products1-2.8-0.06]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants2-products2-22.2-0.06]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants3-N-13-1.95-0.006]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants4-products4-1.2-0.006]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants5-products5-7.54-0.006]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants6-products6-7.35-0.006]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants7-products7-1.73-0.006]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwargs[reactants8-products8-4.96-0.006]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[n-3-TypeError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[n-products1-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants2-products2-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants3-products3-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants4-products4-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants5-p-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants6-products6-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants7-H-3-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants8-products8-ValueError]", "plasmapy/atomic/tests/test_nuclear.py::test_nuclear_reaction_energy_kwerrors[reactants9-products9-ValueError]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[electron-e-]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[beta--e-]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[beta+-e+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[positron-e+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[proton-p+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[-]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[5-5]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[deuterium+-D", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[deuterium", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[tritium", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[alpha-He-4", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[D+-D", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[Deuterium-D]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[deuteron-D", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[triton-T", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[muon-mu-]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[antimuon-mu+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[tau", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[antitau-tau+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[p-p+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[H-1", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[H-1+-p+]", "plasmapy/atomic/tests/test_parsing.py::test_dealias_particle_aliases[hydrogen-1+-p+]", "plasmapy/atomic/tests/test_parsing.py::test_alias_dict_properties[alias_dict0]", "plasmapy/atomic/tests/test_parsing.py::test_alias_dict_properties[alias_dict1]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[He-kwargs0-expected0]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[alpha-kwargs1-expected1]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[1-kwargs2-expected2]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[p-kwargs3-expected3]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[H-kwargs4-expected4]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[2-kwargs5-expected5]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[T-kwargs6-expected6]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[Fe-56+++++++-kwargs7-expected7]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[H--kwargs8-expected8]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[D+-kwargs9-expected9]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[Au-kwargs10-expected10]", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[Ar", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[Fe", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[Be-8", "plasmapy/atomic/tests/test_parsing.py::test_parse_and_check_atomic_input[p+-kwargs14-expected14]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[e--e+]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[mu--mu+]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[tau--tau+]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[p+-p-]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[n-antineutron]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[nu_e-anti_nu_e]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[nu_mu-anti_nu_mu]", "plasmapy/atomic/tests/test_particles.py::test_particle_antiparticle_pairs[nu_tau-anti_nu_tau]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[tau-]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[nu_mu]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[mu-]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[mu+]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[tau+]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[anti_nu_e]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[anti_nu_tau]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[nu_e]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[anti_nu_mu]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[e-]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[e+]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[n]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[p+]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[antineutron]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[nu_tau]", "plasmapy/atomic/tests/test_particles.py::test__Particles_required_keys[p-]" ]
[ "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_equivalent_args[Fe-56", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[H-0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[3.14159-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[Og-294b-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[H-934361079326356530741942970523610389-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[Fe+24-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[C++++++++++++++++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[C-++++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[n-1-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[h-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[d-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[he-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[au-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[p--InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[119-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_symbol_error[antiproton-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[Md-260-kwargs0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[protium-kwargs1-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[alpha-kwargs2-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[O-18-kwargs3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[lead-209-kwargs4-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-1-kwargs5-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[24-kwargs6-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[H-kwargs7-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[H-1-kwargs8-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[P-kwargs9-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[1-kwargs10-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[4-kwargs11-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[hydrogen-444444-kwargs12-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[Fe-kwargs13-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-kwargs14-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-3-kwargs15-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[D-kwargs16-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[T-kwargs17-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[Fe-kwargs18-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[He-kwargs19-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[d-kwargs20-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[h-3-kwargs21-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[h-kwargs22-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_error[d+-kwargs23-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[H-1-kwargs0-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[H-2-kwargs1-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[T-kwargs2-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[Li-6-kwargs3-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[lithium-6-kwargs4-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[alpha-kwargs5-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_symbol_warnings[p-kwargs6-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[H-3934-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[C-12b-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[-1.5-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[n-1-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[Neutron-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[d-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[t-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_atomic_number_error[s-36-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[H-1-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[help", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[1.1-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[n-InvalidElementError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[p-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[alpha-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[deuteron-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[tritium-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[Au+-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[Og", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[h-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_standard_atomic_weight_error[fe-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[H-InvalidIsotopeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[1.1-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[alpha-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[He-4", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[he-4-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[deuteron-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[triton-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[H-1", "plasmapy/atomic/tests/test_atomic.py::test_isotope_mass_error[H-1+-AtomicError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Fe-56-kwargs1-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[n-kwargs2-InvalidIonError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[H-1", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[26-kwargs4-TypeError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[26-kwargs5-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og-kwargs6-MissingAtomicDataError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[Og-kwargs7-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[n-kwargs8-InvalidIonError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[He", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[argument10-kwargs10-UnitConversionError]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_error[fe-56", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_warnings[argument0-kwargs0-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_ion_mass_warnings[argument1-kwargs1-AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument0-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument1-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument2-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument3-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_is_isotope_stable_error[argument4-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_error[common_isotopes]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_error[stable_isotopes]", "plasmapy/atomic/tests/test_atomic.py::test_known_common_stable_isotopes_error[known_isotopes]", "plasmapy/atomic/tests/test_atomic.py::test_isotopic_abundance", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[fads-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[H++-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[h+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[d+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[Fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_error[H-1-ChargeError]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_warnings[H----AtomicWarning]", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_warnings[Fe", "plasmapy/atomic/tests/test_atomic.py::test_integer_charge_warnings[Og", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_error[badinput-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_error[h+-InvalidParticleError]", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_error[Au", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_warning[Au", "plasmapy/atomic/tests/test_atomic.py::test_electric_charge_warning[H----AtomicWarning]", "plasmapy/atomic/tests/test_classes.py::test_Particle_class[Li-kwargs10-expected_dict10]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[a-kwargs0--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[d+-kwargs1--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[H-kwargs2--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Au-818-kwargs3--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Au-12-kwargs4--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Au-kwargs5--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Au-kwargs6--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[e--kwargs7--InvalidParticleError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[e--kwargs8-.atomic_number-InvalidElementError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[alpha-kwargs9-.standard_atomic_weight-InvalidElementError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Fe-56-kwargs10-.standard_atomic_weight-InvalidElementError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[tau--kwargs11-.element_name-InvalidElementError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[tau+-kwargs12-.atomic_number-InvalidElementError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[neutron-kwargs13-.atomic_number-InvalidElementError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[H-kwargs14-.mass_number-InvalidIsotopeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[neutron-kwargs15-.mass_number-InvalidIsotopeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[He-kwargs16-.charge-ChargeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[He-kwargs17-.integer_charge-ChargeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Fe-kwargs18-.spin-MissingAtomicDataError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[nu_e-kwargs19-.mass-MissingAtomicDataError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Og-kwargs20-.standard_atomic_weight-MissingAtomicDataError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Fe-kwargs21--TypeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[Fe-kwargs22--TypeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_errors[arg23-kwargs23--TypeError]", "plasmapy/atomic/tests/test_classes.py::test_Particle_warnings[H-----kwargs0--AtomicWarning]", "plasmapy/atomic/tests/test_classes.py::test_Particle_warnings[alpha-kwargs1--AtomicWarning]", "plasmapy/atomic/tests/test_classes.py::test_Particle_warnings[alpha-kwargs2--AtomicWarning]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[H-0-kwargs0]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[Og-294b-kwargs1]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[H-934361-kwargs2]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[Fe", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[Fe+24-kwargs4]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[C++++++++++++++++-kwargs6]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[C-++++-kwargs7]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[h-kwargs8]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[H++-kwargs9]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[H", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[T+++-kwargs11]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[D-kwargs12]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[d-kwargs13]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[he-kwargs14]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[au-kwargs15]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[0-kwargs16]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[119-kwargs17]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[0-kwargs18]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[p--kwargs19]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[e--kwargs20]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[0-kwargs21]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[n-kwargs22]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[He-4-kwargs23]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[He", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[He-99-kwargs25]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[H-2+-kwargs26]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidParticleErrors[H--kwargs27]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[nu_mu]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[tau+]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[anti_nu_e]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[anti_nu_tau]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[nu_e]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[anti_nu_mu]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[tau-]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[mu+]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[mu-]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[e+]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[p-]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[n]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[e-]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[antineutron]", "plasmapy/atomic/tests/test_parsing.py::test_parse_InvalidElementErrors[nu_tau]", "plasmapy/atomic/tests/test_parsing.py::test_parse_AtomicWarnings[H-2", "plasmapy/atomic/tests/test_parsing.py::test_parse_AtomicWarnings[H", "plasmapy/atomic/tests/test_parsing.py::test_parse_AtomicWarnings[H-3-kwargs2-1]", "plasmapy/atomic/tests/test_parsing.py::test_parse_AtomicWarnings[Fe-56-kwargs3-1]", "plasmapy/atomic/tests/test_parsing.py::test_parse_AtomicWarnings[Og-294" ]
[]
[]
BSD 3-Clause "New" or "Revised" License
2,062
[ "plasmapy/atomic/__init__.py", "plasmapy/atomic/names.py", "plasmapy/atomic/parsing.py", "plasmapy/atomic/particles.py", ".circleci/config.yml", "plasmapy/atomic/isotopes.py", "plasmapy/atomic/atomic.py", "plasmapy/atomic/classes.py", "plasmapy/atomic/nuclear.py" ]
[ "plasmapy/atomic/__init__.py", "plasmapy/atomic/names.py", "plasmapy/atomic/parsing.py", "plasmapy/atomic/particles.py", ".circleci/config.yml", "plasmapy/atomic/isotopes.py", "plasmapy/atomic/atomic.py", "plasmapy/atomic/classes.py", "plasmapy/atomic/nuclear.py" ]
kids-first__kf-api-dataservice-56
bccf1d07c49020fea2f01549e9cac12d39809c7a
2018-01-19 20:33:02
bccf1d07c49020fea2f01549e9cac12d39809c7a
dankolbman: Need to update the README too znatty22: Looks good 👍
diff --git a/README.md b/README.md index ee05429..8780af7 100644 --- a/README.md +++ b/README.md @@ -27,11 +27,11 @@ flask db upgrade flask run ``` -The API should now be available at `localhost:5000/v1`. +The API should now be available at `localhost:5000/`. ## Documentation -The swagger docs are located at the `/v1` endpoint. +The swagger docs are located at the root `localhost:5000/`. ## Testing diff --git a/dataservice/api/__init__.py b/dataservice/api/__init__.py index 3e9a051..5d1beab 100644 --- a/dataservice/api/__init__.py +++ b/dataservice/api/__init__.py @@ -1,16 +1,19 @@ from flask import Blueprint from flask_restplus import Api from dataservice.api.participant import participant_api +from dataservice.api.status import status_api +from dataservice.utils import _get_version -api_v1 = Blueprint('api', __name__, url_prefix='/v1') +api_v1 = Blueprint('api', __name__, url_prefix='') api = Api(api_v1, title='Kids First Data Service', description=open('dataservice/api/README.md').read(), - version='0.1', + version=_get_version(), default='', default_label='') +api.add_namespace(status_api) api.add_namespace(participant_api) diff --git a/dataservice/api/status/README.md b/dataservice/api/status/README.md new file mode 100644 index 0000000..0a331cb --- /dev/null +++ b/dataservice/api/status/README.md @@ -0,0 +1,1 @@ +Contains version and health information about the Dataservice API. diff --git a/dataservice/api/status/__init__.py b/dataservice/api/status/__init__.py new file mode 100644 index 0000000..92ca513 --- /dev/null +++ b/dataservice/api/status/__init__.py @@ -0,0 +1,1 @@ +from .resources import status_api diff --git a/dataservice/api/status/resources.py b/dataservice/api/status/resources.py new file mode 100644 index 0000000..d7b5982 --- /dev/null +++ b/dataservice/api/status/resources.py @@ -0,0 +1,49 @@ +import os +import subprocess + +from flask_restplus import Namespace, Resource, fields + +from dataservice.utils import _get_version + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) +README_FILE = os.path.join(THIS_DIR, 'README.md') + +status_api = Namespace(name='status', description=open(README_FILE).read()) + +from dataservice.api.status.serializers import ( # noqa + version_response, + _version_fields +) + + +status_api.models['VersionFields'] = _version_fields +status_api.models['VersionResponse'] = version_response + + +@status_api.route("") +class Status(Resource): + """ + Service Status + """ + @status_api.marshal_with(version_response) + def get(self): + """ + Get the service status + + Returns information about the current API's version and status + """ + commit = (subprocess.check_output( + ["git", "rev-parse", "--short", "HEAD"]) + .decode("utf-8").strip()) + + tags = (subprocess.check_output( + ["git", "tag", "-l", "--points-at", "HEAD"]) + .decode("utf-8").split('\n')) + tags = [] if tags[0] == "" else tags + return {"_status": { + "message": "Welcome to the Kids First Dataservice API", + "code": 200, + "version": _get_version(), + "commit": commit, + "tags": tags + }} diff --git a/dataservice/api/status/serializers.py b/dataservice/api/status/serializers.py new file mode 100644 index 0000000..8325dd9 --- /dev/null +++ b/dataservice/api/status/serializers.py @@ -0,0 +1,23 @@ +from flask_restplus import fields + +from dataservice.api.common.serializers import ( + _status_fields, + base_response +) + +_version_fields = _status_fields.clone('VersionFields', { + 'version': fields.String( + example='1.0', + description='Current version of the API'), + 'commit': fields.String( + example='23cf525', + description='Commit currently deployed'), + 'tags': fields.List(fields.String( + example='rc-0.8.4', + description='Any tags on the current commit')) +}) + +# Version response model +version_response = base_response.clone('VersionResponse', { + '_status': fields.Nested(_version_fields) +}) diff --git a/dataservice/utils.py b/dataservice/utils.py new file mode 100644 index 0000000..0f97a39 --- /dev/null +++ b/dataservice/utils.py @@ -0,0 +1,5 @@ +import pkg_resources + + +def _get_version(): + return pkg_resources.get_distribution("kf-api-dataservice").version
Fix "/" path to return 200
kids-first/kf-api-dataservice
diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..cc20c3f --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,13 @@ +import pytest + +from dataservice import create_app +from dataservice.extensions import db + + [email protected] +def client(): + app = create_app('testing') + app_context = app.app_context() + app_context.push() + db.create_all() + return app.test_client() diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 0000000..2fb263c --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,63 @@ +import json +import pkg_resources +import pytest + +import dataservice + + +class TestAPI: + """ + General API tests such as reponse code checks, envelope formatting checks, + and header checks + """ + + @pytest.mark.parametrize('endpoint,method,status_code', [ + ('/', 'GET', 200), + ('', 'GET', 200), + ('/status', 'GET', 200), + ('/participants', 'GET', 200), + ('/persons', 'GET', 404), + ('/participants/123', 'GET', 404) + ]) + def test_status_codes(self, client, endpoint, method, status_code): + """ Test endpoint response codes """ + call_func = getattr(client, method.lower()) + assert call_func(endpoint).status_code == status_code + + + @pytest.mark.parametrize('endpoint,method', [ + ('/participants', 'GET') + ]) + def test_status_format(self, client, endpoint, method): + """ Test that the _response field is consistent """ + call_func = getattr(client, method.lower()) + body = json.loads(call_func(endpoint).data.decode('utf-8')) + assert '_status' in body + assert 'message' in body['_status'] + assert type(body['_status']['message']) is str + assert 'code' in body['_status'] + assert type(body['_status']['code']) is int + + def test_version(self, client): + """ Test response from /status returns correct fields """ + status = json.loads(client.get('/status').data.decode('utf-8')) + status = status['_status'] + assert 'commit' in status + assert len(status['commit']) == 7 + assert 'version' in status + assert status['version'].count('.') == 2 + assert 'tags' in status + assert type(status['tags']) is list + assert 'Dataservice' in status['message'] + + def test_versions(self, client): + """ Test that versions are aligned accross package, docs, and api """ + package = pkg_resources.get_distribution("kf-api-dataservice").version + api_restplus = dataservice.api.api.version + api_version = json.loads(client.get('/status').data.decode('utf-8')) + api_version = api_version['_status']['version'] + swagger = json.loads(client.get('/swagger.json').data.decode('utf-8')) + + assert api_version == package + assert api_version == api_restplus + assert api_version == swagger['info']['version']
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cache", "pytest-cov", "pytest-pep8" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alembic==0.9.6 aniso8601==1.3.0 attrs==25.3.0 base32-crockford==0.3.0 click==6.6 coverage==7.8.0 exceptiongroup==1.2.2 execnet==2.1.1 Flask==0.12.2 Flask-Migrate==2.1.1 Flask-Profile==0.2 flask-restplus==0.10.1 Flask-SQLAlchemy==2.3.2 gunicorn==19.7.1 iniconfig==2.1.0 itsdangerous==0.24 Jinja2==2.8 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 -e git+https://github.com/kids-first/kf-api-dataservice.git@bccf1d07c49020fea2f01549e9cac12d39809c7a#egg=kf_api_dataservice Mako==1.0.4 MarkupSafe==0.23 packaging==24.2 pep8==1.7.1 pluggy==1.5.0 pytest==8.3.5 pytest-cache==1.0 pytest-cov==6.0.0 pytest-pep8==1.0.6 python-dateutil==2.9.0.post0 python-editor==1.0.3 pytz==2025.2 referencing==0.36.2 rpds-py==0.24.0 six==1.11.0 SQLAlchemy==1.1.15 tomli==2.2.1 typing_extensions==4.13.0 Werkzeug==0.11.10
name: kf-api-dataservice channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alembic==0.9.6 - aniso8601==1.3.0 - attrs==25.3.0 - base32-crockford==0.3.0 - click==6.6 - coverage==7.8.0 - exceptiongroup==1.2.2 - execnet==2.1.1 - flask==0.12.2 - flask-migrate==2.1.1 - flask-profile==0.2 - flask-restplus==0.10.1 - flask-sqlalchemy==2.3.2 - gunicorn==19.7.1 - iniconfig==2.1.0 - itsdangerous==0.24 - jinja2==2.8 - jsonschema==4.23.0 - jsonschema-specifications==2024.10.1 - mako==1.0.4 - markupsafe==0.23 - packaging==24.2 - pep8==1.7.1 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cache==1.0 - pytest-cov==6.0.0 - pytest-pep8==1.0.6 - python-dateutil==2.9.0.post0 - python-editor==1.0.3 - pytz==2025.2 - referencing==0.36.2 - rpds-py==0.24.0 - six==1.11.0 - sqlalchemy==1.1.15 - tomli==2.2.1 - typing-extensions==4.13.0 - werkzeug==0.11.10 prefix: /opt/conda/envs/kf-api-dataservice
[ "tests/test_api.py::TestAPI::test_status_codes[/-GET-200]", "tests/test_api.py::TestAPI::test_status_codes[-GET-200]", "tests/test_api.py::TestAPI::test_status_codes[/status-GET-200]", "tests/test_api.py::TestAPI::test_status_codes[/participants-GET-200]", "tests/test_api.py::TestAPI::test_status_format[/participants-GET]", "tests/test_api.py::TestAPI::test_version", "tests/test_api.py::TestAPI::test_versions" ]
[]
[ "tests/test_api.py::TestAPI::test_status_codes[/persons-GET-404]", "tests/test_api.py::TestAPI::test_status_codes[/participants/123-GET-404]" ]
[]
Apache License 2.0
2,064
[ "dataservice/api/status/README.md", "dataservice/api/status/serializers.py", "README.md", "dataservice/api/status/resources.py", "dataservice/utils.py", "dataservice/api/__init__.py", "dataservice/api/status/__init__.py" ]
[ "dataservice/api/status/README.md", "dataservice/api/status/serializers.py", "README.md", "dataservice/api/status/resources.py", "dataservice/utils.py", "dataservice/api/__init__.py", "dataservice/api/status/__init__.py" ]
nathandines__SPF2IP-3
7e3593a6f322c39a02c1c0f4a108b046ec6c1a20
2018-01-19 22:24:43
7e3593a6f322c39a02c1c0f4a108b046ec6c1a20
diff --git a/SPF2IP.py b/SPF2IP.py index e6210f3..b95903e 100644 --- a/SPF2IP.py +++ b/SPF2IP.py @@ -29,14 +29,22 @@ def dns_request_unicode(hostname,record_type,*args,**kwargs): value = value.decode('utf-8') output.append(value) elif record_type == "MX": - value = entry.exchange - if type(value) is not unicode: - value = value.__str__().encode('utf-8').decode('utf-8') + try: + value = entry.exchange.decode('utf-8') + except AttributeError as err: + if err.args[0] == "'Name' object has no attribute 'decode'": + value = unicode(entry.exchange) + else: + raise output.append(value) elif record_type == "TXT": - value = ''.join([str(ent) for ent in entry.strings]) - if type(value) is not unicode: - value = value.decode('utf-8') + value_array = [] + for ent in entry.strings: + if type(ent) is not unicode: + value_array.append(ent.decode('utf-8')) + else: + value_array.append(ent) + value = ''.join(value_array) output.append(value) return output diff --git a/setup.py b/setup.py index 3b958d6..3561be0 100755 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ with open(path.join(here, 'README.rst'), encoding='utf-8') as f: setup( name='SPF2IP', - version='1.0.4', + version='1.0.5', description='Python module to get IP addresses from an SPF record', long_description=long_description,
Broken on 3.6 The current code always returns an empty answer on Python 3.6. This fixes it for me: ``` diff --git a/SPF2IP.py b/SPF2IP.py index e6210f3..84683ff 100644 --- a/SPF2IP.py +++ b/SPF2IP.py @@ -34,7 +34,7 @@ def dns_request_unicode(hostname,record_type,*args,**kwargs): value = value.__str__().encode('utf-8').decode('utf-8') output.append(value) elif record_type == "TXT": - value = ''.join([str(ent) for ent in entry.strings]) + value = ''.join([str(ent, encoding='ascii') for ent in entry.strings]) if type(value) is not unicode: value = value.decode('utf-8') output.append(value) ``` I only know python3 so I can't give good advice on making code that works on both 2 and 3. But a friend of mine's package has this function that you might find useful: ``` def to_native_str(value, encoding='utf-8'): if isinstance(value, str): return value if six.PY3 and isinstance(value, six.binary_type): #pragma: no cover return value.decode(encoding) elif six.PY2 and isinstance(value, six.text_type): #pragma: no cover return value.encode(encoding) ```
nathandines/SPF2IP
diff --git a/test_SPF2IP.py b/test_SPF2IP.py index 54caff5..6e88918 100644 --- a/test_SPF2IP.py +++ b/test_SPF2IP.py @@ -125,11 +125,13 @@ dns_records = { class fakedns: def __init__(self,value,record_type): if record_type == 'TXT': - self.strings = value + self.strings = [] + for entry in value: + self.strings.append(entry.encode('utf-8')) elif record_type == 'A' or record_type == 'AAAA': - self.address = value + self.address = value.encode('utf-8') elif record_type == 'MX': - self.exchange = value + self.exchange = value.encode('utf-8') def fake_dns_resolver(hostname,record_type): try: dns_records[hostname]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements-test.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
dnspython==2.7.0 exceptiongroup==1.2.2 iniconfig==2.1.0 ipaddress==1.0.23 mock==5.2.0 nose==1.3.7 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 -e git+https://github.com/nathandines/SPF2IP.git@7e3593a6f322c39a02c1c0f4a108b046ec6c1a20#egg=SPF2IP tomli==2.2.1
name: SPF2IP channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - dnspython==2.7.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - ipaddress==1.0.23 - mock==5.2.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - tomli==2.2.1 prefix: /opt/conda/envs/SPF2IP
[ "test_SPF2IP.py::SPF2IPTestCases::test_included_list_is_string_list", "test_SPF2IP.py::SPF2IPTestCases::test_included_loop", "test_SPF2IP.py::SPF2IPTestCases::test_ip4_results", "test_SPF2IP.py::SPF2IPTestCases::test_ip6_results", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_ip4", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_ip6", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_a", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_a_external", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_a_external_slash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_a_slash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_aaaa", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_aaaa_external", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_aaaa_external_slash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_aaaa_slash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_a", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_a_external", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_a_external_slash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_a_slash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_aaaa", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_aaaa_external", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_aaaa_external_longslash", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_aaaa_slash", "test_SPF2IP.py::SPF2IPTestCases::test_spf_list_is_string_list_with_prefix", "test_SPF2IP.py::SPF2IPTestCases::test_spf_list_split_spf" ]
[]
[ "test_SPF2IP.py::SPF2IPTestCases::test_dns_query_method_output", "test_SPF2IP.py::SPF2IPTestCases::test_domain_without_spf_results", "test_SPF2IP.py::SPF2IPTestCases::test_included_invalid_spf", "test_SPF2IP.py::SPF2IPTestCases::test_included_without_includes", "test_SPF2IP.py::SPF2IPTestCases::test_included_without_spf", "test_SPF2IP.py::SPF2IPTestCases::test_nonexistent_domain_results", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_empty", "test_SPF2IP.py::SPF2IPTestCases::test_single_domain_with_mx_a_external_longslash", "test_SPF2IP.py::SPF2IPTestCases::test_spf_list_invalid_spf", "test_SPF2IP.py::SPF2IPTestCases::test_spf_list_without_spf" ]
[]
MIT License
2,065
[ "setup.py", "SPF2IP.py" ]
[ "setup.py", "SPF2IP.py" ]
PyCQA__pyflakes-325
8a1feac08dae2478e3f67ab4018af86ff4ec56f0
2018-01-19 22:28:40
8a1feac08dae2478e3f67ab4018af86ff4ec56f0
myint: I still don't see anything wrong with this. I'll merge it.
diff --git a/pyflakes/checker.py b/pyflakes/checker.py index baef833..9b39e6c 100644 --- a/pyflakes/checker.py +++ b/pyflakes/checker.py @@ -710,10 +710,14 @@ class Checker(object): # try enclosing function scopes and global scope for scope in self.scopeStack[-1::-1]: - # only generators used in a class scope can access the names - # of the class. this is skipped during the first iteration - if in_generators is False and isinstance(scope, ClassScope): - continue + if isinstance(scope, ClassScope): + if not PY2 and name == '__class__': + return + elif in_generators is False: + # only generators used in a class scope can access the + # names of the class. this is skipped during the first + # iteration + continue try: scope[name].used = (self.scope, node)
On Python 3, __class__ can be used without self *Original report by [icordasc](https://launchpad.net/~icordasc) (@sigmavirus24?) on [Launchpad](https://bugs.launchpad.net/bugs/1487725):* ------------------------------------ On Python 3, the following code raises a warning: ```python class Test(object): def __init__(self): print(__class__.__name__) self.x = 1 t = Test() ``` This is actually valid Python 3 code, but PyFlakes doesn't quite understand that: test.py:3: undefined name '__class__'
PyCQA/pyflakes
diff --git a/pyflakes/test/test_undefined_names.py b/pyflakes/test/test_undefined_names.py index 3d19210..d53e529 100644 --- a/pyflakes/test/test_undefined_names.py +++ b/pyflakes/test/test_undefined_names.py @@ -799,6 +799,24 @@ class Test(TestCase): any(lambda: id(y) for x in range(10)) ''', m.UndefinedName) + def test_dunderClass(self): + """ + `__class__` is defined in class scope under Python 3, but is not + in Python 2. + """ + code = ''' + class Test(object): + def __init__(self): + print(__class__.__name__) + self.x = 1 + + t = Test() + ''' + if version_info < (3,): + self.flakes(code, m.UndefinedName) + else: + self.flakes(code) + class NameTests(TestCase): """
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "flake8", "pep8", "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 flake8==5.0.4 importlib-metadata==4.2.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mccabe==0.7.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pep8==1.7.1 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pycodestyle==2.9.1 -e git+https://github.com/PyCQA/pyflakes.git@8a1feac08dae2478e3f67ab4018af86ff4ec56f0#egg=pyflakes pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pyflakes channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - flake8==5.0.4 - importlib-metadata==4.2.0 - mccabe==0.7.0 - pep8==1.7.1 - pycodestyle==2.9.1 prefix: /opt/conda/envs/pyflakes
[ "pyflakes/test/test_undefined_names.py::Test::test_dunderClass" ]
[]
[ "pyflakes/test/test_undefined_names.py::Test::test_annotationUndefined", "pyflakes/test/test_undefined_names.py::Test::test_badNestedClass", "pyflakes/test/test_undefined_names.py::Test::test_builtinWindowsError", "pyflakes/test/test_undefined_names.py::Test::test_builtins", "pyflakes/test/test_undefined_names.py::Test::test_definedAsStarArgs", "pyflakes/test/test_undefined_names.py::Test::test_definedAsStarUnpack", "pyflakes/test/test_undefined_names.py::Test::test_definedByGlobal", "pyflakes/test/test_undefined_names.py::Test::test_definedByGlobalMultipleNames", "pyflakes/test/test_undefined_names.py::Test::test_definedFromLambdaInDictionaryComprehension", "pyflakes/test/test_undefined_names.py::Test::test_definedFromLambdaInGenerator", "pyflakes/test/test_undefined_names.py::Test::test_definedInClass", "pyflakes/test/test_undefined_names.py::Test::test_definedInClassNested", "pyflakes/test/test_undefined_names.py::Test::test_definedInGenExp", "pyflakes/test/test_undefined_names.py::Test::test_definedInListComp", "pyflakes/test/test_undefined_names.py::Test::test_del", "pyflakes/test/test_undefined_names.py::Test::test_delConditional", "pyflakes/test/test_undefined_names.py::Test::test_delConditionalNested", "pyflakes/test/test_undefined_names.py::Test::test_delExceptionInExcept", "pyflakes/test/test_undefined_names.py::Test::test_delGlobal", "pyflakes/test/test_undefined_names.py::Test::test_delUndefined", "pyflakes/test/test_undefined_names.py::Test::test_delWhile", "pyflakes/test/test_undefined_names.py::Test::test_delWhileNested", "pyflakes/test/test_undefined_names.py::Test::test_delWhileTestUsage", "pyflakes/test/test_undefined_names.py::Test::test_doubleNestingReportsClosestName", "pyflakes/test/test_undefined_names.py::Test::test_functionsNeedGlobalScope", "pyflakes/test/test_undefined_names.py::Test::test_globalFromNestedScope", "pyflakes/test/test_undefined_names.py::Test::test_globalImportStar", "pyflakes/test/test_undefined_names.py::Test::test_globalInGlobalScope", "pyflakes/test/test_undefined_names.py::Test::test_global_reset_name_only", "pyflakes/test/test_undefined_names.py::Test::test_intermediateClassScopeIgnored", "pyflakes/test/test_undefined_names.py::Test::test_keywordOnlyArgs", "pyflakes/test/test_undefined_names.py::Test::test_keywordOnlyArgsUndefined", "pyflakes/test/test_undefined_names.py::Test::test_laterRedefinedGlobalFromNestedScope", "pyflakes/test/test_undefined_names.py::Test::test_laterRedefinedGlobalFromNestedScope2", "pyflakes/test/test_undefined_names.py::Test::test_laterRedefinedGlobalFromNestedScope3", "pyflakes/test/test_undefined_names.py::Test::test_magicGlobalsBuiltins", "pyflakes/test/test_undefined_names.py::Test::test_magicGlobalsFile", "pyflakes/test/test_undefined_names.py::Test::test_magicGlobalsName", "pyflakes/test/test_undefined_names.py::Test::test_magicGlobalsPath", "pyflakes/test/test_undefined_names.py::Test::test_metaClassUndefined", "pyflakes/test/test_undefined_names.py::Test::test_namesDeclaredInExceptBlocks", "pyflakes/test/test_undefined_names.py::Test::test_nestedClass", "pyflakes/test/test_undefined_names.py::Test::test_undefined", "pyflakes/test/test_undefined_names.py::Test::test_undefinedAugmentedAssignment", "pyflakes/test/test_undefined_names.py::Test::test_undefinedExceptionName", "pyflakes/test/test_undefined_names.py::Test::test_undefinedExceptionNameObscuringGlobalVariableFalsePositive1", "pyflakes/test/test_undefined_names.py::Test::test_undefinedExceptionNameObscuringGlobalVariableFalsePositive2", "pyflakes/test/test_undefined_names.py::Test::test_undefinedExceptionNameObscuringLocalVariable2", "pyflakes/test/test_undefined_names.py::Test::test_undefinedExceptionNameObscuringLocalVariableFalsePositive1", "pyflakes/test/test_undefined_names.py::Test::test_undefinedExceptionNameObscuringLocalVariableFalsePositive2", "pyflakes/test/test_undefined_names.py::Test::test_undefinedFromLambdaInComprehension", "pyflakes/test/test_undefined_names.py::Test::test_undefinedFromLambdaInDictionaryComprehension", "pyflakes/test/test_undefined_names.py::Test::test_undefinedInGenExpNested", "pyflakes/test/test_undefined_names.py::Test::test_undefinedInListComp", "pyflakes/test/test_undefined_names.py::Test::test_undefinedInLoop", "pyflakes/test/test_undefined_names.py::Test::test_undefinedWithErrorHandler", "pyflakes/test/test_undefined_names.py::Test::test_unusedAsStarUnpack", "pyflakes/test/test_undefined_names.py::Test::test_usedAsStarUnpack", "pyflakes/test/test_undefined_names.py::NameTests::test_impossibleContext" ]
[]
MIT License
2,066
[ "pyflakes/checker.py" ]
[ "pyflakes/checker.py" ]
python-cmd2__cmd2-257
7b564b4424accfbd7439de10a169d9b64bc599c5
2018-01-20 18:19:44
ddfd3d9a400ae81468e9abcc89fe690c30b7ec7f
diff --git a/CHANGELOG.md b/CHANGELOG.md index c017ff36..24c11dec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,33 +1,40 @@ ## 0.8.0 (TBD, 2018) * Bug Fixes * Fixed unit tests on Python 3.7 due to changes in how re.escape() behaves in Python 3.7 + * Fixed a bug where unknown commands were getting saved in the history * Enhancements * Three new decorators for **do_*** commands to make argument parsing easier * **with_argument_list** decorator to change argument type from str to List[str] * **do_*** commands get a single argument which is a list of strings, as pre-parsed by shlex.split() * **with_argument_parser** decorator for strict argparse-based argument parsing of command arguments * **do_*** commands get a single argument which is the output of argparse.parse_args() - * **with_argparser_and_unknown_args** decorator for argparse-based argument parsing, but allowing unknown args + * **with_argparser_and_unknown_args** decorator for argparse-based argument parsing, but allows unknown args * **do_*** commands get two arguments, the output of argparse.parse_known_args() - * See the **Argument Processing** section of the documentation for more information on these decorators - * Alternatively, see the **argparse_example.py** and **arg_print.py** examples + * See the [Argument Processing](http://cmd2.readthedocs.io/en/latest/argument_processing.html) section of the documentation for more information on these decorators + * Alternatively, see the [argparse_example.py](https://github.com/python-cmd2/cmd2/blob/master/examples/argpasre_example.py) + and [arg_print.py](https://github.com/python-cmd2/cmd2/blob/master/examples/arg_print.py) examples + * Added support for Argpasre sub-commands when using the **with_argument_parser** or **with_argparser_and_unknown_args** decorators + * See [subcommands.py](https://github.com/python-cmd2/cmd2/blob/master/examples/subcommands.py) for an example of how to use subcommands + * Tab-completion of sub-command names is automatically supported * The **__relative_load** command is now hidden from the help menu by default * This command is not intended to be called from the command line, only from within scripts * The **set** command now has an additional **-a/--all** option to also display read-only settings - * The **history** command can now run, edit, and save prior commands, in addition to the prior behavior of displaying prior commands. + * The **history** command can now run, edit, and save prior commands, in addition to displaying prior commands. + * The **history** command can now automatically generate a transcript file for regression testing + * This feature works imperfectly at the moment, but it is still quite useful * Commands Removed * The **cmdenvironment** has been removed and its functionality incorporated into the **-a/--all** argument to **set** * The **show** command has been removed. Its functionality has always existing within **set** and continues to do so - * The **save** command has been removed. The capability to save prior commands is now part of the **history** command. + * The **save** command has been removed. The capability to save commands is now part of the **history** command. * The **run** command has been removed. The capability to run prior commands is now part of the **history** command. * Other changes * The **edit** command no longer allows you to edit prior commands. The capability to edit prior commands is now part of the **history** command. The **edit** command still allows you to edit arbitrary files. * the **autorun_on_edit** setting has been removed. + * For Python 3.4 and earlier, ``cmd2`` now has an additional dependency on the ``contextlib2`` module * Deprecations * The old **options** decorator for optparse-based argument parsing is now *deprecated* - * The old decorator is still present for now, but will eventually be removed in a future release - * ``cmd2`` no longer includes **optparse.make_option** so if your app needs it you need to import it directly from optparse - + * The old decorator is still present for now, but will be removed in a future release + * ``cmd2`` no longer includes **optparse.make_option**, so if your app needs it import directly from optparse ## 0.7.9 (January 4, 2018) diff --git a/README.md b/README.md index 70fde5a8..f6fa6536 100755 --- a/README.md +++ b/README.md @@ -29,11 +29,11 @@ Main Features - Multi-line, case-insensitive, and abbreviated commands - Special-character command shortcuts (beyond cmd's `@` and `!`) - Settable environment parameters -- Parsing commands with arguments using `argparse` +- Parsing commands with arguments using `argparse`, including support for sub-commands - Unicode character support (*Python 3 only*) -- Good tab-completion of commands, file system paths, and shell commands +- Good tab-completion of commands, sub-commands, file system paths, and shell commands - Python 2.7 and 3.4+ support -- Linux, macOS and Windows support +- Windows, macOS, and Linux support - Trivial to provide built-in help for all commands - Built-in regression testing framework for your applications (transcript-based testing) @@ -48,8 +48,9 @@ pip install -U cmd2 cmd2 works with Python 2.7 and Python 3.4+ on Windows, macOS, and Linux. It is pure Python code with the only 3rd-party dependencies being on [six](https://pypi.python.org/pypi/six), -[pyparsing](http://pyparsing.wikispaces.com), and [pyperclip](https://github.com/asweigart/pyperclip) -(on Windows, [pyreadline](https://pypi.python.org/pypi/pyreadline) is an additional dependency). +[pyparsing](http://pyparsing.wikispaces.com), and [pyperclip](https://github.com/asweigart/pyperclip). +Windows has an additional dependency on [pyreadline](https://pypi.python.org/pypi/pyreadline) and Python +3.4 and earlier have an additional dependency on [contextlib2](https://pypi.python.org/pypi/contextlib2). For information on other installation options, see [Installation Instructions](https://cmd2.readthedocs.io/en/latest/install.html) in the cmd2 diff --git a/cmd2.py b/cmd2.py index 378ac097..e77f4557 100755 --- a/cmd2.py +++ b/cmd2.py @@ -73,6 +73,12 @@ try: except ImportError: import subprocess +# Python 3.4 and earlier require contextlib2 for temporarily redirecting stderr and stdout +if sys.version_info < (3, 5): + from contextlib2 import redirect_stdout, redirect_stderr +else: + from contextlib import redirect_stdout, redirect_stderr + # Detect whether IPython is installed to determine if the built-in "ipy" command should be included ipython_available = True try: @@ -106,7 +112,7 @@ if six.PY2 and sys.platform.startswith('lin'): except ImportError: pass -__version__ = '0.8.0a' +__version__ = '0.8.0' # Pyparsing enablePackrat() can greatly speed up parsing, but problems have been seen in Python 3 in the past pyparsing.ParserElement.enablePackrat() @@ -272,10 +278,13 @@ def with_argument_list(func): return cmd_wrapper -def with_argparser_and_unknown_args(argparser): - """A decorator to alter a cmd2 method to populate its ``args`` - argument by parsing arguments with the given instance of - argparse.ArgumentParser, but also returning unknown args as a list. +def with_argparser_and_unknown_args(argparser, subcommand_names=None): + """A decorator to alter a cmd2 method to populate its ``args`` argument by parsing arguments with the given + instance of argparse.ArgumentParser, but also returning unknown args as a list. + + :param argparser: argparse.ArgumentParser - given instance of ArgumentParser + :param subcommand_names: List[str] - list of subcommand names for this parser (used for tab-completion) + :return: function that gets passed parsed args and a list of unknown args """ def arg_decorator(func): def cmd_wrapper(instance, cmdline): @@ -292,14 +301,26 @@ def with_argparser_and_unknown_args(argparser): argparser.description = func.__doc__ cmd_wrapper.__doc__ = argparser.format_help() + + # Mark this function as having an argparse ArgumentParser (used by do_help) + cmd_wrapper.__dict__['has_parser'] = True + + # If there are subcommands, store their names to support tab-completion of subcommand names + if subcommand_names is not None: + cmd_wrapper.__dict__['subcommand_names'] = subcommand_names + return cmd_wrapper + return arg_decorator -def with_argument_parser(argparser): - """A decorator to alter a cmd2 method to populate its ``args`` - argument by parsing arguments with the given instance of - argparse.ArgumentParser. +def with_argparser(argparser, subcommand_names=None): + """A decorator to alter a cmd2 method to populate its ``args`` argument by parsing arguments + with the given instance of argparse.ArgumentParser. + + :param argparser: argparse.ArgumentParser - given instance of ArgumentParser + :param subcommand_names: List[str] - list of subcommand names for this parser (used for tab-completion) + :return: function that gets passed parsed args """ def arg_decorator(func): def cmd_wrapper(instance, cmdline): @@ -316,7 +337,16 @@ def with_argument_parser(argparser): argparser.description = func.__doc__ cmd_wrapper.__doc__ = argparser.format_help() + + # Mark this function as having an argparse ArgumentParser (used by do_help) + cmd_wrapper.__dict__['has_parser'] = True + + # If there are subcommands, store their names to support tab-completion of subcommand names + if subcommand_names is not None: + cmd_wrapper.__dict__['subcommand_names'] = subcommand_names + return cmd_wrapper + return arg_decorator @@ -644,6 +674,9 @@ class Cmd(cmd.Cmd): # Used when piping command output to a shell command self.pipe_proc = None + # Used by complete() for readline tab completion + self.completion_matches = [] + # ----- Methods related to presenting output to the user ----- @property @@ -733,7 +766,7 @@ class Cmd(cmd.Cmd): # noinspection PyMethodOverriding def completenames(self, text, line, begidx, endidx): - """Override of cmd2 method which completes command names both for command completion and help.""" + """Override of cmd method which completes command names both for command completion and help.""" command = text if self.case_insensitive: command = text.lower() @@ -747,6 +780,91 @@ class Cmd(cmd.Cmd): return cmd_completion + # noinspection PyUnusedLocal + def complete_subcommand(self, text, line, begidx, endidx): + """Readline tab-completion method for completing argparse sub-command names.""" + command, args, foo = self.parseline(line) + arglist = args.split() + + if len(arglist) <= 1 and command + ' ' + args == line: + funcname = self._func_named(command) + if funcname: + # Check to see if this function was decorated with an argparse ArgumentParser + func = getattr(self, funcname) + subcommand_names = func.__dict__.get('subcommand_names', None) + + # If this command has subcommands + if subcommand_names is not None: + arg = '' + if arglist: + arg = arglist[0] + + matches = [sc for sc in subcommand_names if sc.startswith(arg)] + + # If completing the sub-command name and get exactly 1 result and are at end of line, add a space + if len(matches) == 1 and endidx == len(line): + matches[0] += ' ' + return matches + + return [] + + def complete(self, text, state): + """Override of command method which returns the next possible completion for 'text'. + + If a command has not been entered, then complete against command list. + Otherwise try to call complete_<command> to get list of completions. + + This method gets called directly by readline because it is set as the tab-completion function. + + This completer function is called as complete(text, state), for state in 0, 1, 2, …, until it returns a + non-string value. It should return the next possible completion starting with text. + + :param text: str - the current word that user is typing + :param state: int - non-negative integer + """ + if state == 0: + import readline + origline = readline.get_line_buffer() + line = origline.lstrip() + stripped = len(origline) - len(line) + begidx = readline.get_begidx() - stripped + endidx = readline.get_endidx() - stripped + if begidx > 0: + command, args, foo = self.parseline(line) + if command == '': + compfunc = self.completedefault + else: + arglist = args.split() + + compfunc = None + # If the user has entered no more than a single argument after the command name + if len(arglist) <= 1 and command + ' ' + args == line: + funcname = self._func_named(command) + if funcname: + # Check to see if this function was decorated with an argparse ArgumentParser + func = getattr(self, funcname) + subcommand_names = func.__dict__.get('subcommand_names', None) + + # If this command has subcommands + if subcommand_names is not None: + compfunc = self.complete_subcommand + + if compfunc is None: + # This command either doesn't have sub-commands or the user is past the point of entering one + try: + compfunc = getattr(self, 'complete_' + command) + except AttributeError: + compfunc = self.completedefault + else: + compfunc = self.completenames + + self.completion_matches = compfunc(text, line, begidx, endidx) + + try: + return self.completion_matches[state] + except IndexError: + return None + def precmd(self, statement): """Hook method executed just before the command is processed by ``onecmd()`` and after adding it to the history. @@ -854,8 +972,7 @@ class Cmd(cmd.Cmd): (stop, statement) = self.postparsing_precmd(statement) if stop: return self.postparsing_postcmd(stop) - if statement.parsed.command not in self.excludeFromHistory: - self.history.append(statement.parsed.raw) + try: if self.allow_redirection: self._redirect_output(statement) @@ -904,7 +1021,11 @@ class Cmd(cmd.Cmd): self.cmdqueue = list(cmds) + self.cmdqueue try: while self.cmdqueue and not stop: - stop = self.onecmd_plus_hooks(self.cmdqueue.pop(0)) + line = self.cmdqueue.pop(0) + if self.echo and line != 'eos': + self.poutput('{}{}'.format(self.prompt, line)) + + stop = self.onecmd_plus_hooks(line) finally: # Clear out the command queue and script directory stack, just in # case we hit an error and they were not completed. @@ -1046,6 +1167,10 @@ class Cmd(cmd.Cmd): if not funcname: return self.default(statement) + # Since we have a valid command store it in the history + if statement.parsed.command not in self.excludeFromHistory: + self.history.append(statement.parsed.raw) + try: func = getattr(self, funcname) except AttributeError: @@ -1198,8 +1323,20 @@ class Cmd(cmd.Cmd): # Getting help for a specific command funcname = self._func_named(arglist[0]) if funcname: - # No special behavior needed, delegate to cmd base class do_help() - cmd.Cmd.do_help(self, funcname[3:]) + # Check to see if this function was decorated with an argparse ArgumentParser + func = getattr(self, funcname) + if func.__dict__.get('has_parser', False): + # Function has an argparser, so get help based on all the arguments in case there are sub-commands + new_arglist = arglist[1:] + new_arglist.append('-h') + + # Temporarily redirect all argparse output to both sys.stdout and sys.stderr to self.stdout + with redirect_stdout(self.stdout): + with redirect_stderr(self.stdout): + func(new_arglist) + else: + # No special behavior needed, delegate to cmd base class do_help() + cmd.Cmd.do_help(self, funcname[3:]) else: # Show a menu of what commands help can be gotten for self._help_menu() @@ -1340,7 +1477,7 @@ class Cmd(cmd.Cmd): set_parser.add_argument('-l', '--long', action='store_true', help='describe function of parameter') set_parser.add_argument('settable', nargs='*', help='[param_name] [value]') - @with_argument_parser(set_parser) + @with_argparser(set_parser) def do_set(self, args): """Sets a settable parameter or shows current settings of parameters. @@ -1692,8 +1829,9 @@ Paths or arguments that contain spaces must be enclosed in quotes history_parser_group.add_argument('-r', '--run', action='store_true', help='run selected history items') history_parser_group.add_argument('-e', '--edit', action='store_true', help='edit and then run selected history items') - history_parser_group.add_argument('-o', '--output-file', metavar='FILE', help='output to file') - history_parser.add_argument('-s', '--script', action='store_true', help='script format; no separation lines') + history_parser_group.add_argument('-s', '--script', action='store_true', help='script format; no separation lines') + history_parser_group.add_argument('-o', '--output-file', metavar='FILE', help='output commands to a script file') + history_parser_group.add_argument('-t', '--transcript', help='output commands and results to a transcript file') _history_arg_help = """empty all history items a one history item by number a..b, a:b, a:, ..b items by indices (inclusive) @@ -1701,7 +1839,7 @@ a..b, a:b, a:, ..b items by indices (inclusive) /regex/ items matching regular expression""" history_parser.add_argument('arg', nargs='?', help=_history_arg_help) - @with_argument_parser(history_parser) + @with_argparser(history_parser) def do_history(self, args): """View, run, edit, and save previously entered commands.""" # If an argument was supplied, then retrieve partial contents of the history @@ -1722,7 +1860,8 @@ a..b, a:b, a:, ..b items by indices (inclusive) else: # If no arg given, then retrieve the entire history cowardly_refuse_to_run = True - history = self.history + # Get a copy of the history so it doesn't get mutated while we are using it + history = self.history[:] if args.run: if cowardly_refuse_to_run: @@ -1755,6 +1894,28 @@ a..b, a:b, a:, ..b items by indices (inclusive) self.pfeedback('{} command{} saved to {}'.format(len(history), plural, args.output_file)) except Exception as e: self.perror('Saving {!r} - {}'.format(args.output_file, e), traceback_war=False) + elif args.transcript: + # Make sure echo is on so commands print to standard out + saved_echo = self.echo + self.echo = True + + # Redirect stdout to the transcript file + saved_self_stdout = self.stdout + self.stdout = open(args.transcript, 'w') + + # Run all of the commands in the history with output redirected to transcript and echo on + self.runcmds_plus_hooks(history) + + # Restore stdout to its original state + self.stdout.close() + self.stdout = saved_self_stdout + + # Set echo back to its original state + self.echo = saved_echo + + plural = 's' if len(history) > 1 else '' + self.pfeedback('{} command{} and outputs saved to transcript file {!r}'.format(len(history), plural, + args.transcript)) else: # Display the history items retrieved for hi in history: diff --git a/docs/argument_processing.rst b/docs/argument_processing.rst index cc08e4e5..2a433bc7 100644 --- a/docs/argument_processing.rst +++ b/docs/argument_processing.rst @@ -13,7 +13,7 @@ Argument Processing 4. Adds the usage message from the argument parser to your command. 5. Checks if the ``-h/--help`` option is present, and if so, display the help message for the command -These features are all provided by the ``@with_argument_parser`` decorator. +These features are all provided by the ``@with_argparser`` decorator. Using the argument parser decorator =================================== @@ -21,7 +21,7 @@ Using the argument parser decorator For each command in the ``cmd2`` subclass which requires argument parsing, create an instance of ``argparse.ArgumentParser()`` which can parse the input appropriately for the command. Then decorate the command method with -the ``@with_argument_parser`` decorator, passing the argument parser as the +the ``@with_argparser`` decorator, passing the argument parser as the first parameter to the decorator. This changes the second argumen to the command method, which will contain the results of ``ArgumentParser.parse_args()``. @@ -33,7 +33,7 @@ Here's what it looks like:: argparser.add_argument('-r', '--repeat', type=int, help='output [n] times') argparser.add_argument('word', nargs='?', help='word to say') - @with_argument_parser(argparser) + @with_argparser(argparser) def do_speak(self, opts) """Repeats what you tell me to.""" arg = opts.word @@ -47,7 +47,7 @@ Here's what it looks like:: .. note:: - The ``@with_argument_parser`` decorator sets the ``prog`` variable in + The ``@with_argparser`` decorator sets the ``prog`` variable in the argument parser based on the name of the method it is decorating. This will override anything you specify in ``prog`` variable when creating the argument parser. @@ -57,14 +57,14 @@ Help Messages ============= By default, cmd2 uses the docstring of the command method when a user asks -for help on the command. When you use the ``@with_argument_parser`` +for help on the command. When you use the ``@with_argparser`` decorator, the docstring for the ``do_*`` method is used to set the description for the ``argparse.ArgumentParser`` is With this code:: argparser = argparse.ArgumentParser() argparser.add_argument('tag', help='tag') argparser.add_argument('content', nargs='+', help='content to surround with tag') - @with_argument_parser(argparser) + @with_argparser(argparser) def do_tag(self, args): """create a html tag""" self.stdout.write('<{0}>{1}</{0}>'.format(args.tag, ' '.join(args.content))) @@ -92,7 +92,7 @@ docstring on your method empty:: argparser = argparse.ArgumentParser(description='create an html tag') argparser.add_argument('tag', help='tag') argparser.add_argument('content', nargs='+', help='content to surround with tag') - @with_argument_parser(argparser) + @with_argparser(argparser) def do_tag(self, args): self.stdout.write('<{0}>{1}</{0}>'.format(args.tag, ' '.join(args.content))) self.stdout.write('\n') @@ -121,7 +121,7 @@ To add additional text to the end of the generated help message, use the ``epilo ) argparser.add_argument('tag', help='tag') argparser.add_argument('content', nargs='+', help='content to surround with tag') - @with_argument_parser(argparser) + @with_argparser(argparser) def do_tag(self, args): self.stdout.write('<{0}>{1}</{0}>'.format(args.tag, ' '.join(args.content))) self.stdout.write('\n') @@ -166,14 +166,14 @@ The default behavior of ``cmd2`` is to pass the user input directly to your Using the argument parser decorator and also receiving a a list of unknown positional arguments =============================================================================================== If you want all unknown arguments to be passed to your command as a list of strings, then -decorate the command method with the ``@with_argparser_and_list`` decorator. +decorate the command method with the ``@with_argparser_and_unknown_args`` decorator. Here's what it looks like:: dir_parser = argparse.ArgumentParser() dir_parser.add_argument('-l', '--long', action='store_true', help="display in long format with one item per line") - @with_argparser_and_list(dir_parser) + @with_argparser_and_unknown_args(dir_parser) def do_dir(self, args, unknown): """List contents of current directory.""" # No arguments for this command @@ -188,6 +188,15 @@ Here's what it looks like:: ... +Sub-commands +============ +Sub-commands are supported for commands using either the ``@with_argparser`` or +``@with_argparser_and_unknown_args`` decorator. The syntax for supporting them is based on argparse sub-parsers. + +See the subcommands_ example to learn more about how to use sub-commands in your ``cmd2`` application. + +.. _subcommands: https://github.com/python-cmd2/cmd2/blob/master/examples/subcommands.py + Deprecated optparse support =========================== diff --git a/docs/conf.py b/docs/conf.py index fd3e9476..d4ef14bf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,9 +60,9 @@ author = 'Catherine Devlin and Todd Leonhardt' # built documents. # # The short X.Y version. -version = '0.7' +version = '0.8' # The full version, including alpha/beta/rc tags. -release = '0.7.9' +release = '0.8.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/install.rst b/docs/install.rst index 19cbdd78..2c247a3e 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -128,6 +128,11 @@ If you wish to permanently uninstall ``cmd2``, this can also easily be done with pip uninstall cmd2 +Extra requirement for Python 3.4 and earlier +-------------------------------------------- +``cmd2`` requires the ``contextlib2`` module for Python 3.4 and earlier. This is used to temporarily redirect +stdout and stderr. + Extra requirement for Python 2.7 only ------------------------------------- If you want to be able to pipe the output of commands to a shell command on Python 2.7, then you will need one diff --git a/examples/arg_print.py b/examples/arg_print.py index 1b18cdf0..8b02bc51 100755 --- a/examples/arg_print.py +++ b/examples/arg_print.py @@ -14,7 +14,7 @@ import argparse import cmd2 import pyparsing -from cmd2 import with_argument_list, with_argument_parser, with_argparser_and_unknown_args +from cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args class ArgumentAndOptionPrinter(cmd2.Cmd): @@ -47,7 +47,7 @@ class ArgumentAndOptionPrinter(cmd2.Cmd): oprint_parser.add_argument('-r', '--repeat', type=int, help='output [n] times') oprint_parser.add_argument('words', nargs='+', help='words to print') - @with_argument_parser(oprint_parser) + @with_argparser(oprint_parser) def do_oprint(self, args): """Print the options and argument list this options command was called with.""" print('oprint was called with the following\n\toptions: {!r}'.format(args)) diff --git a/examples/argparse_example.py b/examples/argparse_example.py index 9f6548de..fbb2b1dc 100755 --- a/examples/argparse_example.py +++ b/examples/argparse_example.py @@ -14,7 +14,7 @@ verifying that the output produced matches the transcript. import argparse import sys -from cmd2 import Cmd, options, with_argument_parser, with_argument_list +from cmd2 import Cmd, options, with_argparser, with_argument_list from optparse import make_option @@ -47,7 +47,7 @@ class CmdLineApp(Cmd): speak_parser.add_argument('-r', '--repeat', type=int, help='output [n] times') speak_parser.add_argument('words', nargs='+', help='words to say') - @with_argument_parser(speak_parser) + @with_argparser(speak_parser) def do_speak(self, args): """Repeats what you tell me to.""" words = [] @@ -68,7 +68,7 @@ class CmdLineApp(Cmd): tag_parser.add_argument('tag', help='tag') tag_parser.add_argument('content', nargs='+', help='content to surround with tag') - @with_argument_parser(tag_parser) + @with_argparser(tag_parser) def do_tag(self, args): """create a html tag""" self.poutput('<{0}>{1}</{0}>'.format(args.tag, ' '.join(args.content))) diff --git a/examples/example.py b/examples/example.py index 4ba0d29a..c66f0e60 100755 --- a/examples/example.py +++ b/examples/example.py @@ -14,7 +14,7 @@ the transcript. import random import argparse -from cmd2 import Cmd, with_argument_parser +from cmd2 import Cmd, with_argparser class CmdLineApp(Cmd): @@ -44,7 +44,7 @@ class CmdLineApp(Cmd): speak_parser.add_argument('-r', '--repeat', type=int, help='output [n] times') speak_parser.add_argument('words', nargs='+', help='words to say') - @with_argument_parser(speak_parser) + @with_argparser(speak_parser) def do_speak(self, args): """Repeats what you tell me to.""" words = [] @@ -66,7 +66,7 @@ class CmdLineApp(Cmd): mumble_parser.add_argument('-r', '--repeat', type=int, help='how many times to repeat') mumble_parser.add_argument('words', nargs='+', help='words to say') - @with_argument_parser(mumble_parser) + @with_argparser(mumble_parser) def do_mumble(self, args): """Mumbles what you tell me to.""" repetitions = args.repeat or 1 diff --git a/examples/pirate.py b/examples/pirate.py index dd9fd98c..cfe545d6 100755 --- a/examples/pirate.py +++ b/examples/pirate.py @@ -7,7 +7,7 @@ presented as part of her PyCon 2010 talk. It demonstrates many features of cmd2. """ import argparse -from cmd2 import Cmd, with_argument_parser +from cmd2 import Cmd, with_argparser class Pirate(Cmd): @@ -25,13 +25,13 @@ class Pirate(Cmd): """Initialize the base class as well as this one""" Cmd.__init__(self) # prompts and defaults - self.gold = 3 + self.gold = 0 self.initial_gold = self.gold self.prompt = 'arrr> ' def default(self, line): """This handles unknown commands.""" - print('What mean ye by "{0}"?'.format(line)) + self.poutput('What mean ye by "{0}"?'.format(line)) def precmd(self, line): """Runs just before a command line is parsed, but after the prompt is presented.""" @@ -41,10 +41,10 @@ class Pirate(Cmd): def postcmd(self, stop, line): """Runs right before a command is about to return.""" if self.gold != self.initial_gold: - print('Now we gots {0} doubloons' + self.poutput('Now we gots {0} doubloons' .format(self.gold)) if self.gold < 0: - print("Off to debtorrr's prison.") + self.poutput("Off to debtorrr's prison.") stop = True return stop @@ -61,30 +61,30 @@ class Pirate(Cmd): self.gold -= int(arg) except ValueError: if arg: - print('''What's "{0}"? I'll take rrrum.'''.format(arg)) + self.poutput('''What's "{0}"? I'll take rrrum.'''.format(arg)) self.gold -= 1 def do_quit(self, arg): """Quit the application gracefully.""" - print("Quiterrr!") + self.poutput("Quiterrr!") return True def do_sing(self, arg): """Sing a colorful song.""" - print(self.colorize(arg, self.songcolor)) + self.poutput(self.colorize(arg, self.songcolor)) yo_parser = argparse.ArgumentParser() yo_parser.add_argument('--ho', type=int, default=2, help="How often to chant 'ho'") yo_parser.add_argument('-c', '--commas', action='store_true', help='Intersperse commas') yo_parser.add_argument('beverage', help='beverage to drink with the chant') - @with_argument_parser(yo_parser) + @with_argparser(yo_parser) def do_yo(self, args): """Compose a yo-ho-ho type chant with flexible options.""" chant = ['yo'] + ['ho'] * args.ho separator = ', ' if args.commas else ' ' chant = separator.join(chant) - print('{0} and a bottle of {1}'.format(chant, args.beverage)) + self.poutput('{0} and a bottle of {1}'.format(chant, args.beverage)) if __name__ == '__main__': diff --git a/examples/subcommands.py b/examples/subcommands.py new file mode 100755 index 00000000..e77abc61 --- /dev/null +++ b/examples/subcommands.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# coding=utf-8 +"""A simple example demonstrating how to use Argparse to support sub-commands. + + +This example shows an easy way for a single command to have many subcommands, each of which takes different arguments +and provides separate contextual help. +""" +import argparse + +import cmd2 +from cmd2 import with_argparser + + +class SubcommandsExample(cmd2.Cmd): + """ Example cmd2 application where we a base command which has a couple subcommands.""" + + def __init__(self): + cmd2.Cmd.__init__(self) + + # sub-command functions for the base command + def base_foo(self, args): + """foo subcommand of base command""" + self.poutput(args.x * args.y) + + def base_bar(self, args): + """bar sucommand of base command""" + self.poutput('((%s))' % args.z) + + # create the top-level parser for the base command + base_parser = argparse.ArgumentParser(prog='base') + base_subparsers = base_parser.add_subparsers(title='subcommands', help='subcommand help') + + # create the parser for the "foo" sub-command + parser_foo = base_subparsers.add_parser('foo', help='foo help') + parser_foo.add_argument('-x', type=int, default=1, help='integer') + parser_foo.add_argument('y', type=float, help='float') + parser_foo.set_defaults(func=base_foo) + + # create the parser for the "bar" sub-command + parser_bar = base_subparsers.add_parser('bar', help='bar help') + parser_bar.add_argument('z', help='string') + parser_bar.set_defaults(func=base_bar) + + # Create a list of subcommand names, which is used to enable tab-completion of sub-commands + subcommands = ['foo', 'bar'] + + @with_argparser(base_parser, subcommands) + def do_base(self, args): + """Base command help""" + try: + # Call whatever sub-command function was selected + args.func(self, args) + except AttributeError: + # No sub-command was provided, so as called + self.do_help('base') + + +if __name__ == '__main__': + app = SubcommandsExample() + app.cmdloop() diff --git a/examples/exampleSession.txt b/examples/transcripts/exampleSession.txt similarity index 100% rename from examples/exampleSession.txt rename to examples/transcripts/exampleSession.txt diff --git a/examples/transcripts/pirate.transcript b/examples/transcripts/pirate.transcript new file mode 100644 index 00000000..570f0cd7 --- /dev/null +++ b/examples/transcripts/pirate.transcript @@ -0,0 +1,10 @@ +arrr> loot +Now we gots 1 doubloons +arrr> loot +Now we gots 2 doubloons +arrr> loot +Now we gots 3 doubloons +arrr> drink 3 +Now we gots 0 doubloons +arrr> yo --ho 3 rum +yo ho ho ho and a bottle of rum diff --git a/examples/transcript_regex.txt b/examples/transcripts/transcript_regex.txt similarity index 100% rename from examples/transcript_regex.txt rename to examples/transcripts/transcript_regex.txt diff --git a/setup.py b/setup.py index 8d5b7619..58f8e4cd 100755 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ Setuptools setup file, used to install or test 'cmd2' import sys from setuptools import setup -VERSION = '0.8.0a' +VERSION = '0.8.0' DESCRIPTION = "cmd2 - a tool for building interactive command line applications in Python" LONG_DESCRIPTION = """cmd2 is a tool for building interactive command line applications in Python. Its goal is to make it quick and easy for developers to build feature-rich and user-friendly interactive command line applications. It @@ -62,9 +62,19 @@ Topic :: Software Development :: Libraries :: Python Modules """.splitlines()))) INSTALL_REQUIRES = ['pyparsing >= 2.0.1', 'pyperclip', 'six'] + +# Windows also requires pyreadline to ensure tab completion works if sys.platform.startswith('win'): INSTALL_REQUIRES += ['pyreadline'] +# Python 3.4 and earlier require contextlib2 for temporarily redirecting stderr and stdout +if sys.version_info < (3, 5): + INSTALL_REQUIRES += ['contextlib2'] + +# Python 2.7 also requires subprocess32 +if sys.version_info < (3, 0): + INSTALL_REQUIRES += ['subprocess32'] + # unittest.mock was added in Python 3.3. mock is a backport of unittest.mock to all versions of Python TESTS_REQUIRE = ['mock', 'pytest'] DOCS_REQUIRE = ['sphinx', 'sphinx_rtd_theme', 'pyparsing', 'pyperclip', 'six']
Consider adding support for nested commands or subcommands Many REPL frameworks have support for multi-level commands, where the command has sub-commands. While this can be done with cmd2, there isn't any built-in support to make it easy. Consider adding some built-in support to make it easy to have nested commands or subcommands.
python-cmd2/cmd2
diff --git a/tests/conftest.py b/tests/conftest.py index 021af193..319e54fe 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,7 +19,7 @@ edit help history load py pyscript quit set shell shortcuts """ # Help text for the history command -HELP_HISTORY = """usage: history [-h] [-r | -e | -o FILE] [-s] [arg] +HELP_HISTORY = """usage: history [-h] [-r | -e | -s | -o FILE | -t TRANSCRIPT] [arg] View, run, edit, and save previously entered commands. @@ -34,10 +34,11 @@ optional arguments: -h, --help show this help message and exit -r, --run run selected history items -e, --edit edit and then run selected history items - -o FILE, --output-file FILE - output to file -s, --script script format; no separation lines - + -o FILE, --output-file FILE + output commands to a script file + -t TRANSCRIPT, --transcript TRANSCRIPT + output commands and results to a transcript file """ # Output from the shortcuts command with default built-in shortcuts diff --git a/tests/test_argparse.py b/tests/test_argparse.py index 21e81603..d3646046 100644 --- a/tests/test_argparse.py +++ b/tests/test_argparse.py @@ -8,6 +8,7 @@ import pytest import cmd2 from conftest import run_cmd, StdOut + class ArgparseApp(cmd2.Cmd): def __init__(self): self.maxrepeats = 3 @@ -19,7 +20,7 @@ class ArgparseApp(cmd2.Cmd): say_parser.add_argument('-r', '--repeat', type=int, help='output [n] times') say_parser.add_argument('words', nargs='+', help='words to say') - @cmd2.with_argument_parser(say_parser) + @cmd2.with_argparser(say_parser) def do_say(self, args): """Repeat what you tell me to.""" words = [] @@ -40,7 +41,7 @@ class ArgparseApp(cmd2.Cmd): tag_parser.add_argument('tag', help='tag') tag_parser.add_argument('content', nargs='+', help='content to surround with tag') - @cmd2.with_argument_parser(tag_parser) + @cmd2.with_argparser(tag_parser) def do_tag(self, args): self.stdout.write('<{0}>{1}</{0}>'.format(args.tag, ' '.join(args.content))) self.stdout.write('\n') @@ -162,3 +163,88 @@ def test_arglist(argparse_app): def test_arglist_decorator_twice(argparse_app): out = run_cmd(argparse_app, 'arglisttwice "we should" get these') assert out[0] == 'we should get these' + + +class SubcommandApp(cmd2.Cmd): + """ Example cmd2 application where we a base command which has a couple subcommands.""" + + def __init__(self): + cmd2.Cmd.__init__(self) + + # sub-command functions for the base command + def base_foo(self, args): + """foo subcommand of base command""" + self.poutput(args.x * args.y) + + def base_bar(self, args): + """bar sucommand of base command""" + self.poutput('((%s))' % args.z) + + # create the top-level parser for the base command + base_parser = argparse.ArgumentParser(prog='base') + base_subparsers = base_parser.add_subparsers(title='subcommands', help='subcommand help') + + # create the parser for the "foo" sub-command + parser_foo = base_subparsers.add_parser('foo', help='foo help') + parser_foo.add_argument('-x', type=int, default=1, help='integer') + parser_foo.add_argument('y', type=float, help='float') + parser_foo.set_defaults(func=base_foo) + + # create the parser for the "bar" sub-command + parser_bar = base_subparsers.add_parser('bar', help='bar help') + parser_bar.add_argument('z', help='string') + parser_bar.set_defaults(func=base_bar) + + # Create a list of subcommand names, which is used to enable tab-completion of sub-commands + subcommands = ['foo', 'bar'] + + @cmd2.with_argparser_and_unknown_args(base_parser, subcommands) + def do_base(self, args, arglist): + """Base command help""" + try: + # Call whatever sub-command function was selected + args.func(self, args) + except AttributeError: + # No sub-command was provided, so as called + self.do_help('base') + [email protected] +def subcommand_app(): + app = SubcommandApp() + app.stdout = StdOut() + return app + + +def test_subcommand_foo(subcommand_app): + out = run_cmd(subcommand_app, 'base foo -x2 5.0') + assert out == ['10.0'] + + +def test_subcommand_bar(subcommand_app): + out = run_cmd(subcommand_app, 'base bar baz') + assert out == ['((baz))'] + +def test_subcommand_invalid(subcommand_app, capsys): + run_cmd(subcommand_app, 'base baz') + out, err = capsys.readouterr() + err = err.splitlines() + assert err[0].startswith('usage: base') + assert err[1].startswith("base: error: invalid choice: 'baz'") + +def test_subcommand_base_help(subcommand_app): + out = run_cmd(subcommand_app, 'help base') + assert out[0].startswith('usage: base') + assert out[1] == '' + assert out[2] == 'Base command help' + +def test_subcommand_help(subcommand_app): + out = run_cmd(subcommand_app, 'help base foo') + assert out[0].startswith('usage: base foo') + assert out[1] == '' + assert out[2] == 'positional arguments:' + + +def test_subcommand_invalid_help(subcommand_app): + out = run_cmd(subcommand_app, 'help base baz') + assert out[0].startswith('usage: base') + assert out[1].startswith("base: error: invalid choice: 'baz'") diff --git a/tests/test_cmd2.py b/tests/test_cmd2.py index 30308dd7..186def65 100644 --- a/tests/test_cmd2.py +++ b/tests/test_cmd2.py @@ -25,7 +25,7 @@ from conftest import run_cmd, normalize, BASE_HELP, HELP_HISTORY, SHORTCUTS_TXT, def test_ver(): - assert cmd2.__version__ == '0.8.0a' + assert cmd2.__version__ == '0.8.0' def test_empty_statement(base_app): @@ -41,19 +41,24 @@ def test_base_help(base_app): def test_base_help_history(base_app): out = run_cmd(base_app, 'help history') - expected = normalize(HELP_HISTORY) - assert out == expected + assert out == normalize(HELP_HISTORY) def test_base_argparse_help(base_app, capsys): + # Verify that "set -h" gives the same output as "help set" and that it starts in a way that makes sense run_cmd(base_app, 'set -h') out, err = capsys.readouterr() - expected = run_cmd(base_app, 'help set') - assert normalize(base_app.do_set.__doc__ + str(err)) == expected + out1 = out.splitlines() + + out2 = run_cmd(base_app, 'help set') + + assert out1 == out2 + assert out1[0].startswith('usage: set') + assert out1[1] == '' + assert out1[2].startswith('Sets a settable parameter') def test_base_invalid_option(base_app, capsys): run_cmd(base_app, 'set -z') out, err = capsys.readouterr() - run_cmd(base_app, 'help set') expected = ['usage: set [-h] [-a] [-l] [settable [settable ...]]', 'set: error: unrecognized arguments: -z'] assert normalize(str(err)) == expected @@ -605,8 +610,7 @@ def test_input_redirection(base_app, request): # Verify that redirecting input ffom a file works out = run_cmd(base_app, 'help < {}'.format(filename)) - expected = normalize(HELP_HISTORY) - assert out == expected + assert out == normalize(HELP_HISTORY) def test_pipe_to_shell(base_app, capsys): diff --git a/tests/test_completion.py b/tests/test_completion.py index efc32986..70f77d0a 100644 --- a/tests/test_completion.py +++ b/tests/test_completion.py @@ -8,10 +8,13 @@ file system paths, and shell commands. Copyright 2017 Todd Leonhardt <[email protected]> Released under MIT license, see LICENSE file """ +import argparse import os +import readline import sys import cmd2 +import mock import pytest @@ -35,6 +38,100 @@ def test_cmd2_command_completion_single_end(cmd2_app): # It is at end of line, so extra space is present assert cmd2_app.completenames(text, line, begidx, endidx) == ['help '] +def test_complete_command_single_end(cmd2_app): + text = 'he' + line = 'he' + state = 0 + endidx = len(line) + begidx = endidx - len(text) + + def get_line(): + return line + + def get_begidx(): + return begidx + + def get_endidx(): + return endidx + + with mock.patch.object(readline, 'get_line_buffer', get_line): + with mock.patch.object(readline, 'get_begidx', get_begidx): + with mock.patch.object(readline, 'get_endidx', get_endidx): + # Run the readline tab-completion function with readline mocks in place + completion = cmd2_app.complete(text, state) + assert completion == 'help ' + +def test_complete_command_invalid_state(cmd2_app): + text = 'he' + line = 'he' + state = 1 + endidx = len(line) + begidx = endidx - len(text) + + def get_line(): + return line + + def get_begidx(): + return begidx + + def get_endidx(): + return endidx + + with mock.patch.object(readline, 'get_line_buffer', get_line): + with mock.patch.object(readline, 'get_begidx', get_begidx): + with mock.patch.object(readline, 'get_endidx', get_endidx): + # Run the readline tab-completion function with readline mocks in place get None + completion = cmd2_app.complete(text, state) + assert completion is None + +def test_complete_empty_arg(cmd2_app): + text = '' + line = 'help ' + state = 0 + endidx = len(line) + begidx = endidx - len(text) + + def get_line(): + return line + + def get_begidx(): + return begidx + + def get_endidx(): + return endidx + + with mock.patch.object(readline, 'get_line_buffer', get_line): + with mock.patch.object(readline, 'get_begidx', get_begidx): + with mock.patch.object(readline, 'get_endidx', get_endidx): + # Run the readline tab-completion function with readline mocks in place + completion = cmd2_app.complete(text, state) + + assert completion == cmd2_app.complete_help(text, line, begidx, endidx)[0] + +def test_complete_bogus_command(cmd2_app): + text = '' + line = 'fizbuzz ' + state = 0 + endidx = len(line) + begidx = endidx - len(text) + + def get_line(): + return line + + def get_begidx(): + return begidx + + def get_endidx(): + return endidx + + with mock.patch.object(readline, 'get_line_buffer', get_line): + with mock.patch.object(readline, 'get_begidx', get_begidx): + with mock.patch.object(readline, 'get_endidx', get_endidx): + # Run the readline tab-completion function with readline mocks in place + completion = cmd2_app.complete(text, state) + + assert completion is None + def test_cmd2_command_completion_is_case_insensitive_by_default(cmd2_app): text = 'HE' line = 'HE' @@ -323,3 +420,123 @@ def test_parseline_expands_shortcuts(cmd2_app): assert command == 'shell' assert args == 'cat foobar.txt' assert line.replace('!', 'shell ') == out_line + + +class SubcommandsExample(cmd2.Cmd): + """ Example cmd2 application where we a base command which has a couple subcommands.""" + + def __init__(self): + cmd2.Cmd.__init__(self) + + # sub-command functions for the base command + def base_foo(self, args): + """foo subcommand of base command""" + self.poutput(args.x * args.y) + + def base_bar(self, args): + """bar sucommand of base command""" + self.poutput('((%s))' % args.z) + + # create the top-level parser for the base command + base_parser = argparse.ArgumentParser(prog='base') + base_subparsers = base_parser.add_subparsers(title='subcommands', help='subcommand help') + + # create the parser for the "foo" sub-command + parser_foo = base_subparsers.add_parser('foo', help='foo help') + parser_foo.add_argument('-x', type=int, default=1, help='integer') + parser_foo.add_argument('y', type=float, help='float') + parser_foo.set_defaults(func=base_foo) + + # create the parser for the "bar" sub-command + parser_bar = base_subparsers.add_parser('bar', help='bar help') + parser_bar.add_argument('z', help='string') + parser_bar.set_defaults(func=base_bar) + + # Create a list of subcommand names, which is used to enable tab-completion of sub-commands + subcommands = ['foo', 'bar'] + + @cmd2.with_argparser(base_parser, subcommands) + def do_base(self, args): + """Base command help""" + try: + # Call whatever sub-command function was selected + args.func(self, args) + except AttributeError: + # No sub-command was provided, so as called + self.do_help('base') + + [email protected] +def sc_app(): + app = SubcommandsExample() + return app + + +def test_cmd2_subcommand_completion_single_end(sc_app): + text = 'f' + line = 'base f' + endidx = len(line) + begidx = endidx - len(text) + + # It is at end of line, so extra space is present + assert sc_app.complete_subcommand(text, line, begidx, endidx) == ['foo '] + +def test_cmd2_subcommand_completion_single_mid(sc_app): + text = 'f' + line = 'base f' + endidx = len(line) - 1 + begidx = endidx - len(text) + + # It is at end of line, so extra space is present + assert sc_app.complete_subcommand(text, line, begidx, endidx) == ['foo'] + +def test_cmd2_subcommand_completion_multiple(sc_app): + text = '' + line = 'base ' + endidx = len(line) + begidx = endidx - len(text) + + # It is at end of line, so extra space is present + assert sc_app.complete_subcommand(text, line, begidx, endidx) == ['foo', 'bar'] + +def test_cmd2_subcommand_completion_nomatch(sc_app): + text = 'z' + line = 'base z' + endidx = len(line) + begidx = endidx - len(text) + + # It is at end of line, so extra space is present + assert sc_app.complete_subcommand(text, line, begidx, endidx) == [] + +def test_cmd2_subcommand_completion_after_subcommand(sc_app): + text = 'f' + line = 'base foo f' + endidx = len(line) + begidx = endidx - len(text) + + # It is at end of line, so extra space is present + assert sc_app.complete_subcommand(text, line, begidx, endidx) == [] + + +def test_complete_subcommand_single_end(sc_app): + text = 'f' + line = 'base f' + endidx = len(line) + begidx = endidx - len(text) + state = 0 + + def get_line(): + return line + + def get_begidx(): + return begidx + + def get_endidx(): + return endidx + + with mock.patch.object(readline, 'get_line_buffer', get_line): + with mock.patch.object(readline, 'get_begidx', get_begidx): + with mock.patch.object(readline, 'get_endidx', get_endidx): + # Run the readline tab-completion function with readline mocks in place + completion = sc_app.complete(text, state) + assert completion == 'foo '
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 11 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "mock", "pytest-xdist", "pytest-cov" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 -e git+https://github.com/python-cmd2/cmd2.git@7b564b4424accfbd7439de10a169d9b64bc599c5#egg=cmd2 coverage==6.2 execnet==1.9.0 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pyperclip==1.9.0 pytest==7.0.1 pytest-cov==4.0.0 pytest-xdist==3.0.2 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: cmd2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - execnet==1.9.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pyperclip==1.9.0 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-xdist==3.0.2 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/cmd2
[ "tests/test_argparse.py::test_argparse_basic_command", "tests/test_argparse.py::test_argparse_quoted_arguments", "tests/test_argparse.py::test_argparse_with_list", "tests/test_argparse.py::test_argparse_with_list_and_empty_doc", "tests/test_argparse.py::test_argparse_quoted_arguments_multiple", "tests/test_argparse.py::test_argparse_quoted_arguments_posix", "tests/test_argparse.py::test_argparse_quoted_arguments_posix_multiple", "tests/test_argparse.py::test_argparse_help_docstring", "tests/test_argparse.py::test_argparse_help_description", "tests/test_argparse.py::test_argparse_prog", "tests/test_argparse.py::test_arglist", "tests/test_argparse.py::test_arglist_decorator_twice", "tests/test_argparse.py::test_subcommand_foo", "tests/test_argparse.py::test_subcommand_bar", "tests/test_argparse.py::test_subcommand_invalid", "tests/test_argparse.py::test_subcommand_base_help", "tests/test_argparse.py::test_subcommand_help", "tests/test_argparse.py::test_subcommand_invalid_help", "tests/test_cmd2.py::test_ver", "tests/test_cmd2.py::test_empty_statement", "tests/test_cmd2.py::test_base_help", "tests/test_cmd2.py::test_base_help_history", "tests/test_cmd2.py::test_base_argparse_help", "tests/test_cmd2.py::test_base_invalid_option", "tests/test_cmd2.py::test_base_shortcuts", "tests/test_cmd2.py::test_base_show", "tests/test_cmd2.py::test_base_show_long", "tests/test_cmd2.py::test_base_show_readonly", "tests/test_cmd2.py::test_base_set", "tests/test_cmd2.py::test_set_not_supported", "tests/test_cmd2.py::test_set_quiet", "tests/test_cmd2.py::test_base_shell", "tests/test_cmd2.py::test_base_py", "tests/test_cmd2.py::test_base_run_python_script", "tests/test_cmd2.py::test_base_run_pyscript", "tests/test_cmd2.py::test_recursive_pyscript_not_allowed", "tests/test_cmd2.py::test_pyscript_with_nonexist_file", "tests/test_cmd2.py::test_pyscript_with_exception", "tests/test_cmd2.py::test_pyscript_requires_an_argument", "tests/test_cmd2.py::test_base_error", "tests/test_cmd2.py::test_base_history", "tests/test_cmd2.py::test_history_script_format", "tests/test_cmd2.py::test_history_with_string_argument", "tests/test_cmd2.py::test_history_with_integer_argument", "tests/test_cmd2.py::test_history_with_integer_span", "tests/test_cmd2.py::test_history_with_span_start", "tests/test_cmd2.py::test_history_with_span_end", "tests/test_cmd2.py::test_history_with_span_index_error", "tests/test_cmd2.py::test_history_output_file", "tests/test_cmd2.py::test_history_edit", "tests/test_cmd2.py::test_history_run_all_commands", "tests/test_cmd2.py::test_history_run_one_command", "tests/test_cmd2.py::test_base_load", "tests/test_cmd2.py::test_load_with_empty_args", "tests/test_cmd2.py::test_load_with_nonexistent_file", "tests/test_cmd2.py::test_load_with_empty_file", "tests/test_cmd2.py::test_load_with_binary_file", "tests/test_cmd2.py::test_load_with_utf8_file", "tests/test_cmd2.py::test_load_nested_loads", "tests/test_cmd2.py::test_base_runcmds_plus_hooks", "tests/test_cmd2.py::test_base_relative_load", "tests/test_cmd2.py::test_relative_load_requires_an_argument", "tests/test_cmd2.py::test_feedback_to_output_true", "tests/test_cmd2.py::test_feedback_to_output_false", "tests/test_cmd2.py::test_allow_redirection", "tests/test_cmd2.py::test_input_redirection", "tests/test_cmd2.py::test_pipe_to_shell", "tests/test_cmd2.py::test_pipe_to_shell_error", "tests/test_cmd2.py::test_base_timing", "tests/test_cmd2.py::test_base_debug", "tests/test_cmd2.py::test_base_colorize", "tests/test_cmd2.py::test_edit_no_editor", "tests/test_cmd2.py::test_edit_file", "tests/test_cmd2.py::test_edit_file_with_spaces", "tests/test_cmd2.py::test_edit_blank", "tests/test_cmd2.py::test_base_py_interactive", "tests/test_cmd2.py::test_base_cmdloop_with_queue", "tests/test_cmd2.py::test_base_cmdloop_without_queue", "tests/test_cmd2.py::test_cmdloop_without_rawinput", "tests/test_cmd2.py::test_precmd_hook_success", "tests/test_cmd2.py::test_precmd_hook_failure", "tests/test_cmd2.py::test_default_to_shell_unknown", "tests/test_cmd2.py::test_default_to_shell_good", "tests/test_cmd2.py::test_default_to_shell_failure", "tests/test_cmd2.py::test_ansi_prompt_not_esacped", "tests/test_cmd2.py::test_ansi_prompt_escaped", "tests/test_cmd2.py::test_custom_command_help", "tests/test_cmd2.py::test_custom_help_menu", "tests/test_cmd2.py::test_help_undocumented", "tests/test_cmd2.py::test_help_overridden_method", "tests/test_cmd2.py::test_select_options", "tests/test_cmd2.py::test_select_invalid_option", "tests/test_cmd2.py::test_select_list_of_strings", "tests/test_cmd2.py::test_select_list_of_tuples", "tests/test_cmd2.py::test_select_uneven_list_of_tuples", "tests/test_cmd2.py::test_pyscript_with_noarglist", "tests/test_cmd2.py::test_option_help_with_no_docstring", "tests/test_cmd2.py::test_which_editor_bad", "tests/test_cmd2.py::test_multiline_complete_empty_statement_raises_exception", "tests/test_cmd2.py::test_multiline_complete_statement_without_terminator", "tests/test_cmd2.py::test_clipboard_failure", "tests/test_cmd2.py::test_cmdresult", "tests/test_cmd2.py::test_exclude_from_history", "tests/test_cmd2.py::test_is_text_file_bad_input", "tests/test_cmd2.py::test_eof", "tests/test_cmd2.py::test_eos", "tests/test_cmd2.py::test_echo", "tests/test_cmd2.py::test_pseudo_raw_input_tty_rawinput_true", "tests/test_cmd2.py::test_pseudo_raw_input_tty_rawinput_false", "tests/test_cmd2.py::test_pseudo_raw_input_piped_rawinput_true_echo_true", "tests/test_cmd2.py::test_pseudo_raw_input_piped_rawinput_true_echo_false", "tests/test_cmd2.py::test_pseudo_raw_input_piped_rawinput_false_echo_true", "tests/test_cmd2.py::test_pseudo_raw_input_piped_rawinput_false_echo_false", "tests/test_cmd2.py::test_raw_input", "tests/test_cmd2.py::test_stdin_input", "tests/test_cmd2.py::test_empty_stdin_input", "tests/test_cmd2.py::test_poutput_string", "tests/test_cmd2.py::test_poutput_zero", "tests/test_cmd2.py::test_poutput_empty_string", "tests/test_cmd2.py::test_poutput_none", "tests/test_completion.py::test_cmd2_command_completion_single_end", "tests/test_completion.py::test_complete_command_single_end", "tests/test_completion.py::test_complete_command_invalid_state", "tests/test_completion.py::test_complete_empty_arg", "tests/test_completion.py::test_complete_bogus_command", "tests/test_completion.py::test_cmd2_command_completion_is_case_insensitive_by_default", "tests/test_completion.py::test_cmd2_case_sensitive_command_completion", "tests/test_completion.py::test_cmd2_command_completion_single_mid", "tests/test_completion.py::test_cmd2_command_completion_multiple", "tests/test_completion.py::test_cmd2_command_completion_nomatch", "tests/test_completion.py::test_cmd2_help_completion_single_end", "tests/test_completion.py::test_cmd2_help_completion_single_mid", "tests/test_completion.py::test_cmd2_help_completion_multiple", "tests/test_completion.py::test_cmd2_help_completion_nomatch", "tests/test_completion.py::test_shell_command_completion", "tests/test_completion.py::test_shell_command_completion_doesnt_match_wildcards", "tests/test_completion.py::test_shell_command_completion_multiple", "tests/test_completion.py::test_shell_command_completion_nomatch", "tests/test_completion.py::test_shell_command_completion_doesnt_complete_when_just_shell", "tests/test_completion.py::test_shell_command_completion_does_path_completion_when_after_command", "tests/test_completion.py::test_path_completion_single_end", "tests/test_completion.py::test_path_completion_single_mid", "tests/test_completion.py::test_path_completion_multiple", "tests/test_completion.py::test_path_completion_nomatch", "tests/test_completion.py::test_path_completion_cwd", "tests/test_completion.py::test_path_completion_doesnt_match_wildcards", "tests/test_completion.py::test_path_completion_directories_only", "tests/test_completion.py::test_parseline_command_and_args", "tests/test_completion.py::test_parseline_emptyline", "tests/test_completion.py::test_parseline_strips_line", "tests/test_completion.py::test_parseline_expands_shortcuts", "tests/test_completion.py::test_cmd2_subcommand_completion_single_end", "tests/test_completion.py::test_cmd2_subcommand_completion_single_mid", "tests/test_completion.py::test_cmd2_subcommand_completion_multiple", "tests/test_completion.py::test_cmd2_subcommand_completion_nomatch", "tests/test_completion.py::test_cmd2_subcommand_completion_after_subcommand", "tests/test_completion.py::test_complete_subcommand_single_end" ]
[ "tests/test_cmd2.py::test_output_redirection", "tests/test_cmd2.py::test_which_editor_good", "tests/test_completion.py::test_path_completion_user_expansion" ]
[]
[]
MIT License
2,069
[ "docs/conf.py", "examples/transcripts/pirate.transcript", "docs/argument_processing.rst", "cmd2.py", "examples/example.py", "examples/exampleSession.txt", "setup.py", "examples/argparse_example.py", "examples/transcript_regex.txt", "CHANGELOG.md", "README.md", "docs/install.rst", "examples/arg_print.py", "examples/pirate.py", "examples/subcommands.py" ]
[ "docs/conf.py", "examples/transcripts/pirate.transcript", "docs/argument_processing.rst", "cmd2.py", "examples/example.py", "setup.py", "examples/argparse_example.py", "CHANGELOG.md", "examples/transcripts/transcript_regex.txt", "README.md", "docs/install.rst", "examples/arg_print.py", "examples/pirate.py", "examples/subcommands.py", "examples/transcripts/exampleSession.txt" ]
nickpegg__posty-30
0187ca11ba573f0bb03b0fbcec666b30c3568927
2018-01-20 21:50:23
0187ca11ba573f0bb03b0fbcec666b30c3568927
diff --git a/posty/cli.py b/posty/cli.py index 5285708..a8e0ee0 100644 --- a/posty/cli.py +++ b/posty/cli.py @@ -57,6 +57,42 @@ def build(output, config): shutil.copytree('media', os.path.join(output, 'media')) [email protected](name='new') +def _new(): + """ + Create a new post or page + """ + pass + + +@_new.command() [email protected]( + '--name', + help='Name of the new page', + default='New Page', +) +def page(name): + """ + Create a new page from the template + """ + site = Site() + site.new_page(name=name) + + +@_new.command() [email protected]( + '--name', + help='Name of the new post', + default='New Post', +) +def post(name): + """ + Create a new page from the template + """ + site = Site() + site.new_post(name=name) + + @cli.group(name='import') def _import(): """ diff --git a/posty/page.py b/posty/page.py index 873f356..c5874af 100644 --- a/posty/page.py +++ b/posty/page.py @@ -26,6 +26,20 @@ class Page(Model): return cls(payload, config=config) + def to_yaml(self): + """ + Returns a string of the YAML and text representation of this Post. + This is the reverse of from_yaml + """ + metadata = {'title': self['title']} + if self['parent']: + metadata['parent'] = self['parent'] + output = yaml.dump(metadata, default_flow_style=False) + output += "---\n" + output += self['body'] + + return output + def validate(self): required_fields = ('title', 'body') for field in required_fields: diff --git a/posty/post.py b/posty/post.py index 060c2c1..fbd2b0d 100644 --- a/posty/post.py +++ b/posty/post.py @@ -39,6 +39,32 @@ class Post(Model): return cls(post, config=config) + def to_yaml(self): + """ + Returns the YAML and text representation of this Post. This is the + reverse of ``from_yaml()`` + """ + metadata = { + 'title': self['title'], + 'date': self['date'], + 'tags': self['tags'], + } + body = self['body'] + + output = yaml.dump(metadata, default_flow_style=False) + + if self['blurb'] != self['body']: + output += "---\n" + output += self['blurb'].strip() + output += "\n" + + body = body.replace(self['blurb'], '') + + output += "---\n" + output += body.strip() + + return output + def validate(self): required_fields = ('title', 'date', 'blurb', 'body') for field in required_fields: diff --git a/posty/site.py b/posty/site.py index 04bebff..0aeb50f 100644 --- a/posty/site.py +++ b/posty/site.py @@ -1,4 +1,5 @@ from collections import Counter +import datetime import os.path import shutil @@ -175,3 +176,44 @@ class Site(object): ) return copyright + + def new_post(self, name="New Post"): + """ + Create a new post in the site directory from the skeleton post + """ + post_dir = os.path.join(self.site_path, 'posts') + if not os.path.exists(post_dir): + raise PostyError('You must initialize the site first') + + date = datetime.date.today() + filename = '{}_{}.yaml'.format(date, slugify(name)) + post_path = os.path.join(post_dir, filename) + + skel_path = os.path.join(os.path.dirname(__file__), + 'skel/posts/1970-01-01_new-post.yaml') + post = Post.from_yaml(open(skel_path).read(), config=self.config) + post['title'] = name + post['date'] = date + + with open(post_path, 'w') as output_file: + output_file.write(post.to_yaml()) + + def new_page(self, name="New Page"): + """ + Create a new page in the site directory from the skeleton page + """ + page_dir = os.path.join(self.site_path, 'pages') + if not os.path.exists(page_dir): + raise PostyError('You must initialize the site first') + + filename = '{}.yaml'.format(slugify(name)) + page_path = os.path.join(page_dir, filename) + + skel_path = os.path.join(os.path.dirname(__file__), + 'skel/pages/new-page.yaml') + + page = Page.from_yaml(open(skel_path).read(), config=self.config) + page['title'] = name + + with open(page_path, 'w') as output_file: + output_file.write(page.to_yaml()) diff --git a/posty/skel/pages/new-page.yaml b/posty/skel/pages/new-page.yaml new file mode 100644 index 0000000..73cc0f6 --- /dev/null +++ b/posty/skel/pages/new-page.yaml @@ -0,0 +1,4 @@ +title: New Page +parent: None +--- +This is your new page. Write what you want here! diff --git a/posty/skel/posts/1970-01-01_new-post.yaml b/posty/skel/posts/1970-01-01_new-post.yaml new file mode 100644 index 0000000..9057105 --- /dev/null +++ b/posty/skel/posts/1970-01-01_new-post.yaml @@ -0,0 +1,11 @@ +title: New Post +date: 1970-01-01 +tags: + - tag1 + - tag2 +--- +This the the summary/first paragraph of your new post +--- +This is the rest of the post. + +Write what you want here!
`posty new` commands Add a set of commands to create new posts or pages. Pull from an example file from the skeleton data. Example CLI: `posty new page "About me"` -> `pages/{{slug}}.yaml` `posty new post "My new post"` -> `posts/{{date}}_{{slug}}.yaml` `posty new post` -> `posts/{{date}}_new-post.yaml`
nickpegg/posty
diff --git a/tests/fixtures/site/posts/multi-paragraph.yaml b/tests/fixtures/site/posts/multi-paragraph.yaml index 1c55996..633e099 100644 --- a/tests/fixtures/site/posts/multi-paragraph.yaml +++ b/tests/fixtures/site/posts/multi-paragraph.yaml @@ -1,8 +1,8 @@ -title: Multi-paragraph Post date: 2017-01-14 tags: - - blah - - test +- blah +- test +title: Multi-paragraph Post --- This is a post that has multiple paragraphs, where the first paragraph should get converted into a blurb. --- diff --git a/tests/test_page.py b/tests/test_page.py index d06c567..62feb33 100644 --- a/tests/test_page.py +++ b/tests/test_page.py @@ -7,15 +7,19 @@ from posty.page import Page from .fixtures import config # noqa [email protected] +def page_contents(): + path = os.path.join(os.path.dirname(__file__), 'fixtures', 'site', 'pages', + 'test.yaml') + return open(path).read() + + @pytest.fixture # noqa -def page(config): +def page(config, page_contents): """ Basic top-level page (has no parent) """ - path = os.path.join(os.path.dirname(__file__), 'fixtures', 'site', 'pages', - 'test.yaml') - contents = open(path).read() - return Page.from_yaml(contents, config=config) + return Page.from_yaml(page_contents, config=config) class TestValidation(object): @@ -44,3 +48,7 @@ def test_url(page): expected_url = 'http://example.org/test/{}/'.format(page['slug']) assert page.url() == expected_url + + +def test_to_yaml(page, page_contents): + assert page_contents.strip() == page.to_yaml() diff --git a/tests/test_post.py b/tests/test_post.py index d27217d..478f834 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -8,15 +8,19 @@ from posty.post import Post from .fixtures import config # noqa [email protected] +def post_contents(): + path = os.path.join(os.path.dirname(__file__), 'fixtures', 'site', 'posts', + 'multi-paragraph.yaml') + return open(path).read() + + @pytest.fixture # noqa -def post(config): +def post(config, post_contents): """ Basic post """ - path = os.path.join(os.path.dirname(__file__), 'fixtures', 'site', 'posts', - 'multi-paragraph.yaml') - contents = open(path).read() - return Post.from_yaml(contents, config=config) + return Post.from_yaml(post_contents, config=config) class TestValidation(object): @@ -46,3 +50,7 @@ def test_url(post): post['slug']) assert post.url() == expected_url + + +def test_to_yaml(post, post_contents): + assert post_contents.strip() == post.to_yaml() diff --git a/tests/test_site.py b/tests/test_site.py index ecee2af..1468563 100644 --- a/tests/test_site.py +++ b/tests/test_site.py @@ -1,3 +1,6 @@ +import datetime +import os + from .fixtures import site # noqa @@ -32,3 +35,27 @@ def test_post_sorting(site): # noqa def test_copyright(site): # noqa site.load() assert site.copyright == 'Copyright 2010 - 2017, Jimbo Jawn' + + +def test_new_page(site): # noqa + site.new_page() + new_page_path = os.path.join(site.site_path, 'pages', 'new-page.yaml') + assert os.path.exists(new_page_path) + + site.new_page('Neato page') + new_page_path = os.path.join(site.site_path, 'pages', 'neato-page.yaml') + assert os.path.exists(new_page_path) + + +def test_new_post(site): # noqa + date = datetime.date.today() + + site.new_post() + filename = '{}_new-post.yaml'.format(date) + expected_path = os.path.join(site.site_path, 'posts', filename) + assert os.path.exists(expected_path) + + site.new_post('Neato Post') + filename = '{}_neato-post.yaml'.format(date) + expected_path = os.path.join(site.site_path, 'posts', filename) + assert os.path.exists(expected_path)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 4 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 awesome-slugify==1.6.5 certifi==2021.5.30 click==6.7 coverage==6.2 execnet==1.9.0 feedgen==0.6.1 future==0.16.0 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==2.11.3 lxml==5.3.1 Markdown==2.6.11 MarkupSafe==2.0.1 packaging==21.3 pluggy==1.0.0 -e git+https://github.com/nickpegg/posty.git@0187ca11ba573f0bb03b0fbcec666b30c3568927#egg=Posty py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==3.13 regex==2023.8.8 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 Unidecode==0.4.21 zipp==3.6.0
name: posty channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - awesome-slugify==1.6.5 - click==6.7 - coverage==6.2 - execnet==1.9.0 - feedgen==0.6.1 - future==0.16.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==2.11.3 - lxml==5.3.1 - markdown==2.6.11 - markupsafe==2.0.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==3.13 - regex==2023.8.8 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - unidecode==0.04.21 - zipp==3.6.0 prefix: /opt/conda/envs/posty
[ "tests/test_page.py::test_to_yaml", "tests/test_post.py::test_to_yaml", "tests/test_site.py::test_new_page", "tests/test_site.py::test_new_post" ]
[]
[ "tests/test_page.py::TestValidation::test_basic_case", "tests/test_page.py::TestValidation::test_no_title", "tests/test_page.py::TestValidation::test_no_parent", "tests/test_page.py::test_url", "tests/test_post.py::TestValidation::test_basic_case", "tests/test_post.py::TestValidation::test_no_title", "tests/test_post.py::TestValidation::test_no_tags", "tests/test_post.py::test_url", "tests/test_site.py::test_site_loads_config", "tests/test_site.py::test_page_sorting", "tests/test_site.py::test_post_sorting", "tests/test_site.py::test_copyright" ]
[]
MIT License
2,070
[ "posty/page.py", "posty/cli.py", "posty/skel/posts/1970-01-01_new-post.yaml", "posty/skel/pages/new-page.yaml", "posty/site.py", "posty/post.py" ]
[ "posty/page.py", "posty/cli.py", "posty/skel/posts/1970-01-01_new-post.yaml", "posty/skel/pages/new-page.yaml", "posty/site.py", "posty/post.py" ]
asottile__git-code-debt-87
6d3c4a0d3e5dd36482c010462e2b00e944e7a876
2018-01-21 02:15:35
9a8dc753514f2a38885101b14dc538aefaa8749f
diff --git a/git_code_debt/metrics/common.py b/git_code_debt/metrics/common.py index 8cf025f..d91f272 100644 --- a/git_code_debt/metrics/common.py +++ b/git_code_debt/metrics/common.py @@ -1,56 +1,12 @@ from __future__ import absolute_import from __future__ import unicode_literals - -PYTHON = 'Python' -YAML = 'Yaml' -TEMPLATE = 'Template' -CSS = 'Css' -MAKO_TEMPLATE = 'Mako_Template' -JAVASCRIPT = 'Javascript' -JAVA = 'Java' -ILLUSTRATOR = 'Illustrator' -HTML = 'Html' -CCPP = 'C_C++' -TEXT = 'Text' -SQL = 'SQL' - - -# Maps a set of file extensions to a nice name. -# Updating this will cause that file type to be tracked for LinesOfCode metric. -FILE_TYPE_MAP = { - b'.py': PYTHON, - - b'.yaml': YAML, - b'.yml': YAML, - - b'.css': CSS, - b'.scss': CSS, - - b'.tmpl': TEMPLATE, - - b'.mako': MAKO_TEMPLATE, - - b'.js': JAVASCRIPT, - - b'.java': JAVA, - - b'.ai': ILLUSTRATOR, - - b'.htm': HTML, - b'.html': HTML, - - b'.h': CCPP, - b'.c': CCPP, - b'.cpp': CCPP, - - b'.md': TEXT, - b'.rst': TEXT, - b'.csv': TEXT, - b'.log': TEXT, - b'.json': TEXT, - b'.xml': TEXT, - b'.txt': TEXT, - - b'.sql': SQL, -} +from identify import identify + +UNKNOWN = 'unknown' +IGNORED_TAGS = frozenset(( + identify.DIRECTORY, identify.SYMLINK, identify.FILE, + identify.EXECUTABLE, identify.NON_EXECUTABLE, + identify.TEXT, identify.BINARY, +)) +ALL_TAGS = frozenset((identify.ALL_TAGS - IGNORED_TAGS) | {UNKNOWN}) diff --git a/git_code_debt/metrics/curse.py b/git_code_debt/metrics/curse.py index 89a6fd0..a522169 100644 --- a/git_code_debt/metrics/curse.py +++ b/git_code_debt/metrics/curse.py @@ -3,9 +3,12 @@ from __future__ import unicode_literals import collections +from identify import identify + from git_code_debt.metric import Metric from git_code_debt.metrics.base import DiffParserBase -from git_code_debt.metrics.common import FILE_TYPE_MAP +from git_code_debt.metrics.common import ALL_TAGS +from git_code_debt.metrics.common import UNKNOWN from git_code_debt.metrics.curse_words import word_list @@ -34,20 +37,19 @@ class CurseWordsParser(DiffParserBase): total_curses = total_curses + curses_changed # Track by file extension -> type mapping - file_type = FILE_TYPE_MAP.get(file_diff_stat.extension, 'unknown') - curses_by_file_type[file_type] += curses_changed + filename = file_diff_stat.filename.decode('UTF-8') + tags = identify.tags_from_filename(filename) or {UNKNOWN} + + for tag in tags: + curses_by_file_type[tag] += curses_changed # Yield overall metric and one per type of expected mapping types yield Metric('TotalCurseWords', total_curses) - for file_type in set(FILE_TYPE_MAP.values()) | {'unknown'}: - curses_changed = curses_by_file_type.get(file_type, 0) - yield Metric( - 'TotalCurseWords_{}'.format(file_type), - curses_changed, - ) + for tag in ALL_TAGS: + curses_changed = curses_by_file_type[tag] + yield Metric('TotalCurseWords_{}'.format(tag), curses_changed) def get_possible_metric_ids(self): return ['TotalCurseWords'] + [ - 'TotalCurseWords_{}'.format(file_type) - for file_type in set(FILE_TYPE_MAP.values()) | {'unknown'} + 'TotalCurseWords_{}'.format(tag) for tag in ALL_TAGS ] diff --git a/git_code_debt/metrics/lines.py b/git_code_debt/metrics/lines.py index 5c082aa..5cb72ff 100644 --- a/git_code_debt/metrics/lines.py +++ b/git_code_debt/metrics/lines.py @@ -3,9 +3,12 @@ from __future__ import unicode_literals import collections +from identify import identify + from git_code_debt.metric import Metric from git_code_debt.metrics.base import DiffParserBase -from git_code_debt.metrics.common import FILE_TYPE_MAP +from git_code_debt.metrics.common import ALL_TAGS +from git_code_debt.metrics.common import UNKNOWN class LinesOfCodeParser(DiffParserBase): @@ -24,21 +27,19 @@ class LinesOfCodeParser(DiffParserBase): # Track total overall total_lines += lines_changed - # Track by file extension -> type mapping - file_type = FILE_TYPE_MAP.get(file_diff_stat.extension, 'unknown') - lines_by_file_type[file_type] += lines_changed + filename = file_diff_stat.filename.decode('UTF-8') + tags = identify.tags_from_filename(filename) or {UNKNOWN} + + for tag in tags: + lines_by_file_type[tag] += lines_changed # Yield overall metric and one per type of expected mapping types yield Metric('TotalLinesOfCode', total_lines) - for file_type in set(FILE_TYPE_MAP.values()) | {'unknown'}: - lines_changed = lines_by_file_type.get(file_type, 0) - yield Metric( - 'TotalLinesOfCode_{}'.format(file_type), - lines_changed, - ) + for tag in ALL_TAGS: + lines_changed = lines_by_file_type[tag] + yield Metric('TotalLinesOfCode_{}'.format(tag), lines_changed) def get_possible_metric_ids(self): return ['TotalLinesOfCode'] + [ - 'TotalLinesOfCode_{}'.format(file_type) - for file_type in set(FILE_TYPE_MAP.values()) | {'unknown'} + 'TotalLinesOfCode_{}'.format(tag) for tag in ALL_TAGS ] diff --git a/metric_config.yaml b/metric_config.yaml index fd1ff4e..7e3dbe3 100644 --- a/metric_config.yaml +++ b/metric_config.yaml @@ -14,13 +14,9 @@ # NOTE: metrics and metric_expressions may be omitted Groups: - - Cheetah: - metrics: ['TotalLinesOfCode_Template'] - metric_expressions: - - ^.*Cheetah.*$ - Python: metric_expressions: - - ^.*Python.*$ + - (?i)^.*Python.*$ - CurseWords: metric_expressions: - ^TotalCurseWords.*$ @@ -50,8 +46,8 @@ CommitLinks: # These denote the metrics to show in the widget. WidgetMetrics: TotalLinesOfCode: {} - TotalLinesOfCode_Css: {} - TotalLinesOfCode_Python: {} - TotalLinesOfCode_Javascript: {} - TotalLinesOfCode_Text: {} - TotalLinesOfCode_Yaml: {} + TotalLinesOfCode_css: {} + TotalLinesOfCode_python: {} + TotalLinesOfCode_javascript: {} + TotalLinesOfCode_plain-text: {} + TotalLinesOfCode_yaml: {} diff --git a/setup.py b/setup.py index be888de..6deb81b 100644 --- a/setup.py +++ b/setup.py @@ -35,6 +35,7 @@ setup( }, install_requires=[ 'flask', + 'identify', 'jsonschema', 'mako', 'pyyaml',
Use `identify` for code type identification Notably https://github.com/chriskuehl/identify/blob/39c020afff92d50d6ffb8b251f5bdc30d51ca96c/identify/identify.py#L69-L86
asottile/git-code-debt
diff --git a/tests/metrics/curse_test.py b/tests/metrics/curse_test.py index 4b5cafc..024461a 100644 --- a/tests/metrics/curse_test.py +++ b/tests/metrics/curse_test.py @@ -11,8 +11,8 @@ def test_curse_words_parser(): parser = CurseWordsParser() input_stats = [ FileDiffStat( - b'templates/foo.tmpl', - [b'#man seriously, fuck cheetah'], + b'some/file.rb', + [b'#man seriously, fuck ruby'], [], None, ), @@ -24,5 +24,5 @@ def test_curse_words_parser(): ), ] metrics = list(parser.get_metrics_from_stat(Commit.blank, input_stats)) - assert Metric('TotalCurseWords_Template', 1) in metrics - assert Metric('TotalCurseWords_Python', 0) in metrics + assert Metric('TotalCurseWords_ruby', 1) in metrics + assert Metric('TotalCurseWords_python', 0) in metrics diff --git a/tests/metrics/lines_test.py b/tests/metrics/lines_test.py index 3728981..801f2a7 100644 --- a/tests/metrics/lines_test.py +++ b/tests/metrics/lines_test.py @@ -17,8 +17,8 @@ def test_lines_of_code_parser(): expected_value = { 'TotalLinesOfCode': 3, - 'TotalLinesOfCode_Python': 1, - 'TotalLinesOfCode_Yaml': 2, + 'TotalLinesOfCode_python': 1, + 'TotalLinesOfCode_yaml': 2, } for metric in metrics: assert metric.value == expected_value.get(metric.name, 0) diff --git a/tests/server/servlets/widget_test.py b/tests/server/servlets/widget_test.py index 1002017..aff0423 100644 --- a/tests/server/servlets/widget_test.py +++ b/tests/server/servlets/widget_test.py @@ -47,11 +47,11 @@ def test_widget_data(server): def test_widget_data_multiple_values(server): with metrics_enabled( - {'TotalLinesOfCode': {}, 'TotalLinesOfCode_Text': {}}, + {'TotalLinesOfCode': {}, 'TotalLinesOfCode_plain-text': {}}, ): response = server.client.post( flask.url_for('widget.data'), data={'diff': file_diff_stat_test.SAMPLE_OUTPUT}, ) response_pq = pyquery.PyQuery(response.json['metrics']) - assert 'TotalLinesOfCode_Text' in response_pq.text() + assert 'TotalLinesOfCode_plain-text' in response_pq.text()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 5 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 cfgv==3.3.1 click==8.0.4 coverage==6.2 cssselect==1.1.0 dataclasses==0.8 distlib==0.3.9 filelock==3.4.1 Flask==2.0.3 -e git+https://github.com/asottile/git-code-debt.git@6d3c4a0d3e5dd36482c010462e2b00e944e7a876#egg=git_code_debt identify==2.4.4 importlib-metadata==4.8.3 importlib-resources==5.2.3 iniconfig==1.1.1 itsdangerous==2.0.1 Jinja2==3.0.3 jsonschema==3.2.0 lxml==5.3.1 Mako==1.1.6 MarkupSafe==2.0.1 mock==5.2.0 nodeenv==1.6.0 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 pre-commit==2.17.0 py==1.11.0 pyparsing==3.1.4 pyquery==1.4.3 pyrsistent==0.18.0 pytest==7.0.1 pytest-env==0.6.2 PyYAML==6.0.1 six==1.17.0 toml==0.10.2 tomli==1.2.3 typing_extensions==4.1.1 virtualenv==20.16.2 Werkzeug==2.0.3 zipp==3.6.0
name: git-code-debt channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - cfgv==3.3.1 - click==8.0.4 - coverage==6.2 - cssselect==1.1.0 - dataclasses==0.8 - distlib==0.3.9 - filelock==3.4.1 - flask==2.0.3 - identify==2.4.4 - importlib-metadata==4.8.3 - importlib-resources==5.2.3 - iniconfig==1.1.1 - itsdangerous==2.0.1 - jinja2==3.0.3 - jsonschema==3.2.0 - lxml==5.3.1 - mako==1.1.6 - markupsafe==2.0.1 - mock==5.2.0 - nodeenv==1.6.0 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - pre-commit==2.17.0 - py==1.11.0 - pyparsing==3.1.4 - pyquery==1.4.3 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-env==0.6.2 - pyyaml==6.0.1 - six==1.17.0 - toml==0.10.2 - tomli==1.2.3 - typing-extensions==4.1.1 - virtualenv==20.16.2 - werkzeug==2.0.3 - zipp==3.6.0 prefix: /opt/conda/envs/git-code-debt
[ "tests/metrics/curse_test.py::test_curse_words_parser", "tests/metrics/lines_test.py::test_lines_of_code_parser" ]
[ "tests/server/servlets/widget_test.py::test_widget_data", "tests/server/servlets/widget_test.py::test_widget_data_multiple_values" ]
[ "tests/server/servlets/widget_test.py::test_widget_frame_loads" ]
[]
MIT License
2,071
[ "git_code_debt/metrics/curse.py", "git_code_debt/metrics/common.py", "setup.py", "git_code_debt/metrics/lines.py", "metric_config.yaml" ]
[ "git_code_debt/metrics/curse.py", "git_code_debt/metrics/common.py", "setup.py", "git_code_debt/metrics/lines.py", "metric_config.yaml" ]
asottile__git-code-debt-91
44b090434a24fe945747a98fddde5a051f0c7b1d
2018-01-22 04:17:54
9a8dc753514f2a38885101b14dc538aefaa8749f
diff --git a/git_code_debt/generate.py b/git_code_debt/generate.py index 90d72da..283b26c 100644 --- a/git_code_debt/generate.py +++ b/git_code_debt/generate.py @@ -29,7 +29,7 @@ from git_code_debt.write_logic import insert_metric_values from git_code_debt.write_logic import update_has_data -def get_metrics(commit, diff, metric_parsers): +def get_metrics(commit, diff, metric_parsers, exclude): def get_all_metrics(file_diff_stats): for metric_parser_cls in metric_parsers: metric_parser = metric_parser_cls() @@ -39,6 +39,10 @@ def get_metrics(commit, diff, metric_parsers): yield metric file_diff_stats = get_file_diff_stats_from_output(diff) + file_diff_stats = tuple( + x for x in file_diff_stats + if not exclude.search(x.path) + ) return tuple(get_all_metrics(file_diff_stats)) @@ -47,13 +51,13 @@ def increment_metric_values(metric_values, metrics): metric_values[metric.name] += metric.value -def _get_metrics_inner(m_args): - compare_commit, commit, repo_parser, metric_parsers = m_args +def _get_metrics_inner(mp_args): + compare_commit, commit, repo_parser, metric_parsers, exclude = mp_args if compare_commit is None: diff = repo_parser.get_original_commit(commit.sha) else: diff = repo_parser.get_commit_diff(compare_commit.sha, commit.sha) - return get_metrics(commit, diff, metric_parsers) + return get_metrics(commit, diff, metric_parsers, exclude) def mapper(jobs): @@ -68,6 +72,7 @@ def load_data( repo, package_names, skip_defaults, + exclude, jobs, ): metric_parsers = get_metric_parsers_from_args(package_names, skip_defaults) @@ -102,6 +107,7 @@ def load_data( commits, itertools.repeat(repo_parser), itertools.repeat(metric_parsers), + itertools.repeat(exclude), ) do_map = mapper(jobs) for commit, metrics in six.moves.zip( @@ -176,6 +182,7 @@ def main(argv=None): args.repo, args.metric_package_names, args.skip_default_metrics, + args.exclude, parsed_args.jobs, ) diff --git a/git_code_debt/generate_config.py b/git_code_debt/generate_config.py index ad9fce6..302a8ca 100644 --- a/git_code_debt/generate_config.py +++ b/git_code_debt/generate_config.py @@ -2,6 +2,7 @@ from __future__ import absolute_import from __future__ import unicode_literals import collections +import re import jsonschema @@ -17,6 +18,7 @@ GENERATE_OPTIONS_SCHEMA = { 'metric_package_names': {'type': 'array', 'items': {'type': 'string'}}, 'repo': {'type': 'string'}, 'database': {'type': 'string'}, + 'exclude': {'type': 'string'}, }, } @@ -28,6 +30,7 @@ class GenerateOptions(collections.namedtuple( 'metric_package_names', 'repo', 'database', + 'exclude', ), )): @classmethod @@ -38,4 +41,5 @@ class GenerateOptions(collections.namedtuple( metric_package_names=yaml_dict.get('metric_package_names', []), repo=yaml_dict['repo'], database=yaml_dict['database'], + exclude=re.compile(yaml_dict.get('exclude', '^$').encode()), ) diff --git a/git_code_debt/server/servlets/widget.py b/git_code_debt/server/servlets/widget.py index a023861..6264a09 100644 --- a/git_code_debt/server/servlets/widget.py +++ b/git_code_debt/server/servlets/widget.py @@ -35,7 +35,7 @@ def data(): parsers = get_metric_parsers_from_args( metric_config.metric_package_names, skip_defaults=False, ) - metrics = get_metrics(Commit.blank, diff, parsers) + metrics = get_metrics(Commit.blank, diff, parsers, metric_config.exclude) metrics = [ metric for metric in metrics if metric.value and metric.name in metric_names
Add `exclude` pattern Add the ability to remove checked in files from the pattern which are not part of the codebase. For example: ```yaml exclude: '^vendor/' ```
asottile/git-code-debt
diff --git a/tests/generate_config_test.py b/tests/generate_config_test.py index 889b074..3449297 100644 --- a/tests/generate_config_test.py +++ b/tests/generate_config_test.py @@ -1,6 +1,8 @@ from __future__ import absolute_import from __future__ import unicode_literals +import re + import jsonschema.exceptions import pytest @@ -18,12 +20,14 @@ def test_with_all_options_specified(): 'metric_package_names': ['my_package'], 'repo': '.', 'database': 'database.db', + 'exclude': '^vendor/', }) assert ret == GenerateOptions( skip_default_metrics=True, metric_package_names=['my_package'], repo='.', database='database.db', + exclude=re.compile(b'^vendor/'), ) @@ -34,17 +38,5 @@ def test_minimal_defaults(): metric_package_names=[], repo='./', database='database.db', - ) - - -def test_none_for_tempdir_allowed(): - ret = GenerateOptions.from_yaml({ - 'repo': 'repo', - 'database': 'database.db', - }) - assert ret == GenerateOptions( - skip_default_metrics=False, - metric_package_names=[], - repo='repo', - database='database.db', + exclude=re.compile(b'^$'), ) diff --git a/tests/generate_test.py b/tests/generate_test.py index 2f05015..62eb1c6 100644 --- a/tests/generate_test.py +++ b/tests/generate_test.py @@ -3,12 +3,11 @@ from __future__ import unicode_literals import collections import io -import os import os.path +import re import sqlite3 import pytest -import yaml from git_code_debt.discovery import get_metric_parsers_from_args from git_code_debt.generate import _get_metrics_inner @@ -43,7 +42,7 @@ def test_get_metrics_inner_first_commit(cloneable_with_commits): with repo_parser.repo_checked_out(): metrics = _get_metrics_inner(( None, cloneable_with_commits.commits[0], - repo_parser, [LinesOfCodeParser], + repo_parser, [LinesOfCodeParser], re.compile(b'^$'), )) assert Metric(name='TotalLinesOfCode', value=0) in metrics @@ -54,7 +53,7 @@ def test_get_metrics_inner_nth_commit(cloneable_with_commits): metrics = _get_metrics_inner(( cloneable_with_commits.commits[-2], cloneable_with_commits.commits[-1], - repo_parser, [LinesOfCodeParser], + repo_parser, [LinesOfCodeParser], re.compile(b'^$'), )) assert Metric(name='TotalLinesOfCode', value=2) in metrics @@ -73,18 +72,6 @@ def test_generate_integration(sandbox, cloneable): main(('-C', sandbox.gen_config(repo=cloneable))) -def test_generate_integration_config_file(sandbox, cloneable, tempdir_factory): - tmpdir = tempdir_factory.get() - config_filename = os.path.join(tmpdir, 'generate_config.yaml') - with io.open(config_filename, 'w') as config_file: - yaml.dump( - {'repo': cloneable, 'database': sandbox.db_path}, - stream=config_file, - ) - with cwd(tmpdir): - main([]) - - def test_main_database_does_not_exist(sandbox, cloneable): new_db_path = os.path.join(sandbox.directory, 'new.db') cfg = sandbox.gen_config(database=new_db_path, repo=cloneable) @@ -157,6 +144,25 @@ def test_moves_handled_properly(sandbox, cloneable): assert not main(('-C', sandbox.gen_config(repo=cloneable))) +def test_exclude_pattern(sandbox, cloneable_with_commits): + cfg = sandbox.gen_config( + repo=cloneable_with_commits.path, exclude='\.tmpl$', + ) + assert not main(('-C', cfg)) + with sandbox.db() as db: + query = ( + 'SELECT running_value\n' + 'FROM metric_data\n' + 'INNER JOIN metric_names ON\n' + ' metric_data.metric_id == metric_names.id\n' + 'WHERE sha = ? AND name = "TotalLinesOfCode"\n' + ) + sha = cloneable_with_commits.commits[-1].sha + val, = db.execute(query, (sha,)).fetchone() + # 2 lines of code from test.py, 0 lines from foo.tmpl (2 lines) + assert val == 2 + + def test_get_options_from_config_no_config_file(): with pytest.raises(SystemExit): get_options_from_config('i-dont-exist')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 3 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-env" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 blinker==1.9.0 cfgv==3.4.0 click==8.1.8 coverage==7.8.0 cssselect==1.3.0 distlib==0.3.9 exceptiongroup==1.2.2 filelock==3.18.0 Flask==3.1.0 -e git+https://github.com/asottile/git-code-debt.git@44b090434a24fe945747a98fddde5a051f0c7b1d#egg=git_code_debt identify==2.6.9 importlib_metadata==8.6.1 iniconfig==2.1.0 itsdangerous==2.2.0 Jinja2==3.1.6 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 lxml==5.3.1 Mako==1.3.9 MarkupSafe==3.0.2 mock==5.2.0 nodeenv==1.9.1 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 pre_commit==4.2.0 pyquery==2.0.1 pytest==8.3.5 pytest-env==1.1.5 PyYAML==6.0.2 referencing==0.36.2 rpds-py==0.24.0 six==1.17.0 tomli==2.2.1 typing_extensions==4.13.0 virtualenv==20.30.0 Werkzeug==3.1.3 zipp==3.21.0
name: git-code-debt channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - blinker==1.9.0 - cfgv==3.4.0 - click==8.1.8 - coverage==7.8.0 - cssselect==1.3.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - filelock==3.18.0 - flask==3.1.0 - identify==2.6.9 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - itsdangerous==2.2.0 - jinja2==3.1.6 - jsonschema==4.23.0 - jsonschema-specifications==2024.10.1 - lxml==5.3.1 - mako==1.3.9 - markupsafe==3.0.2 - mock==5.2.0 - nodeenv==1.9.1 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - pre-commit==4.2.0 - pyquery==2.0.1 - pytest==8.3.5 - pytest-env==1.1.5 - pyyaml==6.0.2 - referencing==0.36.2 - rpds-py==0.24.0 - six==1.17.0 - tomli==2.2.1 - typing-extensions==4.13.0 - virtualenv==20.30.0 - werkzeug==3.1.3 - zipp==3.21.0 prefix: /opt/conda/envs/git-code-debt
[ "tests/generate_config_test.py::test_with_all_options_specified", "tests/generate_config_test.py::test_minimal_defaults", "tests/generate_test.py::test_get_metrics_inner_first_commit", "tests/generate_test.py::test_get_metrics_inner_nth_commit" ]
[ "tests/generate_test.py::test_generate_integration", "tests/generate_test.py::test_main_database_does_not_exist", "tests/generate_test.py::test_generate_integration_previous_data", "tests/generate_test.py::test_generate_new_data_created", "tests/generate_test.py::test_regression_for_issue_10", "tests/generate_test.py::test_moves_handled_properly", "tests/generate_test.py::test_exclude_pattern" ]
[ "tests/generate_config_test.py::test_empty_config_invalid", "tests/generate_test.py::test_increment_metrics_first_time", "tests/generate_test.py::test_increment_metrics_already_there", "tests/generate_test.py::test_mapper[1]", "tests/generate_test.py::test_mapper[4]", "tests/generate_test.py::test_get_options_from_config_no_config_file", "tests/generate_test.py::test_create_schema", "tests/generate_test.py::test_populate_metric_ids" ]
[]
MIT License
2,074
[ "git_code_debt/generate_config.py", "git_code_debt/generate.py", "git_code_debt/server/servlets/widget.py" ]
[ "git_code_debt/generate_config.py", "git_code_debt/generate.py", "git_code_debt/server/servlets/widget.py" ]
EdinburghGenomics__EGCG-Core-73
56a36841e0fedb399d9e5aab2c80c0468e93f2ed
2018-01-22 16:40:18
4dba72a40487a2341a4be255af8366644300f53b
diff --git a/egcg_core/util.py b/egcg_core/util.py index d1719af..0bfb634 100644 --- a/egcg_core/util.py +++ b/egcg_core/util.py @@ -100,3 +100,22 @@ def move_dir(src_dir, dest_dir): dest_file = os.path.join(dest_dir, os.path.basename(src_file)) shutil.move(fp, dest_file) return 0 + + +def query_dict(data, query_string): + """ + Drill down into a dict using dot notation, e.g. query_dict({'this': {'that': 'other'}}, 'this.that'}). + :param dict data: + :param str query_string: + """ + _data = data.copy() + + for q in query_string.split('.'): + d = _data.get(q) + if d is None: + return None + + else: + _data = d + + return _data
Dot-notated dict querying We implement this quite a lot in multiple places, so can add this as a common function: ```python >>> d = {'this': {'that': 'other'}} >>> query_dict(d, 'this.that') 'other' ```
EdinburghGenomics/EGCG-Core
diff --git a/tests/test_util.py b/tests/test_util.py index 7881fc3..fe703e8 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -74,7 +74,6 @@ class TestMoveDir(TestEGCG): def setUp(self): self.test_dir = join(self.assets_path, 'move_dir') - makedirs(join(self.test_dir, 'from'), exist_ok=True) makedirs(join(self.test_dir, 'from', 'subdir'), exist_ok=True) self._create_test_file(join(self.test_dir, 'from', 'ftest.txt')) self._create_test_file(join(self.test_dir, 'from', 'subdir', 'ftest.txt')) @@ -83,7 +82,6 @@ class TestMoveDir(TestEGCG): self._create_test_file(join(self.test_dir, 'external', 'external.txt'), 'External file') symlink(join(self.test_dir, 'external', 'external.txt'), join(self.test_dir, 'from', 'external_renamed.txt')) - makedirs(join(self.test_dir, 'exists'), exist_ok=True) makedirs(join(self.test_dir, 'exists', 'subdir'), exist_ok=True) self._create_test_file(join(self.test_dir, 'exists', 'subdir', 'ftest.txt'), 'another file') self._create_test_file(join(self.test_dir, 'exists', 'ftest.txt'), 'another file') @@ -127,3 +125,10 @@ class TestMoveDir(TestEGCG): assert util.find_file(to, 'ftest.txt') assert md5_from1 == self._md5(join(to, 'ftest.txt')) assert md5_from2 == self._md5(join(to, 'subdir', 'ftest.txt')) + + +def test_query_dict(): + data = {'this': {'that': 'other'}} + assert util.query_dict(data, 'this') == {'that': 'other'} + assert util.query_dict(data, 'this.that') == 'other' + assert util.query_dict(data, 'nonexistent') is None
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asana==0.6.5 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 coverage==6.2 -e git+https://github.com/EdinburghGenomics/EGCG-Core.git@56a36841e0fedb399d9e5aab2c80c0468e93f2ed#egg=EGCG_Core importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==2.8 MarkupSafe==2.0.1 oauthlib==3.2.2 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyclarity-lims==0.4.8 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 PyYAML==6.0.1 requests==2.14.2 requests-oauthlib==0.6.2 six==1.10.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: EGCG-Core channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asana==0.6.5 - attrs==22.2.0 - cached-property==1.5.2 - coverage==6.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==2.8 - markupsafe==2.0.1 - oauthlib==3.2.2 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyclarity-lims==0.4.8 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pyyaml==6.0.1 - requests==2.14.2 - requests-oauthlib==0.6.2 - six==1.10.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/EGCG-Core
[ "tests/test_util.py::test_query_dict" ]
[]
[ "tests/test_util.py::test_find_files", "tests/test_util.py::test_find_file", "tests/test_util.py::test_str_join", "tests/test_util.py::test_find_fastqs", "tests/test_util.py::test_find_fastqs_with_lane", "tests/test_util.py::test_find_all_fastqs", "tests/test_util.py::test_find_all_fastq_pairs", "tests/test_util.py::test_same_fs", "tests/test_util.py::TestMoveDir::test_move_dir" ]
[]
MIT License
2,076
[ "egcg_core/util.py" ]
[ "egcg_core/util.py" ]
jupyterhub__kubespawner-122
5c9bf38731e8f5b04b5001d78987b7408ed1ae00
2018-01-22 18:45:58
5c9bf38731e8f5b04b5001d78987b7408ed1ae00
diff --git a/kubespawner/objects.py b/kubespawner/objects.py index ae0d42e..2c8201c 100644 --- a/kubespawner/objects.py +++ b/kubespawner/objects.py @@ -250,7 +250,8 @@ def make_pvc( storage_class, access_modes, storage, - labels + labels, + annotations={} ): """ Make a k8s pvc specification for running a user notebook. @@ -272,9 +273,7 @@ def make_pvc( pvc.api_version = "v1" pvc.metadata = V1ObjectMeta() pvc.metadata.name = name - pvc.metadata.annotations = {} - if storage_class: - pvc.metadata.annotations.update({"volume.beta.kubernetes.io/storage-class": storage_class}) + pvc.metadata.annotations = annotations pvc.metadata.labels = {} pvc.metadata.labels.update(labels) pvc.spec = V1PersistentVolumeClaimSpec() @@ -282,6 +281,10 @@ def make_pvc( pvc.spec.resources = V1ResourceRequirements() pvc.spec.resources.requests = {"storage": storage} + if storage_class: + pvc.metadata.annotations.update({"volume.beta.kubernetes.io/storage-class": storage_class}) + pvc.spec.storage_class_name = storage_class + return pvc def make_ingress( diff --git a/kubespawner/spawner.py b/kubespawner/spawner.py index e3da4d9..163303a 100644 --- a/kubespawner/spawner.py +++ b/kubespawner/spawner.py @@ -783,12 +783,8 @@ class KubeSpawner(Spawner): labels = { 'heritage': 'jupyterhub', 'app': 'jupyterhub', - 'hub.jupyter.org/username': escapism.escape(self.user.name) } - if self.name: - # FIXME: Make sure this is dns safe? - labels['hub.jupyter.org/servername'] = self.name labels.update(extra_labels) return labels @@ -801,6 +797,17 @@ class KubeSpawner(Spawner): labels.update(self.pod_reflector.labels) return self._build_common_labels(labels) + def _build_common_annotations(self, extra_annotations): + # Annotations don't need to be escaped + annotations = { + 'hub.jupyter.org/username': self.user.name + } + if self.name: + annotations['hub.jupyter.org/servername'] = self.name + + annotations.update(extra_annotations) + return annotations + @gen.coroutine def get_pod_manifest(self): """ @@ -822,7 +829,7 @@ class KubeSpawner(Spawner): real_cmd = None labels = self._build_pod_labels(self._expand_all(self.singleuser_extra_labels)) - annotations = self._expand_all(self.singleuser_extra_annotations) + annotations = self._build_common_annotations(self._expand_all(self.singleuser_extra_annotations)) return make_pod( name=self.pod_name, @@ -861,12 +868,15 @@ class KubeSpawner(Spawner): """ labels = self._build_common_labels(self._expand_all(self.user_storage_extra_labels)) + annotations = self._build_common_annotations({}) + return make_pvc( name=self.pvc_name, storage_class=self.user_storage_class, access_modes=self.user_storage_access_modes, storage=self.user_storage_capacity, - labels=labels + labels=labels, + annotations=annotations ) def is_pod_running(self, pod):
Long user names used in pod and pvc labels not allowed by k8s The `hub.jupyter.org/username` label in pod or pvc manifests (e.g. https://github.com/jupyterhub/kubespawner/blob/master/kubespawner/spawner.py#L663) when user name has more than 63 characters will make the creation of the pod fail with a error like this: error: invalid label value: "hub.jupyter.org/username=notebook-a8e986f5974c1ee896fa7717cad98511be0388b3d667535d9af7fe18feab7e3d": must be no more than 63 characters The username I'm using comes from an external OAuth2 IdP and has a fixed length of 64 characters so any pod creation will always fail. As a workaround I have changed the label to `hub.jupyter.org/userid` and use the id which should be small enough. However I'm not sure if that's the right approach.
jupyterhub/kubespawner
diff --git a/tests/test_objects.py b/tests/test_objects.py index a9f0311..d69b8d3 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -787,6 +787,7 @@ def test_make_resources_all(): } }, 'spec': { + 'storageClassName': 'gce-standard-storage', 'accessModes': ['ReadWriteOnce'], 'resources': { 'requests': {
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "configurable-http-proxy" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alembic==1.15.2 annotated-types==0.7.0 arrow==1.3.0 async-generator==1.10 attrs==25.3.0 cachetools==5.5.2 certifi==2025.1.31 certipy==0.2.2 cffi==1.17.1 charset-normalizer==3.4.1 click==8.1.8 configurable-http-proxy==0.3.0 cryptography==44.0.2 escapism==1.0.1 exceptiongroup==1.2.2 fqdn==1.5.1 google-auth==2.38.0 greenlet==3.1.1 idna==3.10 importlib_metadata==8.6.1 iniconfig==2.1.0 ipaddress==1.0.23 isoduration==20.11.0 Jinja2==3.1.6 jsonpointer==3.0.0 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 jupyter-events==0.12.0 jupyterhub==5.2.1 -e git+https://github.com/jupyterhub/kubespawner.git@5c9bf38731e8f5b04b5001d78987b7408ed1ae00#egg=jupyterhub_kubespawner kubernetes==3.0.0 Mako==1.3.9 MarkupSafe==3.0.2 oauthlib==3.2.2 packaging==24.2 pamela==1.2.0 pluggy==1.5.0 prometheus_client==0.21.1 pyasn1==0.6.1 pyasn1_modules==0.4.2 pycparser==2.22 pydantic==2.11.1 pydantic_core==2.33.0 pytest==8.3.5 python-dateutil==2.9.0.post0 python-json-logger==3.3.0 PyYAML==6.0.2 referencing==0.36.2 requests==2.32.3 rfc3339-validator==0.1.4 rfc3986-validator==0.1.1 rpds-py==0.24.0 rsa==4.9 six==1.17.0 SQLAlchemy==2.0.40 tomli==2.2.1 tornado==6.4.2 traitlets==5.14.3 types-python-dateutil==2.9.0.20241206 typing-inspection==0.4.0 typing_extensions==4.13.0 uri-template==1.3.0 urllib3==2.3.0 webcolors==24.11.1 websocket_client==0.40.0 zipp==3.21.0
name: kubespawner channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alembic==1.15.2 - annotated-types==0.7.0 - arrow==1.3.0 - async-generator==1.10 - attrs==25.3.0 - cachetools==5.5.2 - certifi==2025.1.31 - certipy==0.2.2 - cffi==1.17.1 - charset-normalizer==3.4.1 - click==8.1.8 - configurable-http-proxy==0.3.0 - cryptography==44.0.2 - escapism==1.0.1 - exceptiongroup==1.2.2 - fqdn==1.5.1 - google-auth==2.38.0 - greenlet==3.1.1 - idna==3.10 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - ipaddress==1.0.23 - isoduration==20.11.0 - jinja2==3.1.6 - jsonpointer==3.0.0 - jsonschema==4.23.0 - jsonschema-specifications==2024.10.1 - jupyter-events==0.12.0 - jupyterhub==5.2.1 - kubernetes==3.0.0 - mako==1.3.9 - markupsafe==3.0.2 - oauthlib==3.2.2 - packaging==24.2 - pamela==1.2.0 - pluggy==1.5.0 - prometheus-client==0.21.1 - pyasn1==0.6.1 - pyasn1-modules==0.4.2 - pycparser==2.22 - pydantic==2.11.1 - pydantic-core==2.33.0 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - python-json-logger==3.3.0 - pyyaml==6.0.2 - referencing==0.36.2 - requests==2.32.3 - rfc3339-validator==0.1.4 - rfc3986-validator==0.1.1 - rpds-py==0.24.0 - rsa==4.9 - six==1.17.0 - sqlalchemy==2.0.40 - tomli==2.2.1 - tornado==6.4.2 - traitlets==5.14.3 - types-python-dateutil==2.9.0.20241206 - typing-extensions==4.13.0 - typing-inspection==0.4.0 - uri-template==1.3.0 - urllib3==2.3.0 - webcolors==24.11.1 - websocket-client==0.40.0 - zipp==3.21.0 prefix: /opt/conda/envs/kubespawner
[ "tests/test_objects.py::test_make_resources_all" ]
[]
[ "tests/test_objects.py::test_make_simplest_pod", "tests/test_objects.py::test_make_labeled_pod", "tests/test_objects.py::test_make_annotated_pod", "tests/test_objects.py::test_make_pod_with_image_pull_secrets", "tests/test_objects.py::test_set_pod_uid_fs_gid", "tests/test_objects.py::test_run_privileged_container", "tests/test_objects.py::test_make_pod_resources_all", "tests/test_objects.py::test_make_pod_with_env", "tests/test_objects.py::test_make_pod_with_lifecycle", "tests/test_objects.py::test_make_pod_with_init_containers", "tests/test_objects.py::test_make_pod_with_extra_container_config", "tests/test_objects.py::test_make_pod_with_extra_pod_config", "tests/test_objects.py::test_make_pod_with_extra_containers", "tests/test_objects.py::test_make_pod_with_extra_resources", "tests/test_objects.py::test_make_pvc_simple", "tests/test_objects.py::test_make_pod_with_service_account" ]
[]
BSD 3-Clause "New" or "Revised" License
2,077
[ "kubespawner/objects.py", "kubespawner/spawner.py" ]
[ "kubespawner/objects.py", "kubespawner/spawner.py" ]
missionpinball__mpf-1074
ca7dadb34fac8c7bc0cb2a036e61af9ec0b1369b
2018-01-22 19:18:54
2c1bb3aa1e25674916bc4e0d17ccb6c3c87bd01b
diff --git a/mpf/core/placeholder_manager.py b/mpf/core/placeholder_manager.py index 90badc6af..0295c4f80 100644 --- a/mpf/core/placeholder_manager.py +++ b/mpf/core/placeholder_manager.py @@ -226,7 +226,7 @@ class MpfFormatter(string.Formatter): class TextTemplate: - """Legacy text placeholder.""" + """Text placeholder.""" var_finder = re.compile("(?<=\\()[a-zA-Z_0-9|]+(?=\\))") string_finder = re.compile("(?<=\\$)[a-zA-Z_0-9]+") @@ -251,93 +251,12 @@ class TextTemplate: if not subscriptions: future = asyncio.Future(loop=self.machine.clock.loop) elif len(subscriptions) == 1: - future = subscriptions + future = subscriptions[0] else: future = Util.any(subscriptions, loop=self.machine.clock.loop) future = Util.ensure_future(future, loop=self.machine.clock.loop) return value, future - def monitor_changes(self, callback): - """Monitor variables for changes and call callback on changes.""" - self._change_callback = callback - self._setup_variable_monitors() - - def stop_monitor(self): - """Stop monitoring for changes.""" - self._change_callback = None - self.machine.events.remove_handler(self._var_changes) - - def _add_player_var_handler(self, name: str) -> None: - self.machine.events.add_handler('player_{}'.format(name), self._var_changes) - - def _add_current_player_handler(self) -> None: - self.machine.events.add_handler('player_turn_started', self._var_changes) - - def _add_machine_var_handler(self, name: str) -> None: - self.machine.events.add_handler('machine_var_{}'.format(name), self._var_changes) - - def _var_changes(self, **kwargs) -> None: - del kwargs - if self._change_callback: - self._change_callback() - - def _setup_variable_monitors(self) -> None: - for var_string in self.vars: - if '|' not in var_string: - self._add_player_var_handler(name=var_string) - self._add_current_player_handler() - else: - source, variable_name = var_string.split('|') - if source.lower().startswith('player'): - - if source.lstrip('player'): # we have player num - self._add_player_var_handler(name=variable_name) - else: # no player num - self._add_player_var_handler(name=var_string) - self._add_current_player_handler() - elif source.lower() == 'machine': - self._add_machine_var_handler(name=variable_name) - - def evaluate_text(self) -> str: - """Evaluate placeholder to string.""" - text = self.text - for var_string in self.vars: - if var_string.startswith('machine|'): - _, var_name = var_string.split('|') - if self.machine.is_machine_var(var_name): - replacement = str(self.machine.get_machine_var(var_name)) - else: - replacement = '' - - text = text.replace('(' + var_string + ')', replacement) - - elif self.machine.game and self.machine.game.player: - if var_string.startswith('player|'): - text = text.replace('(' + var_string + ')', str(self.machine.game.player[var_string.split('|')[1]])) - elif var_string.startswith('player') and '|' in var_string: - player_num, var_name = var_string.lstrip('player').split('|') - try: - value = self.machine.game.player_list[int(player_num) - 1][var_name] - - if value is not None: - text = text.replace('(' + var_string + ')', str(value)) - else: - text = text.replace('(' + var_string + ')', '') - except IndexError: - text = text.replace('(' + var_string + ')', '') - elif self.machine.game.player.is_player_var(var_string): - value = self.machine.game.player[var_string] - if value is not None: - text = text.replace('(' + var_string + ')', str(value)) - else: - text = text.replace('(' + var_string + ')', '') - else: - # set var to empty otherwise - if var_string.startswith('player') or var_string.startswith('player') and '|' in var_string: - text = text.replace('(' + var_string + ')', '') - - return text - class BasePlaceholder(object): @@ -442,10 +361,10 @@ class PlayerPlaceholder(BasePlaceholder): """Wraps the player.""" - def __init__(self, player, machine): + def __init__(self, machine, number=None): """Initialise placeholder.""" - self._player = player # type: Player self._machine = machine # type: MachineController + self._number = number def subscribe(self): """Subscribe to player changes.""" @@ -457,11 +376,52 @@ class PlayerPlaceholder(BasePlaceholder): def __getitem__(self, item): """Array access.""" - return self._player[item] + if self._machine.game and self._machine.game.player: + if self._number is not None: + if len(self._machine.game.player_list) <= self._number: + raise ValueError("Player not in game") + return self._machine.game.player_list[self._number][item] + else: + return self._machine.game.player[item] + else: + raise ValueError("Not in a game") + + def __getattr__(self, item): + """Attribute access.""" + if self._machine.game and self._machine.game.player: + if self._number is not None: + if len(self._machine.game.player_list) <= self._number: + raise ValueError("Player not in game") + return getattr(self._machine.game.player_list[self._number], item) + else: + return getattr(self._machine.game.player, item) + else: + raise ValueError("Not in a game") + + +class PlayersPlaceholder(BasePlaceholder): + + """Wraps the player list.""" + + def __init__(self, machine): + """Initialise placeholder.""" + self._machine = machine # type: MachineController + + def subscribe(self): + """Subscribe to player list changes.""" + return self._machine.events.wait_for_any_event(["player_added", "game_ended"]) + + def subscribe_attribute(self, item): + """Subscribe player variable changes.""" + return self._machine.events.wait_for_event('player_{}'.format(item)) + + def __getitem__(self, item): + """Array access.""" + return PlayerPlaceholder(self._machine, item) def __getattr__(self, item): """Attribute access.""" - return getattr(self._player, item) + return PlayerPlaceholder(self._machine, item) class MachinePlaceholder(BasePlaceholder): @@ -612,7 +572,13 @@ class BasePlaceholderManager(MpfController): if isinstance(slice_value, dict) and node.attr in slice_value: ret_value = slice_value[node.attr] else: - ret_value = getattr(slice_value, node.attr) + try: + ret_value = getattr(slice_value, node.attr) + except ValueError: + if subscribe: + raise TemplateEvalError(subscription + [slice_value.subscribe_attribute(node.attr)]) + else: + raise if subscribe: return ret_value, subscription + [slice_value.subscribe_attribute(node.attr)] else: @@ -622,7 +588,10 @@ class BasePlaceholderManager(MpfController): value, subscription = self._eval(node.value, variables, subscribe) if isinstance(node.slice, ast.Index): slice_value, slice_subscript = self._eval(node.slice.value, variables, subscribe) - return value[slice_value], subscription + slice_subscript + try: + return value[slice_value], subscription + slice_subscript + except ValueError: + raise TemplateEvalError(subscription + slice_subscript) elif isinstance(node.slice, ast.Slice): lower, lower_subscription = self._eval(node.slice.lower, variables, subscribe) upper, upper_subscription = self._eval(node.slice.upper, variables, subscribe) @@ -697,7 +666,7 @@ class BasePlaceholderManager(MpfController): if not subscriptions: future = asyncio.Future(loop=self.machine.clock.loop) elif len(subscriptions) == 1: - future = subscriptions + future = subscriptions[0] else: future = Util.any(subscriptions, loop=self.machine.clock.loop) future = Util.ensure_future(future, loop=self.machine.clock.loop) @@ -749,11 +718,12 @@ class PlaceholderManager(BasePlaceholderManager): return DevicesPlaceholder(self.machine) elif name == "mode": return ModePlaceholder(self.machine) + elif name == "current_player": + return PlayerPlaceholder(self.machine) + elif name == "players": + return PlayersPlaceholder(self.machine) elif self.machine.game: - if name == "current_player": - return PlayerPlaceholder(self.machine.game.player, self.machine) - elif name == "players": - return self.machine.game.player_list - elif name == "game": + if name == "game": return self.machine.game + return False diff --git a/mpf/devices/segment_display.py b/mpf/devices/segment_display.py index 2153d5627..0beceea62 100644 --- a/mpf/devices/segment_display.py +++ b/mpf/devices/segment_display.py @@ -70,7 +70,6 @@ class SegmentDisplay(SystemWideDevice): self.hw_display.set_text("", flashing=False) if self._current_placeholder: self.text = "" - self._current_placeholder.stop_monitor() self._current_placeholder = None return @@ -79,19 +78,18 @@ class SegmentDisplay(SystemWideDevice): # get top entry top_entry = self._text_stack[0] - if self._current_placeholder: - self._current_placeholder.stop_monitor() - self._current_placeholder = TextTemplate(self.machine, top_entry.text) - self._current_placeholder.monitor_changes(self._update_display) self._update_display() - def _update_display(self) -> None: + def _update_display(self, *args, **kwargs) -> None: """Update display to current text.""" + del args + del kwargs if not self._current_placeholder: new_text = "" else: - new_text = self._current_placeholder.evaluate_text() + new_text, future = self._current_placeholder.evaluate_and_subscribe({}) + future.add_done_callback(self._update_display) # set text to display if it changed if new_text != self.text:
Text templates ng Current text templates look like this: ``` Player (player|number) Score: (player|score) Color: (machine|score) ``` Those work fine mostly. But they have a few limitations: * No internationalization (i18n) * No math/conditionals * No formating * Pipes are confusing because we use dots for other placeholders * Not all placeholders are supported Inline example (everything is converted to string): ``` Player {current_player.number} ``` Minimal example: ``` text: "Player {player_number:n}" placeholders: - player_number: type: int value: current_player.number ``` Plural example (points is padded to 10 chars and centered): ``` text: "{points:^10d} point" text_plural: "{points:^10d} points" plural: current_player.score placeholders: - points: type: int value: current_player.score default: 0 # in case there is no current player ``` Float formating example (fill with zero and max two decimals): ``` text: "{credits:0.2f} credits" placeholders: - credits: type: int value: machine.credit_units ``` Multiple vars: ``` text: "Player {player_number:n} Score: {score:n} Color: {color}" placeholders: - player_number: type: int value: current_player.number - score: type: float value: current_player.score - color: type: string value: "red" if current_player.score > 100 else: "blue" ``` All texts can be translated using gettext. We need to generate .po files and that is all. This also supports other plural forms which exist in some languages (e.g. for 21, 31, 41 and so on). For number grouping {score:n} will work depending on the locale (e.g. in en_US or en_GB but not in de_DE). If you want to enfore it use {score:,} and it will happen in all locales.
missionpinball/mpf
diff --git a/mpf/tests/machine_files/segment_display/config/config.yaml b/mpf/tests/machine_files/segment_display/config/config.yaml index f87d5f055..2c79237a1 100644 --- a/mpf/tests/machine_files/segment_display/config/config.yaml +++ b/mpf/tests/machine_files/segment_display/config/config.yaml @@ -30,15 +30,15 @@ segment_display_player: test_score: display1: - text: "1: (player1|score)" + text: "1: {players[0].score:d}" display2: - text: "2: (machine|test)" + text: "2: {machine.test:d}" test_score_two_player: display1: - text: "(player1|score)" + text: "{players[0].score:d}" display2: - text: "(player2|score)" + text: "{players[1].score:d}" test_flash: display1: diff --git a/mpf/tests/machine_files/segment_display/config/game.yaml b/mpf/tests/machine_files/segment_display/config/game.yaml index 671f389ca..98289207a 100644 --- a/mpf/tests/machine_files/segment_display/config/game.yaml +++ b/mpf/tests/machine_files/segment_display/config/game.yaml @@ -24,9 +24,21 @@ segment_display_player: display4: text: "" display5: - text: "(player|ball)" + text: "{current_player.ball:d}" # clear only display5 after game + game_ended{machine.player1_score > 0}: + display1: + text: "{machine.player1_score:d}" + game_ended{machine.player2_score > 0}: + display2: + text: "{machine.player2_score:d}" + game_ended{machine.player3_score > 0}: + display3: + text: "{machine.player3_score:d}" + game_ended{machine.player4_score > 0}: + display4: + text: "{machine.player4_score:d}" game_ended: display5: text: "" @@ -60,14 +72,14 @@ segment_display_player: # show score when adding players player_added.1{num==1}: display1: - text: "(player1|score)" + text: "{players[0].score:d}" player_added.2{num==2}: display2: - text: "(player2|score)" + text: "{players[1].score:d}" player_added.3{num==3}: display3: - text: "(player3|score)" + text: "{players[2].score:d}" player_added.4{num==4}: display4: - text: "(player4|score)" + text: "{players[3].score:d}" diff --git a/mpf/tests/test_SegmentDisplay.py b/mpf/tests/test_SegmentDisplay.py index 360de311b..cb9c32cf8 100644 --- a/mpf/tests/test_SegmentDisplay.py +++ b/mpf/tests/test_SegmentDisplay.py @@ -1,7 +1,7 @@ from mpf.tests.MpfFakeGameTestCase import MpfFakeGameTestCase -class TestShots(MpfFakeGameTestCase): +class TestSegmentDisplay(MpfFakeGameTestCase): def getConfigFile(self): if self._testMethodName == "test_game": @@ -164,13 +164,13 @@ class TestShots(MpfFakeGameTestCase): self.post_event("test_score") self.advance_time_and_run() - self.assertEqual("1: ", display1.hw_display.text) - self.assertEqual("2: ", display2.hw_display.text) + self.assertEqual("1: 0", display1.hw_display.text) + self.assertEqual("2: 0", display2.hw_display.text) self.machine.set_machine_var("test", 42) self.advance_time_and_run() - self.assertEqual("1: ", display1.hw_display.text) + self.assertEqual("1: 0", display1.hw_display.text) self.assertEqual("2: 42", display2.hw_display.text) self.start_game() @@ -225,13 +225,13 @@ class TestShots(MpfFakeGameTestCase): # first display shows score. second empty self.assertEqual("0", display1.hw_display.text) - self.assertEqual("", display2.hw_display.text) + self.assertEqual("0", display2.hw_display.text) # player scores self.machine.game.player.score += 42 self.advance_time_and_run(.01) self.assertEqual("42", display1.hw_display.text) - self.assertEqual("", display2.hw_display.text) + self.assertEqual("0", display2.hw_display.text) # add player self.add_player()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 2 }
0.33
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asciimatics==1.14.0 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 future==1.0.0 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work -e git+https://github.com/missionpinball/mpf.git@ca7dadb34fac8c7bc0cb2a036e61af9ec0b1369b#egg=mpf packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work Pillow==8.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyfiglet==0.8.post1 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyserial==3.5 pyserial-asyncio==0.6 pytest==6.2.4 ruamel.base==1.0.0 ruamel.yaml==0.10.23 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing==3.7.4.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work wcwidth==0.2.13 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: mpf channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - asciimatics==1.14.0 - future==1.0.0 - pillow==8.4.0 - psutil==7.0.0 - pyfiglet==0.8.post1 - pyserial==3.5 - pyserial-asyncio==0.6 - ruamel-base==1.0.0 - ruamel-yaml==0.10.23 - typing==3.7.4.3 - wcwidth==0.2.13 prefix: /opt/conda/envs/mpf
[ "mpf/tests/test_SegmentDisplay.py::TestSegmentDisplay::test_game", "mpf/tests/test_SegmentDisplay.py::TestSegmentDisplay::test_player", "mpf/tests/test_SegmentDisplay.py::TestSegmentDisplay::test_scoring" ]
[]
[]
[]
MIT License
2,078
[ "mpf/devices/segment_display.py", "mpf/core/placeholder_manager.py" ]
[ "mpf/devices/segment_display.py", "mpf/core/placeholder_manager.py" ]
EdinburghGenomics__pyclarity-lims-24
66586f02da5660a87a4347e246dd76eac0bb29c4
2018-01-22 21:45:22
a03be6eda34f0d8adaf776d2286198a34e40ecf5
diff --git a/pyclarity_lims/descriptors.py b/pyclarity_lims/descriptors.py index c2ce1ad..1c4eea6 100644 --- a/pyclarity_lims/descriptors.py +++ b/pyclarity_lims/descriptors.py @@ -78,7 +78,8 @@ class XmlDictionary(XmlMutable, dict): def clear(self): dict.clear(self) - self.rootnode(self.instance).clear() + for elem in self._elems: + self.rootnode(self.instance).remove(elem) self._update_elems() def _update_elems(self): @@ -337,18 +338,21 @@ class PlacementDictionary(XmlDictionary): break -class SubTagDictionary(XmlDictionary): +class SubTagDictionary(XmlDictionary, Nestable): """Dictionary of xml sub element where the key is the tag and the value is the text of the sub element. """ def __init__(self, instance, tag, **kwargs): - self.tag = tag + # In case extra nesting is provided + nesting = kwargs.get('nesting', []) + nesting.append(tag) + Nestable.__init__(self, nesting=nesting) XmlDictionary.__init__(self, instance) def _update_elems(self): - tag_node = self.rootnode(self.instance).find(self.tag) - if tag_node: - self._elems = tag_node.getchildren() + root_node = self.rootnode(self.instance) + if root_node: + self._elems = root_node.getchildren() else: self._elems = [] @@ -358,20 +362,18 @@ class SubTagDictionary(XmlDictionary): def _setitem(self, key, value): if not isinstance(key, str): raise ValueError() - tag_node = self.rootnode(self.instance).find(self.tag) - if tag_node is None: - tag_node = ElementTree.SubElement(self.rootnode(self.instance), self.tag) + root_node = self.rootnode(self.instance) - elem = tag_node.find(key) + elem = root_node.find(key) if elem is None: - elem = ElementTree.SubElement(tag_node, key) + elem = ElementTree.SubElement(root_node, key) elem.text = value def _delitem(self, key): - tag_node = self.rootnode(self.instance).find(self.tag) + root_node = self.rootnode(self.instance) for node in self._elems: if node.tag == key: - tag_node.remove(node) + root_node.remove(node) break @@ -437,7 +439,8 @@ class XmlList(XmlMutable, list): def clear(self): # python 2.7 does not have a clear function for list del self[:] - self.rootnode(self.instance).clear() + for elem in self._elems: + self.rootnode(self.instance).remove(elem) self._update_elems() def __add__(self, other_list):
UdfConfig presets clear method clears the entire UdfConfig object Calling the UdfConfig.preset.clear() method seems to clear values from the entire UdfConfig object. Even if I set the preset list manually to a new empty list the object is also cleared...Is this expected?
EdinburghGenomics/pyclarity-lims
diff --git a/tests/test_descriptors.py b/tests/test_descriptors.py index e4cee66..bd8a42f 100644 --- a/tests/test_descriptors.py +++ b/tests/test_descriptors.py @@ -191,6 +191,8 @@ class TestStringAttributeDescriptor(TestDescriptor): assert instance_new.root.attrib['name'] == "test name2" + + class TestStringListDescriptor(TestDescriptor): def setUp(self): et = ElementTree.fromstring("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?> @@ -368,7 +370,7 @@ class TestUdfDictionary(TestCase): assert (k, self.dict1[k]) in expected_content -class TestPlacementDictionary(TestCase): +class TestPlacementDictionary(TestDescriptor): def setUp(self): et = ElementTree.fromstring("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?> @@ -376,6 +378,7 @@ class TestPlacementDictionary(TestCase): <placement uri="http://testgenologics.com:4040/api/v2/artifacts/a1" limsid="a1"> <value>A:1</value> </placement> +<other>thing</other> </test-entry>""") self.lims = Lims('http://testgenologics.com:4040', username='test', password='password') self.instance1 = Mock(root=et, lims=self.lims) @@ -410,6 +413,15 @@ class TestPlacementDictionary(TestCase): del self.dict1['A:1'] assert len(self.dict1.rootnode(self.dict1.instance).findall('placement')) == 0 + def test_clear(self): + el = EntityList(self.instance1, 'artifact', Artifact) + sd = self._make_desc(StringDescriptor, 'other') + assert sd.__get__(self.instance1, None) == "thing" + assert len(self.dict1.rootnode(self.dict1.instance).findall('placement')) == 1 + self.dict1.clear() + assert len(self.dict1.rootnode(self.dict1.instance).findall('placement')) == 0 + assert sd.__get__(self.instance1, None) == "thing" + class TestSubTagDictionary(TestCase): @@ -424,20 +436,32 @@ class TestSubTagDictionary(TestCase): self.instance1 = Mock(root=et, lims=self.lims) self.dict1 = SubTagDictionary(self.instance1, tag='test-tag') + et = ElementTree.fromstring("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?> + <test-entry xmlns:udf="http://genologics.com/ri/userdefined"> + </test-entry>""") + self.instance2 = Mock(root=et, lims=self.lims) + self.dict2 = SubTagDictionary(self.instance2, tag='test-tag') + def test___getitem__(self): assert self.dict1['key1'] == 'value1' def test___setitem__(self): - assert len(self.dict1.rootnode(self.dict1.instance).find('test-tag')) == 1 - assert self.dict1.rootnode(self.dict1.instance).find('test-tag').find('key1').text == 'value1' + assert len(self.dict1.rootnode(self.dict1.instance)) == 1 + assert self.dict1.rootnode(self.dict1.instance).find('key1').text == 'value1' self.dict1['key1'] = 'value11' - assert len(self.dict1.rootnode(self.dict1.instance).find('test-tag')) == 1 - assert self.dict1.rootnode(self.dict1.instance).find('test-tag').find('key1').text == 'value11' + assert len(self.dict1.rootnode(self.dict1.instance)) == 1 + assert self.dict1.rootnode(self.dict1.instance).find('key1').text == 'value11' self.dict1['key2'] = 'value2' - assert len(self.dict1.rootnode(self.dict1.instance).find('test-tag')) == 2 - assert self.dict1.rootnode(self.dict1.instance).find('test-tag').find('key2').text == 'value2' + assert len(self.dict1.rootnode(self.dict1.instance)) == 2 + assert self.dict1.rootnode(self.dict1.instance).find('key2').text == 'value2' assert self.dict1['key2'] == 'value2' + def test___setitem__from_empty(self): + assert len(self.dict2.rootnode(self.dict2.instance)) == 0 + self.dict2['key1'] = 'value1' + assert self.dict2.rootnode(self.dict2.instance).find('key1').text == 'value1' + assert len(self.dict2.rootnode(self.dict2.instance)) == 1 + class TestXmlElementAttributeDict(TestCase): def setUp(self): @@ -502,13 +526,14 @@ class TestXmlPooledInputDict(TestCase): assert len(self.dict1.rootnode(self.dict1.instance)) == 3 -class TestEntityList(TestCase): +class TestEntityList(TestDescriptor): def setUp(self): et = ElementTree.fromstring("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <test-entry> <artifact uri="http://testgenologics.com:4040/api/v2/artifacts/a1"></artifact> <artifact uri="http://testgenologics.com:4040/api/v2/artifacts/a2"></artifact> + <other>thing</other> </test-entry> """) self.lims = Lims('http://testgenologics.com:4040', username='test', password='password') @@ -579,6 +604,15 @@ class TestEntityList(TestCase): assert el[0] == a3 assert el[1] == a4 + def test_clear(self): + el = EntityList(self.instance1, 'artifact', Artifact) + sd = self._make_desc(StringDescriptor, 'other') + assert sd.__get__(self.instance1, None) == "thing" + assert len(el) == 2 + el.clear() + assert len(el) == 0 + assert sd.__get__(self.instance1, None) == "thing" + class TestInputOutputMapList(TestCase): def setUp(self): @@ -751,7 +785,7 @@ class TestQueuedArtifactList(TestCase): </location> </artifact> <artifact uri="{url}/artifacts/a3"> - <queue-time>2011-12-25T01:10:10.050-00:00</queue-time> + <queue-time>2011-12-25T01:10:10.050-01:00</queue-time> <location> <container uri="{url}/containers/c1"/> <value>A:3</value> @@ -783,7 +817,7 @@ class TestQueuedArtifactList(TestCase): assert queued_artifacts[0] == qart qart = self.get_queue_art('a2', 'A:2', 200000, datetime.timedelta(0, 3600)) assert queued_artifacts[1] == qart - qart = self.get_queue_art('a3', 'A:3', 50000, datetime.timedelta(0, 0)) + qart = self.get_queue_art('a3', 'A:3', 50000, datetime.timedelta(0, -3600)) assert queued_artifacts[2] == qart def test_set(self):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 -e git+https://github.com/EdinburghGenomics/pyclarity-lims.git@66586f02da5660a87a4347e246dd76eac0bb29c4#egg=pyclarity_lims pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: pyclarity-lims channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/pyclarity-lims
[ "tests/test_descriptors.py::TestPlacementDictionary::test_clear", "tests/test_descriptors.py::TestSubTagDictionary::test___setitem__", "tests/test_descriptors.py::TestSubTagDictionary::test___setitem__from_empty", "tests/test_descriptors.py::TestEntityList::test_clear" ]
[]
[ "tests/test_descriptors.py::TestStringDescriptor::test__get__", "tests/test_descriptors.py::TestStringDescriptor::test__set__", "tests/test_descriptors.py::TestStringDescriptor::test_create", "tests/test_descriptors.py::TestIntegerDescriptor::test__get__", "tests/test_descriptors.py::TestIntegerDescriptor::test__set__", "tests/test_descriptors.py::TestIntegerDescriptor::test_create", "tests/test_descriptors.py::TestBooleanDescriptor::test__get__", "tests/test_descriptors.py::TestBooleanDescriptor::test__set__", "tests/test_descriptors.py::TestBooleanDescriptor::test_create", "tests/test_descriptors.py::TestEntityDescriptor::test__get__", "tests/test_descriptors.py::TestEntityDescriptor::test__set__", "tests/test_descriptors.py::TestEntityDescriptor::test_create", "tests/test_descriptors.py::TestEntityListDescriptor::test__get__", "tests/test_descriptors.py::TestStringAttributeDescriptor::test__get__", "tests/test_descriptors.py::TestStringAttributeDescriptor::test__set__", "tests/test_descriptors.py::TestStringAttributeDescriptor::test_create", "tests/test_descriptors.py::TestStringListDescriptor::test__get__", "tests/test_descriptors.py::TestStringListDescriptor::test__set__", "tests/test_descriptors.py::TestStringDictionaryDescriptor::test__get__", "tests/test_descriptors.py::TestStringDictionaryDescriptor::test__set__", "tests/test_descriptors.py::TestUdfDictionary::test___delitem__", "tests/test_descriptors.py::TestUdfDictionary::test___getitem__", "tests/test_descriptors.py::TestUdfDictionary::test___iter__", "tests/test_descriptors.py::TestUdfDictionary::test___setitem__", "tests/test_descriptors.py::TestUdfDictionary::test___setitem__new", "tests/test_descriptors.py::TestUdfDictionary::test___setitem__unicode", "tests/test_descriptors.py::TestUdfDictionary::test_clear", "tests/test_descriptors.py::TestUdfDictionary::test_create", "tests/test_descriptors.py::TestUdfDictionary::test_create_with_nesting", "tests/test_descriptors.py::TestUdfDictionary::test_items", "tests/test_descriptors.py::TestPlacementDictionary::test___delitem__", "tests/test_descriptors.py::TestPlacementDictionary::test___getitem__", "tests/test_descriptors.py::TestPlacementDictionary::test___setitem__", "tests/test_descriptors.py::TestPlacementDictionary::test___setitem__2", "tests/test_descriptors.py::TestSubTagDictionary::test___getitem__", "tests/test_descriptors.py::TestXmlElementAttributeDict::test___getitem__", "tests/test_descriptors.py::TestXmlElementAttributeDict::test___setitem__", "tests/test_descriptors.py::TestXmlElementAttributeDict::test__len__", "tests/test_descriptors.py::TestXmlPooledInputDict::test___getitem__", "tests/test_descriptors.py::TestXmlPooledInputDict::test___setitem__", "tests/test_descriptors.py::TestEntityList::test__get__", "tests/test_descriptors.py::TestEntityList::test_append", "tests/test_descriptors.py::TestEntityList::test_insert", "tests/test_descriptors.py::TestEntityList::test_set", "tests/test_descriptors.py::TestEntityList::test_set_list", "tests/test_descriptors.py::TestInputOutputMapList::test___get__", "tests/test_descriptors.py::TestExternalidList::test_append", "tests/test_descriptors.py::TestExternalidList::test_get", "tests/test_descriptors.py::TestXmlAttributeList::test_append", "tests/test_descriptors.py::TestXmlAttributeList::test_get", "tests/test_descriptors.py::TestXmlAttributeList::test_insert", "tests/test_descriptors.py::TestXmlReagentLabelList::test_append", "tests/test_descriptors.py::TestXmlReagentLabelList::test_get", "tests/test_descriptors.py::TestXmlAction::test_parse", "tests/test_descriptors.py::TestXmlAction::test_set", "tests/test_descriptors.py::TestQueuedArtifactList::test_parse", "tests/test_descriptors.py::TestQueuedArtifactList::test_set" ]
[]
MIT License
2,079
[ "pyclarity_lims/descriptors.py" ]
[ "pyclarity_lims/descriptors.py" ]
smarkets__marge-bot-77
d5e588ce69791c75cb201d80ffafa875046c053c
2018-01-23 13:40:23
9986daf294673ad58a06c7ca19125bc20c144c96
diff --git a/CHANGELOG.md b/CHANGELOG.md index b9ef871..2081ca5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ + * 0.5.1: + - Sleep even less between polling for MRs #75. * 0.5.0: - Added "default -> config file -> env var -> args" way to configure marge-bot #71 * 0.4.1: diff --git a/marge/app.py b/marge/app.py index 32f8db5..6fdee3b 100644 --- a/marge/app.py +++ b/marge/app.py @@ -100,7 +100,16 @@ def _parse_config(args): metavar='INTERVAL[,..]', help='Time(s) during which no merging is to take place, e.g. "Friday 1pm - Monday 9am".\n', ) - parser.add_argument( + merge_group = parser.add_mutually_exclusive_group(required=False) + merge_group.add_argument( + '--use-merge-strategy', + action='store_true', + help=( + 'Use git merge instead of git rebase\n' + 'Enable if you use a workflow based on merge-commits and not linear history\n' + ), + ) + merge_group.add_argument( '--add-tested', action='store_true', help='Add "Tested: marge-bot <$MR_URL>" for the final commit on branch after it passed CI.\n', @@ -214,6 +223,7 @@ def main(args=sys.argv[1:]): reapprove=options.impersonate_approvers, embargo=options.embargo, ci_timeout=options.ci_timeout, + use_merge_strategy=options.use_merge_strategy, ) ) diff --git a/marge/bot.py b/marge/bot.py index 7b32544..0c52e75 100644 --- a/marge/bot.py +++ b/marge/bot.py @@ -47,6 +47,8 @@ class Bot(object): return self._api def _run(self, repo_manager): + time_to_sleep_between_projects_in_secs = 1 + min_time_to_sleep_after_iterating_all_projects_in_secs = 30 while True: log.info('Finding out my current projects...') my_projects = Project.fetch_all_mine(self._api) @@ -104,12 +106,15 @@ class Bot(object): options=self._config.merge_opts, ) merge_job.execute() - time_to_sleep_in_secs = 5 else: log.info('Nothing to merge at this point...') - time_to_sleep_in_secs = 30 - log.info('Sleeping for %s seconds...', time_to_sleep_in_secs) - time.sleep(time_to_sleep_in_secs) + time.sleep(time_to_sleep_between_projects_in_secs) + big_sleep = max(0, + min_time_to_sleep_after_iterating_all_projects_in_secs - + time_to_sleep_between_projects_in_secs * len(filtered_projects)) + log.info('Sleeping for %s seconds...', big_sleep) + time.sleep(big_sleep) + diff --git a/marge/git.py b/marge/git.py index 3c3c453..fa7c68a 100644 --- a/marge/git.py +++ b/marge/git.py @@ -59,6 +59,17 @@ class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): raise return self.get_commit_hash() + def merge(self, source_branch, target_branch, source_repo_url=None): + """Merge `target_branch` into `source_branch` and return the new HEAD commit id. + + By default `source_branch` and `target_branch` are assumed to reside in the same + repo as `self`. However, if `source_repo_url` is passed and not `None`, + `source_branch` is taken from there. + + Throws a `GitError` if the merge fails. Will also try to --abort it. + """ + return self._fuse_branch('merge', source_branch, target_branch, source_repo_url) + def rebase(self, branch, new_base, source_repo_url=None): """Rebase `new_base` into `branch` and return the new HEAD commit id. @@ -68,7 +79,10 @@ class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): Throws a `GitError` if the rebase fails. Will also try to --abort it. """ - assert source_repo_url or branch != new_base, branch + return self._fuse_branch('rebase', branch, new_base, source_repo_url) + + def _fuse_branch(self, strategy, branch, target_branch, source_repo_url=None): + assert source_repo_url or branch != target_branch, branch self.git('fetch', 'origin') if source_repo_url: @@ -84,10 +98,10 @@ class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): self.git('checkout', '-B', branch, 'origin/' + branch, '--') try: - self.git('rebase', 'origin/' + new_base) + self.git(strategy, 'origin/' + target_branch) except GitError: - log.warning('rebase failed, doing an --abort') - self.git('rebase', '--abort') + log.warning(strategy + ' failed, doing an --abort') + self.git(strategy, '--abort') raise return self.get_commit_hash() diff --git a/marge/job.py b/marge/job.py index 85840e2..82d80f2 100644 --- a/marge/job.py +++ b/marge/job.py @@ -52,7 +52,7 @@ class MergeJob(object): return approvals = merge_request.fetch_approvals() - self.rebase_and_accept(approvals) + self.update_merge_request_and_accept(approvals) log.info('Successfully merged !%s.', merge_request.info['iid']) except CannotMerge as err: message = "I couldn't merge this branch: %s" % err.reason @@ -69,12 +69,12 @@ class MergeJob(object): self.unassign_from_mr(merge_request) raise - def rebase_and_accept(self, approvals): + def update_merge_request_and_accept(self, approvals): api = self._api merge_request = self._merge_request - rebased_into_up_to_date_target_branch = False + updated_into_up_to_date_target_branch = False - while not rebased_into_up_to_date_target_branch: + while not updated_into_up_to_date_target_branch: if merge_request.work_in_progress: raise CannotMerge("Sorry, I can't merge requests marked as Work-In-Progress!") if merge_request.squash and self.opts.requests_commit_tagging: @@ -106,8 +106,8 @@ class MergeJob(object): else None ) source_repo_url = None if source_project is self._project else source_project.ssh_url_to_repo - # NB. this will be a no-op if there is nothing to rebase/rewrite - target_sha, _rebased_sha, actual_sha = push_rebased_and_rewritten_version( + # NB. this will be a no-op if there is nothing to update/rewrite + target_sha, _updated_sha, actual_sha = update_from_target_branch_and_push( repo=self.repo, source_branch=merge_request.source_branch, target_branch=merge_request.target_branch, @@ -115,6 +115,7 @@ class MergeJob(object): reviewers=reviewers, tested_by=tested_by, part_of=part_of, + use_merge_strategy=self.opts.use_merge_strategy, ) log.info('Commit id to merge %r (into: %r)', actual_sha, target_sha) time.sleep(5) @@ -144,7 +145,7 @@ class MergeJob(object): merge_request.accept(remove_branch=True, sha=actual_sha) except gitlab.NotAcceptable as err: new_target_sha = Commit.last_on_branch(self._project.id, merge_request.target_branch, api).id - # target_branch has moved under us since we rebased, just try again + # target_branch has moved under us since we updated, just try again if new_target_sha != target_sha: log.info('Someone was naughty and by-passed marge') merge_request.comment( @@ -166,7 +167,7 @@ class MergeJob(object): # someone must have hit "merge when build succeeds" and we lost the race, # the branch is gone and we got a 404. Anyway, our job here is done. # (see #33) - rebased_into_up_to_date_target_branch = True + updated_into_up_to_date_target_branch = True else: log.warning('For the record, merge request state is %r', merge_request.state) raise @@ -188,7 +189,7 @@ class MergeJob(object): # We are not covering any observed behaviour here, but if at this # point the request is merged, our job is done, so no need to complain log.info('Merge request is already merged, someone was faster!') - rebased_into_up_to_date_target_branch = True + updated_into_up_to_date_target_branch = True else: raise CannotMerge("Gitlab refused to merge this request and I don't know why!") except gitlab.ApiError: @@ -196,7 +197,7 @@ class MergeJob(object): raise CannotMerge('had some issue with gitlab, check my logs...') else: self.wait_for_branch_to_be_merged() - rebased_into_up_to_date_target_branch = True + updated_into_up_to_date_target_branch = True def wait_for_ci_to_pass(self, source_project_id, commit_sha): api = self._api @@ -253,7 +254,8 @@ class MergeJob(object): now = datetime.utcnow() return self.opts.embargo.covers(now) -def push_rebased_and_rewritten_version( + +def update_from_target_branch_and_push( *, repo, source_branch, @@ -262,15 +264,17 @@ def push_rebased_and_rewritten_version( reviewers=None, tested_by=None, part_of=None, + use_merge_strategy=False, ): - """Rebase `target_branch` into `source_branch`, optionally add trailers and push. + """Updates `target_branch` with commits from `source_branch`, optionally add trailers and push. + The update strategy can either be rebase or merge. The default is rebase. Parameters ---------- source_branch - The branch we want to rebase to. + The branch we want to update. target_branch - The branch we want to rebase from. + The branch we want to get updates from. source_repo_url The url of the repo we want to push the changes to (or `None` if it's the same repo for both `source_branch` and `target_branch`). @@ -282,25 +286,28 @@ def push_rebased_and_rewritten_version( tested_by A list like ``["User Name <[email protected]>", ...]`` or `None`. ``None`` means existing Tested-by lines will be left alone, otherwise they will be replaced. + Ignored if using the merge strategy. part_of A string with likely a link to the merge request this commit is part-of, or ``None``. - + use_merge_strategy + Updates `target_branch` using merge instead of rebase. Returns ------- - (sha_of_target_branch, sha_after_rebase, sha_after_rewrite) + (sha_of_target_branch, sha_after_update, sha_after_rewrite) """ assert source_repo_url != repo.remote_url if source_repo_url is None and source_branch == target_branch: raise CannotMerge('source and target branch seem to coincide!') - branch_rebased = branch_rewritten = changes_pushed = False + branch_updated = branch_rewritten = changes_pushed = False try: - rewritten_sha = rebased_sha = repo.rebase( + fuse = repo.merge if use_merge_strategy else repo.rebase + rewritten_sha = updated_sha = fuse( branch=source_branch, new_base=target_branch, source_repo_url=source_repo_url ) - branch_rebased = True + branch_updated = True if reviewers is not None: rewritten_sha = repo.tag_with_trailer( trailer_name='Reviewed-by', @@ -308,7 +315,7 @@ def push_rebased_and_rewritten_version( branch=source_branch, start_commit='origin/' + target_branch, ) - if tested_by is not None: + if tested_by is not None and not use_merge_strategy: rewritten_sha = repo.tag_with_trailer( trailer_name='Tested-by', trailer_values=tested_by, @@ -326,17 +333,20 @@ def push_rebased_and_rewritten_version( repo.push_force(source_branch, source_repo_url) changes_pushed = True except git.GitError: - if not branch_rebased: + if not branch_updated: raise CannotMerge('got conflicts while rebasing, your problem now...') if not branch_rewritten: raise CannotMerge('failed on filter-branch; check my logs!') if not changes_pushed: - raise CannotMerge('failed to push rebased changes, check my logs!') + if use_merge_strategy: + raise CannotMerge('failed to push merged changes, check my logs!') + else: + raise CannotMerge('failed to push rebased changes, check my logs!') raise else: target_sha = repo.get_commit_hash('origin/' + target_branch) - return target_sha, rebased_sha, rewritten_sha + return target_sha, updated_sha, rewritten_sha finally: # A failure to clean up probably means something is fucked with the git repo # and likely explains any previous failure, so it will better to just @@ -346,6 +356,7 @@ def push_rebased_and_rewritten_version( else: assert source_repo_url is not None + def _get_reviewer_names_and_emails(approvals, api): """Return a list ['A. Prover <[email protected]', ...]` for `merge_request.`""" @@ -360,6 +371,7 @@ _job_options = [ 'reapprove', 'embargo', 'ci_timeout', + 'use_merge_strategy', ] class MergeJobOptions(namedtuple('MergeJobOptions', _job_options)): @@ -373,7 +385,7 @@ class MergeJobOptions(namedtuple('MergeJobOptions', _job_options)): def default( cls, *, add_tested=False, add_part_of=False, add_reviewers=False, reapprove=False, - embargo=None, ci_timeout=None, + embargo=None, ci_timeout=None, use_merge_strategy=False ): embargo = embargo or IntervalUnion.empty() ci_timeout = ci_timeout or timedelta(minutes=15) @@ -384,6 +396,7 @@ class MergeJobOptions(namedtuple('MergeJobOptions', _job_options)): reapprove=reapprove, embargo=embargo, ci_timeout=ci_timeout, + use_merge_strategy=use_merge_strategy, ) diff --git a/version b/version index 8f0916f..4b9fcbe 100644 --- a/version +++ b/version @@ -1,1 +1,1 @@ -0.5.0 +0.5.1
Merge instead of rebase Would it be possible to add an option to use a git merge instead of a git rebase ?
smarkets/marge-bot
diff --git a/tests/test_app.py b/tests/test_app.py index d47b36e..d700fc7 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -101,12 +101,24 @@ def test_embargo(): embargo=interval.IntervalUnion.from_human('Fri 1pm-Mon 7am'), ) +def test_use_merge_strategy(): + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with main('--use-merge-strategy') as bot: + assert bot.config.merge_opts != job.MergeJobOptions.default() + assert bot.config.merge_opts == job.MergeJobOptions.default(use_merge_strategy=True) + def test_add_tested(): with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): with main('--add-tested') as bot: assert bot.config.merge_opts != job.MergeJobOptions.default() assert bot.config.merge_opts == job.MergeJobOptions.default(add_tested=True) +def test_use_merge_strategy_and_add_tested_are_mutualy_exclusive(): + with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): + with pytest.raises(SystemExit): + with main('--use-merge-strategy --add-tested') as bot: + pass + def test_add_part_of(): with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): with main('--add-part-of') as bot: diff --git a/tests/test_git.py b/tests/test_git.py index d25a773..4569d03 100644 --- a/tests/test_git.py +++ b/tests/test_git.py @@ -44,6 +44,16 @@ class TestRepo(object): 'git -C /tmp/local/path rev-parse HEAD' ] + def test_merge_success(self, mocked_run): + self.repo.merge('feature_branch', 'master_of_the_universe') + + assert get_calls(mocked_run) == [ + 'git -C /tmp/local/path fetch origin', + 'git -C /tmp/local/path checkout -B feature_branch origin/feature_branch --', + 'git -C /tmp/local/path merge origin/master_of_the_universe', + 'git -C /tmp/local/path rev-parse HEAD' + ] + def test_reviewer_tagging_success(self, mocked_run): self.repo.tag_with_trailer( trailer_name='Reviewed-by', @@ -89,6 +99,12 @@ class TestRepo(object): assert get_calls(mocked_run) == [] + def test_merge_same_branch(self, mocked_run): + with pytest.raises(AssertionError): + self.repo.merge('branch', 'branch') + + assert get_calls(mocked_run) == [] + def test_remove_branch(self, mocked_run): self.repo.remove_branch('some_branch') assert get_calls(mocked_run) == [ diff --git a/tests/test_job.py b/tests/test_job.py index 1cb5868..3e7b790 100644 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -134,10 +134,10 @@ class MockLab(object): "I'm broken on the inside, please somebody fix me... :cry:" ) - def push_rebased(self, *args, **kwargs): + def push_updated(self, *args, **kwargs): self.api.state = 'pushed' - rebased_sha = 'deadbeef' - return self.initial_master_sha, rebased_sha, self.rewritten_sha + updated_sha = 'deadbeef' + return self.initial_master_sha, updated_sha, self.rewritten_sha @contextlib.contextmanager def expected_failure(self, message): @@ -159,7 +159,7 @@ class MockLab(object): assert error_note in self.api.notes @patch('time.sleep') -class TestRebaseAndAccept(object): +class TestUpdateAndAccept(object): def setup_method(self, _method): self.mocklab = MockLab() self.api = self.mocklab.api @@ -184,7 +184,7 @@ class TestRebaseAndAccept(object): def test_succeeds_first_time(self, time_sleep): api, mocklab = self.api, self.mocklab - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): job = self.make_job(marge.job.MergeJobOptions.default(add_tested=True, add_reviewers=False)) job.execute() @@ -199,7 +199,7 @@ class TestRebaseAndAccept(object): Ok({'commit': _commit(id=new_branch_head_sha, status='success')}), from_state='pushed', to_state='pushed_but_head_changed' ) - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): with mocklab.expected_failure("Someone pushed to branch while we were trying to merge"): job = self.make_job(marge.job.MergeJobOptions.default(add_tested=True, add_reviewers=False)) job.execute() @@ -244,7 +244,7 @@ class TestRebaseAndAccept(object): api.state = 'pushed' yield moved_master_sha, 'deadbeef', mocklab.rewritten_sha - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=push_effects()): + with patch('marge.job.update_from_target_branch_and_push', side_effect=push_effects()): job = self.make_job(marge.job.MergeJobOptions.default(add_tested=True, add_reviewers=False)) job.execute() @@ -266,7 +266,7 @@ class TestRebaseAndAccept(object): dict(mocklab.merge_request_info, state='merged'), from_state='someone_else_merged', ) - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): job = self.make_job() job.execute() assert api.state == 'someone_else_merged' @@ -288,7 +288,7 @@ class TestRebaseAndAccept(object): from_state='now_is_wip', ) message = 'The request was marked as WIP as I was processing it (maybe a WIP commit?)' - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): with mocklab.expected_failure(message): job = self.make_job() job.execute() @@ -314,7 +314,7 @@ class TestRebaseAndAccept(object): 'GitLab refused to merge this branch. I suspect that a Push Rule or a git-hook ' 'is rejecting my commits; maybe my email needs to be white-listed?' ) - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): with mocklab.expected_failure(message): job = self.make_job() job.execute() @@ -337,7 +337,7 @@ class TestRebaseAndAccept(object): from_state='oops_someone_closed_it', ) message = 'Someone closed the merge request while I was attempting to merge it.' - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): with mocklab.expected_failure(message): job = self.make_job() job.execute() @@ -356,7 +356,7 @@ class TestRebaseAndAccept(object): from_state='passed', to_state='rejected_for_misterious_reasons', ) message = "Gitlab refused to merge this request and I don't know why!" - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): with mocklab.expected_failure(message): job = self.make_job() job.execute() @@ -392,7 +392,7 @@ class TestRebaseAndAccept(object): assert api.state == 'initial' - with patch('marge.job.push_rebased_and_rewritten_version', side_effect=mocklab.push_rebased): + with patch('marge.job.update_from_target_branch_and_push', side_effect=mocklab.push_updated): job = self.make_job() job.execute() assert api.state == 'merged' @@ -407,6 +407,7 @@ class TestMergeJobOptions(object): reapprove=False, embargo=marge.interval.IntervalUnion.empty(), ci_timeout=timedelta(minutes=15), + use_merge_strategy=False, ) def test_default_ci_time(self):
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 6 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs==22.2.0 backports.zoneinfo==0.2.1 certifi==2021.5.30 charset-normalizer==2.0.12 ConfigArgParse==1.7 coverage==6.2 dateparser==1.1.3 dill==0.3.4 humanize==3.14.0 idna==3.10 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 isort==5.10.1 lazy-object-proxy==1.7.1 -e git+https://github.com/smarkets/marge-bot.git@d5e588ce69791c75cb201d80ffafa875046c053c#egg=marge maya==0.6.1 mccabe==0.7.0 packaging==21.3 pendulum==2.1.2 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pylint==2.13.9 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-pylint==0.18.0 pytest-runner==5.3.2 python-dateutil==2.9.0.post0 pytz==2025.2 pytz-deprecation-shim==0.1.0.post0 pytzdata==2020.1 PyYAML==6.0.1 regex==2022.3.2 requests==2.27.1 six==1.17.0 snaptime==0.2.4 toml==0.10.2 tomli==1.2.3 typed-ast==1.5.5 typing_extensions==4.1.1 tzdata==2025.2 tzlocal==4.2 urllib3==1.26.20 wrapt==1.16.0 zipp==3.6.0
name: marge-bot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - attrs==22.2.0 - backports-zoneinfo==0.2.1 - charset-normalizer==2.0.12 - configargparse==1.7 - coverage==6.2 - dateparser==1.1.3 - dill==0.3.4 - humanize==3.14.0 - idna==3.10 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - maya==0.6.1 - mccabe==0.7.0 - packaging==21.3 - pendulum==2.1.2 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pylint==2.13.9 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-pylint==0.18.0 - pytest-runner==5.3.2 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pytz-deprecation-shim==0.1.0.post0 - pytzdata==2020.1 - pyyaml==6.0.1 - regex==2022.3.2 - requests==2.27.1 - six==1.17.0 - snaptime==0.2.4 - toml==0.10.2 - tomli==1.2.3 - typed-ast==1.5.5 - typing-extensions==4.1.1 - tzdata==2025.2 - tzlocal==4.2 - urllib3==1.26.20 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/marge-bot
[ "tests/test_app.py::PYLINT", "tests/test_app.py::test_use_merge_strategy", "tests/test_git.py::PYLINT", "tests/test_git.py::TestRepo::test_merge_success", "tests/test_git.py::TestRepo::test_merge_same_branch", "tests/test_job.py::TestUpdateAndAccept::test_succeeds_first_time", "tests/test_job.py::TestUpdateAndAccept::test_fails_on_not_acceptable_if_master_did_not_move", "tests/test_job.py::TestUpdateAndAccept::test_succeeds_second_time_if_master_moved", "tests/test_job.py::TestUpdateAndAccept::test_handles_races_for_merging", "tests/test_job.py::TestUpdateAndAccept::test_handles_request_becoming_wip_after_push", "tests/test_job.py::TestUpdateAndAccept::test_guesses_git_hook_error_on_merge_refusal", "tests/test_job.py::TestUpdateAndAccept::test_discovers_if_someone_closed_the_merge_request", "tests/test_job.py::TestUpdateAndAccept::test_tells_explicitly_that_gitlab_refused_to_merge", "tests/test_job.py::TestUpdateAndAccept::test_wont_merge_branches_with_autosquash_if_rewriting", "tests/test_job.py::TestMergeJobOptions::test_default" ]
[ "tests/test_app.py::test_disabled_auth_token_cli_arg", "tests/test_app.py::test_disabled_ssh_key_cli_arg", "tests/test_app.py::test_config_file" ]
[ "tests/test_app.py::test_default_values", "tests/test_app.py::test_embargo", "tests/test_app.py::test_add_tested", "tests/test_app.py::test_use_merge_strategy_and_add_tested_are_mutualy_exclusive", "tests/test_app.py::test_add_part_of", "tests/test_app.py::test_add_reviewers", "tests/test_app.py::test_impersonate_approvers", "tests/test_app.py::test_project_regexp", "tests/test_app.py::test_ci_timeout", "tests/test_app.py::test_deprecated_max_ci_time_in_minutes", "tests/test_app.py::test_git_timeout", "tests/test_app.py::test_branch_regexp", "tests/test_app.py::test_time_interval", "tests/test_app.py::test_config_overwrites", "tests/test_git.py::TestRepo::test_clone", "tests/test_git.py::TestRepo::test_config_user_info", "tests/test_git.py::TestRepo::test_rebase_success", "tests/test_git.py::TestRepo::test_reviewer_tagging_success", "tests/test_git.py::TestRepo::test_reviewer_tagging_failure", "tests/test_git.py::TestRepo::test_rebase_same_branch", "tests/test_git.py::TestRepo::test_remove_branch", "tests/test_git.py::TestRepo::test_remove_master_branch_fails", "tests/test_git.py::TestRepo::test_push_force", "tests/test_git.py::TestRepo::test_push_force_fails_on_dirty", "tests/test_git.py::TestRepo::test_push_force_fails_on_untracked", "tests/test_git.py::TestRepo::test_get_commit_hash", "tests/test_git.py::TestRepo::test_passes_ssh_key", "tests/test_git.py::test_filter", "tests/test_git.py::test_filter_fails_on_empty_commit_messages", "tests/test_git.py::test_filter_fails_on_commit_messages_that_are_empty_apart_from_trailers", "tests/test_git.py::test_filter_treats_the_first_commit_line_not_as_a_trailer_unless_it_matches_the_trailer_name_passed_in", "tests/test_job.py::PYLINT", "tests/test_job.py::TestUpdateAndAccept::test_wont_merge_wip_stuff", "tests/test_job.py::TestMergeJobOptions::test_default_ci_time" ]
[]
BSD 3-Clause "New" or "Revised" License
2,080
[ "version", "marge/job.py", "CHANGELOG.md", "marge/bot.py", "marge/app.py", "marge/git.py" ]
[ "version", "marge/job.py", "CHANGELOG.md", "marge/bot.py", "marge/app.py", "marge/git.py" ]
grappa-py__grappa-39
c81d2eeaf63788dd130ba712645c82bb1af9b752
2018-01-23 15:13:23
e62e9ee7e7f10e970de84d4782caafb4a20039a4
diff --git a/grappa/operator.py b/grappa/operator.py index 2976373..46c67d0 100644 --- a/grappa/operator.py +++ b/grappa/operator.py @@ -208,7 +208,7 @@ class Operator(object): return self.run_matcher(*args, **kw) def __enter__(self): - raise NotImplementedError('operator cannot be used as "with" statement') # noqa + raise NotImplementedError('operator cannot be used like "with" statement') # noqa def __exit__(self, etype, value, traceback): self.__enter__() diff --git a/grappa/operators/raises.py b/grappa/operators/raises.py index 48dcea9..763f2a5 100644 --- a/grappa/operators/raises.py +++ b/grappa/operators/raises.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import inspect from ..operator import Operator @@ -57,7 +56,7 @@ class RaisesOperator(Operator): ) def match(self, fn, *errors): - if not any([inspect.isfunction(fn) or inspect.ismethod(fn)]): + if not callable(fn): return False, ['subject must be a function or method'] try:
Assertion Errors Love the library. It is super useful! I am trying to figure out how to see if a function raises an exception when called with a certain set of parameters. Here is an example (a bit contrived as I am trying to raise my own Exception based on valid inputs). ```python t = [0,1,2] t.index(3) | should.raise_error(ValueError) ``` Right now the should.raise_error expects a function to be passed into it. Eg. ```python t.index | should.raise_error(ValueError) ``` Right now I have to compose the test this way. ```python err = "" try: self.index(3) except Exception as err: error = err error | should.be.an.instance.of(ValueError) ``` Am I missing Something?
grappa-py/grappa
diff --git a/tests/operators/raises_test.py b/tests/operators/raises_test.py index 33f4277..a3acdfa 100644 --- a/tests/operators/raises_test.py +++ b/tests/operators/raises_test.py @@ -1,13 +1,21 @@ import pytest +from functools import partial def test_raises(should): def error(): raise AssertionError('foo') + def error_with_params(foo_param): + raise AssertionError(foo_param) + error | should.raise_error(AssertionError) error | should.do_not.raise_error(NotImplementedError) + partial(error_with_params, "Foobar") | should.raise_error(AssertionError) + partial(error_with_params, "Foobar") | should.to_not\ + .raise_error(NotImplementedError) + with pytest.raises(AssertionError): error | should.raise_error(NotImplementedError)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 colorama==0.3.9 -e git+https://github.com/grappa-py/grappa.git@c81d2eeaf63788dd130ba712645c82bb1af9b752#egg=grappa importlib-metadata==4.8.3 iniconfig==1.1.1 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: grappa channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - colorama==0.3.9 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/grappa
[ "tests/operators/raises_test.py::test_raises" ]
[]
[]
[]
MIT License
2,081
[ "grappa/operators/raises.py", "grappa/operator.py" ]
[ "grappa/operators/raises.py", "grappa/operator.py" ]
ska-sa__fakenewsredis-8
0472a6e928d9502f7549b6c829dd86570ec00c0e
2018-01-24 14:07:05
0472a6e928d9502f7549b6c829dd86570ec00c0e
diff --git a/fakenewsredis.py b/fakenewsredis.py index ce1e1ed..a104150 100644 --- a/fakenewsredis.py +++ b/fakenewsredis.py @@ -510,26 +510,25 @@ class FakeStrictRedis(object): def set(self, name, value, ex=None, px=None, nx=False, xx=False): if (not nx and not xx) or (nx and self._db.get(name, None) is None) \ or (xx and not self._db.get(name, None) is None): - self._db[name] = to_bytes(value) if ex is not None: if isinstance(ex, timedelta): ex = ex.seconds + ex.days * 24 * 3600 - if ex < 0: + if ex <= 0: raise ResponseError('invalid expire time in SETEX') - if ex > 0: - self._db.expire(name, datetime.now() + - timedelta(seconds=ex)) + self._db[name] = to_bytes(value) + self._db.expire(name, datetime.now() + + timedelta(seconds=ex)) elif px is not None: if isinstance(px, timedelta): ms = int(px.microseconds / 1000) px = (px.seconds + px.days * 24 * 3600) * 1000 + ms - if px < 0: + if px <= 0: raise ResponseError('invalid expire time in SETEX') - if px > 0: - self._db.expire(name, datetime.now() + - timedelta(milliseconds=px)) + self._db[name] = to_bytes(value) + self._db.expire(name, datetime.now() + + timedelta(milliseconds=px)) else: - self._db.persist(name) + self._db[name] = to_bytes(value) return True else: return None diff --git a/requirements.txt b/requirements.txt index df48512..12cc5c9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ # Flake8 3.0.0 does not support Python 2.6. flake8<3.0.0 nose==1.3.4 -redis==2.10.5 +redis==2.10.6
Update to match redis-py 2.10.6 There are a few unit tests that fail when run against the latest redis-py (2.10.6): ``` ====================================================================== ERROR: test_set_ex_should_expire_value (test_fakenewsredis.TestRealRedis) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/bmerry/work/sdp/git/fakenewsredis/test_fakenewsredis.py", line 3145, in test_set_ex_should_expire_value self.redis.set('foo', 'bar', ex=0) File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/client.py", line 1171, in set return self.execute_command('SET', *pieces) File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/client.py", line 668, in execute_command return self.parse_response(connection, command_name, **options) File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/client.py", line 680, in parse_response response = connection.read_response() File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/connection.py", line 629, in read_response raise response ResponseError: invalid expire time in set ====================================================================== ERROR: test_set_ex_should_expire_value (test_fakenewsredis.TestRealRedisDecodeResponses) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/bmerry/work/sdp/git/fakenewsredis/test_fakenewsredis.py", line 3145, in test_set_ex_should_expire_value self.redis.set('foo', 'bar', ex=0) File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/client.py", line 1171, in set return self.execute_command('SET', *pieces) File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/client.py", line 668, in execute_command return self.parse_response(connection, command_name, **options) File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/client.py", line 680, in parse_response response = connection.read_response() File "/home/bmerry/work/sdp/env/local/lib/python2.7/site-packages/redis/connection.py", line 629, in read_response raise response ResponseError: invalid expire time in set ``` This is due to a change in how a zero expiry time is interpreted.
ska-sa/fakenewsredis
diff --git a/test_fakenewsredis.py b/test_fakenewsredis.py index 183591e..403d885 100644 --- a/test_fakenewsredis.py +++ b/test_fakenewsredis.py @@ -501,26 +501,44 @@ class TestFakeStrictRedis(unittest.TestCase): def test_set_raises_wrong_ex(self): with self.assertRaises(ResponseError): self.redis.set('foo', 'bar', ex=-100) + with self.assertRaises(ResponseError): + self.redis.set('foo', 'bar', ex=0) + self.assertFalse(self.redis.exists('foo')) def test_set_using_timedelta_raises_wrong_ex(self): with self.assertRaises(ResponseError): self.redis.set('foo', 'bar', ex=timedelta(seconds=-100)) + with self.assertRaises(ResponseError): + self.redis.set('foo', 'bar', ex=timedelta(seconds=0)) + self.assertFalse(self.redis.exists('foo')) def test_set_raises_wrong_px(self): with self.assertRaises(ResponseError): self.redis.set('foo', 'bar', px=-100) + with self.assertRaises(ResponseError): + self.redis.set('foo', 'bar', px=0) + self.assertFalse(self.redis.exists('foo')) def test_set_using_timedelta_raises_wrong_px(self): with self.assertRaises(ResponseError): self.redis.set('foo', 'bar', px=timedelta(milliseconds=-100)) + with self.assertRaises(ResponseError): + self.redis.set('foo', 'bar', px=timedelta(milliseconds=0)) + self.assertFalse(self.redis.exists('foo')) def test_setex_raises_wrong_ex(self): with self.assertRaises(ResponseError): self.redis.setex('foo', -100, 'bar') + with self.assertRaises(ResponseError): + self.redis.setex('foo', 0, 'bar') + self.assertFalse(self.redis.exists('foo')) def test_setex_using_timedelta_raises_wrong_ex(self): with self.assertRaises(ResponseError): self.redis.setex('foo', timedelta(seconds=-100), 'bar') + with self.assertRaises(ResponseError): + self.redis.setex('foo', timedelta(seconds=-100), 'bar') + self.assertFalse(self.redis.exists('foo')) def test_setnx(self): self.assertEqual(self.redis.setnx('foo', 'bar'), True) @@ -3142,7 +3160,7 @@ class TestFakeRedis(unittest.TestCase): @attr('slow') def test_set_ex_should_expire_value(self): - self.redis.set('foo', 'bar', ex=0) + self.redis.set('foo', 'bar') self.assertEqual(self.redis.get('foo'), b'bar') self.redis.set('foo', 'bar', ex=1) sleep(2)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 2 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 -e git+https://github.com/ska-sa/fakenewsredis.git@0472a6e928d9502f7549b6c829dd86570ec00c0e#egg=fakenewsredis flake8==2.6.2 iniconfig==2.1.0 mccabe==0.5.3 nose==1.3.4 packaging==24.2 pluggy==1.5.0 pycodestyle==2.0.0 pyflakes==1.2.3 pytest==8.3.5 redis==2.10.5 tomli==2.2.1
name: fakenewsredis channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - flake8==2.6.2 - iniconfig==2.1.0 - mccabe==0.5.3 - nose==1.3.4 - packaging==24.2 - pluggy==1.5.0 - pycodestyle==2.0.0 - pyflakes==1.2.3 - pytest==8.3.5 - redis==2.10.5 - tomli==2.2.1 prefix: /opt/conda/envs/fakenewsredis
[ "test_fakenewsredis.py::TestFakeStrictRedis::test_set_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_raises_wrong_px", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_using_timedelta_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_using_timedelta_raises_wrong_px", "test_fakenewsredis.py::TestFakeStrictRedis::test_setex_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedis::test_setex_using_timedelta_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_raises_wrong_px", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_using_timedelta_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_using_timedelta_raises_wrong_px", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setex_raises_wrong_ex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setex_using_timedelta_raises_wrong_ex" ]
[]
[ "test_fakenewsredis.py::TestFakeStrictRedis::test_append", "test_fakenewsredis.py::TestFakeStrictRedis::test_append_with_no_preexisting_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_append_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_basic_sort", "test_fakenewsredis.py::TestFakeStrictRedis::test_bitcount", "test_fakenewsredis.py::TestFakeStrictRedis::test_bitcount_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_blocking_operations_when_empty", "test_fakenewsredis.py::TestFakeStrictRedis::test_blpop_allow_single_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_blpop_single_list", "test_fakenewsredis.py::TestFakeStrictRedis::test_blpop_test_multiple_lists", "test_fakenewsredis.py::TestFakeStrictRedis::test_blpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_brpop_single_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_brpop_test_multiple_lists", "test_fakenewsredis.py::TestFakeStrictRedis::test_brpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_brpoplpush_multi_keys", "test_fakenewsredis.py::TestFakeStrictRedis::test_brpoplpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_contains", "test_fakenewsredis.py::TestFakeStrictRedis::test_decr", "test_fakenewsredis.py::TestFakeStrictRedis::test_decr_badtype", "test_fakenewsredis.py::TestFakeStrictRedis::test_decr_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_decr_newkey", "test_fakenewsredis.py::TestFakeStrictRedis::test_delete", "test_fakenewsredis.py::TestFakeStrictRedis::test_delete_expire", "test_fakenewsredis.py::TestFakeStrictRedis::test_delete_multiple", "test_fakenewsredis.py::TestFakeStrictRedis::test_delete_nonexistent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_echo", "test_fakenewsredis.py::TestFakeStrictRedis::test_empty_hash", "test_fakenewsredis.py::TestFakeStrictRedis::test_empty_list", "test_fakenewsredis.py::TestFakeStrictRedis::test_empty_set", "test_fakenewsredis.py::TestFakeStrictRedis::test_empty_sort", "test_fakenewsredis.py::TestFakeStrictRedis::test_empty_zset", "test_fakenewsredis.py::TestFakeStrictRedis::test_exists", "test_fakenewsredis.py::TestFakeStrictRedis::test_flushdb", "test_fakenewsredis.py::TestFakeStrictRedis::test_foo", "test_fakenewsredis.py::TestFakeStrictRedis::test_get_does_not_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_get_invalid_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_get_with_non_str_keys", "test_fakenewsredis.py::TestFakeStrictRedis::test_getbit", "test_fakenewsredis.py::TestFakeStrictRedis::test_getbit_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_getset_exists", "test_fakenewsredis.py::TestFakeStrictRedis::test_getset_not_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_getset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hdel", "test_fakenewsredis.py::TestFakeStrictRedis::test_hdel_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hexists", "test_fakenewsredis.py::TestFakeStrictRedis::test_hexists_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hgetall", "test_fakenewsredis.py::TestFakeStrictRedis::test_hgetall_empty_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_hgetall_with_tuples", "test_fakenewsredis.py::TestFakeStrictRedis::test_hgetall_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrby", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrby_with_no_starting_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrby_with_range_param", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrby_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat_on_non_float_value_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat_precision", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat_with_no_starting_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat_with_non_float_amount_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat_with_range_param", "test_fakenewsredis.py::TestFakeStrictRedis::test_hincrbyfloat_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hkeys", "test_fakenewsredis.py::TestFakeStrictRedis::test_hkeys_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hlen", "test_fakenewsredis.py::TestFakeStrictRedis::test_hlen_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmget", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmget_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmset_convert_values", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmset_does_not_mutate_input_params", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmsetset", "test_fakenewsredis.py::TestFakeStrictRedis::test_hmsetset_empty_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_hscan", "test_fakenewsredis.py::TestFakeStrictRedis::test_hset_then_hget", "test_fakenewsredis.py::TestFakeStrictRedis::test_hset_update", "test_fakenewsredis.py::TestFakeStrictRedis::test_hset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_hsetnx", "test_fakenewsredis.py::TestFakeStrictRedis::test_hvals", "test_fakenewsredis.py::TestFakeStrictRedis::test_hvals_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_bad_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_by", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_followed_by_mget", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_followed_by_mget_returns_strings", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_preexisting_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_with_float", "test_fakenewsredis.py::TestFakeStrictRedis::test_incr_with_no_preexisting_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_incrbyfloat", "test_fakenewsredis.py::TestFakeStrictRedis::test_incrbyfloat_bad_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_incrbyfloat_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_incrbyfloat_precision", "test_fakenewsredis.py::TestFakeStrictRedis::test_incrbyfloat_with_noexist", "test_fakenewsredis.py::TestFakeStrictRedis::test_key_patterns", "test_fakenewsredis.py::TestFakeStrictRedis::test_lindex", "test_fakenewsredis.py::TestFakeStrictRedis::test_lindex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_linsert_after", "test_fakenewsredis.py::TestFakeStrictRedis::test_linsert_before", "test_fakenewsredis.py::TestFakeStrictRedis::test_linsert_no_pivot", "test_fakenewsredis.py::TestFakeStrictRedis::test_linsert_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_llen", "test_fakenewsredis.py::TestFakeStrictRedis::test_llen_no_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_llen_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpop", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpop_empty_list", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpush_key_does_not_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpush_then_lrange_all", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpush_then_lrange_portion", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpush_with_nonstr_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpushx", "test_fakenewsredis.py::TestFakeStrictRedis::test_lpushx_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_default_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_does_not_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_negative_count", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_positive_count", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_return_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_lrem_zero_count", "test_fakenewsredis.py::TestFakeStrictRedis::test_lset", "test_fakenewsredis.py::TestFakeStrictRedis::test_lset_index_out_of_range", "test_fakenewsredis.py::TestFakeStrictRedis::test_lset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_ltrim", "test_fakenewsredis.py::TestFakeStrictRedis::test_ltrim_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_ltrim_with_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_ltrim_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_mget", "test_fakenewsredis.py::TestFakeStrictRedis::test_mget_mixed_types", "test_fakenewsredis.py::TestFakeStrictRedis::test_mget_with_no_keys_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_move_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_mset", "test_fakenewsredis.py::TestFakeStrictRedis::test_mset_accepts_kwargs", "test_fakenewsredis.py::TestFakeStrictRedis::test_mset_with_no_keys_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_msetnx", "test_fakenewsredis.py::TestFakeStrictRedis::test_multidb", "test_fakenewsredis.py::TestFakeStrictRedis::test_multiple_bits_set", "test_fakenewsredis.py::TestFakeStrictRedis::test_multiple_successful_watch_calls", "test_fakenewsredis.py::TestFakeStrictRedis::test_persist", "test_fakenewsredis.py::TestFakeStrictRedis::test_pfadd", "test_fakenewsredis.py::TestFakeStrictRedis::test_pfcount", "test_fakenewsredis.py::TestFakeStrictRedis::test_pfmerge", "test_fakenewsredis.py::TestFakeStrictRedis::test_ping", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_as_context_manager", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_ignore_errors", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_non_transactional", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_proxies_to_redis_object", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_raises_when_watched_key_changed", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_succeeds_despite_unwatched_key_changed", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_succeeds_when_watching_nonexistent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_transaction_shortcut", "test_fakenewsredis.py::TestFakeStrictRedis::test_pipeline_transaction_value_from_callable", "test_fakenewsredis.py::TestFakeStrictRedis::test_pttl_should_return_minus_one_for_non_expiring_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_pttl_should_return_minus_two_for_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_binary_message", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_ignore_sub_messages_listen", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_listen", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_psubscribe", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_punsubscribe", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_subscribe", "test_fakenewsredis.py::TestFakeStrictRedis::test_pubsub_unsubscribe", "test_fakenewsredis.py::TestFakeStrictRedis::test_rename", "test_fakenewsredis.py::TestFakeStrictRedis::test_rename_does_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_rename_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_rename_nonexistent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_renamenx_doesnt_exist", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpop", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpoplpush", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpoplpush_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpoplpush_one_to_self", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpoplpush_to_nonexistent_destination", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpoplpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpush", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpush_then_lrange_with_nested_list1", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpush_then_lrange_with_nested_list2", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpush_then_lrange_with_nested_list3", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpushx", "test_fakenewsredis.py::TestFakeStrictRedis::test_rpushx_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sadd", "test_fakenewsredis.py::TestFakeStrictRedis::test_sadd_as_str_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sadd_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_saving_non_ascii_chars_as_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_saving_non_ascii_chars_as_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_saving_unicode_type_as_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_saving_unicode_type_as_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan_all_in_single_call", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan_iter_multiple_pages", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan_iter_multiple_pages_with_match", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan_iter_single_page", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan_multiple_pages_with_count_arg", "test_fakenewsredis.py::TestFakeStrictRedis::test_scan_single", "test_fakenewsredis.py::TestFakeStrictRedis::test_scard", "test_fakenewsredis.py::TestFakeStrictRedis::test_scard_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sdiff", "test_fakenewsredis.py::TestFakeStrictRedis::test_sdiff_empty", "test_fakenewsredis.py::TestFakeStrictRedis::test_sdiff_one_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_sdiff_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sdiffstore", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_None_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_ex", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_ex_using_timedelta", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_existing_key_persists", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_float_value", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_non_str_keys", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_px", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_px_using_timedelta", "test_fakenewsredis.py::TestFakeStrictRedis::test_set_then_get", "test_fakenewsredis.py::TestFakeStrictRedis::test_setbit_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_setbit_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_setbits_and_getkeys", "test_fakenewsredis.py::TestFakeStrictRedis::test_setex", "test_fakenewsredis.py::TestFakeStrictRedis::test_setex_using_float", "test_fakenewsredis.py::TestFakeStrictRedis::test_setex_using_timedelta", "test_fakenewsredis.py::TestFakeStrictRedis::test_setitem_getitem", "test_fakenewsredis.py::TestFakeStrictRedis::test_setnx", "test_fakenewsredis.py::TestFakeStrictRedis::test_setrange", "test_fakenewsredis.py::TestFakeStrictRedis::test_setrange_expiry", "test_fakenewsredis.py::TestFakeStrictRedis::test_sinter", "test_fakenewsredis.py::TestFakeStrictRedis::test_sinter_bytes_keys", "test_fakenewsredis.py::TestFakeStrictRedis::test_sinter_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sinterstore", "test_fakenewsredis.py::TestFakeStrictRedis::test_sismember", "test_fakenewsredis.py::TestFakeStrictRedis::test_sismember_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_smembers", "test_fakenewsredis.py::TestFakeStrictRedis::test_smembers_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_smove", "test_fakenewsredis.py::TestFakeStrictRedis::test_smove_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_alpha", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_descending", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_range_offset_norange", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_range_offset_range", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_range_offset_range_and_desc", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_range_with_large_range", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_with_by_and_get_option", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_with_hash", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_with_set", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_with_store_option", "test_fakenewsredis.py::TestFakeStrictRedis::test_sort_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_spop", "test_fakenewsredis.py::TestFakeStrictRedis::test_spop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_srandmember", "test_fakenewsredis.py::TestFakeStrictRedis::test_srandmember_number", "test_fakenewsredis.py::TestFakeStrictRedis::test_srandmember_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_srem", "test_fakenewsredis.py::TestFakeStrictRedis::test_srem_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sscan", "test_fakenewsredis.py::TestFakeStrictRedis::test_strlen", "test_fakenewsredis.py::TestFakeStrictRedis::test_strlen_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_substr", "test_fakenewsredis.py::TestFakeStrictRedis::test_substr_noexist_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_substr_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sunion", "test_fakenewsredis.py::TestFakeStrictRedis::test_sunion_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_sunionstore", "test_fakenewsredis.py::TestFakeStrictRedis::test_ttl_should_return_minus_one_for_non_expiring_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_ttl_should_return_minus_two_for_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_unset_bits", "test_fakenewsredis.py::TestFakeStrictRedis::test_watch_state_is_cleared_across_multiple_watches", "test_fakenewsredis.py::TestFakeStrictRedis::test_zadd", "test_fakenewsredis.py::TestFakeStrictRedis::test_zadd_errors", "test_fakenewsredis.py::TestFakeStrictRedis::test_zadd_multiple", "test_fakenewsredis.py::TestFakeStrictRedis::test_zadd_uses_str", "test_fakenewsredis.py::TestFakeStrictRedis::test_zadd_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zcard", "test_fakenewsredis.py::TestFakeStrictRedis::test_zcard_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedis::test_zcard_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zcount", "test_fakenewsredis.py::TestFakeStrictRedis::test_zcount_exclusive", "test_fakenewsredis.py::TestFakeStrictRedis::test_zcount_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zincrby", "test_fakenewsredis.py::TestFakeStrictRedis::test_zincrby_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zinterstore", "test_fakenewsredis.py::TestFakeStrictRedis::test_zinterstore_max", "test_fakenewsredis.py::TestFakeStrictRedis::test_zinterstore_mixed_set_types", "test_fakenewsredis.py::TestFakeStrictRedis::test_zinterstore_nokey", "test_fakenewsredis.py::TestFakeStrictRedis::test_zinterstore_onekey", "test_fakenewsredis.py::TestFakeStrictRedis::test_zinterstore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zlexcount", "test_fakenewsredis.py::TestFakeStrictRedis::test_zlexcount_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrange_descending", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrange_descending_with_scores", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrange_same_score", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrange_with_positive_indices", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrange_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebylex", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebylex_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebylex_with_limit", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebylex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebyscore", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebyscore_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebyscore_slice", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebyscore_withscores", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebyscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrangebysore_exclusive", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrank", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrank_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrank_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrem", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrem_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrem_numeric_member", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrem_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebylex", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebylex_badkey", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebylex_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebylex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyrank", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyrank_negative_indices", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyrank_out_of_bounds", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyrank_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyscore", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyscore_badkey", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyscore_exclusive", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyscore_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_zremrangebyscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrange", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrange_sorted_keys", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrange_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebylex", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebylex_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebylex_with_limit", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebylex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebyscore", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebyscore_exclusive", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebyscore_raises_error", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrangebyscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrank", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrank_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedis::test_zrevrank_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zscore", "test_fakenewsredis.py::TestFakeStrictRedis::test_zscore_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedis::test_zscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_badkey", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_max", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_min", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_mixed_set_types", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_nokey", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_sum", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_weights", "test_fakenewsredis.py::TestFakeStrictRedis::test_zunionstore_wrong_type", "test_fakenewsredis.py::TestFakeRedis::test_expire_should_expire_immediately_with_millisecond_timedelta", "test_fakenewsredis.py::TestFakeRedis::test_expire_should_expire_key", "test_fakenewsredis.py::TestFakeRedis::test_expire_should_expire_key_using_timedelta", "test_fakenewsredis.py::TestFakeRedis::test_expire_should_not_handle_floating_point_values", "test_fakenewsredis.py::TestFakeRedis::test_expire_should_return_false_for_missing_key", "test_fakenewsredis.py::TestFakeRedis::test_expire_should_return_true_for_existing_key", "test_fakenewsredis.py::TestFakeRedis::test_expireat_should_expire_key_by_datetime", "test_fakenewsredis.py::TestFakeRedis::test_expireat_should_expire_key_by_timestamp", "test_fakenewsredis.py::TestFakeRedis::test_expireat_should_return_false_for_missing_key", "test_fakenewsredis.py::TestFakeRedis::test_expireat_should_return_true_for_existing_key", "test_fakenewsredis.py::TestFakeRedis::test_lock", "test_fakenewsredis.py::TestFakeRedis::test_lrem_default_value", "test_fakenewsredis.py::TestFakeRedis::test_lrem_does_not_exist", "test_fakenewsredis.py::TestFakeRedis::test_lrem_negative_count", "test_fakenewsredis.py::TestFakeRedis::test_lrem_postitive_count", "test_fakenewsredis.py::TestFakeRedis::test_lrem_return_value", "test_fakenewsredis.py::TestFakeRedis::test_lrem_zero_count", "test_fakenewsredis.py::TestFakeRedis::test_pexpire_should_expire_key", "test_fakenewsredis.py::TestFakeRedis::test_pexpire_should_expire_key_using_timedelta", "test_fakenewsredis.py::TestFakeRedis::test_pexpire_should_return_falsey_for_missing_key", "test_fakenewsredis.py::TestFakeRedis::test_pexpire_should_return_truthy_for_existing_key", "test_fakenewsredis.py::TestFakeRedis::test_pexpireat_should_expire_key_by_datetime", "test_fakenewsredis.py::TestFakeRedis::test_pexpireat_should_expire_key_by_timestamp", "test_fakenewsredis.py::TestFakeRedis::test_pexpireat_should_return_false_for_missing_key", "test_fakenewsredis.py::TestFakeRedis::test_pexpireat_should_return_true_for_existing_key", "test_fakenewsredis.py::TestFakeRedis::test_psetex_expire_value", "test_fakenewsredis.py::TestFakeRedis::test_psetex_expire_value_using_timedelta", "test_fakenewsredis.py::TestFakeRedis::test_pttl_should_return_none_for_non_expiring_key", "test_fakenewsredis.py::TestFakeRedis::test_pttl_should_return_value_for_expiring_key", "test_fakenewsredis.py::TestFakeRedis::test_set_ex_should_expire_value", "test_fakenewsredis.py::TestFakeRedis::test_set_nx_doesnt_set_value_twice", "test_fakenewsredis.py::TestFakeRedis::test_set_px_should_expire_value", "test_fakenewsredis.py::TestFakeRedis::test_set_xx_set_value_when_exists", "test_fakenewsredis.py::TestFakeRedis::test_setex", "test_fakenewsredis.py::TestFakeRedis::test_setex_using_timedelta", "test_fakenewsredis.py::TestFakeRedis::test_ttl_should_return_none_for_non_expiring_key", "test_fakenewsredis.py::TestFakeRedis::test_ttl_should_return_value_for_expiring_key", "test_fakenewsredis.py::TestFakeRedis::test_ttls_should_always_be_long", "test_fakenewsredis.py::TestFakeRedis::test_zadd_deprecated", "test_fakenewsredis.py::TestFakeRedis::test_zadd_missing_required_params", "test_fakenewsredis.py::TestFakeRedis::test_zadd_with_multiple_keypairs", "test_fakenewsredis.py::TestFakeRedis::test_zadd_with_name_is_non_string", "test_fakenewsredis.py::TestFakeRedis::test_zadd_with_single_keypair", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_append", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_append_with_no_preexisting_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_append_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_basic_sort", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_bitcount", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_bitcount_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_blocking_operations_when_empty", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_blpop_allow_single_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_blpop_single_list", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_blpop_test_multiple_lists", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_blpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_brpop_single_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_brpop_test_multiple_lists", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_brpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_brpoplpush_multi_keys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_brpoplpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_contains", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_decr", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_decr_badtype", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_decr_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_decr_newkey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_delete", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_delete_expire", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_delete_multiple", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_delete_nonexistent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_echo", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_empty_hash", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_empty_list", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_empty_set", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_empty_sort", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_empty_zset", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_exists", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_flushdb", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_foo", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_get_does_not_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_get_invalid_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_get_with_non_str_keys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_getbit", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_getbit_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_getset_exists", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_getset_not_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_getset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hdel", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hdel_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hexists", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hexists_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hgetall", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hgetall_empty_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hgetall_with_tuples", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hgetall_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrby", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrby_with_no_starting_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrby_with_range_param", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrby_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat_on_non_float_value_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat_precision", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat_with_no_starting_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat_with_non_float_amount_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat_with_range_param", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hincrbyfloat_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hkeys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hkeys_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hlen", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hlen_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmget", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmget_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmset_convert_values", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmset_does_not_mutate_input_params", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmsetset", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hmsetset_empty_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hscan", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hset_then_hget", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hset_update", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hsetnx", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hvals", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_hvals_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_bad_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_by", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_followed_by_mget", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_followed_by_mget_returns_strings", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_preexisting_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_with_float", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incr_with_no_preexisting_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incrbyfloat", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incrbyfloat_bad_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incrbyfloat_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incrbyfloat_precision", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_incrbyfloat_with_noexist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_key_patterns", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lindex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lindex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_linsert_after", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_linsert_before", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_linsert_no_pivot", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_linsert_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_llen", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_llen_no_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_llen_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpop", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpop_empty_list", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpush_key_does_not_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpush_then_lrange_all", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpush_then_lrange_portion", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpush_with_nonstr_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpushx", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lpushx_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_default_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_does_not_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_negative_count", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_positive_count", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_return_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lrem_zero_count", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lset", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lset_index_out_of_range", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_lset_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ltrim", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ltrim_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ltrim_with_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ltrim_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_mget", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_mget_mixed_types", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_mget_with_no_keys_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_move_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_mset", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_mset_accepts_kwargs", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_mset_with_no_keys_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_msetnx", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_multidb", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_multiple_bits_set", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_multiple_successful_watch_calls", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_persist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pfadd", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pfcount", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pfmerge", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ping", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_as_context_manager", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_ignore_errors", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_non_transactional", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_proxies_to_redis_object", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_raises_when_watched_key_changed", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_succeeds_despite_unwatched_key_changed", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_succeeds_when_watching_nonexistent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_transaction_shortcut", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pipeline_transaction_value_from_callable", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pttl_should_return_minus_one_for_non_expiring_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pttl_should_return_minus_two_for_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_binary_message", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_ignore_sub_messages_listen", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_listen", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_psubscribe", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_punsubscribe", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_subscribe", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_pubsub_unsubscribe", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rename", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rename_does_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rename_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rename_nonexistent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_renamenx_doesnt_exist", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpop", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpoplpush", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpoplpush_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpoplpush_one_to_self", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpoplpush_to_nonexistent_destination", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpoplpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpush", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpush_then_lrange_with_nested_list1", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpush_then_lrange_with_nested_list2", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpush_then_lrange_with_nested_list3", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpush_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpushx", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_rpushx_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sadd", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sadd_as_str_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sadd_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_saving_non_ascii_chars_as_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_saving_non_ascii_chars_as_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_saving_unicode_type_as_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_saving_unicode_type_as_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan_all_in_single_call", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan_iter_multiple_pages", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan_iter_multiple_pages_with_match", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan_iter_single_page", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan_multiple_pages_with_count_arg", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scan_single", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scard", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_scard_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sdiff", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sdiff_empty", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sdiff_one_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sdiff_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sdiffstore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_None_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_ex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_ex_using_timedelta", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_existing_key_persists", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_float_value", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_non_str_keys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_px", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_px_using_timedelta", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_set_then_get", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setbit_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setbit_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setbits_and_getkeys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setex_using_float", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setex_using_timedelta", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setitem_getitem", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setnx", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setrange", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_setrange_expiry", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sinter", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sinter_bytes_keys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sinter_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sinterstore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sismember", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sismember_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_smembers", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_smembers_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_smove", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_smove_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_alpha", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_descending", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_range_offset_norange", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_range_offset_range", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_range_offset_range_and_desc", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_range_with_large_range", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_with_by_and_get_option", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_with_hash", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_with_set", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_with_store_option", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sort_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_spop", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_spop_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_srandmember", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_srandmember_number", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_srandmember_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_srem", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_srem_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sscan", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_strlen", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_strlen_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_substr", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_substr_noexist_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_substr_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sunion", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sunion_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_sunionstore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ttl_should_return_minus_one_for_non_expiring_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_ttl_should_return_minus_two_for_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_unset_bits", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_watch_state_is_cleared_across_multiple_watches", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zadd", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zadd_errors", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zadd_multiple", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zadd_uses_str", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zadd_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zcard", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zcard_non_existent_key", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zcard_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zcount", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zcount_exclusive", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zcount_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zincrby", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zincrby_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zinterstore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zinterstore_max", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zinterstore_mixed_set_types", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zinterstore_nokey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zinterstore_onekey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zinterstore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zlexcount", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zlexcount_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrange_descending", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrange_descending_with_scores", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrange_same_score", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrange_with_positive_indices", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrange_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebylex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebylex_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebylex_with_limit", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebylex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebyscore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebyscore_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebyscore_slice", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebyscore_withscores", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebyscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrangebysore_exclusive", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrank", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrank_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrank_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrem", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrem_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrem_numeric_member", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrem_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebylex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebylex_badkey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebylex_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebylex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyrank", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyrank_negative_indices", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyrank_out_of_bounds", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyrank_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyscore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyscore_badkey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyscore_exclusive", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyscore_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zremrangebyscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrange", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrange_sorted_keys", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrange_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebylex", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebylex_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebylex_with_limit", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebylex_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebyscore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebyscore_exclusive", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebyscore_raises_error", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrangebyscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrank", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrank_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zrevrank_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zscore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zscore_non_existent_member", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zscore_wrong_type", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_badkey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_max", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_min", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_mixed_set_types", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_nokey", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_sum", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_weights", "test_fakenewsredis.py::TestFakeStrictRedisDecodeResponses::test_zunionstore_wrong_type", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expire_should_expire_immediately_with_millisecond_timedelta", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expire_should_expire_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expire_should_expire_key_using_timedelta", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expire_should_not_handle_floating_point_values", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expire_should_return_false_for_missing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expire_should_return_true_for_existing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expireat_should_expire_key_by_datetime", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expireat_should_expire_key_by_timestamp", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expireat_should_return_false_for_missing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_expireat_should_return_true_for_existing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lock", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lrem_default_value", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lrem_does_not_exist", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lrem_negative_count", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lrem_postitive_count", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lrem_return_value", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_lrem_zero_count", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpire_should_expire_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpire_should_expire_key_using_timedelta", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpire_should_return_falsey_for_missing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpire_should_return_truthy_for_existing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpireat_should_expire_key_by_datetime", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpireat_should_expire_key_by_timestamp", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpireat_should_return_false_for_missing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pexpireat_should_return_true_for_existing_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_psetex_expire_value", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_psetex_expire_value_using_timedelta", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pttl_should_return_none_for_non_expiring_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_pttl_should_return_value_for_expiring_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_set_ex_should_expire_value", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_set_nx_doesnt_set_value_twice", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_set_px_should_expire_value", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_set_xx_set_value_when_exists", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_setex", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_setex_using_timedelta", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_ttl_should_return_none_for_non_expiring_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_ttl_should_return_value_for_expiring_key", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_ttls_should_always_be_long", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_zadd_deprecated", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_zadd_missing_required_params", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_zadd_with_multiple_keypairs", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_zadd_with_name_is_non_string", "test_fakenewsredis.py::TestFakeRedisDecodeResponses::test_zadd_with_single_keypair", "test_fakenewsredis.py::TestInitArgs::test_can_accept_any_kwargs", "test_fakenewsredis.py::TestInitArgs::test_can_pass_through_extra_args", "test_fakenewsredis.py::TestInitArgs::test_from_url", "test_fakenewsredis.py::TestInitArgs::test_from_url_db_value_error", "test_fakenewsredis.py::TestInitArgs::test_from_url_with_db_arg", "test_fakenewsredis.py::TestImportation::test_searches_for_c_stdlib_and_raises_if_missing" ]
[]
BSD License
2,082
[ "requirements.txt", "fakenewsredis.py" ]
[ "requirements.txt", "fakenewsredis.py" ]
EdinburghGenomics__clarity_scripts-41
37d6a75d81d94652c5c29228f86954b0ea96f8bb
2018-01-24 15:27:31
32c21fa719365176a9101a8a7ce72eb07f3ac85d
diff --git a/requirements.txt b/requirements.txt index 0b298f8..f7c70e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,1 +1,1 @@ -EGCG-Core==0.7.3 +EGCG-Core==0.8.1 diff --git a/scripts/assign_container_name.py b/scripts/assign_container_name.py index 98d46c6..2952a84 100644 --- a/scripts/assign_container_name.py +++ b/scripts/assign_container_name.py @@ -3,41 +3,42 @@ from EPPs.common import StepEPP, step_argparser class AssignContainerName(StepEPP): - # assigns the container automatically for the FluidX Transfer from Rack to Plate step using rule of Project+P+9. - # Checks to see if thecontainer name has already been used. + """ + Assigns a container name for, e.g, the FluidX Transfer from Rack to Plate step in the format + '{project_id}P{number}. Automatically checks previously used container names to ensure a new name is used. + """ def _run(self): - # lists all of the output artifacts for the process. - processOutputs = list(self.process.all_outputs(unique=True)) - - # loops through each artifact, when an analyte type artifact is found it updates the container name with the rule stated above then breaks the loop - # as this only needs to occur once as the step can only process a single rack of tubes from a single project. - for p in processOutputs: + """ + Update each analyte-type artifact with a new, unused container name. This only needs to occur once as the step + can only process a single rack of tubes from a single project. + """ + for p in list(self.process.all_outputs(unique=True)): if p.output_type == 'Analyte': project = p.samples[0].project.name - new_container_name = self.findAvailableContainer(project=project, count=1) + new_container_name = self.find_available_container(project) p.container.name = new_container_name p.container.put() break - - - - def findAvailableContainer(self, project, count): - # checks to see if the first container name is available and then recurses until it finds an available container name - - if count>999: + def find_available_container(self, project, count=1): + """ + Check to see if a container name is available, and recurse with incremented container numbers until an available + container name is found. + :param str project: + :param int count: + """ + if count > 999: raise ValueError('Cannot allocate more than 999 containers') - new_name=project+'P%03d' % (count) + new_name = project + 'P%03d' % count - if self.lims.get_artifacts(containername=new_name) == []: + if not self.lims.get_artifacts(containername=new_name): return new_name else: - return self.findAvailableContainer(project=project, count=count+1) - + return self.find_available_container(project, count=count + 1) def main(): @@ -48,4 +49,4 @@ def main(): if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/scripts/assign_workflow_preseqlab.py b/scripts/assign_workflow_preseqlab.py index 59cc10e..0cd2fa7 100644 --- a/scripts/assign_workflow_preseqlab.py +++ b/scripts/assign_workflow_preseqlab.py @@ -5,12 +5,12 @@ from EPPs.common import StepEPP, step_argparser, get_workflow_stage, find_newest class AssignWorkflowPreSeqLab(StepEPP): def _run(self): - artifact_to_route = set() + artifacts_to_route = set() for art in self.artifacts: sample = art.samples[0] if sample.udf.get("Proceed To SeqLab") and not sample.udf.get("2D Barcode"): # checks to see if sample is in plate or fluidX tube - artifact_to_route.add(sample.artifact) + artifacts_to_route.add(sample.artifact) elif sample.udf.get("Proceed To SeqLab") and sample.udf.get("2D Barcode"): artifact = find_newest_artifact_originating_from( @@ -18,12 +18,12 @@ class AssignWorkflowPreSeqLab(StepEPP): process_type="FluidX Transfer From Rack Into Plate EG 1.0 ST", sample_name=sample.name ) - artifact_to_route.add(artifact) + artifacts_to_route.add(artifact) - if artifact_to_route: + if artifacts_to_route: # Only route artifacts if there are any stage = get_workflow_stage(self.lims, "PreSeqLab EG 6.0", "Sequencing Plate Preparation EG 2.0") - self.lims.route_artifacts(list(artifact_to_route), stage_uri=stage.uri) + self.lims.route_artifacts(list(artifacts_to_route), stage_uri=stage.uri) def main(): @@ -34,4 +34,3 @@ def main(): if __name__ == "__main__": main() - diff --git a/scripts/assign_workflow_receive_sample.py b/scripts/assign_workflow_receive_sample.py index ccc42be..7d31130 100644 --- a/scripts/assign_workflow_receive_sample.py +++ b/scripts/assign_workflow_receive_sample.py @@ -1,35 +1,32 @@ #!/usr/bin/env python from EPPs.common import StepEPP, step_argparser, get_workflow_stage -class AssignWorkflowReceiveSample(StepEPP): - #Assign received plate to either User Prepared Library Batch or Spectramax Picogreen +class AssignWorkflowReceiveSample(StepEPP): + """ + Assigns a received plate to either User Prepared Library Batch or Spectramax Picogreen, depending on the contents of + the UDF 'User Prepared Library' + """ def _run(self): - artifact_to_route_userprepared = set() - artifact_to_route_preseqlab = set() - + artifacts_to_route_userprepared = set() + artifacts_to_route_preseqlab = set() for art in self.artifacts: sample = art.samples[0] if sample.udf.get("User Prepared Library") == "Yes": - artifact_to_route_userprepared.add(art) - # assigns the received sample to User Prepared Library Batch if is a user prepared library + artifacts_to_route_userprepared.add(art) + else: + artifacts_to_route_preseqlab.add(art) - - elif not sample.udf.get("User Prepared Library"): - artifact_to_route_preseqlab.add(art) - # assigns the received sample to PreSeqLab if NOT an user prepared library - - if artifact_to_route_userprepared: + if artifacts_to_route_userprepared: # Only route artifacts if there are any artifacts to go to PCR-Free stage = get_workflow_stage(self.lims, "User Prepared Library Batch EG1.0 WF", "User Prepared Library Batch EG 1.0 ST") - self.lims.route_artifacts(list(artifact_to_route_userprepared), stage_uri=stage.uri) + self.lims.route_artifacts(list(artifacts_to_route_userprepared), stage_uri=stage.uri) - if artifact_to_route_preseqlab: + if artifacts_to_route_preseqlab: # Only route artifacts if there are any artifacts to go to Nano stage = get_workflow_stage(self.lims, "PreSeqLab EG 6.0", "Spectramax Picogreen EG 6.0") - self.lims.route_artifacts(list(artifact_to_route_preseqlab), stage_uri=stage.uri) - + self.lims.route_artifacts(list(artifacts_to_route_preseqlab), stage_uri=stage.uri) def main(): diff --git a/scripts/assign_workflow_seqlab_quantstudio.py b/scripts/assign_workflow_seqlab_quantstudio.py index 5c8802e..9f964ce 100644 --- a/scripts/assign_workflow_seqlab_quantstudio.py +++ b/scripts/assign_workflow_seqlab_quantstudio.py @@ -2,65 +2,51 @@ from EPPs.common import StepEPP, step_argparser, get_workflow_stage, find_newest_artifact_originating_from -def get_parent_process_id(art): - return art.parent_process.id - - class AssignWorkflowSeqLabQuantStudio(StepEPP): - '''Assigns plate submitted samples or FluidX derived artifacts to the correct SeqLab Workflow and - assigns to QuantStudio workflow if required based on the contents of submitted sample UDFs. - "Prep Workflow", "Species", "Skip genotyping for (human) sample?"''' - + """ + Assigns plates of submitted samples or FluidX derived artifacts to the correct SeqLab Workflow and assigns to the + QuantStudio workflow if required based on UDFs "Prep Workflow", "Species" and "Skip genotyping for (human) sample?" + """ def _run(self): - artifact_to_route_pcr_free = set() - artifact_to_route_nano = set() - artifact_to_route_quant = set() + artifacts_to_route_pcr_free = set() + artifacts_to_route_nano = set() + artifacts_to_route_quant = set() for art in self.output_artifacts: sample = art.samples[0] if sample.udf.get("Prep Workflow") == "TruSeq PCR-Free DNA Sample Prep": - artifact_to_route_pcr_free.add(art) - # assigns the normalised batch plate to the TruSeq PCR-Free workflow - + artifacts_to_route_pcr_free.add(art) elif sample.udf.get("Prep Workflow") == "TruSeq Nano DNA Sample Prep": - artifact_to_route_nano.add(art) - # assigns the normalised batch plate to the TruSeq Nano workflow - - if sample.udf.get("Species") == "Homo sapiens" and not sample.udf.get("Skip genotyping for (human) sample?") \ - or sample.udf.get("Species") == "Human" and not sample.udf.get( - "Skip genotyping for (human) sample?"): + artifacts_to_route_nano.add(art) - # assigns either the submitted sample plate or the FluidX derived plate to the QuantStudio - # workflow if a human samples and the lab manager has not selected "Skip genotyping for (human) - # sample?" + if sample.udf.get("Species") in ("Homo sapiens", "Human") and not sample.udf.get("Skip genotyping for (human) sample?"): + # assign either the submitted sample plate or the FluidX derived plate to the QuantStudio + # workflow if human samples and the lab manager has not selected "Skip genotyping for (human) sample?" if not sample.udf.get("2D Barcode"): - # checks to see if sample is in plate or fluidX tube - artifact_to_route_quant.add(sample.artifact) + # check to see if sample is in plate or fluidX tube + artifacts_to_route_quant.add(sample.artifact) - elif sample.udf.get("2D Barcode"): + else: artifact = find_newest_artifact_originating_from( self.lims, process_type="FluidX Transfer From Rack Into Plate EG 1.0 ST", sample_name=sample.name ) - artifact_to_route_quant.add(artifact) + artifacts_to_route_quant.add(artifact) - if artifact_to_route_pcr_free: - # Only route artifacts if there are any artifacts to go to PCR-Free + if artifacts_to_route_pcr_free: stage = get_workflow_stage(self.lims, "TruSeq PCR-Free DNA Sample Prep", "Visual QC") - self.lims.route_artifacts(list(artifact_to_route_pcr_free), stage_uri=stage.uri) + self.lims.route_artifacts(list(artifacts_to_route_pcr_free), stage_uri=stage.uri) - if artifact_to_route_nano: - # Only route artifacts if there are any artifacts to go to Nano + if artifacts_to_route_nano: stage = get_workflow_stage(self.lims, "TruSeq Nano DNA Sample Prep", "Visual QC") - self.lims.route_artifacts(list(artifact_to_route_nano), stage_uri=stage.uri) + self.lims.route_artifacts(list(artifacts_to_route_nano), stage_uri=stage.uri) - if artifact_to_route_quant: - # Only route artifacts if there are any artifacts to go to Nano + if artifacts_to_route_quant: stage = get_workflow_stage(self.lims, "QuantStudio EG1.0", "QuantStudio Plate Preparation EG1.0") - self.lims.route_artifacts(list(artifact_to_route_quant), stage_uri=stage.uri) + self.lims.route_artifacts(list(artifacts_to_route_quant), stage_uri=stage.uri) def main(): diff --git a/scripts/assign_workflow_user_library.py b/scripts/assign_workflow_user_library.py index 64cb73f..141d7ae 100644 --- a/scripts/assign_workflow_user_library.py +++ b/scripts/assign_workflow_user_library.py @@ -1,9 +1,11 @@ #!/usr/bin/env python from EPPs.common import StepEPP, step_argparser, get_workflow_stage -class AssignWorkflowUserPreparedLibrary(StepEPP): - #Assign plate created in User Prepared Library to either Nano or PCR Free workflow +class AssignWorkflowUserPreparedLibrary(StepEPP): + """ + Assigns a plate created in User Prepared Library to either the Nano or PCR Free workflow. + """ def _run(self): artifact_to_route_pcr_free = set() artifact_to_route_nano = set() @@ -12,25 +14,19 @@ class AssignWorkflowUserPreparedLibrary(StepEPP): sample = art.samples[0] if sample.udf.get("Prep Workflow") == "TruSeq PCR-Free DNA Sample Prep": artifact_to_route_pcr_free.add(art) - # assigns the normalised batch plate to the TruSeq PCR-Free workflow - elif sample.udf.get("Prep Workflow") == "TruSeq Nano DNA Sample Prep": artifact_to_route_nano.add(art) - # assigns the normalised batch plate to the TruSeq Nano workflow if artifact_to_route_pcr_free: - # Only route artifacts if there are any artifacts to go to PCR-Free stage = get_workflow_stage(self.lims, "TruSeq PCR-Free DNA Sample Prep", "SEMI-AUTOMATED - Make and Read qPCR Quant") self.lims.route_artifacts(list(artifact_to_route_pcr_free), stage_uri=stage.uri) if artifact_to_route_nano: - # Only route artifacts if there are any artifacts to go to Nano stage = get_workflow_stage(self.lims, "TruSeq Nano DNA Sample Prep", "SEMI-AUTOMATED - Make LQC & Caliper GX QC") self.lims.route_artifacts(list(artifact_to_route_nano), stage_uri=stage.uri) - def main(): p = step_argparser() args = p.parse_args() diff --git a/scripts/convert_and_dispatch_genotypes.py b/scripts/convert_and_dispatch_genotypes.py index 528565e..4cb8a39 100644 --- a/scripts/convert_and_dispatch_genotypes.py +++ b/scripts/convert_and_dispatch_genotypes.py @@ -1,15 +1,12 @@ #!/usr/bin/env python import csv -import sys from os import remove from os.path import join, dirname, abspath from collections import defaultdict from egcg_core.config import Configuration from egcg_core.app_logging import AppLogger, logging_default as log_cfg - -sys.path.append(dirname(dirname(abspath(__file__)))) -from EPPs.common import StepEPP, step_argparser import EPPs +from EPPs.common import StepEPP, step_argparser etc_path = join(abspath(dirname(EPPs.__file__)), 'etc') snp_cfg = Configuration(join(etc_path, 'SNPs_definition.yml')) diff --git a/scripts/create_reagents_for_run.py b/scripts/create_reagents_for_run.py index a304f56..6e540e4 100644 --- a/scripts/create_reagents_for_run.py +++ b/scripts/create_reagents_for_run.py @@ -57,10 +57,18 @@ class CreateReagentForRun(StepEPP): else: reagent_kits = self.lims.get_reagent_kits(name=reagent_kit_map[name]) if len(reagent_kits) != 1: - raise Exception('Found %s reagent kits for name %s' % (len(reagent_kits), reagent_kit_map[name])) - reagent_lot = ReagentLot.create(self.lims, reagent_kit=reagent_kits[0], name=name, lot_number=lot, expiry_date=time.strftime('%Y-%m,-%d'), status='active') + raise ValueError('Found %s reagent kits for name %s' % (len(reagent_kits), reagent_kit_map[name])) - print('Create reagent %s: %s' % (name, lot)) + ReagentLot.create( + self.lims, + reagent_kit=reagent_kits[0], + name=name, + lot_number=lot, + expiry_date=time.strftime('%Y-%m,-%d'), + status='active' + ) + + print('Created reagent %s: %s' % (name, lot)) def main(): diff --git a/scripts/email_data_release.py b/scripts/email_data_release.py index 80303c4..35c7305 100644 --- a/scripts/email_data_release.py +++ b/scripts/email_data_release.py @@ -5,58 +5,40 @@ from EPPs.config import load_config class DataReleaseEmail(SendMailEPP): + """ + Notifies the relevant teams that data for a project has been released. Also sends a reminder to send a customer + survey depending on UDF values. + """ def _run(self): - if len(self.projects)>1: # check if more than one project in step, only one permitted + if len(self.projects) > 1: raise ValueError('More than one project present in step. Only one project per step permitted') - # Create the message to notify team that data has been released msg = 'Hi,\n\nData for {sample_count} sample(s) has been released for {project} at:\n\n{link}\n\nKind regards,\nClarityX' - - # fill in message with parameters msg = msg.format( - link='https://'+platform.node() + '/clarity/work-details/' + self.step_id[3:], + link='https://' + platform.node() + '/clarity/work-details/' + self.step_id[3:], sample_count=len(self.samples), - project=self.projects[0].name, + project=self.projects[0].name ) - subject = ', '.join([p.name for p in self.projects]) + ': Data Released' - - # Send email to list of persons specified in the projects-facility-finance_only section of config + subject = ', '.join(p.name for p in self.projects) + ': Data Released' self.send_mail(subject, msg, config_name='projects-facility-finance_only') - # if "Request Customer Survey (Final Data Release)" step UDF completed as "Yes" - # send an additional email to notify business team to send customer survey - # and reminder to finance that the final release has occurred - if self.process.udf.get("Request Customer Survey (Final Data Release)") == "Yes": + if self.process.udf.get('Request Customer Survey (Final Data Release)') == 'Yes': msg2 = 'Hi,\n\nThe final data release has occurred for {project}. Please request a customer survey.\n\nKind regards,\nClarityX' + msg2 = msg2.format(project=self.projects[0].name) - # fill in message with parameters - msg2 = msg2.format( - sample_count=len(self.samples), - project=self.projects[0].name, - ) subject2 = self.projects[0].name + ': Request Customer Survey - Final Data Released' self.send_mail(subject2, msg2, config_name='projects-facility-finance-bd_only') def main(): - # Ge the default command line options p = step_argparser() - - # Parse command line options args = p.parse_args() - - # Load the config from the default location load_config() - # Setup the EPP - action = DataReleaseEmail( - args.step_uri, args.username, args.password, args.log_file, - ) - - # Run the EPP + action = DataReleaseEmail(args.step_uri, args.username, args.password, args.log_file) action.run() -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/scripts/email_data_trigger.py b/scripts/email_data_trigger.py index eac8c99..0a3d094 100644 --- a/scripts/email_data_trigger.py +++ b/scripts/email_data_trigger.py @@ -1,26 +1,26 @@ #!/usr/bin/env python -from collections import defaultdict import platform from EPPs.common import step_argparser, SendMailEPP from EPPs.config import load_config class DataReleaseEmailAndUpdateEPP(SendMailEPP): + """Notifies the bioinformatics team to release data for a project.""" def _run(self): - if len(self.projects)>1: + if len(self.projects) > 1: raise ValueError('More than one project present in step. Only one project per step permitted') data_download_contacts = [] # There are up to 5 contacts entered in the step. - for count in range(1,6): - udf_name1 = "Data Download Contact Name "+str(count) - udf_name2 = "Is Contact "+str(count)+" A New or Existing User?" + for count in range(1, 6): + udf_name1 = 'Data Download Contact Name %s' % count + udf_name2 = 'Is Contact %s A New or Existing User?' % count if self.process.udf.get(udf_name1): data_download_contacts.append( - '%s (%s)' % (self.process.udf.get(udf_name1), self.process.udf.get(udf_name2) ) + '%s (%s)' % (self.process.udf.get(udf_name1), self.process.udf.get(udf_name2)) ) - # Create the message + msg = '''Hi Bioinformatics, Please release the data for {sample_count} sample(s) from project {project} shown at the link below: @@ -33,37 +33,27 @@ The data contacts are: Kind regards, ClarityX''' - # fill in message with parameters + msg = msg.format( - link='https://'+platform.node() + '/clarity/work-details/' + self.step_id[3:], + link='https://' + platform.node() + '/clarity/work-details/' + self.step_id[3:], sample_count=len(self.samples), project=self.projects[0].name, data_download_contacts='\n'.join(data_download_contacts) ) - subject = ', '.join([p.name for p in self.projects]) + ': Please release data' + subject = ', '.join(p.name for p in self.projects) + ': Please release data' # Send email to list of persons specified in the default section of config self.send_mail(subject, msg) def main(): - # Get the default command line options p = step_argparser() - - # Parse command line options args = p.parse_args() - - # Load the config from the default location load_config() - # Setup the EPP - action = DataReleaseEmailAndUpdateEPP( - args.step_uri, args.username, args.password, args.log_file, - ) - - # Run the EPP + action = DataReleaseEmailAndUpdateEPP(args.step_uri, args.username, args.password, args.log_file) action.run() -if __name__ == "__main__": - main() \ No newline at end of file +if __name__ == '__main__': + main() diff --git a/scripts/email_fluidx_sample_receipt.py b/scripts/email_fluidx_sample_receipt.py index db607f6..0abb1a1 100644 --- a/scripts/email_fluidx_sample_receipt.py +++ b/scripts/email_fluidx_sample_receipt.py @@ -5,26 +5,22 @@ from EPPs.config import load_config class FluidXSampleReceiptEmail(SendMailEPP): + """Sends an email to request FluidX manifest parsing by the project team""" def _run(self): - if len(self.projects) > 1: # check if more than one project in step, only one permitted + if len(self.projects) > 1: raise ValueError('More than one project present in step. Only one project per step permitted') - # Create the message to notify the lab team, projects and finance that samples have arrived at the facility msg = 'Hi,\n\n{sample_count} sample(s) have been received for project {project} at:\n\n{link}\n\nKind regards,\nClarityX' - - # fill in message with parameters msg = msg.format( - link='https://'+platform.node() + '/clarity/work-details/' + self.step_id[3:], + link='https://' + platform.node() + '/clarity/work-details/' + self.step_id[3:], sample_count=len(self.samples), project=self.projects[0].name, ) - subject = ', '.join([p.name for p in self.projects]) + ': FluidX Tube Received' + subject = ', '.join(p.name for p in self.projects) + ': FluidX Tube Received' - # Send email to list of persons specified in the projects-lab-finance_only section of config self.send_mail(subject, msg, config_name='projects-lab-finance_only') - # Create the message to request manifest parsing by the project team msg2 = '''Hi, The manifest should now be parsed for project {project} go to the queue for step FluidX Manifest Parsing EG 1.0 ST at: @@ -34,33 +30,19 @@ The manifest should now be parsed for project {project} go to the queue for step Kind regards, ClarityX''' - # fill in message with parameters - msg2 = msg2.format( - link='https://' + platform.node() + '/clarity/queue/752', - project=self.projects[0].name, - ) - subject2 = ', '.join([p.name for p in self.projects]) + ': Parse Manifest Required (FluidX)' + msg2 = msg2.format(link='https://' + platform.node() + '/clarity/queue/752', project=self.projects[0].name) + subject2 = ', '.join(p.name for p in self.projects) + ': Parse Manifest Required (FluidX)' self.send_mail(subject2, msg2, config_name='projects_only') def main(): - # Ge the default command line options p = step_argparser() - - # Parse command line options args = p.parse_args() - - # Load the config from the default location load_config() - # Setup the EPP - action = FluidXSampleReceiptEmail( - args.step_uri, args.username, args.password, args.log_file, - ) - - # Run the EPP + action = FluidXSampleReceiptEmail(args.step_uri, args.username, args.password, args.log_file) action.run() -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/scripts/email_receive_sample.py b/scripts/email_receive_sample.py index 9a5fc96..793c057 100644 --- a/scripts/email_receive_sample.py +++ b/scripts/email_receive_sample.py @@ -5,44 +5,30 @@ from EPPs.config import load_config class ReceiveSampleEmail(SendMailEPP): + """Notifies the relevant teams that samples for a project have been received""" def _run(self): - if len(self.projects)>1: # check if more than one project in step, only one permitted + if len(self.projects) > 1: raise ValueError('More than one project present in step. Only one project per step permitted') - # Create the message msg = 'Hi,\n\n{sample_count} sample(s) have been received for {project} at:\n\n{link}\n\nKind regards,\nClarityX' - - # fill in message with parameters msg = msg.format( - link='https://'+platform.node() + '/clarity/work-details/' + self.step_id[3:], + link='https://' + platform.node() + '/clarity/work-details/' + self.step_id[3:], sample_count=len(self.samples), - project=self.projects[0].name, + project=self.projects[0].name ) subject = self.projects[0].name + ': Plate Received' - - # Send email to list of persons specified in the projects-facility-lab-finance_only section of config self.send_mail(subject, msg, config_name='projects-facility-lab-finance_only') def main(): - # Ge the default command line options p = step_argparser() - - # Parse command line options args = p.parse_args() - - # Load the config from the default location load_config() - # Setup the EPP - action = ReceiveSampleEmail( - args.step_uri, args.username, args.password, args.log_file, - ) - - # Run the EPP + action = ReceiveSampleEmail(args.step_uri, args.username, args.password, args.log_file) action.run() -if __name__ == "__main__": - main() \ No newline at end of file +if __name__ == '__main__': + main() diff --git a/scripts/prepare_discard_plate.py b/scripts/prepare_discard_plate.py index 00f1e9b..c7ad4bb 100644 --- a/scripts/prepare_discard_plate.py +++ b/scripts/prepare_discard_plate.py @@ -61,13 +61,15 @@ class FindPlateToRoute(StepEPP): step_artifacts = self.process.all_inputs() self.lims.get_batch(step_artifacts) - # Fetch the sample associated with these artifacts + # Fetch the samples associated with these artifacts samples = [a.samples[0] for a in step_artifacts] self.lims.get_batch(samples) self.info('Found %d Samples in the step', len(samples)) + # Fetch the all the artifacts associated with these samples step_associated_artifacts = fetch_all_artifacts_for_samples(self.lims, samples) self.info('Found %d Analytes (derived samples)', len(step_associated_artifacts)) + # List all the containers and retrieve them containers = list(set([a.container for a in step_associated_artifacts])) batch_limit(self.lims, containers)
User prepared library UDF handling In assign_workflow_receive_sample, the following logic is done: ```python if sample.udf.get('User Prepared Library') == 'Yes': artifacts_to_route_userprepared.add(art) elif not sample.udf.get('User Prepared Library'): artifacts_to_route_preseqlab.add(art) ``` This relies on User Prepared Library being either 'Yes' or not populated, which is expected to be the case in the Lims, but if there is a sample with the UDF `{'User Prepared Library': 'No'}`, nothing will happen. It would be more robust if the `elif` was an `else`.
EdinburghGenomics/clarity_scripts
diff --git a/tests/test_assign_container_name.py b/tests/test_assign_container_name.py index 07b4664..1bf2c61 100644 --- a/tests/test_assign_container_name.py +++ b/tests/test_assign_container_name.py @@ -41,8 +41,8 @@ class TestAssignContainerName(TestEPP): def test_findAvailableContainer(self): - with patch.object(self.epp.lims, 'get_artifacts',side_effect=[['something'], ['something'], []]): - assert self.epp.findAvailableContainer(project='project1', count=1) == 'project1P003' + with patch.object(self.epp.lims, 'get_artifacts', side_effect=[['something'], ['something'], []]): + assert self.epp.find_available_container(project='project1', count=1) == 'project1P003' with patch.object(self.epp.lims, 'get_artifacts', side_effect=[['a']] * 500 + [[]]): - assert self.epp.findAvailableContainer(project='project1', count=1) == 'project1P501' + assert self.epp.find_available_container(project='project1', count=1) == 'project1P501' diff --git a/tests/test_assign_to_workflow_preseqlab.py b/tests/test_assign_to_workflow_preseqlab.py index 7aba75b..00b7614 100644 --- a/tests/test_assign_to_workflow_preseqlab.py +++ b/tests/test_assign_to_workflow_preseqlab.py @@ -1,14 +1,13 @@ -from EPPs.common import StepEPP from scripts.assign_workflow_preseqlab import AssignWorkflowPreSeqLab from tests.test_common import TestEPP, fake_artifact -from unittest.mock import Mock, patch, PropertyMock, MagicMock +from unittest.mock import Mock, patch, PropertyMock def fake_all_inputs(unique=False, resolve=False): - '''Return a list of mocked artifacts which contain sample which contain artifact ... Simple!''' + """Return a list of mocked artifacts which contain samples which contain artifacts ... Simple!""" return ( - Mock(samples=[Mock(artifact=fake_artifact(id='a1'), id='s1', udf={"Proceed To SeqLab": True})]), - Mock(samples=[Mock(artifact=fake_artifact(id='a2'), id='s2', udf={"Proceed To SeqLab": True, "2D Barcode": 'fluidX1'})]) + Mock(samples=[Mock(artifact=fake_artifact('a1'), id='s1', udf={'Proceed To SeqLab': True})]), + Mock(samples=[Mock(artifact=fake_artifact('a2'), id='s2', udf={'Proceed To SeqLab': True, '2D Barcode': 'fluidX1'})]) ) @@ -42,4 +41,3 @@ class TestAssignWorkflowPreSeqLab(TestEPP): pws.assert_called_with(self.epp.lims, 'PreSeqLab EG 6.0', 'Sequencing Plate Preparation EG 2.0') assert sorted([a.id for a in self.epp.lims.route_artifacts.call_args[0][0]]) == ['a1', 'fx2'] assert self.epp.lims.route_artifacts.call_args[1] == {'stage_uri': 'a_uri'} - diff --git a/tests/test_assign_to_workflow_receive_sample.py b/tests/test_assign_to_workflow_receive_sample.py index e9a02e7..fd1492d 100644 --- a/tests/test_assign_to_workflow_receive_sample.py +++ b/tests/test_assign_to_workflow_receive_sample.py @@ -2,13 +2,15 @@ from scripts.assign_workflow_receive_sample import AssignWorkflowReceiveSample from tests.test_common import TestEPP from unittest.mock import Mock, patch, PropertyMock, call + def fake_all_inputs(unique=False, resolve=False): - '''Return a list of mocked artifacts which contain sample with udf''' + """Return a list of mocked artifacts which contain samples with UDFs""" return ( Mock(id='ai1', samples=[Mock(udf={'User Prepared Library': 'Yes'})]), - Mock(id='ai2', samples=[Mock(udf={})]) + Mock(id='ai2', samples=[Mock(udf={'User Prepared Library': 'No'})]) ) + class TestAssignWorkflowReceiveSample(TestEPP): def setUp(self): self.patched_process = patch.object( @@ -33,8 +35,8 @@ class TestAssignWorkflowReceiveSample(TestEPP): self.epp._run() pws.assert_has_calls(( - call(self.epp.lims, "User Prepared Library Batch EG1.0 WF", "User Prepared Library Batch EG 1.0 ST"), - call(self.epp.lims, "PreSeqLab EG 6.0", "Spectramax Picogreen EG 6.0") + call(self.epp.lims, 'User Prepared Library Batch EG1.0 WF', 'User Prepared Library Batch EG 1.0 ST'), + call(self.epp.lims, 'PreSeqLab EG 6.0', 'Spectramax Picogreen EG 6.0') )) # first routing (user prepared library) route_args = self.epp.lims.route_artifacts.call_args_list[0] @@ -43,4 +45,4 @@ class TestAssignWorkflowReceiveSample(TestEPP): # second routing (not user prepared library) route_args = self.epp.lims.route_artifacts.call_args_list[1] - assert sorted([a.id for a in route_args[0][0]]) == ['ai2',] + assert sorted([a.id for a in route_args[0][0]]) == ['ai2'] diff --git a/tests/test_assign_to_workflow_seqlab_quantstudio.py b/tests/test_assign_to_workflow_seqlab_quantstudio.py index 9676675..753b9d8 100644 --- a/tests/test_assign_to_workflow_seqlab_quantstudio.py +++ b/tests/test_assign_to_workflow_seqlab_quantstudio.py @@ -1,14 +1,14 @@ from scripts.assign_workflow_seqlab_quantstudio import AssignWorkflowSeqLabQuantStudio from tests.test_common import TestEPP, fake_artifact -from unittest.mock import Mock, patch, PropertyMock, MagicMock, call +from unittest.mock import Mock, patch, PropertyMock, call def fake_all_output(unique=False, resolve=False): - '''Return a list of mocked artifacts which contain sample which contain artifact ... Simple!''' + """Return a list of mocked artifacts which contain samples which contain artifacts... Simple!""" return ( - Mock(id='ao1', samples=[Mock(artifact=fake_artifact(id='a1'), id='s1', udf={"Prep Workflow": "TruSeq PCR-Free DNA Sample Prep", "Species": "Homo sapiens"})]), - Mock(id='ao2', samples=[Mock(artifact=fake_artifact(id='a2'), id='s2', udf={"Prep Workflow": "TruSeq Nano DNA Sample Prep"})]), - Mock(id='ao3', samples=[Mock(artifact=fake_artifact(id='a3'), id='s3', udf={"Prep Workflow": "TruSeq Nano DNA Sample Prep", "Species": "Homo sapiens", "2D Barcode": 'fluidX1'})]) + Mock(id='ao1', samples=[Mock(artifact=fake_artifact('a1'), id='s1', udf={'Prep Workflow': 'TruSeq PCR-Free DNA Sample Prep', 'Species': 'Homo sapiens'})]), + Mock(id='ao2', samples=[Mock(artifact=fake_artifact('a2'), id='s2', udf={'Prep Workflow': 'TruSeq Nano DNA Sample Prep'})]), + Mock(id='ao3', samples=[Mock(artifact=fake_artifact('a3'), id='s3', udf={'Prep Workflow': 'TruSeq Nano DNA Sample Prep', 'Species': 'Homo sapiens', '2D Barcode': 'fluidX1'})]) ) @@ -40,9 +40,9 @@ class TestAssignWorkflowSeqLabQuantStudio(TestEPP): self.epp._run() pws.assert_has_calls(( - call(self.epp.lims, "TruSeq PCR-Free DNA Sample Prep", "Visual QC"), - call(self.epp.lims, "TruSeq Nano DNA Sample Prep", "Visual QC"), - call(self.epp.lims, "QuantStudio EG1.0", "QuantStudio Plate Preparation EG1.0"), + call(self.epp.lims, 'TruSeq PCR-Free DNA Sample Prep', 'Visual QC'), + call(self.epp.lims, 'TruSeq Nano DNA Sample Prep', 'Visual QC'), + call(self.epp.lims, 'QuantStudio EG1.0', 'QuantStudio Plate Preparation EG1.0'), )) # first routing (pcr free) route_args = self.epp.lims.route_artifacts.call_args_list[0] @@ -56,4 +56,3 @@ class TestAssignWorkflowSeqLabQuantStudio(TestEPP): # third routing (quantstudio) route_args = self.epp.lims.route_artifacts.call_args_list[2] assert sorted([a.id for a in route_args[0][0]]) == ['a1', 'fx3'] - diff --git a/tests/test_assign_to_workflow_stage.py b/tests/test_assign_to_workflow_stage.py index 2a34bbf..c3b3817 100644 --- a/tests/test_assign_to_workflow_stage.py +++ b/tests/test_assign_to_workflow_stage.py @@ -6,9 +6,6 @@ from scripts import assign_to_workflow_stage class TestAsignWorkflowStage(TestEPP): def setUp(self): - - self.patched_process = patch.object(StepEPP, 'process', new_callable=PropertyMock(return_value=Mock(all_inputs=fake_all_inputs))) - self.patched_lims = patch.object(StepEPP, 'lims', new_callable=PropertyMock) self.epp = assign_to_workflow_stage.AssignWorkflowStage( 'http://server:8080/a_step_uri', 'a_user', @@ -30,18 +27,20 @@ class TestAsignWorkflowStage(TestEPP): only_once=True ) - def test_assign(self): - with patch('scripts.assign_to_workflow_stage.get_workflow_stage', return_value=Mock(uri='a_uri')) as p, \ - self.patched_lims, self.patched_process: + @patch.object(StepEPP, 'lims', new_callable=PropertyMock) + @patch('scripts.assign_to_workflow_stage.get_workflow_stage', return_value=Mock(uri='a_uri')) + def test_assign(self, mocked_workflow_stage, mocked_lims): + with patch.object(StepEPP, 'process', new_callable=PropertyMock(return_value=Mock(all_inputs=fake_all_inputs))): self.epp._run() - p.assert_called_with(self.epp.lims, self.epp.workflow_name, 'a_stage_name') + mocked_workflow_stage.assert_called_with(self.epp.lims, self.epp.workflow_name, 'a_stage_name') exp_artifacts = ['a1', 'a2'] assert sorted([a.id for a in self.epp.lims.route_artifacts.call_args[0][0]]) == exp_artifacts assert self.epp.lims.route_artifacts.call_args[1] == {'stage_uri': 'a_uri'} - with patch('scripts.assign_to_workflow_stage.get_workflow_stage', return_value=Mock(uri='a_uri')) as p, \ - self.patched_lims, self.patched_process: + mocked_workflow_stage.reset_mock() + mocked_lims.reset_mock() + self.epp2._run() - p.assert_called_with(self.epp2.lims, self.epp2.workflow_name, 'a_stage_name') + mocked_workflow_stage.assert_called_with(self.epp2.lims, self.epp2.workflow_name, 'a_stage_name') assert self.epp2.lims.route_artifacts.call_count == 0 diff --git a/tests/test_assign_to_workflow_user_library.py b/tests/test_assign_to_workflow_user_library.py index 232b9a9..fde1ff3 100644 --- a/tests/test_assign_to_workflow_user_library.py +++ b/tests/test_assign_to_workflow_user_library.py @@ -4,10 +4,10 @@ from unittest.mock import Mock, patch, PropertyMock, call def fake_all_output(unique=False, resolve=False): - '''Return a list of mocked artifacts which contain sample which contain artifact ... Simple!''' + """Return a list of mocked artifacts which contain samples which contain artifacts... Simple!""" return ( - Mock(id='ao1', samples=[Mock(artifact=fake_artifact(id='a1'), id='s1', udf={"Prep Workflow": "TruSeq PCR-Free DNA Sample Prep"})]), - Mock(id='ao2', samples=[Mock(artifact=fake_artifact(id='a2'), id='s2', udf={"Prep Workflow": "TruSeq Nano DNA Sample Prep"})]), + Mock(id='ao1', samples=[Mock(artifact=fake_artifact('a1'), id='s1', udf={'Prep Workflow': 'TruSeq PCR-Free DNA Sample Prep'})]), + Mock(id='ao2', samples=[Mock(artifact=fake_artifact('a2'), id='s2', udf={'Prep Workflow': 'TruSeq Nano DNA Sample Prep'})]), ) @@ -36,8 +36,8 @@ class TestAssignWorkflowUserPreparedLibrary(TestEPP): self.epp._run() pws.assert_has_calls(( - call(self.epp.lims, "TruSeq PCR-Free DNA Sample Prep", "SEMI-AUTOMATED - Make and Read qPCR Quant"), - call(self.epp.lims, "TruSeq Nano DNA Sample Prep", "SEMI-AUTOMATED - Make LQC & Caliper GX QC"), + call(self.epp.lims, 'TruSeq PCR-Free DNA Sample Prep', 'SEMI-AUTOMATED - Make and Read qPCR Quant'), + call(self.epp.lims, 'TruSeq Nano DNA Sample Prep', 'SEMI-AUTOMATED - Make LQC & Caliper GX QC'), )) # first routing (pcr free) route_args = self.epp.lims.route_artifacts.call_args_list[0] diff --git a/tests/test_common.py b/tests/test_common.py index 4a2ebee..43d6423 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -5,6 +5,7 @@ from unittest.mock import Mock, PropertyMock, patch import EPPs from EPPs.common import StepEPP, RestCommunicationEPP, find_newest_artifact_originating_from + class NamedMock(Mock): @property def name(self): @@ -15,18 +16,18 @@ class MockedSamples(NamedMock): project = NamedMock(real_name='10015AT') -def fake_artifact(id): +def fake_artifact(_id): return Mock( - id=str(id), + id=str(_id), workflow_stages_and_statuses=[(Mock(uri='a_uri'), 'COMPLETE', 'stage1')] ) def fake_all_inputs(unique=False, resolve=False): - """Return a list of mocked artifacts which contain samples which contain artifacts ... Simple!""" + """Return a list of mocked artifacts which contain samples which contain artifacts... Simple!""" return ( - Mock(samples=[Mock(artifact=fake_artifact(id='a1'), id='s1')]), - Mock(samples=[Mock(artifact=fake_artifact(id='a2'), id='s2')]) + Mock(samples=[Mock(artifact=fake_artifact('a1'), id='s1')]), + Mock(samples=[Mock(artifact=fake_artifact('a2'), id='s2')]) ) diff --git a/tests/test_convert_and_dispatch_genotypes.py b/tests/test_convert_and_dispatch_genotypes.py index d743099..177010d 100644 --- a/tests/test_convert_and_dispatch_genotypes.py +++ b/tests/test_convert_and_dispatch_genotypes.py @@ -10,6 +10,7 @@ from scripts.convert_and_dispatch_genotypes import GenotypeConversion, UploadVcf def open_files(list_of_files): return [open(f) for f in list_of_files] + class TestGenotypeConversion(TestCommon): test_records = { 'id1': {'test_sample': '0/1', 'SNP': ['chr2', '120', 'id1', 'T', 'C', '.', '.', '.', 'GT']}, @@ -19,8 +20,9 @@ class TestGenotypeConversion(TestCommon): } def setUp(self): - self.geno_conversion = GenotypeConversion(open_files([self.genotype_csv]), self.accufill_log, 'igmm', - self.small_reference_fai, flank_length=600) + self.geno_conversion = GenotypeConversion( + open_files([self.genotype_csv]), self.accufill_log, 'igmm', self.small_reference_fai, flank_length=600 + ) def test_generate_vcf(self): # header_lines = ['##header line1', '##header line2'] @@ -91,14 +93,8 @@ class TestGenotypeConversion(TestCommon): assert find(valid_other_fieldnames, observed_fieldnames) == 'OTHER' - - - class TestUploadVcfToSamples(TestEPP): - - def setUp(self): - self.epp = UploadVcfToSamples( 'http://server:8080/a_step_uri', 'a_user', diff --git a/tests/test_emails.py b/tests/test_emails.py index d6d10aa..b77bc8f 100644 --- a/tests/test_emails.py +++ b/tests/test_emails.py @@ -1,9 +1,6 @@ import os import platform - -import pytest from egcg_core.config import cfg - from EPPs.common import SendMailEPP from scripts.email_data_release import DataReleaseEmail from scripts.email_data_trigger import DataReleaseEmailAndUpdateEPP @@ -14,51 +11,42 @@ from unittest.mock import Mock, patch, PropertyMock class TestEmailEPP(TestEPP): + patch_project_multi = patch.object( + SendMailEPP, + 'projects', + new_callable=PropertyMock(return_value=[NamedMock(real_name='project1'), NamedMock(real_name='project2')]) + ) + + patch_project_single = patch.object( + SendMailEPP, + 'projects', + new_callable=PropertyMock(return_value=[NamedMock(real_name='project1')]) + ) + + patch_samples = patch.object( + SendMailEPP, + 'samples', + new_callable=PropertyMock(return_value=[NamedMock(real_name='sample1'), NamedMock(real_name='sample2')]) + ) + + patch_email = patch('egcg_core.notifications.email.send_email') def setUp(self): super().setUp() cfg.load_config_file(os.path.join(self.etc_path, 'example_clarity_script.yml')) - - project1 = NamedMock(real_name='project1') - project2 = NamedMock(real_name='project2') - - self.patch_project_multi = patch.object( - SendMailEPP, - 'projects', - new_callable=PropertyMock(return_value=[project1, project2]) - ) - - project1 = NamedMock(real_name='project1') - self.patch_project_single = patch.object( - SendMailEPP, - 'projects', - new_callable=PropertyMock(return_value=[project1]) - ) self.patch_process = self.create_patch_process(SendMailEPP) - sample1 = NamedMock(real_name='sample1') - sample2 = NamedMock(real_name='sample2') - self.patch_samples = patch.object( - SendMailEPP, - 'samples', - new_callable=PropertyMock(return_value=[sample1, sample2]) - ) - - self.patch_email = patch('egcg_core.notifications.email.send_email') - - def _test_only_one_project(self, epp): - with pytest.raises(ValueError): - with self.patch_project_multi: - epp._run() + def test_only_one_project(self): + try: + with self.assertRaises(ValueError): + with self.patch_project_multi: + self.epp._run() + except NotImplementedError: + print('Skipping test for abstract class: ' + self.epp.__class__.__name__) def create_epp(self, klass): - return klass( - 'http://server:8080/a_step_uri', - 'a_user', - 'a_password', - self.log_file - ) + return klass('http://server:8080/a_step_uri', 'a_user', 'a_password', self.log_file) @staticmethod def create_patch_process(klass, udfs=None): @@ -73,17 +61,18 @@ class TestDataReleaseEmailAndUpdateEPP(TestEmailEPP): def setUp(self): super().setUp() self.epp = self.create_epp(DataReleaseEmailAndUpdateEPP) - self.patch_process = self.create_patch_process(DataReleaseEmailAndUpdateEPP, { + self.patch_process = self.create_patch_process( + DataReleaseEmailAndUpdateEPP, + { 'Data Download Contact Name 1': 'John Doe', 'Data Download Contact Name 2': 'Jane Doe', 'Is Contact 1 A New or Existing User?': 'New User', 'Is Contact 2 A New or Existing User?': 'Existing User' - }) + } + ) def test_send_email(self): - self._test_only_one_project(self.epp) - - with self.patch_project_single, self.patch_process, self.patch_samples, self.patch_email as mocked_send_email: + with self.patch_project_single, self.patch_process, self.patch_samples, self.patch_email as mocked_email: self.epp._run() msg = '''Hi Bioinformatics, @@ -99,7 +88,7 @@ Jane Doe (Existing User) Kind regards, ClarityX''' msg = msg.format(localmachine=platform.node()) - mocked_send_email.assert_called_with( + mocked_email.assert_called_with( msg=msg, subject='project1: Please release data', mailhost='smtp.test.me', @@ -114,13 +103,17 @@ class TestDataReleaseEmail(TestEmailEPP): def setUp(self): super().setUp() self.epp = self.create_epp(DataReleaseEmail) - self.patch_process1 = self.create_patch_process(DataReleaseEmail, {'Request Customer Survey (Final Data Release)': 'No'}) - self.patch_process2 = self.create_patch_process(DataReleaseEmail, {'Request Customer Survey (Final Data Release)': 'Yes'}) + self.patch_process1 = self.create_patch_process( + DataReleaseEmail, + {'Request Customer Survey (Final Data Release)': 'No'} + ) + self.patch_process2 = self.create_patch_process( + DataReleaseEmail, + {'Request Customer Survey (Final Data Release)': 'Yes'} + ) def test_send_email(self): - self._test_only_one_project(self.epp) - - with self.patch_project_single, self.patch_process1, self.patch_samples, self.patch_email as mocked_send_email: + with self.patch_project_single, self.patch_process1, self.patch_samples, self.patch_email as mocked_email: self.epp._run() msg = '''Hi, @@ -132,7 +125,7 @@ Kind regards, ClarityX''' msg = msg.format(localmachine=platform.node()) - mocked_send_email.assert_called_with( + mocked_email.assert_called_with( msg=msg, subject='project1: Data Released', mailhost='smtp.test.me', @@ -170,8 +163,6 @@ class TestFluidXSampleReceiptEmail(TestEmailEPP): self.epp = self.create_epp(FluidXSampleReceiptEmail) def test_send_email(self): - self._test_only_one_project(self.epp) - with self.patch_project_single, self.patch_process, self.patch_samples, self.patch_email as mocked_send_email: self.epp._run() msg = '''Hi, @@ -215,15 +206,14 @@ ClarityX''' strict=True ) + class TestReceiveSampleEmail(TestEmailEPP): def setUp(self): super().setUp() self.epp = self.create_epp(ReceiveSampleEmail) def test_send_email(self): - self._test_only_one_project(self.epp) - - with self.patch_project_single, self.patch_process, self.patch_samples, self.patch_email as mocked_send_email: + with self.patch_project_single, self.patch_process, self.patch_samples, self.patch_email as mocked_email: self.epp._run() msg = '''Hi, @@ -234,8 +224,7 @@ https://{localmachine}/clarity/work-details/tep_uri Kind regards, ClarityX''' msg = msg.format(localmachine=platform.node()) - - mocked_send_email.assert_called_with( + mocked_email.assert_called_with( msg=msg, subject='project1: Plate Received', mailhost='smtp.test.me',
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 13 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asana==0.6.5 attrs==22.2.0 cached-property==1.5.2 certifi==2021.5.30 -e git+https://github.com/EdinburghGenomics/clarity_scripts.git@37d6a75d81d94652c5c29228f86954b0ea96f8bb#egg=clarity_scripts EGCG-Core==0.7.3 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==2.8 MarkupSafe==2.0.1 oauthlib==3.2.2 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyclarity-lims==0.4.8 pyparsing==3.1.4 pytest==7.0.1 PyYAML==6.0.1 requests==2.14.2 requests-oauthlib==0.6.2 six==1.10.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: clarity_scripts channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asana==0.6.5 - attrs==22.2.0 - cached-property==1.5.2 - egcg-core==0.7.3 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==2.8 - markupsafe==2.0.1 - oauthlib==3.2.2 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyclarity-lims==0.4.8 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==6.0.1 - requests==2.14.2 - requests-oauthlib==0.6.2 - six==1.10.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/clarity_scripts
[ "tests/test_assign_container_name.py::TestAssignContainerName::test_findAvailableContainer", "tests/test_assign_to_workflow_receive_sample.py::TestAssignWorkflowReceiveSample::test_assign" ]
[]
[ "tests/test_assign_container_name.py::TestEPP::test_init", "tests/test_assign_container_name.py::TestAssignContainerName::test_assign_existing_arts", "tests/test_assign_container_name.py::TestAssignContainerName::test_assign_no_arts", "tests/test_assign_container_name.py::TestAssignContainerName::test_init", "tests/test_assign_to_workflow_preseqlab.py::TestEPP::test_init", "tests/test_assign_to_workflow_preseqlab.py::TestAssignWorkflowPreSeqLab::test_assign", "tests/test_assign_to_workflow_preseqlab.py::TestAssignWorkflowPreSeqLab::test_init", "tests/test_assign_to_workflow_receive_sample.py::TestEPP::test_init", "tests/test_assign_to_workflow_receive_sample.py::TestAssignWorkflowReceiveSample::test_init", "tests/test_assign_to_workflow_seqlab_quantstudio.py::TestEPP::test_init", "tests/test_assign_to_workflow_seqlab_quantstudio.py::TestAssignWorkflowSeqLabQuantStudio::test_assign", "tests/test_assign_to_workflow_seqlab_quantstudio.py::TestAssignWorkflowSeqLabQuantStudio::test_init", "tests/test_assign_to_workflow_stage.py::TestEPP::test_init", "tests/test_assign_to_workflow_stage.py::TestAsignWorkflowStage::test_assign", "tests/test_assign_to_workflow_stage.py::TestAsignWorkflowStage::test_init", "tests/test_assign_to_workflow_user_library.py::TestEPP::test_init", "tests/test_assign_to_workflow_user_library.py::TestAssignWorkflowUserPreparedLibrary::test_assign", "tests/test_assign_to_workflow_user_library.py::TestAssignWorkflowUserPreparedLibrary::test_init", "tests/test_common.py::TestEPP::test_init", "tests/test_common.py::TestRestCommunicationEPP::test_interaction", "tests/test_common.py::TestFindNewestArtifactOriginatingFrom::test_find_newest_artifact_originating_from", "tests/test_convert_and_dispatch_genotypes.py::TestEPP::test_init", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_find_field", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_generate_vcf", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_get_genotype_from_call", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_init_genotype_csv", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_order_from_fai", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_parse_QuantStudio_AIF_genotype", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_parse_genome_fai", "tests/test_convert_and_dispatch_genotypes.py::TestGenotypeConversion::test_vcf_header_from_ref_length", "tests/test_convert_and_dispatch_genotypes.py::TestUploadVcfToSamples::test_init", "tests/test_convert_and_dispatch_genotypes.py::TestUploadVcfToSamples::test_upload", "tests/test_emails.py::TestEPP::test_init", "tests/test_emails.py::TestEmailEPP::test_init", "tests/test_emails.py::TestEmailEPP::test_only_one_project", "tests/test_emails.py::TestDataReleaseEmailAndUpdateEPP::test_init", "tests/test_emails.py::TestDataReleaseEmailAndUpdateEPP::test_only_one_project", "tests/test_emails.py::TestDataReleaseEmailAndUpdateEPP::test_send_email", "tests/test_emails.py::TestDataReleaseEmail::test_init", "tests/test_emails.py::TestDataReleaseEmail::test_only_one_project", "tests/test_emails.py::TestDataReleaseEmail::test_send_email", "tests/test_emails.py::TestFluidXSampleReceiptEmail::test_init", "tests/test_emails.py::TestFluidXSampleReceiptEmail::test_only_one_project", "tests/test_emails.py::TestFluidXSampleReceiptEmail::test_send_email", "tests/test_emails.py::TestReceiveSampleEmail::test_init", "tests/test_emails.py::TestReceiveSampleEmail::test_only_one_project", "tests/test_emails.py::TestReceiveSampleEmail::test_send_email" ]
[]
MIT License
2,083
[ "scripts/assign_workflow_user_library.py", "scripts/prepare_discard_plate.py", "scripts/email_fluidx_sample_receipt.py", "scripts/email_receive_sample.py", "scripts/assign_workflow_receive_sample.py", "scripts/assign_container_name.py", "scripts/convert_and_dispatch_genotypes.py", "scripts/create_reagents_for_run.py", "scripts/assign_workflow_seqlab_quantstudio.py", "scripts/email_data_trigger.py", "requirements.txt", "scripts/assign_workflow_preseqlab.py", "scripts/email_data_release.py" ]
[ "scripts/assign_workflow_user_library.py", "scripts/prepare_discard_plate.py", "scripts/email_fluidx_sample_receipt.py", "scripts/email_receive_sample.py", "scripts/assign_workflow_receive_sample.py", "scripts/assign_container_name.py", "scripts/convert_and_dispatch_genotypes.py", "scripts/create_reagents_for_run.py", "scripts/assign_workflow_seqlab_quantstudio.py", "scripts/email_data_trigger.py", "requirements.txt", "scripts/assign_workflow_preseqlab.py", "scripts/email_data_release.py" ]
zopefoundation__RestrictedPython-97
a63f5c3b600b00cb5bbe779b31e75e94413f8390
2018-01-25 11:29:02
a63f5c3b600b00cb5bbe779b31e75e94413f8390
diff --git a/docs/CHANGES.rst b/docs/CHANGES.rst index 91bc291..9af2e61 100644 --- a/docs/CHANGES.rst +++ b/docs/CHANGES.rst @@ -7,6 +7,9 @@ Changes - Warn when using another Python implementation than CPython as it is not safe to use RestrictedPython with other versions than CPyton. See https://bitbucket.org/pypy/pypy/issues/2653 for PyPy. +- Allow to use list comprehensions in the default implementation of + ``RestrictionCapableEval.eval()``. + 4.0b2 (2017-09-15) ------------------ @@ -62,7 +65,7 @@ Changes - Mostly complete rewrite based on Python AST module. [loechel (Alexander Loechel), icemac (Michael Howitz), stephan-hof (Stephan Hofmockel), tlotze (Thomas Lotze)] - + - Support Python versions 3.4 up to 3.6. - switch to pytest diff --git a/src/RestrictedPython/Eval.py b/src/RestrictedPython/Eval.py index 836ea5e..221cd2d 100644 --- a/src/RestrictedPython/Eval.py +++ b/src/RestrictedPython/Eval.py @@ -35,6 +35,11 @@ def default_guarded_getitem(ob, index): return ob[index] +def default_guarded_getiter(ob): + # No restrictions. + return ob + + class RestrictionCapableEval(object): """A base class for restricted code.""" @@ -99,7 +104,8 @@ class RestrictionCapableEval(object): global_scope = { '_getattr_': default_guarded_getattr, - '_getitem_': default_guarded_getitem + '_getitem_': default_guarded_getitem, + '_getiter_': default_guarded_getiter, } global_scope.update(self.globals)
list comprehension expression raises "TypeError: 'NoneType' object is not subscriptable" Tested with Python 3.6.1 with RestrictedPython 4.0b2: ``` (Pdb) RestrictionCapableEval('[item for item in [1,2]]').eval(context) *** TypeError: 'NoneType' object is not subscriptable (Pdb) [item for item in [1,2]] [1, 2] ``` where context is: ``` (Pdb) context {'variables': {'test_run_identifier': 'QA-240dc1b5-f6bd-11e7-888e-080027edbcd8-skin1', 'skin': 'skin1', 'datetime': '2018-01-11T11:49:48.439194', 'base_url': 'http://'}, 'len': <built-in function len>, 'list': <class 'list'>, 'match': <function match at 0x7f2b3c55bd90>} ``` Any suggestion? Thanks in advance
zopefoundation/RestrictedPython
diff --git a/tests/test_eval.py b/tests/test_eval.py index 0499a04..d1acf10 100644 --- a/tests/test_eval.py +++ b/tests/test_eval.py @@ -95,3 +95,10 @@ def test_Eval__RestictionCapableEval__eval_1(): ob.globals['c'] = 8 result = ob.eval(dict(a=1, b=2, c=4)) assert result == 11 + + +def test_Eval__RestictionCapableEval__eval__2(): + """It allows to use list comprehensions.""" + ob = RestrictionCapableEval("[item for item in (1, 2)]") + result = ob.eval({}) + assert result == [1, 2]
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
4.02
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[develop,docs,test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 docutils==0.18.1 fancycompleter==0.9.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 Jinja2==3.0.3 MarkupSafe==2.0.1 packaging==21.3 pdbpp==0.10.3 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrepl==0.9.0 pytest==7.0.1 pytest-cov==4.0.0 pytest-mock==3.6.1 pytz==2025.2 requests==2.27.1 -e git+https://github.com/zopefoundation/RestrictedPython.git@a63f5c3b600b00cb5bbe779b31e75e94413f8390#egg=RestrictedPython snowballstemmer==2.2.0 Sphinx==5.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 wmctrl==0.5 zipp==3.6.0
name: RestrictedPython channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - docutils==0.18.1 - fancycompleter==0.9.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - jinja2==3.0.3 - markupsafe==2.0.1 - packaging==21.3 - pdbpp==0.10.3 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrepl==0.9.0 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytz==2025.2 - requests==2.27.1 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - wmctrl==0.5 - zipp==3.6.0 prefix: /opt/conda/envs/RestrictedPython
[ "tests/test_eval.py::test_Eval__RestictionCapableEval__eval__2" ]
[]
[ "tests/test_eval.py::test_init", "tests/test_eval.py::test_init_with_syntax_error", "tests/test_eval.py::test_prepRestrictedCode", "tests/test_eval.py::test_call", "tests/test_eval.py::test_eval", "tests/test_eval.py::test_Eval__RestrictionCapableEval_1", "tests/test_eval.py::test_Eval__RestrictionCapableEval__2", "tests/test_eval.py::test_Eval__RestictionCapableEval__prepUnrestrictedCode_1", "tests/test_eval.py::test_Eval__RestictionCapableEval__prepUnrestrictedCode_2", "tests/test_eval.py::test_Eval__RestictionCapableEval__prepRestrictedCode_1", "tests/test_eval.py::test_Eval__RestictionCapableEval__eval_1" ]
[]
Zope Public License 2.1
2,084
[ "docs/CHANGES.rst", "src/RestrictedPython/Eval.py" ]
[ "docs/CHANGES.rst", "src/RestrictedPython/Eval.py" ]
sdispater__pendulum-174
5a596b0ab75960a8e86700cbe0f9ae6ec4fe1442
2018-01-26 00:08:01
5a596b0ab75960a8e86700cbe0f9ae6ec4fe1442
sdispater: Thanks a lot for this and the detailed explanation ! One thing though: Could you base your PR on the `1.x` branch? The `master` branch will soon be holding the work towards the `2.0` release so it will have some breaking changes. Delgan: Done. 👍
diff --git a/pendulum/pendulum.py b/pendulum/pendulum.py index cbbf94a..6cca7f6 100644 --- a/pendulum/pendulum.py +++ b/pendulum/pendulum.py @@ -1980,16 +1980,10 @@ class Pendulum(Date, datetime.datetime): return(self, ) def _getstate(self, protocol=3): - tz = self.timezone_name - - # Fix for fixed timezones not being properly unpickled - if isinstance(self.tz, FixedTimezone): - tz = self.offset_hours - return ( self.year, self.month, self.day, self.hour, self.minute, self.second, self.microsecond, - tz, + self.tzinfo, self.fold ) diff --git a/pendulum/tz/timezone.py b/pendulum/tz/timezone.py index cb520de..416b352 100644 --- a/pendulum/tz/timezone.py +++ b/pendulum/tz/timezone.py @@ -512,7 +512,7 @@ class FixedTimezone(Timezone): return (dt + self._tzinfo.adjusted_offset).replace(tzinfo=self._tzinfo) def __getinitargs__(self): - return self._offset + return (self._offset, ) class _UTC(FixedTimezone):
Timezone is empty on Windows 10 bash shell, causing error while unpickling Pendulum objects Hello. This is maybe related to #161, but I'm not sure. When I run this using the Windows 10 bash shell: ```python import pendulum import pendulum.version import pickle import sys print("Python version:", sys.version) print("Pendulum version:", pendulum.version.VERSION) now = pendulum.now() print(now, now.timezone) pickled = pickle.dumps(now) pickle.loads(pickled) ``` It raises an error (but it works fine with W10 Python installation): ``` Python version: 3.6.3 (default, Oct 4 2017, 02:55:45) [GCC 5.4.0 20160609] Pendulum version: 1.3.2 2017-12-11T20:31:49.750107+01:00 <Timezone []> Traceback (most recent call last): File "test.py", line 12, in <module> pickle.loads(pickled) File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pendulum/pendulum.py", line 155, in __init__ self._tz = self._safe_create_datetime_zone(tzinfo) File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pendulum/pendulum.py", line 82, in _safe_create_datetime_zone return cls._timezone(obj) File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pendulum/pendulum.py", line 107, in _timezone return Timezone.load(zone) File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pendulum/tz/timezone.py", line 90, in load utc_transition_times) = Loader.load(name) File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pendulum/tz/loader.py", line 33, in load with tz_file(name) as f: File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pytzdata/__init__.py", line 32, in tz_file filepath = tz_path(name) File "/mnt/c/Users/delgan/Desktop/project/env/lib/python3.6/site-packages/pytzdata/__init__.py", line 62, in tz_path raise ValueError('Invalid timezone') ValueError: Invalid timezone ``` As you can see, the timezone is empty, which causes an exception equivalent to `pendulum.now(tz='')`. Yet, the `now()` worked fine because the time contains `+01:00`. Do you have any idea where the bug might come from?
sdispater/pendulum
diff --git a/tests/pendulum_tests/test_behavior.py b/tests/pendulum_tests/test_behavior.py index 12a1285..76d90ed 100644 --- a/tests/pendulum_tests/test_behavior.py +++ b/tests/pendulum_tests/test_behavior.py @@ -6,6 +6,7 @@ from copy import deepcopy from datetime import datetime, date, time, timedelta from pendulum import Pendulum, timezone from pendulum.tz.timezone import Timezone +from pendulum.tz.loader import Loader from .. import AbstractTestCase @@ -101,6 +102,14 @@ class BehaviorTest(AbstractTestCase): self.assertEqual(dt1, dt2) + def test_pickle_with_empty_tzinfo_name(self): + empty_timezone = Timezone('', *Loader.load('Europe/Paris')) + dt1 = Pendulum(2016, 8, 27, 12, 34, 56, 123456, empty_timezone) + s = pickle.dumps(dt1) + dt2 = pickle.loads(s) + + self.assertEqual(dt1, dt2) + def test_proper_dst(self): dt = pendulum.create(1941, 7, 1, tz='Europe/Amsterdam')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 1 }, "num_modified_files": 2 }
1.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 execnet==2.1.1 iniconfig==2.1.0 packaging==24.2 -e git+https://github.com/sdispater/pendulum.git@5a596b0ab75960a8e86700cbe0f9ae6ec4fe1442#egg=pendulum pluggy==1.5.0 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 python-dateutil==2.9.0.post0 pytzdata==2020.1 six==1.17.0 tomli==2.2.1 typing_extensions==4.13.0 tzlocal==5.3.1
name: pendulum channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - execnet==2.1.1 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - python-dateutil==2.9.0.post0 - pytzdata==2020.1 - six==1.17.0 - tomli==2.2.1 - typing-extensions==4.13.0 - tzlocal==5.3.1 prefix: /opt/conda/envs/pendulum
[ "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_pickle_with_empty_tzinfo_name" ]
[]
[ "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_astimezone", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_combine", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_ctime", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_date", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_deepcopy", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_deepcopy_datetime", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_dst", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_fromordinal", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_fromtimestamp", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_hash", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_isocalendar", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_isoformat", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_isoweekday", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_pickle", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_pickle_timezone", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_pickle_with_integer_tzinfo", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_proper_dst", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_time", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_timetuple", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_timetz", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_toordinal", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_tzname", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_utcfromtimestamp", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_utcoffset", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_utctimetuple", "tests/pendulum_tests/test_behavior.py::BehaviorTest::test_weekday" ]
[]
MIT License
2,085
[ "pendulum/pendulum.py", "pendulum/tz/timezone.py" ]
[ "pendulum/pendulum.py", "pendulum/tz/timezone.py" ]
sigmavirus24__github3.py-776
785562d89a01545e1efe54efc8aba5e8a15cdd18
2018-01-26 09:58:59
785562d89a01545e1efe54efc8aba5e8a15cdd18
diff --git a/github3/models.py b/github3/models.py index ec4bf8bf..88c482de 100644 --- a/github3/models.py +++ b/github3/models.py @@ -31,6 +31,7 @@ class GitHubCore(object): basic attributes and methods to other sub-classes that are very useful to have. """ + _ratelimit_resource = 'core' def __init__(self, json, session=None): if hasattr(session, 'session'): @@ -305,7 +306,7 @@ class GitHubCore(object): :returns: int """ json = self._json(self._get(self._github_url + '/rate_limit'), 200) - core = json.get('resources', {}).get('core', {}) + core = json.get('resources', {}).get(self._ratelimit_resource, {}) self._remaining = core.get('remaining', 0) return self._remaining diff --git a/github3/structs.py b/github3/structs.py index c3af7bd8..e9a25032 100644 --- a/github3/structs.py +++ b/github3/structs.py @@ -129,6 +129,7 @@ class SearchIterator(GitHubIterator): class. For other members and methods, check its parent class. """ + _ratelimit_resource = 'search' def __init__(self, count, url, cls, session, params=None, etag=None, headers=None):
`ratelimit_remaining` and `_remaining` return wrong number for `search` GitHub API v3 [enforces different rate limits](https://developer.github.com/v3/#rate-limiting) for subsets of API calls. [The `search` API has custom rate limts.](https://developer.github.com/v3/search/#rate-limit) `GitHubCore` has a property `remaining_ratelimit` which requests rate limit information from [endpoint `GET /rate_limit`](https://developer.github.com/v3/rate_limit/). That API call returns information about all rate limits but `GitHubCore.remaining_ratelimit` only exposes the one for the `core` resources and caches it in `GitHubCore._remaining`. **This means, when calling `remaining_ratelimit` or reading `_remaining` on a search class, the wrong number of remaining requests is returned.** In these cases, not the `core` but the `search` rate limit should be made available. One solution to fix this is: - `GitHubCore` gets a new field (e.g. `_ratelimit_resource`) that indicates which kind of rate limit calls to a subclass are counted against. - By default this field is set to `core`. - Sub-classes of `GitHubCore` in `search/` set the field to `search`. - `GitHubCore.remaining_ratelimit` uses that field to determine which rate limit to return and cache: ```diff json = self._json(self._get(self._github_url + '/rate_limit'), 200) - core = json.get('resources', {}).get('core', {}) + core = json.get('resources', {}).get(self._ratelimit_resource, {}) self._remaining = core.get('remaining', 0) return self._remaining ```
sigmavirus24/github3.py
diff --git a/tests/cassettes/GitHubCore_ratelimit_remaining_search.json b/tests/cassettes/GitHubCore_ratelimit_remaining_search.json new file mode 100644 index 00000000..deab066b --- /dev/null +++ b/tests/cassettes/GitHubCore_ratelimit_remaining_search.json @@ -0,0 +1,1 @@ +{"recorded_with": "betamax/0.8.0", "http_interactions": [{"response": {"body": {"base64_string": "H4sIAAAAAAAAA6tWKkotzi8tSk4tVrKqVkrOL0oF0TmZuZklSlZmBjpA+dzEzLzMvHQYtzgVKGNoamhmaWlibGhcq6NUnJpYlJyBpM8QVR+Ei6TPwNwcpC+9KLEgozAHSSOqPjAP3TqgvqLEEpJdWQsAyulOc+oAAAA=", "encoding": "utf-8", "string": ""}, "url": "https://api.github.com/rate_limit", "headers": {"Cache-Control": "no-cache", "Access-Control-Expose-Headers": "ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "Access-Control-Allow-Origin": "*", "Content-Security-Policy": "default-src 'none'", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "Content-Encoding": "gzip", "X-Content-Type-Options": "nosniff", "X-RateLimit-Limit": "60", "X-RateLimit-Reset": "1516994313", "Content-Type": "application/json; charset=utf-8", "X-XSS-Protection": "1; mode=block", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Date": "Fri, 26 Jan 2018 18:18:33 GMT", "X-Runtime-rack": "0.008218", "Transfer-Encoding": "chunked", "X-GitHub-Request-Id": "D196:12414:2B7A313:507FFAA:5A6B70F9", "Status": "200 OK", "Server": "GitHub.com", "X-Frame-Options": "deny", "X-RateLimit-Remaining": "60"}, "status": {"message": "OK", "code": 200}}, "recorded_at": "2018-01-26T18:18:33", "request": {"body": {"encoding": "utf-8", "string": ""}, "method": "GET", "uri": "https://api.github.com/rate_limit", "headers": {"Connection": "keep-alive", "Content-Type": "application/json", "User-Agent": "github3.py/1.0.0a4", "Accept-Charset": "utf-8", "Accept-Encoding": "gzip, deflate", "Accept": "application/vnd.github.v3.full+json"}}}, {"response": {"body": {"base64_string": "H4sIAAAAAAAAA6tWKkotzi8tSk4tVrKqVkrOL0oF0TmZuZklSlZmBjpA+dzEzLzMvHQYtzgVKGNoamhmaWlibGhcq6NUnJpYlJyBpM8QVR+Ei6TPwNwcpC+9KLEgozAHSSOqPjAP3TqgvqLEEpJdWQsAyulOc+oAAAA=", "encoding": "utf-8", "string": ""}, "url": "https://api.github.com/rate_limit", "headers": {"Cache-Control": "no-cache", "Access-Control-Expose-Headers": "ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval", "Access-Control-Allow-Origin": "*", "Content-Security-Policy": "default-src 'none'", "X-GitHub-Media-Type": "github.v3; param=full; format=json", "Content-Encoding": "gzip", "X-Content-Type-Options": "nosniff", "X-RateLimit-Limit": "60", "X-RateLimit-Reset": "1516994313", "Content-Type": "application/json; charset=utf-8", "X-XSS-Protection": "1; mode=block", "Strict-Transport-Security": "max-age=31536000; includeSubdomains; preload", "Date": "Fri, 26 Jan 2018 18:18:33 GMT", "X-Runtime-rack": "0.010151", "Transfer-Encoding": "chunked", "X-GitHub-Request-Id": "D196:12414:2B7A31F:507FFBB:5A6B70F9", "Status": "200 OK", "Server": "GitHub.com", "X-Frame-Options": "deny", "X-RateLimit-Remaining": "60"}, "status": {"message": "OK", "code": 200}}, "recorded_at": "2018-01-26T18:18:33", "request": {"body": {"encoding": "utf-8", "string": ""}, "method": "GET", "uri": "https://api.github.com/rate_limit", "headers": {"Connection": "keep-alive", "Content-Type": "application/json", "User-Agent": "github3.py/1.0.0a4", "Accept-Charset": "utf-8", "Accept-Encoding": "gzip, deflate", "Accept": "application/vnd.github.v3.full+json"}}}]} \ No newline at end of file diff --git a/tests/integration/test_github_core.py b/tests/integration/test_github_core.py index 9446aa58..0ecf6a86 100644 --- a/tests/integration/test_github_core.py +++ b/tests/integration/test_github_core.py @@ -6,3 +6,31 @@ class TestGitHubCore(IntegrationHelper): cassette_name = self.cassette_name('ratelimit_remaining') with self.recorder.use_cassette(cassette_name): assert self.gh.ratelimit_remaining > 0 + + def test_ratelimit_remaining_search(self): + """Test if search iterators return search ratelimit""" + + def _get_ratelimit(resource): + resources = self.gh.rate_limit().get('resources', {}) + rate_limit = resources.get(resource, {}) + return rate_limit.get('remaining', -1) + + cassette_name = self.cassette_name('ratelimit_remaining_search') + + # Run cassette to get correct remaining rate limit from responses. + with self.recorder.use_cassette(cassette_name): + correct_ratelimit_search = _get_ratelimit('search') + correct_ratelimit_core = _get_ratelimit('core') + + # Re-run cassette to test functions under test. + with self.recorder.use_cassette(cassette_name): + result_iterator = self.gh.search_code( + 'HTTPAdapter in:file language:python' + ' repo:kennethreitz/requests' + ) + ratelimit_remaining_search = result_iterator.ratelimit_remaining + ratelimit_remaining_core = self.gh.ratelimit_remaining + + assert ratelimit_remaining_search != ratelimit_remaining_core + assert ratelimit_remaining_core == correct_ratelimit_core + assert ratelimit_remaining_search == correct_ratelimit_search
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[test]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
betamax==0.9.0 betamax-matchers==0.4.0 certifi==2025.1.31 charset-normalizer==3.4.1 coverage==7.8.0 exceptiongroup==1.2.2 -e git+https://github.com/sigmavirus24/github3.py.git@785562d89a01545e1efe54efc8aba5e8a15cdd18#egg=github3.py idna==3.10 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 requests==2.32.3 requests-toolbelt==1.0.0 tomli==2.2.1 uritemplate==4.1.1 urllib3==2.3.0
name: github3.py channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - betamax==0.9.0 - betamax-matchers==0.4.0 - certifi==2025.1.31 - charset-normalizer==3.4.1 - coverage==7.8.0 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - requests==2.32.3 - requests-toolbelt==1.0.0 - tomli==2.2.1 - uritemplate==4.1.1 - urllib3==2.3.0 prefix: /opt/conda/envs/github3.py
[ "tests/integration/test_github_core.py::TestGitHubCore::test_ratelimit_remaining_search" ]
[]
[ "tests/integration/test_github_core.py::TestGitHubCore::test_ratelimit_remaining" ]
[]
BSD 3-Clause "New" or "Revised" License
2,086
[ "github3/structs.py", "github3/models.py" ]
[ "github3/structs.py", "github3/models.py" ]
pior__pyramid_useragent-3
ca27e28f2b19a2c68a8c3a4a555ee0420b0c382b
2018-01-26 15:45:36
ca27e28f2b19a2c68a8c3a4a555ee0420b0c382b
diff --git a/pyramid_useragent/__init__.py b/pyramid_useragent/__init__.py index a06e6d0..c6a71ac 100644 --- a/pyramid_useragent/__init__.py +++ b/pyramid_useragent/__init__.py @@ -27,7 +27,7 @@ def get_user_agent_parsed(request): return UserAgent(request.user_agent) def get_user_agent_classified(request): - return UserAgentClassifier(request.user_agent) + return UserAgentClassifier(request.user_agent or '') class UserAgentComponent(object):
Fails if request.user_agent is None Anonymous report on Bitbucket: > In my unit tests using WebTest, request.user_agent comes through as None. This is probably the behavior of pyramid when the User-Agent header is not provided. https://bitbucket.org/pior/pyramid_useragent/issues/3/fails-if-requestuser_agent-is-none
pior/pyramid_useragent
diff --git a/pyramid_useragent/tests.py b/pyramid_useragent/tests.py index 1bb7104..4c771a5 100644 --- a/pyramid_useragent/tests.py +++ b/pyramid_useragent/tests.py @@ -28,6 +28,15 @@ class TestPyramidUserAgent(unittest.TestCase): self.assertIsInstance(resp, UserAgentClassifier) self.assertTrue(resp.is_mobile) + def test_no_user_agent(self): + from pyramid_useragent import (get_user_agent_classified, UserAgentClassifier) + + request = mock.Mock() + request.user_agent = None + + resp = get_user_agent_classified(request) + self.assertIsInstance(resp, UserAgentClassifier) + def test_safety(self): from pyramid_useragent import UserAgent
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[testing]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work hupper==1.12.1 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work mock==5.2.0 nose==1.3.7 nosexcover==1.0.11 packaging @ file:///croot/packaging_1734472117206/work PasteDeploy==3.1.0 plaster==1.1.2 plaster-pastedeploy==1.0.1 pluggy @ file:///croot/pluggy_1733169602837/work pyramid==2.0.2 -e git+https://github.com/pior/pyramid_useragent.git@ca27e28f2b19a2c68a8c3a4a555ee0420b0c382b#egg=pyramid_useragent pytest @ file:///croot/pytest_1738938843180/work tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work translationstring==1.4 ua-parser==1.0.1 ua-parser-builtins==0.18.0.post1 user-agents==2.2.0 venusian==3.1.1 WebOb==1.8.9 zope.deprecation==5.1 zope.interface==7.2
name: pyramid_useragent channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - hupper==1.12.1 - mock==5.2.0 - nose==1.3.7 - nosexcover==1.0.11 - pastedeploy==3.1.0 - plaster==1.1.2 - plaster-pastedeploy==1.0.1 - pyramid==2.0.2 - translationstring==1.4 - ua-parser==1.0.1 - ua-parser-builtins==0.18.0.post1 - user-agents==2.2.0 - venusian==3.1.1 - webob==1.8.9 - zope-deprecation==5.1 - zope-interface==7.2 prefix: /opt/conda/envs/pyramid_useragent
[ "pyramid_useragent/tests.py::TestPyramidUserAgent::test_no_user_agent" ]
[ "pyramid_useragent/tests.py::TestPyramidUserAgent::test_components_list_order", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_empty_user_agent", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_maincomponent", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_no_version", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_value_none" ]
[ "pyramid_useragent/tests.py::TestPyramidUserAgent::test_components_comment", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_get_user_agent_classified", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_get_user_agent_parsed", "pyramid_useragent/tests.py::TestPyramidUserAgent::test_safety" ]
[]
BSD License
2,087
[ "pyramid_useragent/__init__.py" ]
[ "pyramid_useragent/__init__.py" ]
dask__dask-3107
35ee4dfea06392b3b359869b945d00202358d296
2018-01-26 16:15:40
de6c2a49d76066abb51085570816322f063fc5c5
diff --git a/dask/array/percentile.py b/dask/array/percentile.py index baa1e5bf0..0fd239e74 100644 --- a/dask/array/percentile.py +++ b/dask/array/percentile.py @@ -14,21 +14,22 @@ from .. import sharedict @wraps(np.percentile) def _percentile(a, q, interpolation='linear'): + n = len(a) if not len(a): - return None + return None, n if isinstance(q, Iterator): q = list(q) if a.dtype.name == 'category': result = np.percentile(a.codes, q, interpolation=interpolation) import pandas as pd - return pd.Categorical.from_codes(result, a.categories, a.ordered) + return pd.Categorical.from_codes(result, a.categories, a.ordered), n if np.issubdtype(a.dtype, np.datetime64): a2 = a.astype('i8') result = np.percentile(a2, q, interpolation=interpolation) - return result.astype(a.dtype) + return result.astype(a.dtype), n if not np.issubdtype(a.dtype, np.number): interpolation = 'nearest' - return np.percentile(a, q, interpolation=interpolation) + return np.percentile(a, q, interpolation=interpolation), n def percentile(a, q, interpolation='linear'): @@ -49,7 +50,7 @@ def percentile(a, q, interpolation='linear'): name2 = 'percentile-' + token dsk2 = {(name2, 0): (merge_percentiles, q, [q] * len(a.chunks[0]), - sorted(dsk), a.chunks[0], interpolation)} + sorted(dsk), interpolation)} dtype = a.dtype if np.issubdtype(dtype, np.integer): @@ -60,7 +61,7 @@ def percentile(a, q, interpolation='linear'): return Array(dsk, name2, chunks=((len(q),),), dtype=dtype) -def merge_percentiles(finalq, qs, vals, Ns, interpolation='lower'): +def merge_percentiles(finalq, qs, vals, interpolation='lower', Ns=None): """ Combine several percentile calculations of different data. Parameters @@ -86,7 +87,7 @@ def merge_percentiles(finalq, qs, vals, Ns, interpolation='lower'): >>> vals = [np.array([1, 2, 3, 4]), np.array([10, 11, 12, 13])] >>> Ns = [100, 100] # Both original arrays had 100 elements - >>> merge_percentiles(finalq, qs, vals, Ns) + >>> merge_percentiles(finalq, qs, vals, Ns=Ns) array([ 1, 2, 3, 4, 10, 11, 12, 13]) """ if isinstance(finalq, Iterator): @@ -94,6 +95,8 @@ def merge_percentiles(finalq, qs, vals, Ns, interpolation='lower'): finalq = np.array(finalq) qs = list(map(list, qs)) vals = list(vals) + if Ns is None: + vals, Ns = zip(*vals) Ns = list(Ns) L = list(zip(*[(q, val, N) for q, val, N in zip(qs, vals, Ns) if N])) @@ -104,7 +107,7 @@ def merge_percentiles(finalq, qs, vals, Ns, interpolation='lower'): # TODO: Perform this check above in percentile once dtype checking is easy # Here we silently change meaning if vals[0].dtype.name == 'category': - result = merge_percentiles(finalq, qs, [v.codes for v in vals], Ns, interpolation) + result = merge_percentiles(finalq, qs, [v.codes for v in vals], interpolation, Ns) import pandas as pd return pd.Categorical.from_codes(result, vals[0].categories, vals[0].ordered) if not np.issubdtype(vals[0].dtype, np.number): diff --git a/dask/dataframe/core.py b/dask/dataframe/core.py index 1e047d977..e2034ae7c 100644 --- a/dask/dataframe/core.py +++ b/dask/dataframe/core.py @@ -3446,14 +3446,12 @@ def quantile(df, q): name = 'quantiles-1-' + token val_dsk = {(name, i): (_percentile, (getattr, key, 'values'), qs) for i, key in enumerate(df.__dask_keys__())} - name2 = 'quantiles-2-' + token - len_dsk = {(name2, i): (len, key) for i, key in enumerate(df.__dask_keys__())} name3 = 'quantiles-3-' + token merge_dsk = {(name3, 0): finalize_tsk((merge_percentiles, qs, [qs] * df.npartitions, - sorted(val_dsk), sorted(len_dsk)))} - dsk = merge(df.dask, val_dsk, len_dsk, merge_dsk) + sorted(val_dsk)))} + dsk = merge(df.dask, val_dsk, merge_dsk) return return_type(dsk, name3, meta, new_divisions) diff --git a/dask/dataframe/partitionquantiles.py b/dask/dataframe/partitionquantiles.py index b83dbabc6..aeaf4c2d8 100644 --- a/dask/dataframe/partitionquantiles.py +++ b/dask/dataframe/partitionquantiles.py @@ -410,7 +410,7 @@ def percentiles_summary(df, num_old, num_new, upsample, state): if is_categorical_dtype(data): data = data.codes interpolation = 'nearest' - vals = _percentile(data, qs, interpolation=interpolation) + vals, n = _percentile(data, qs, interpolation=interpolation) if interpolation == 'linear' and np.issubdtype(data.dtype, np.integer): vals = np.round(vals).astype(data.dtype) vals_and_weights = percentiles_to_weights(qs, vals, length) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 4ac97da0f..e76dabba2 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -11,6 +11,8 @@ Array - Update error handling when len is called with empty chunks (:issue:`3058`) `Xander Johnson`_ - Fixes a metadata bug with ``store``'s ``return_stored`` option (:pr:`3064`) `John A Kirkham`_ - Fix a bug in ``optimization.fuse_slice`` to properly handle when first input is ``None`` (:pr:`3076`) `James Bourbeau`_ +- Support arrays with unknown chunk sizes in percentile (:pr:`3107`) `Matthew Rocklin`_ + DataFrame +++++++++
BUG: da.percentile silently returns nan for array of unknown length So the percentile on a normal dask array is fine: ``` In [2]: a = da.from_array(np.random.randn(10), chunks=(10,)) In [3]: a Out[3]: dask.array<array, shape=(10,), dtype=float64, chunksize=(10,)> In [4]: da.percentile(a, 50) Out[4]: dask.array<percentile, shape=(1,), dtype=float64, chunksize=(1,)> In [5]: da.percentile(a, 50).compute() Out[5]: array([ 0.27603973]) ``` but if it has unknown chunk sizes (eg from getting the values from a dataframe), it doesn't complain about this, but silently returns a dask array consisting of NaNs: ``` In [6]: ddf = dd.from_pandas(pd.DataFrame(np.random.randn(10, 3)), npartitions=2) In [7]: ddf.values Out[7]: dask.array<values, shape=(nan, 3), dtype=float64, chunksize=(nan, 3)> In [8]: a2 = ddf.values[:, 0] In [9]: a2 Out[9]: dask.array<getitem, shape=(nan,), dtype=float64, chunksize=(nan,)> In [10]: da.percentile(a2, 50) Out[10]: dask.array<percentile, shape=(1,), dtype=float64, chunksize=(1,)> In [11]: da.percentile(a2, 50).compute() Out[11]: array([ nan]) ``` Should this rather raise an error?
dask/dask
diff --git a/dask/array/tests/test_percentiles.py b/dask/array/tests/test_percentiles.py index 1ff01aa6d..f6a9b3408 100644 --- a/dask/array/tests/test_percentiles.py +++ b/dask/array/tests/test_percentiles.py @@ -59,3 +59,16 @@ def test_percentiles_with_scaler_percentile(q): # See #3020 d = da.ones((16,), chunks=(4,)) assert_eq(da.percentile(d, q), np.array([1], dtype=d.dtype)) + + +def test_unknown_chunk_sizes(): + x = da.random.random(1000, chunks=(100,)) + x._chunks = ((np.nan,) * 10,) + + result = da.percentile(x, 50).compute() + assert 0.1 < result < 0.9 + + a, b = da.percentile(x, [40, 60]).compute() + assert 0.1 < a < 0.9 + assert 0.1 < b < 0.9 + assert a < b
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 4 }
0.16
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 click==8.0.4 cloudpickle==2.2.1 -e git+https://github.com/dask/dask.git@35ee4dfea06392b3b359869b945d00202358d296#egg=dask distributed==1.20.2 HeapDict==1.0.1 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work locket==1.0.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work msgpack-python==0.5.6 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 partd==1.2.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 python-dateutil==2.9.0.post0 pytz==2025.2 six==1.17.0 sortedcontainers==2.4.0 tblib==1.7.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work toolz==0.12.0 tornado==6.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zict==2.1.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - click==8.0.4 - cloudpickle==2.2.1 - distributed==1.20.2 - heapdict==1.0.1 - locket==1.0.0 - msgpack-python==0.5.6 - numpy==1.19.5 - pandas==1.1.5 - partd==1.2.0 - psutil==7.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - six==1.17.0 - sortedcontainers==2.4.0 - tblib==1.7.0 - toolz==0.12.0 - tornado==6.1 - zict==2.1.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_percentiles.py::test_unknown_chunk_sizes" ]
[]
[ "dask/array/tests/test_percentiles.py::test_percentile", "dask/array/tests/test_percentiles.py::test_percentiles_with_empty_arrays", "dask/array/tests/test_percentiles.py::test_percentiles_with_scaler_percentile[5]", "dask/array/tests/test_percentiles.py::test_percentiles_with_scaler_percentile[5.00]", "dask/array/tests/test_percentiles.py::test_percentiles_with_scaler_percentile[q2]", "dask/array/tests/test_percentiles.py::test_percentiles_with_scaler_percentile[5.01]" ]
[]
BSD 3-Clause "New" or "Revised" License
2,088
[ "docs/source/changelog.rst", "dask/dataframe/core.py", "dask/dataframe/partitionquantiles.py", "dask/array/percentile.py" ]
[ "docs/source/changelog.rst", "dask/dataframe/core.py", "dask/dataframe/partitionquantiles.py", "dask/array/percentile.py" ]
pydap__pydap-159
4ae73e393b0d52bce9d1cf5571945a9ed884d526
2018-01-26 20:47:09
eb8ee96bdf150642bf2e0603f406d2053af02424
laliberte: I'm going to merge this one right since it mostly does some basic housekeeping that are necessary for successful builds and it solves a critical bug.
diff --git a/src/pydap/handlers/csv/__init__.py b/src/pydap/handlers/csv/__init__.py index 38ded4c..717079b 100644 --- a/src/pydap/handlers/csv/__init__.py +++ b/src/pydap/handlers/csv/__init__.py @@ -114,7 +114,7 @@ class CSVHandler(BaseHandler): BaseHandler.__init__(self) try: - with open(filepath, 'Ur') as fp: + with open(filepath, 'r') as fp: reader = csv.reader(fp, quoting=csv.QUOTE_NONNUMERIC) vars = next(reader) except Exception as exc: @@ -257,7 +257,7 @@ class CSVData(IterData): def stream(self): """Generator that yield lines of the file.""" try: - with open(self.filepath, 'Ur') as fp: + with open(self.filepath, 'r') as fp: reader = csv.reader(fp, quoting=csv.QUOTE_NONNUMERIC) next(reader) # consume var names for row in reader: diff --git a/src/pydap/handlers/lib.py b/src/pydap/handlers/lib.py index dca98b8..ffef4e8 100644 --- a/src/pydap/handlers/lib.py +++ b/src/pydap/handlers/lib.py @@ -120,7 +120,7 @@ class BaseHandler(object): 'Origin, X-Requested-With, Content-Type') return res(environ, start_response) - except: + except Exception: # should the exception be catched? if environ.get('x-wsgiorg.throw_errors'): raise @@ -456,7 +456,7 @@ def build_filter(expression, template): col = keys.index(token) target = target[token] a = operator.itemgetter(col) - except: + except Exception: raise ConstraintExpressionError( 'Invalid constraint expression: "{expression}" ' '("{id}" is not a valid variable)'.format( @@ -474,7 +474,7 @@ def build_filter(expression, template): def b(row): return value - except: + except Exception: raise ConstraintExpressionError( 'Invalid constraint expression: "{expression}" ' '("{id}" is not valid)'.format( diff --git a/src/pydap/lib.py b/src/pydap/lib.py index ed55eb9..7bfc7ea 100644 --- a/src/pydap/lib.py +++ b/src/pydap/lib.py @@ -125,7 +125,7 @@ def encode(obj): """Return an object encoded to its DAP representation.""" try: return '%.6g' % obj - except: + except Exception: return '"{0}"'.format(obj) diff --git a/src/pydap/model.py b/src/pydap/model.py index 335f0c3..ee9b486 100644 --- a/src/pydap/model.py +++ b/src/pydap/model.py @@ -381,7 +381,7 @@ class StructureType(DapType, Mapping): """Lazy shortcut return children.""" try: return self[attr] - except: + except Exception: return DapType.__getattr__(self, attr) def __contains__(self, key): @@ -408,7 +408,7 @@ class StructureType(DapType, Mapping): if len(splitted) > 1: try: return self[splitted[0]]['.'.join(splitted[1:])] - except KeyError: + except (KeyError, IndexError): return self['.'.join(splitted[1:])] else: raise diff --git a/src/pydap/parsers/__init__.py b/src/pydap/parsers/__init__.py index 7068faa..aab33ed 100644 --- a/src/pydap/parsers/__init__.py +++ b/src/pydap/parsers/__init__.py @@ -74,12 +74,12 @@ def parse_selection(expression, dataset): try: id1 = get_var(dataset, id1) - except: + except Exception: id1 = ast.literal_eval(id1) try: id2 = get_var(dataset, id2) - except: + except Exception: id2 = ast.literal_eval(id2) return id1, op, id2 diff --git a/src/pydap/wsgi/app.py b/src/pydap/wsgi/app.py index b735b10..578187e 100644 --- a/src/pydap/wsgi/app.py +++ b/src/pydap/wsgi/app.py @@ -155,7 +155,7 @@ def alphanum_key(s): def tryint(s): try: return int(s) - except: + except Exception: return s return [tryint(c) for c in re.split('([0-9]+)', s)] diff --git a/src/pydap/wsgi/ssf.py b/src/pydap/wsgi/ssf.py index e85eed4..785c7d4 100644 --- a/src/pydap/wsgi/ssf.py +++ b/src/pydap/wsgi/ssf.py @@ -183,10 +183,10 @@ def eval_function(dataset, function, functions): try: names = re.sub(r'\[.*?\]', '', str(token)).split('.') return reduce(operator.getitem, [dataset] + names) - except: + except Exception: try: return ast.literal_eval(token) - except: + except Exception: return token args = map(parse, tokenize(args))
variable name error in pydap 3.2.2 I'm having trouble accessing a dataset with pydap 3.2.2 that worked fine in 3.2.1. I think 3.2.2 mangles the variable name. I briefly looked at the source, but it wasn't clear to me where the problem was. pydap 3.2.1 ``` >>> from pydap.client import open_url >>> dataset = open_url('https://nomads.ncdc.noaa.gov/thredds/dodsC/gfs-004/201612/20161201/gfs_4_20161201_0000_003.grb2') >>> tsurf = dataset['Temperature_surface'] >>> tsurf[0, 0, 0] <GridType with array 'Temperature_surface' and maps 'time', 'lat', 'lon'> ``` pydap 3.2.2 ``` >>> from pydap.client import open_url >>> dataset = open_url('https://nomads.ncdc.noaa.gov/thredds/dodsC/gfs-004/201612/20161201/gfs_4_20161201_0000_003.grb2') >>> tsurf = dataset['Temperature_surface'] >>> tsurf[0, 0, 0] Traceback (most recent call last): File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/model.py", line 404, in _getitem_string return self._dict[quote(key)] KeyError: 'Temperature_surface%2ETemperature_surface' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/model.py", line 740, in __getitem__ var.data = self[var.name].data[slice_] File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/handlers/dap.py", line 149, in __getitem__ return dataset[self.id].data File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/model.py", line 425, in __getitem__ return self._getitem_string(key) File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/model.py", line 409, in _getitem_string return self[splitted[0]]['.'.join(splitted[1:])] File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/model.py", line 320, in __getitem__ out.data = self._get_data_index(index) File "/Users/holmgren/miniconda3/envs/pydap322/lib/python3.6/site-packages/pydap/model.py", line 349, in _get_data_index return self._data[index] IndexError: only integers, slices (`:`), ellipsis (`...`), numpy.newaxis (`None`) and integer or boolean arrays are valid indices ```
pydap/pydap
diff --git a/src/pydap/tests/test_model.py b/src/pydap/tests/test_model.py index c2e8fee..35e7e77 100644 --- a/src/pydap/tests/test_model.py +++ b/src/pydap/tests/test_model.py @@ -248,8 +248,10 @@ def test_StructureType_getitem(): """Test item retrieval.""" var = StructureType("var") child = BaseType("child") + child.data = np.array([[[0, 1]]]) var["child"] = child assert var["child"] is child + assert var["child.child"] is child with pytest.raises(KeyError): var["unloved child"] with pytest.raises(KeyError): diff --git a/src/pydap/tests/test_responses_error.py b/src/pydap/tests/test_responses_error.py index 70e35ab..3c9f06e 100644 --- a/src/pydap/tests/test_responses_error.py +++ b/src/pydap/tests/test_responses_error.py @@ -11,7 +11,7 @@ class TestErrorResponse(unittest.TestCase): # create an exception that would happen in runtime try: 1/0 - except: + except Exception: error = ErrorResponse(sys.exc_info()) req = Request.blank('/')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 7 }
3.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[server,handlers.netcdf,testing]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest pytest-cov pytest-attrib requests-mock requests", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y libhdf5-serial-dev netcdf-bin libnetcdf-dev" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work beautifulsoup4==4.12.3 certifi==2021.5.30 cftime==1.6.0 charset-normalizer==2.0.12 coards==1.0.5 coverage==6.2 docopt==0.6.2 flake8==5.0.4 gsw==3.0.6 gunicorn==21.2.0 idna==3.10 importlib-metadata==4.2.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==3.0.3 lxml==5.3.1 MarkupSafe==2.0.1 mccabe==0.7.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work netCDF4==1.6.2 numpy==1.19.5 ordereddict==1.1 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work PasteDeploy==2.1.1 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pycodestyle==2.9.1 -e git+https://github.com/pydap/pydap.git@4ae73e393b0d52bce9d1cf5571945a9ed884d526#egg=Pydap pyflakes==2.5.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-attrib==0.1.3 pytest-cov==4.0.0 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 soupsieve==2.3.2.post1 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 waitress==2.0.0 WebOb==1.8.9 WebTest==3.0.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pydap channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - beautifulsoup4==4.12.3 - cftime==1.6.0 - charset-normalizer==2.0.12 - coards==1.0.5 - coverage==6.2 - docopt==0.6.2 - flake8==5.0.4 - gsw==3.0.6 - gunicorn==21.2.0 - idna==3.10 - importlib-metadata==4.2.0 - jinja2==3.0.3 - lxml==5.3.1 - markupsafe==2.0.1 - mccabe==0.7.0 - netcdf4==1.6.2 - numpy==1.19.5 - ordereddict==1.1 - pastedeploy==2.1.1 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pytest-attrib==0.1.3 - pytest-cov==4.0.0 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - soupsieve==2.3.2.post1 - tomli==1.2.3 - urllib3==1.26.20 - waitress==2.0.0 - webob==1.8.9 - webtest==3.0.0 prefix: /opt/conda/envs/pydap
[ "src/pydap/tests/test_model.py::test_StructureType_getitem" ]
[]
[ "src/pydap/tests/test_model.py::test_DapType_quote", "src/pydap/tests/test_model.py::test_DapType_attributes", "src/pydap/tests/test_model.py::test_DapType_id", "src/pydap/tests/test_model.py::test_DapType_repr", "src/pydap/tests/test_model.py::test_DapType_id_property", "src/pydap/tests/test_model.py::test_DapType_getattr", "src/pydap/tests/test_model.py::test_DapType_children", "src/pydap/tests/test_model.py::test_BaseType_no_data", "src/pydap/tests/test_model.py::test_BaseType_data_and_dimensions", "src/pydap/tests/test_model.py::test_BaseType_repr", "src/pydap/tests/test_model.py::test_BaseType_dtype", "src/pydap/tests/test_model.py::test_BaseType_shape", "src/pydap/tests/test_model.py::test_BaseType_size", "src/pydap/tests/test_model.py::test_BaseType_ndim", "src/pydap/tests/test_model.py::test_BaseType_copy", "src/pydap/tests/test_model.py::test_BaseType_comparisons", "src/pydap/tests/test_model.py::test_BaseType_sequence_protocol", "src/pydap/tests/test_model.py::test_BaseType_iter_protocol", "src/pydap/tests/test_model.py::test_BaseType_array", "src/pydap/tests/test_model.py::test_StructureType_init", "src/pydap/tests/test_model.py::test_StructureType_instance", "src/pydap/tests/test_model.py::test_StructureType_repr", "src/pydap/tests/test_model.py::test_StructureType_len", "src/pydap/tests/test_model.py::test_StructureType_contains", "src/pydap/tests/test_model.py::test_StructureType_lazy_attribute", "src/pydap/tests/test_model.py::test_StructureType_children", "src/pydap/tests/test_model.py::test_StructureType_setitem", "src/pydap/tests/test_model.py::test_StructureType_getitem_tuple", "src/pydap/tests/test_model.py::test_StructureType_delitem", "src/pydap/tests/test_model.py::test_StructureType_get_data", "src/pydap/tests/test_model.py::test_StructureType_set_data", "src/pydap/tests/test_model.py::test_StructureType_copy", "src/pydap/tests/test_model.py::test_DatasetType_setitem", "src/pydap/tests/test_model.py::test_DatasetType_id", "src/pydap/tests/test_model.py::test_SequenceType_data", "src/pydap/tests/test_model.py::test_SequenceType_len", "src/pydap/tests/test_model.py::test_SequenceType_iterdata", "src/pydap/tests/test_model.py::test_SequenceType_iter", "src/pydap/tests/test_model.py::test_SequenceType_iter_deprecation", "src/pydap/tests/test_model.py::test_SequenceType_items", "src/pydap/tests/test_model.py::test_SequenceType_values", "src/pydap/tests/test_model.py::test_SequenceType_getitem", "src/pydap/tests/test_model.py::test_SequenceType_copy", "src/pydap/tests/test_model.py::test_GridType_repr", "src/pydap/tests/test_model.py::test_GridType_dtype", "src/pydap/tests/test_model.py::test_GridType_shape", "src/pydap/tests/test_model.py::test_GridType_size", "src/pydap/tests/test_model.py::test_GridType_ndim", "src/pydap/tests/test_model.py::test_GridType_len", "src/pydap/tests/test_model.py::test_GridType_getitem", "src/pydap/tests/test_model.py::test_GridType_getitem_not_tuple", "src/pydap/tests/test_model.py::test_GridType_array", "src/pydap/tests/test_model.py::test_GridType_array2", "src/pydap/tests/test_model.py::test_GridType_maps", "src/pydap/tests/test_model.py::test_GridType_dimensions", "src/pydap/tests/test_responses_error.py::TestErrorResponse::test_body", "src/pydap/tests/test_responses_error.py::TestErrorResponse::test_charset", "src/pydap/tests/test_responses_error.py::TestErrorResponse::test_content_type", "src/pydap/tests/test_responses_error.py::TestErrorResponse::test_headers", "src/pydap/tests/test_responses_error.py::TestErrorResponse::test_status" ]
[]
MIT License
2,089
[ "src/pydap/handlers/lib.py", "src/pydap/lib.py", "src/pydap/parsers/__init__.py", "src/pydap/model.py", "src/pydap/handlers/csv/__init__.py", "src/pydap/wsgi/ssf.py", "src/pydap/wsgi/app.py" ]
[ "src/pydap/handlers/lib.py", "src/pydap/lib.py", "src/pydap/parsers/__init__.py", "src/pydap/model.py", "src/pydap/handlers/csv/__init__.py", "src/pydap/wsgi/ssf.py", "src/pydap/wsgi/app.py" ]
firebase__firebase-admin-python-122
d2d0060ec805f85a73b6b203d6df1d3c9e74cb8b
2018-01-26 23:00:56
d2d0060ec805f85a73b6b203d6df1d3c9e74cb8b
diff --git a/firebase_admin/__init__.py b/firebase_admin/__init__.py index 0bf08cd..03471a6 100644 --- a/firebase_admin/__init__.py +++ b/firebase_admin/__init__.py @@ -218,15 +218,38 @@ class App(object): self._options = _AppOptions(options) self._lock = threading.RLock() self._services = {} - pid = self._options.get('projectId') + self._project_id = App._lookup_project_id(self._credential, self._options) + + @classmethod + def _lookup_project_id(cls, credential, options): + """Looks up the Firebase project ID associated with an App. + + This method first inspects the app options for a ``projectId`` entry. Then it attempts to + get the project ID from the credential used to initialize the app. If that also fails, + attempts to look up the ``GCLOUD_PROJECT`` environment variable. + + Args: + credential: A Firebase credential instance. + options: A Firebase AppOptions instance. + + Returns: + str: A project ID string or None. + + Raises: + ValueError: If a non-string project ID value is specified. + """ + pid = options.get('projectId') if not pid: try: - pid = self._credential.project_id + pid = credential.project_id except AttributeError: pass if not pid: pid = os.environ.get('GCLOUD_PROJECT') - self._project_id = pid + if pid is not None and not isinstance(pid, six.string_types): + raise ValueError( + 'Invalid project ID: "{0}". project ID must be a string.'.format(pid)) + return pid @property def name(self): diff --git a/firebase_admin/firestore.py b/firebase_admin/firestore.py index 0191c00..1c32368 100644 --- a/firebase_admin/firestore.py +++ b/firebase_admin/firestore.py @@ -28,8 +28,6 @@ except ImportError: raise ImportError('Failed to import the Cloud Firestore library for Python. Make sure ' 'to install the "google-cloud-firestore" module.') -import six - from firebase_admin import _utils @@ -75,7 +73,4 @@ class _FirestoreClient(object): 'Project ID is required to access Firestore. Either set the projectId option, ' 'or use service account credentials. Alternatively, set the GCLOUD_PROJECT ' 'environment variable.') - elif not isinstance(project, six.string_types): - raise ValueError( - 'Invalid project ID: "{0}". project ID must be a string.'.format(project)) return _FirestoreClient(credentials, project) diff --git a/firebase_admin/instance_id.py b/firebase_admin/instance_id.py index 5e4f5d4..70ace55 100644 --- a/firebase_admin/instance_id.py +++ b/firebase_admin/instance_id.py @@ -79,9 +79,6 @@ class _InstanceIdService(object): 'Project ID is required to access Instance ID service. Either set the projectId ' 'option, or use service account credentials. Alternatively, set the ' 'GCLOUD_PROJECT environment variable.') - elif not isinstance(project_id, six.string_types): - raise ValueError( - 'Invalid project ID: "{0}". project ID must be a string.'.format(project_id)) self._project_id = project_id self._client = _http_client.JsonHttpClient( credential=app.credential.get_credential(), base_url=_IID_SERVICE_URL)
Validate Project ID String Globally We currently check if project_id is a string in each of the service modules. This can be done in one place -- namely when the project_id is first read in the `App.__init__()` method.
firebase/firebase-admin-python
diff --git a/tests/test_app.py b/tests/test_app.py index e4450eb..aaa3f0a 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -317,6 +317,11 @@ class TestFirebaseApp(object): if project_id: os.environ[GCLOUD_PROJECT] = project_id + def test_non_string_project_id(self): + options = {'projectId': {'key': 'not a string'}} + with pytest.raises(ValueError): + firebase_admin.initialize_app(CREDENTIAL, options=options) + def test_app_get(self, init_app): assert init_app is firebase_admin.get_app(init_app.name)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 3 }
2.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==1.4.9 cachetools==5.5.2 certifi==2025.1.31 chardet==5.2.0 charset-normalizer==3.4.1 colorama==0.4.6 coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 filelock==3.18.0 -e git+https://github.com/firebase/firebase-admin-python.git@d2d0060ec805f85a73b6b203d6df1d3c9e74cb8b#egg=firebase_admin google-api-core==2.24.2 google-auth==2.38.0 google-cloud-core==2.4.3 google-cloud-firestore==2.20.1 google-cloud-storage==3.1.0 google-crc32c==1.7.1 google-resumable-media==2.7.2 googleapis-common-protos==1.69.2 grpcio==1.71.0 grpcio-status==1.71.0 idna==3.10 iniconfig==2.1.0 isort==6.0.1 lazy-object-proxy==1.10.0 mccabe==0.7.0 packaging==24.2 platformdirs==4.3.7 pluggy==1.5.0 proto-plus==1.26.1 protobuf==5.29.4 pyasn1==0.6.1 pyasn1_modules==0.4.2 pylint==1.6.4 pyproject-api==1.9.0 pytest==8.3.5 pytest-cov==6.0.0 requests==2.32.3 rsa==4.9 six==1.17.0 tomli==2.2.1 tox==4.25.0 typing_extensions==4.13.0 urllib3==2.3.0 virtualenv==20.29.3 wrapt==1.17.2
name: firebase-admin-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==1.4.9 - cachetools==5.5.2 - certifi==2025.1.31 - chardet==5.2.0 - charset-normalizer==3.4.1 - colorama==0.4.6 - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - filelock==3.18.0 - google-api-core==2.24.2 - google-auth==2.38.0 - google-cloud-core==2.4.3 - google-cloud-firestore==2.20.1 - google-cloud-storage==3.1.0 - google-crc32c==1.7.1 - google-resumable-media==2.7.2 - googleapis-common-protos==1.69.2 - grpcio==1.71.0 - grpcio-status==1.71.0 - idna==3.10 - iniconfig==2.1.0 - isort==6.0.1 - lazy-object-proxy==1.10.0 - mccabe==0.7.0 - packaging==24.2 - platformdirs==4.3.7 - pluggy==1.5.0 - proto-plus==1.26.1 - protobuf==5.29.4 - pyasn1==0.6.1 - pyasn1-modules==0.4.2 - pylint==1.6.4 - pyproject-api==1.9.0 - pytest==8.3.5 - pytest-cov==6.0.0 - requests==2.32.3 - rsa==4.9 - six==1.17.0 - tomli==2.2.1 - tox==4.25.0 - typing-extensions==4.13.0 - urllib3==2.3.0 - virtualenv==20.29.3 - wrapt==1.17.2 prefix: /opt/conda/envs/firebase-admin-python
[ "tests/test_app.py::TestFirebaseApp::test_non_string_project_id" ]
[]
[ "tests/test_app.py::TestFirebaseApp::test_default_app_init[cert]", "tests/test_app.py::TestFirebaseApp::test_default_app_init[refreshtoken]", "tests/test_app.py::TestFirebaseApp::test_default_app_init[explicit-appdefault]", "tests/test_app.py::TestFirebaseApp::test_default_app_init[implicit-appdefault]", "tests/test_app.py::TestFirebaseApp::test_non_default_app_init[cert]", "tests/test_app.py::TestFirebaseApp::test_non_default_app_init[refreshtoken]", "tests/test_app.py::TestFirebaseApp::test_non_default_app_init[explicit-appdefault]", "tests/test_app.py::TestFirebaseApp::test_non_default_app_init[implicit-appdefault]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[foo]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[0]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[1]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[cred4]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[cred5]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[cred6]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[True]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_credential[False]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[0]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[1]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[options3]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[options4]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[True]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_options[False]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[None]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[0]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[1]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[name4]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[name5]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[name6]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[True]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_name[False]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_config_file[firebase_config_empty.json]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_config_file[firebase_config_invalid.json]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_config_file[no_such_file]", "tests/test_app.py::TestFirebaseApp::test_app_init_with_invalid_config_string", "tests/test_app.py::TestFirebaseApp::test_app_init_with_default_config[Environment", "tests/test_app.py::TestFirebaseApp::test_app_init_with_default_config[Invalid", "tests/test_app.py::TestFirebaseApp::test_project_id_from_options[cert]", "tests/test_app.py::TestFirebaseApp::test_project_id_from_options[refreshtoken]", "tests/test_app.py::TestFirebaseApp::test_project_id_from_options[explicit-appdefault]", "tests/test_app.py::TestFirebaseApp::test_project_id_from_options[implicit-appdefault]", "tests/test_app.py::TestFirebaseApp::test_project_id_from_credentials", "tests/test_app.py::TestFirebaseApp::test_project_id_from_environment", "tests/test_app.py::TestFirebaseApp::test_no_project_id", "tests/test_app.py::TestFirebaseApp::test_app_get[DefaultApp]", "tests/test_app.py::TestFirebaseApp::test_app_get[CustomApp]", "tests/test_app.py::TestFirebaseApp::test_non_existing_app_get[DefaultApp]", "tests/test_app.py::TestFirebaseApp::test_non_existing_app_get[CustomApp]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[None]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[0]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[1]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[name4]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[name5]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[name6]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[True]", "tests/test_app.py::TestFirebaseApp::test_app_get_with_invalid_name[False]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[None]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[0]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[1]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[app4]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[app5]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[app6]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[True]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[False]", "tests/test_app.py::TestFirebaseApp::test_invalid_app_delete[app9]", "tests/test_app.py::TestFirebaseApp::test_app_delete[DefaultApp]", "tests/test_app.py::TestFirebaseApp::test_app_delete[CustomApp]", "tests/test_app.py::TestFirebaseApp::test_app_services[DefaultApp]", "tests/test_app.py::TestFirebaseApp::test_app_services[CustomApp]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[0]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[1]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[True]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[False]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[str]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[arg5]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[arg6]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_arg[arg7]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_app[DefaultApp]", "tests/test_app.py::TestFirebaseApp::test_app_services_invalid_app[CustomApp]" ]
[]
Apache License 2.0
2,090
[ "firebase_admin/firestore.py", "firebase_admin/__init__.py", "firebase_admin/instance_id.py" ]
[ "firebase_admin/firestore.py", "firebase_admin/__init__.py", "firebase_admin/instance_id.py" ]
pydata__sparse-94
80e9abc41c0e6238eafe20a7696ccfa9a32eec5d
2018-01-27 09:16:07
b9fc91cb68b37757f37881843b2e420a3c07f9be
diff --git a/sparse/dok.py b/sparse/dok.py index 65ede6f..9beaada 100644 --- a/sparse/dok.py +++ b/sparse/dok.py @@ -339,8 +339,11 @@ class DOK(object): raise IndexError('All indices must be slices or integers' ' when setting an item.') + key = tuple(key_list) if value != _zero_of_dtype(self.dtype): - self.data[tuple(key_list)] = value[()] + self.data[key] = value[()] + elif key in self.data: + del self.data[key] def __str__(self): return "<DOK: shape=%s, dtype=%s, nnz=%d>" % (self.shape, self.dtype, self.nnz)
Setting DOK elements to zero doesn't work. Trivial example: ``` >>> import sparse >>> import numpy as np >>> s = sparse.DOK((1,)) >>> s[0] = 1 >>> s[0] 1.0 >>> s[0] = 0 >>> s[0] 1.0 ```
pydata/sparse
diff --git a/sparse/tests/test_dok.py b/sparse/tests/test_dok.py index 568c1d0..45c9899 100644 --- a/sparse/tests/test_dok.py +++ b/sparse/tests/test_dok.py @@ -149,3 +149,12 @@ def test_float_dtype(): s = DOK((5,), data) assert s.dtype == np.float32 + + +def test_set_zero(): + s = DOK((1,), dtype=np.uint8) + s[0] = 1 + s[0] = 0 + + assert s[0] == 0 + assert s.nnz == 0
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 1 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[tests]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-flake8", "packaging" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 distlib==0.3.9 filelock==3.4.1 flake8==5.0.4 importlib-metadata==4.2.0 importlib-resources==5.4.0 iniconfig==1.1.1 mccabe==0.7.0 numpy==1.19.5 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-flake8==1.1.1 scipy==1.5.4 six==1.17.0 -e git+https://github.com/pydata/sparse.git@80e9abc41c0e6238eafe20a7696ccfa9a32eec5d#egg=sparse toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 virtualenv==20.16.2 zipp==3.6.0
name: sparse channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - flake8==5.0.4 - importlib-metadata==4.2.0 - importlib-resources==5.4.0 - iniconfig==1.1.1 - mccabe==0.7.0 - numpy==1.19.5 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-flake8==1.1.1 - scipy==1.5.4 - six==1.17.0 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - virtualenv==20.16.2 - zipp==3.6.0 prefix: /opt/conda/envs/sparse
[ "sparse/tests/test_dok.py::test_setitem[shape0-index0-0.7537312134103704]", "sparse/tests/test_dok.py::test_setitem[shape1-index1-0.28988226468720524]", "sparse/tests/test_dok.py::test_setitem[shape3-1-0.6816505138699129]", "sparse/tests/test_dok.py::test_setitem[shape4-index4-0.5721138246976042]", "sparse/tests/test_dok.py::test_setitem[shape5-index5-0.9172128110666324]", "sparse/tests/test_dok.py::test_setitem[shape9-index9-0.27975249563410365]", "sparse/tests/test_dok.py::test_setitem[shape11-index11-0.07900577341304127]", "sparse/tests/test_dok.py::test_setitem[shape13-index13-0.08694272894736244]", "sparse/tests/test_dok.py::test_set_zero" ]
[ "sparse/__init__.py::flake-8::FLAKE8", "sparse/coo.py::flake-8::FLAKE8", "sparse/dok.py::flake-8::FLAKE8", "sparse/slicing.py::flake-8::FLAKE8", "sparse/utils.py::flake-8::FLAKE8", "sparse/tests/test_coo.py::flake-8::FLAKE8", "sparse/tests/test_dok.py::flake-8::FLAKE8" ]
[ "sparse/coo.py::sparse.coo.COO", "sparse/coo.py::sparse.coo.COO.T", "sparse/coo.py::sparse.coo.COO.__len__", "sparse/coo.py::sparse.coo.COO.density", "sparse/coo.py::sparse.coo.COO.dot", "sparse/coo.py::sparse.coo.COO.dtype", "sparse/coo.py::sparse.coo.COO.from_numpy", "sparse/coo.py::sparse.coo.COO.from_scipy_sparse", "sparse/coo.py::sparse.coo.COO.linear_loc", "sparse/coo.py::sparse.coo.COO.max", "sparse/coo.py::sparse.coo.COO.maybe_densify", "sparse/coo.py::sparse.coo.COO.min", "sparse/coo.py::sparse.coo.COO.nbytes", "sparse/coo.py::sparse.coo.COO.ndim", "sparse/coo.py::sparse.coo.COO.nnz", "sparse/coo.py::sparse.coo.COO.prod", "sparse/coo.py::sparse.coo.COO.reduce", "sparse/coo.py::sparse.coo.COO.reshape", "sparse/coo.py::sparse.coo.COO.size", "sparse/coo.py::sparse.coo.COO.sort_indices", "sparse/coo.py::sparse.coo.COO.sum", "sparse/coo.py::sparse.coo.COO.sum_duplicates", "sparse/coo.py::sparse.coo.COO.todense", "sparse/coo.py::sparse.coo.COO.transpose", "sparse/dok.py::sparse.dok.DOK", "sparse/dok.py::sparse.dok.DOK.from_coo", "sparse/dok.py::sparse.dok.DOK.from_numpy", "sparse/dok.py::sparse.dok.DOK.ndim", "sparse/dok.py::sparse.dok.DOK.nnz", "sparse/dok.py::sparse.dok.DOK.to_coo", "sparse/dok.py::sparse.dok.DOK.todense", "sparse/slicing.py::sparse.slicing.check_index", "sparse/slicing.py::sparse.slicing.normalize_index", "sparse/slicing.py::sparse.slicing.normalize_slice", "sparse/slicing.py::sparse.slicing.posify_index", "sparse/slicing.py::sparse.slicing.replace_ellipsis", "sparse/slicing.py::sparse.slicing.sanitize_index", "sparse/utils.py::sparse.utils.random", "sparse/tests/test_coo.py::test_reductions[True-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_transpose[None]", "sparse/tests/test_coo.py::test_transpose[axis1]", "sparse/tests/test_coo.py::test_transpose[axis2]", "sparse/tests/test_coo.py::test_transpose[axis3]", "sparse/tests/test_coo.py::test_transpose[axis4]", "sparse/tests/test_coo.py::test_transpose[axis5]", "sparse/tests/test_coo.py::test_transpose[axis6]", "sparse/tests/test_coo.py::test_transpose_error[axis0]", "sparse/tests/test_coo.py::test_transpose_error[axis1]", "sparse/tests/test_coo.py::test_transpose_error[axis2]", "sparse/tests/test_coo.py::test_transpose_error[axis3]", "sparse/tests/test_coo.py::test_transpose_error[axis4]", "sparse/tests/test_coo.py::test_transpose_error[axis5]", "sparse/tests/test_coo.py::test_reshape[a0-b0]", "sparse/tests/test_coo.py::test_reshape[a1-b1]", "sparse/tests/test_coo.py::test_reshape[a2-b2]", "sparse/tests/test_coo.py::test_reshape[a3-b3]", "sparse/tests/test_coo.py::test_reshape[a4-b4]", "sparse/tests/test_coo.py::test_reshape[a5-b5]", "sparse/tests/test_coo.py::test_reshape[a6-b6]", "sparse/tests/test_coo.py::test_reshape[a7-b7]", "sparse/tests/test_coo.py::test_reshape[a8-b8]", "sparse/tests/test_coo.py::test_reshape[a9-b9]", "sparse/tests/test_coo.py::test_large_reshape", "sparse/tests/test_coo.py::test_reshape_same", "sparse/tests/test_coo.py::test_to_scipy_sparse", "sparse/tests/test_coo.py::test_tensordot[a_shape0-b_shape0-axes0]", "sparse/tests/test_coo.py::test_tensordot[a_shape1-b_shape1-axes1]", "sparse/tests/test_coo.py::test_tensordot[a_shape2-b_shape2-axes2]", "sparse/tests/test_coo.py::test_tensordot[a_shape3-b_shape3-axes3]", "sparse/tests/test_coo.py::test_tensordot[a_shape4-b_shape4-axes4]", "sparse/tests/test_coo.py::test_tensordot[a_shape5-b_shape5-axes5]", "sparse/tests/test_coo.py::test_tensordot[a_shape6-b_shape6-axes6]", "sparse/tests/test_coo.py::test_tensordot[a_shape7-b_shape7-axes7]", "sparse/tests/test_coo.py::test_tensordot[a_shape8-b_shape8-axes8]", "sparse/tests/test_coo.py::test_tensordot[a_shape9-b_shape9-0]", "sparse/tests/test_coo.py::test_dot", "sparse/tests/test_coo.py::test_elemwise[expm1]", "sparse/tests/test_coo.py::test_elemwise[log1p]", "sparse/tests/test_coo.py::test_elemwise[sin]", "sparse/tests/test_coo.py::test_elemwise[tan]", "sparse/tests/test_coo.py::test_elemwise[sinh]", "sparse/tests/test_coo.py::test_elemwise[tanh]", "sparse/tests/test_coo.py::test_elemwise[floor]", "sparse/tests/test_coo.py::test_elemwise[ceil]", "sparse/tests/test_coo.py::test_elemwise[sqrt]", "sparse/tests/test_coo.py::test_elemwise[conjugate0]", "sparse/tests/test_coo.py::test_elemwise[round_]", "sparse/tests/test_coo.py::test_elemwise[rint]", "sparse/tests/test_coo.py::test_elemwise[<lambda>0]", "sparse/tests/test_coo.py::test_elemwise[conjugate1]", "sparse/tests/test_coo.py::test_elemwise[conjugate2]", "sparse/tests/test_coo.py::test_elemwise[<lambda>1]", "sparse/tests/test_coo.py::test_elemwise[abs]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-ne]", "sparse/tests/test_coo.py::test_auto_densification_fails[pow]", "sparse/tests/test_coo.py::test_auto_densification_fails[truediv]", "sparse/tests/test_coo.py::test_auto_densification_fails[floordiv]", "sparse/tests/test_coo.py::test_auto_densification_fails[ge]", "sparse/tests/test_coo.py::test_auto_densification_fails[le]", "sparse/tests/test_coo.py::test_auto_densification_fails[eq]", "sparse/tests/test_coo.py::test_auto_densification_fails[mod]", "sparse/tests/test_coo.py::test_op_scipy_sparse[mul]", "sparse/tests/test_coo.py::test_op_scipy_sparse[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse[sub]", "sparse/tests/test_coo.py::test_op_scipy_sparse[gt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[lt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[ne]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mod-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mod-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[add-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[sub--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[pow--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[truediv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[floordiv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[gt--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[lt-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ne-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ge--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[le-3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[eq-0]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-xor]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape3-and_]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-rshift]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape0-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape1-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape2-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape3-invert]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-xor]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-xor]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape0-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape1-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape2-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape3-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape0-and_]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape1-and_]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape2-and_]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape3-and_]", "sparse/tests/test_coo.py::test_elemwise_binary_empty", "sparse/tests/test_coo.py::test_gt", "sparse/tests/test_coo.py::test_slicing[0]", "sparse/tests/test_coo.py::test_slicing[1]", "sparse/tests/test_coo.py::test_slicing[-1]", "sparse/tests/test_coo.py::test_slicing[index3]", "sparse/tests/test_coo.py::test_slicing[index4]", "sparse/tests/test_coo.py::test_slicing[index5]", "sparse/tests/test_coo.py::test_slicing[index6]", "sparse/tests/test_coo.py::test_slicing[index7]", "sparse/tests/test_coo.py::test_slicing[index8]", "sparse/tests/test_coo.py::test_slicing[index9]", "sparse/tests/test_coo.py::test_slicing[index10]", "sparse/tests/test_coo.py::test_slicing[index11]", "sparse/tests/test_coo.py::test_slicing[index12]", "sparse/tests/test_coo.py::test_slicing[index13]", "sparse/tests/test_coo.py::test_slicing[index14]", "sparse/tests/test_coo.py::test_slicing[index15]", "sparse/tests/test_coo.py::test_slicing[index16]", "sparse/tests/test_coo.py::test_slicing[index17]", "sparse/tests/test_coo.py::test_slicing[index18]", "sparse/tests/test_coo.py::test_slicing[index19]", "sparse/tests/test_coo.py::test_slicing[index20]", "sparse/tests/test_coo.py::test_slicing[index21]", "sparse/tests/test_coo.py::test_slicing[index22]", "sparse/tests/test_coo.py::test_slicing[index23]", "sparse/tests/test_coo.py::test_slicing[index24]", "sparse/tests/test_coo.py::test_slicing[index25]", "sparse/tests/test_coo.py::test_slicing[index26]", "sparse/tests/test_coo.py::test_slicing[index27]", "sparse/tests/test_coo.py::test_slicing[index28]", "sparse/tests/test_coo.py::test_slicing[index29]", "sparse/tests/test_coo.py::test_slicing[index30]", "sparse/tests/test_coo.py::test_slicing[index31]", "sparse/tests/test_coo.py::test_slicing[index32]", "sparse/tests/test_coo.py::test_slicing[index33]", "sparse/tests/test_coo.py::test_slicing[index34]", "sparse/tests/test_coo.py::test_slicing[index35]", "sparse/tests/test_coo.py::test_slicing[index36]", "sparse/tests/test_coo.py::test_slicing[index37]", "sparse/tests/test_coo.py::test_slicing[index38]", "sparse/tests/test_coo.py::test_slicing[index39]", "sparse/tests/test_coo.py::test_slicing[index40]", "sparse/tests/test_coo.py::test_slicing[index41]", "sparse/tests/test_coo.py::test_slicing[index42]", "sparse/tests/test_coo.py::test_slicing[index43]", "sparse/tests/test_coo.py::test_slicing[index44]", "sparse/tests/test_coo.py::test_custom_dtype_slicing", "sparse/tests/test_coo.py::test_slicing_errors[index0]", "sparse/tests/test_coo.py::test_slicing_errors[index1]", "sparse/tests/test_coo.py::test_slicing_errors[index2]", "sparse/tests/test_coo.py::test_slicing_errors[5]", "sparse/tests/test_coo.py::test_slicing_errors[-5]", "sparse/tests/test_coo.py::test_slicing_errors[foo]", "sparse/tests/test_coo.py::test_slicing_errors[index6]", "sparse/tests/test_coo.py::test_canonical", "sparse/tests/test_coo.py::test_concatenate", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-1]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-1]", "sparse/tests/test_coo.py::test_stack[0-shape0]", "sparse/tests/test_coo.py::test_stack[0-shape1]", "sparse/tests/test_coo.py::test_stack[0-shape2]", "sparse/tests/test_coo.py::test_stack[1-shape0]", "sparse/tests/test_coo.py::test_stack[1-shape1]", "sparse/tests/test_coo.py::test_stack[1-shape2]", "sparse/tests/test_coo.py::test_stack[-1-shape0]", "sparse/tests/test_coo.py::test_stack[-1-shape1]", "sparse/tests/test_coo.py::test_stack[-1-shape2]", "sparse/tests/test_coo.py::test_large_concat_stack", "sparse/tests/test_coo.py::test_coord_dtype", "sparse/tests/test_coo.py::test_addition", "sparse/tests/test_coo.py::test_addition_not_ok_when_large_and_sparse", "sparse/tests/test_coo.py::test_broadcasting[shape10-shape20-add]", "sparse/tests/test_coo.py::test_broadcasting[shape10-shape20-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape11-shape21-add]", "sparse/tests/test_coo.py::test_broadcasting[shape11-shape21-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape12-shape22-add]", "sparse/tests/test_coo.py::test_broadcasting[shape12-shape22-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape13-shape23-add]", "sparse/tests/test_coo.py::test_broadcasting[shape13-shape23-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape14-shape24-add]", "sparse/tests/test_coo.py::test_broadcasting[shape14-shape24-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape15-shape25-add]", "sparse/tests/test_coo.py::test_broadcasting[shape15-shape25-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape10-shape20-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape11-shape21-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape12-shape22-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape13-shape23-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape14-shape24-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape15-shape25-mul]", "sparse/tests/test_coo.py::test_broadcast_to[shape10-shape20]", "sparse/tests/test_coo.py::test_broadcast_to[shape11-shape21]", "sparse/tests/test_coo.py::test_broadcast_to[shape12-shape22]", "sparse/tests/test_coo.py::test_scalar_multiplication[2]", "sparse/tests/test_coo.py::test_scalar_multiplication[2.5]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar2]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar3]", "sparse/tests/test_coo.py::test_scalar_exponentiation", "sparse/tests/test_coo.py::test_create_with_lists_of_tuples", "sparse/tests/test_coo.py::test_sizeof", "sparse/tests/test_coo.py::test_scipy_sparse_interface", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[coo]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csr]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[dok]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csc]", "sparse/tests/test_coo.py::test_cache_csr", "sparse/tests/test_coo.py::test_empty_shape", "sparse/tests/test_coo.py::test_single_dimension", "sparse/tests/test_coo.py::test_raise_dense", "sparse/tests/test_coo.py::test_large_sum", "sparse/tests/test_coo.py::test_add_many_sparse_arrays", "sparse/tests/test_coo.py::test_caching", "sparse/tests/test_coo.py::test_scalar_slicing", "sparse/tests/test_coo.py::test_triul[shape0-0]", "sparse/tests/test_coo.py::test_triul[shape1-1]", "sparse/tests/test_coo.py::test_triul[shape2--1]", "sparse/tests/test_coo.py::test_triul[shape3--2]", "sparse/tests/test_coo.py::test_triul[shape4-1000]", "sparse/tests/test_coo.py::test_empty_reduction", "sparse/tests/test_coo.py::test_random_shape[0.1-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape2]", "sparse/tests/test_coo.py::test_two_random_unequal", "sparse/tests/test_coo.py::test_two_random_same_seed", "sparse/tests/test_coo.py::test_random_sorted", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_scalar_shape_construction", "sparse/tests/test_coo.py::test_len", "sparse/tests/test_coo.py::test_density", "sparse/tests/test_coo.py::test_size", "sparse/tests/test_coo.py::test_np_array", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape2]", "sparse/tests/test_dok.py::test_convert_to_coo", "sparse/tests/test_dok.py::test_convert_from_coo", "sparse/tests/test_dok.py::test_convert_from_numpy", "sparse/tests/test_dok.py::test_convert_to_numpy", "sparse/tests/test_dok.py::test_construct[2-data0]", "sparse/tests/test_dok.py::test_construct[shape1-data1]", "sparse/tests/test_dok.py::test_construct[shape2-data2]", "sparse/tests/test_dok.py::test_getitem[0.1-shape0]", "sparse/tests/test_dok.py::test_getitem[0.1-shape1]", "sparse/tests/test_dok.py::test_getitem[0.1-shape2]", "sparse/tests/test_dok.py::test_getitem[0.3-shape0]", "sparse/tests/test_dok.py::test_getitem[0.3-shape1]", "sparse/tests/test_dok.py::test_getitem[0.3-shape2]", "sparse/tests/test_dok.py::test_getitem[0.5-shape0]", "sparse/tests/test_dok.py::test_getitem[0.5-shape1]", "sparse/tests/test_dok.py::test_getitem[0.5-shape2]", "sparse/tests/test_dok.py::test_getitem[0.7-shape0]", "sparse/tests/test_dok.py::test_getitem[0.7-shape1]", "sparse/tests/test_dok.py::test_getitem[0.7-shape2]", "sparse/tests/test_dok.py::test_setitem[shape2-index2-value2]", "sparse/tests/test_dok.py::test_setitem[shape6-index6-value6]", "sparse/tests/test_dok.py::test_setitem[shape7-index7-value7]", "sparse/tests/test_dok.py::test_setitem[shape8-index8-value8]", "sparse/tests/test_dok.py::test_setitem[shape10-index10-value10]", "sparse/tests/test_dok.py::test_setitem[shape12-index12-value12]", "sparse/tests/test_dok.py::test_default_dtype", "sparse/tests/test_dok.py::test_int_dtype", "sparse/tests/test_dok.py::test_float_dtype" ]
[]
BSD 3-Clause "New" or "Revised" License
2,091
[ "sparse/dok.py" ]
[ "sparse/dok.py" ]
tech-teach__marshmallowjson-3
b8a2e3edf36dc7c65b73ed108371e1b2743a4b8e
2018-01-28 23:48:38
b8a2e3edf36dc7c65b73ed108371e1b2743a4b8e
diff --git a/.travis.yml b/.travis.yml index 4f0901a..71234d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,26 +1,29 @@ -# Config file for automatic testing at travis-ci.org -# This file will be regenerated if you run travis_pypi_setup.py - -language: python -python: - - 3.6 - - 3.5 - -# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors -install: pip install -U tox-travis - -# command to run tests, e.g. python setup.py test -script: tox - -# After you create the Github repo and add it to Travis, run the -# travis_pypi_setup.py script to finish PyPI deployment setup +# This file was autogenerated and will overwrite each time you run travis_pypi_setup.py deploy: + password: + secure: !!binary | + VnZ0MytIcHhSRkxWdGFqYnd6WGo5eWZxN0E0UE9kRm5QNTFZR3FQcXQ3MFEvakp2OXhkMnZwbVcr + MElOVVNjZFVqdUljUUpCa2NrbXdPZUVzL1FuNThBRVhJRjhCMVNSL0FGTFV1bW9DTEU0YmgzcHgw + d2VSdk8rZXpkUFgxdmx0TFF4bWdLR2xHVG4yN3RlSEtTdVR6eVVWTkNzSGwrKzB5a0VSeFBPODBC + NDl4S0EvbjVPQk9YSkFYZzNXODUvaDRwUTQ0Z2NhSHg3bTdZSTBGSytRZGJTZHRWTjZrUEV1R3hJ + MlNSdkhQdVdmWjhhY0Q5eXJSVmtVRk5iUldzNTZUeEI3TUp0ajkxdEJTdGZLdTM0Z2ZITGNXdTNp + M1dQUVl3UmlZUFNFUjZvMnVZZzFsR1k3ZmJhM01ZZUVGdnRYZER3YndUcEh6T1kyYnlSd1ptSlhr + N3VCOUw1dlNLa1hyd0VOcXgyaU12Wm5jMVhNbkRqcTNnOHYvUk5XSWVoSnFoMWN4ZGtkNHhPREty + enpJMUNZbGc5b0FaL1JSYVVvR3ZuNkRtYVN5aTU2U2NZZTJWaUlld1E3Zm13eEpKQVBmRzBMY2RO + QUkrU0tCUmVqenl6bHlBSndwS20wRU1kOUx4dlRoQTVydzlwS0pVSkYzN20xWHJGUU9OK29nOUFU + YzZKK3puSUtQRmVXTWlHUU5xL1RRZUI5YTZPcnRZQmxZWjY2ZldhelppOEVXL01PSUlXQnNDU0Zm + VG9VSXNDSDAxTFNKczc0MzRjdVJORWZON1FhOVVDcnh0MGVNcnNDTVRGMWxyV28vbW9jODU0TXlZ + bmV2UlFtOHVxa0k4Q2JaTStvM0pDV2lCQXV6MStVZjdaR1R2OThlcFVvYkN1WGhZY00xTU1nd1E9 provider: pypi distributions: sdist bdist_wheel user: odarbelaeze - password: - secure: PLEASE_REPLACE_ME - on: - tags: true - repo: odarbelaeze/marshmallowjson + true: python: 3.6 + repo: odarbelaeze/marshmallowjson + tags: true +install: pip install -U tox-travis +language: python +python: +- 3.6 +- 3.5 +script: tox diff --git a/marshmallowjson/cli.py b/marshmallowjson/cli.py index 8cdfbe7..b19d723 100644 --- a/marshmallowjson/cli.py +++ b/marshmallowjson/cli.py @@ -1,14 +1,41 @@ """Console script for marshmallowjson.""" import click +import collections +import json +import sys + + +def fail(kind, type_, name): + click.echo(click.style( + '{kind} is not a known type in {type_}.{name}'.format( + kind=kind, + type_=type_, + name=name, + ), + fg='red' + )) + sys.exit(1) @click.command() -def main(args=None): - """Console script for marshmallowjson.""" - click.echo("Replace this message by putting your code into " - "marshmallowjson.cli.main") - click.echo("See click documentation at http://click.pocoo.org/") [email protected]('definition', type=click.File('r')) +def main(definition): + """Validate an schema for marshmallow json""" + known = set('string boolean uuid number integer decimal'.split()) + definitions = json.load(definition, object_pairs_hook=collections.OrderedDict) + for type_, schema in definitions.items(): + for name, field in schema.items(): + kind = field['kind'] + if kind == 'list': + items = field['items'] + if items not in known: + fail(items, type_, name) + continue + if kind not in known: + fail(kind, type_, name) + known.add(type_) + click.echo(click.style('All clear', fg='green')) if __name__ == "__main__":
validate json schema dependency order Is necessary works with a valid json schema, so, to get this, create a json validator for this structure: `{ "Identifier": { "catalog": { "kind": "str", "required": true, "doc": "Que vaina es esto" }, "entry": { "kind": "str", "required": true }, "uuid": { "kind": "uuid", "required": true } }, "General": { "identifier": { "kind": "object", "schema": "Identifier", "required": true }, "title": { "kind": "str", "required": true }, "keywords": { "kind": "list", "items": "str", "required": false } }, "LearningObject": { "general": { "kind": "object", "schema": "General", "required": false } } } `
tech-teach/marshmallowjson
diff --git a/tests/data/basic.json b/tests/data/basic.json new file mode 100644 index 0000000..93aaa83 --- /dev/null +++ b/tests/data/basic.json @@ -0,0 +1,20 @@ +{ + "StringType": { + "field": { + "kind": "string", + "required": false + } + }, + "NumberType": { + "field": { + "kind": "number", + "required": false + } + }, + "BooleanType": { + "field": { + "kind": "boolean", + "required": false + } + } +} diff --git a/tests/data/list.json b/tests/data/list.json new file mode 100644 index 0000000..a08f382 --- /dev/null +++ b/tests/data/list.json @@ -0,0 +1,15 @@ +{ + "StringType": { + "field": { + "kind": "string", + "required": false + } + }, + "ListOfString": { + "field": { + "kind": "list", + "items": "StringType", + "required": false + } + } +} diff --git a/tests/data/unknown.json b/tests/data/unknown.json new file mode 100644 index 0000000..7bd8f98 --- /dev/null +++ b/tests/data/unknown.json @@ -0,0 +1,9 @@ +{ + "Type": { + "field": { + "kind": "Unknown", + "required": false, + "doc": "Unknow is nowhere near the type definitions, that's an error" + } + } +} diff --git a/tests/test_marshmallowjson.py b/tests/test_marshmallowjson.py index 8a3236c..62824c1 100644 --- a/tests/test_marshmallowjson.py +++ b/tests/test_marshmallowjson.py @@ -1,5 +1,6 @@ """Tests for `marshmallowjson` package.""" +import os import pytest from click.testing import CliRunner @@ -9,27 +10,45 @@ from marshmallowjson import cli @pytest.fixture -def response(): - """Sample pytest fixture. +def unknown(): + root = os.path.dirname(__file__) + return os.path.join(root, 'data/unknown.json') - See more at: http://doc.pytest.org/en/latest/fixture.html - """ - # import requests - # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') + [email protected] +def basic(): + root = os.path.dirname(__file__) + return os.path.join(root, 'data/basic.json') + + [email protected] +def list_schema(): + root = os.path.dirname(__file__) + return os.path.join(root, 'data/list.json') + + +def test_error_when_using_unknown_type(unknown): + runner = CliRunner() + result = runner.invoke(cli.main, [unknown]) + assert result.exit_code == 1, result.output + assert 'Unknown is not a known type in Type.field' in result.output + + +def test_all_basic_types_are_allowed(basic): + runner = CliRunner() + result = runner.invoke(cli.main, [basic]) + assert result.exit_code == 0, result.output -def test_content(response): - """Sample pytest test function with the pytest fixture as an argument.""" - # from bs4 import BeautifulSoup - # assert 'GitHub' in BeautifulSoup(response.content).title.string +def test_lists_are_allowed(list_schema): + runner = CliRunner() + result = runner.invoke(cli.main, [list_schema]) + assert result.exit_code == 0, result.output def test_command_line_interface(): """Test the CLI.""" runner = CliRunner() - result = runner.invoke(cli.main) - assert result.exit_code == 0 - assert 'marshmallowjson.cli.main' in result.output help_result = runner.invoke(cli.main, ['--help']) assert help_result.exit_code == 0 assert '--help Show this message and exit.' in help_result.output
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2021.5.30 click==8.0.4 importlib-metadata==4.8.3 -e git+https://github.com/tech-teach/marshmallowjson.git@b8a2e3edf36dc7c65b73ed108371e1b2743a4b8e#egg=marshmallowjson typing_extensions==4.1.1 zipp==3.6.0
name: marshmallowjson channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - click==8.0.4 - importlib-metadata==4.8.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/marshmallowjson
[ "tests/test_marshmallowjson.py::test_error_when_using_unknown_type", "tests/test_marshmallowjson.py::test_all_basic_types_are_allowed", "tests/test_marshmallowjson.py::test_lists_are_allowed" ]
[]
[ "tests/test_marshmallowjson.py::test_command_line_interface", "tests/test_marshmallowjson.py::test_avoid_warning" ]
[]
MIT License
2,094
[ ".travis.yml", "marshmallowjson/cli.py" ]
[ ".travis.yml", "marshmallowjson/cli.py" ]
spesmilo__electrum-3794
743ef9ec8f1e69c56f587359f00de19f4f05ff0a
2018-01-29 01:07:55
743ef9ec8f1e69c56f587359f00de19f4f05ff0a
diff --git a/lib/transaction.py b/lib/transaction.py index 3fd17b2b3..95cb593cc 100644 --- a/lib/transaction.py +++ b/lib/transaction.py @@ -45,6 +45,14 @@ class SerializationError(Exception): """ Thrown when there's a problem deserializing or serializing """ +class UnknownTxinType(Exception): + pass + + +class NotRecognizedRedeemScript(Exception): + pass + + class BCDataStream(object): def __init__(self): self.input = None @@ -302,10 +310,23 @@ def parse_scriptSig(d, _bytes): if match_decoded(decoded, match): item = decoded[0][1] if item[0] == 0: + # segwit embedded into p2sh + # witness version 0 + # segwit embedded into p2sh d['address'] = bitcoin.hash160_to_p2sh(bitcoin.hash_160(item)) - d['type'] = 'p2wpkh-p2sh' if len(item) == 22 else 'p2wsh-p2sh' + if len(item) == 22: + d['type'] = 'p2wpkh-p2sh' + elif len(item) == 34: + d['type'] = 'p2wsh-p2sh' + else: + print_error("unrecognized txin type", bh2u(item)) + elif opcodes.OP_1 <= item[0] <= opcodes.OP_16: + # segwit embedded into p2sh + # witness version 1-16 + pass else: - # payto_pubkey + # assert item[0] == 0x30 + # pay-to-pubkey d['type'] = 'p2pk' d['address'] = "(pubkey)" d['signatures'] = [bh2u(item)] @@ -361,7 +382,7 @@ def parse_redeemScript(s): match_multisig = [ op_m ] + [opcodes.OP_PUSHDATA4]*n + [ op_n, opcodes.OP_CHECKMULTISIG ] if not match_decoded(dec2, match_multisig): print_error("cannot find address in input script", bh2u(s)) - return + raise NotRecognizedRedeemScript() x_pubkeys = [bh2u(x[1]) for x in dec2[1:-2]] pubkeys = [safe_parse_pubkey(x) for x in x_pubkeys] redeemScript = multisig_script(pubkeys, m) @@ -430,21 +451,40 @@ def parse_witness(vds, txin): if n == 0xffffffff: txin['value'] = vds.read_uint64() n = vds.read_compact_size() + # now 'n' is the number of items in the witness w = list(bh2u(vds.read_bytes(vds.read_compact_size())) for i in range(n)) + + add_w = lambda x: var_int(len(x) // 2) + x + txin['witness'] = var_int(n) + ''.join(add_w(i) for i in w) + + # FIXME: witness version > 0 will probably fail here. + # For native segwit, we would need the scriptPubKey of the parent txn + # to determine witness program version, and properly parse the witness. + # In case of p2sh-segwit, we can tell based on the scriptSig in this txn. + # The code below assumes witness version 0. + # p2sh-segwit should work in that case; for native segwit we need to tell + # between p2wpkh and p2wsh; we do this based on number of witness items, + # hence (FIXME) p2wsh with n==2 (maybe n==1 ?) will probably fail. + # If v==0 and n==2, we need parent scriptPubKey to distinguish between p2wpkh and p2wsh. if txin['type'] == 'coinbase': pass - elif n > 2: + elif txin['type'] == 'p2wsh-p2sh' or n > 2: + try: + m, n, x_pubkeys, pubkeys, witnessScript = parse_redeemScript(bfh(w[-1])) + except NotRecognizedRedeemScript: + raise UnknownTxinType() txin['signatures'] = parse_sig(w[1:-1]) - m, n, x_pubkeys, pubkeys, witnessScript = parse_redeemScript(bfh(w[-1])) txin['num_sig'] = m txin['x_pubkeys'] = x_pubkeys txin['pubkeys'] = pubkeys txin['witnessScript'] = witnessScript - else: + elif txin['type'] == 'p2wpkh-p2sh' or n == 2: txin['num_sig'] = 1 txin['x_pubkeys'] = [w[1]] txin['pubkeys'] = [safe_parse_pubkey(w[1])] txin['signatures'] = parse_sig([w[0]]) + else: + raise UnknownTxinType() def parse_output(vds, i): d = {} @@ -474,7 +514,12 @@ def deserialize(raw): if is_segwit: for i in range(n_vin): txin = d['inputs'][i] - parse_witness(vds, txin) + try: + parse_witness(vds, txin) + except UnknownTxinType: + txin['type'] = 'unknown' + # FIXME: GUI might show 'unknown' address (e.g. for a non-multisig p2wsh) + continue # segwit-native script if not txin.get('scriptSig'): if txin['num_sig'] == 1: @@ -674,7 +719,9 @@ class Transaction: witness_script = multisig_script(pubkeys, txin['num_sig']) witness = var_int(n) + '00' + ''.join(add_w(x) for x in sig_list) + add_w(witness_script) else: - raise BaseException('wrong txin type') + witness = txin.get('witness', None) + if not witness: + raise BaseException('wrong txin type:', txin['type']) if self.is_txin_complete(txin) or estimate_size: value_field = '' else: @@ -683,7 +730,7 @@ class Transaction: @classmethod def is_segwit_input(cls, txin): - return cls.is_segwit_inputtype(txin['type']) + return cls.is_segwit_inputtype(txin['type']) or bool(txin.get('witness', False)) @classmethod def is_segwit_inputtype(cls, txin_type):
[Synchronizer] cannot deserialize transaction, skipping The transaction which can not be deserialized is this [e9933221a150f78f9f224899f8568ff6422ffcc28ca3d53d87936368ff7c4b1d](https://blockchain.info/tx/e9933221a150f78f9f224899f8568ff6422ffcc28ca3d53d87936368ff7c4b1d), which looks like this serialized: `0100000000010160f84fdcda039c3ca1b20038adea2d49a53db92f7c467e8def13734232bb610804000000232200202814720f16329ab81cb8867c4d447bd13255931f23e6655944c9ada1797fcf88ffffffff0ba3dcfc04000000001976a91488124a57c548c9e7b1dd687455af803bd5765dea88acc9f44900000000001976a914da55045a0ccd40a56ce861946d13eb861eb5f2d788ac49825e000000000017a914ca34d4b190e36479aa6e0023cfe0a8537c6aa8dd87680c0d00000000001976a914651102524c424b2e7c44787c4f21e4c54dffafc088acf02fa9000000000017a914ee6c596e6f7066466d778d4f9ba633a564a6e95d874d250900000000001976a9146ca7976b48c04fd23867748382ee8401b1d27c2988acf5119600000000001976a914cf47d5dcdba02fd547c600697097252d38c3214a88ace08a12000000000017a914017bef79d92d5ec08c051786bad317e5dd3befcf87e3d76201000000001976a9148ec1b88b66d142bcbdb42797a0fd402c23e0eec288ac718f6900000000001976a914e66344472a224ce6f843f2989accf435ae6a808988ac65e51300000000001976a914cad6717c13a2079066f876933834210ebbe68c3f88ac0347304402201a4907c4706104320313e182ecbb1b265b2d023a79586671386de86bb47461590220472c3db9fc99a728ebb9b555a72e3481d20b181bd059a9c1acadfb853d90c96c01210338a46f2a54112fef8803c8478bc17e5f8fc6a5ec276903a946c1fafb2e3a8b181976a914eda8660085bf607b82bd18560ca8f3a9ec49178588ac00000000` Because the TX is skipped, the addresses that belong to my wallet involving that tx show as having a balance of zero and I can't get the funds out of them. I'm using Electrum 3.0.2, but was able to reproduce it in 3.0.5. As a quick hack, I replaced the empty `return` from [this line](https://github.com/spesmilo/electrum/blob/master/lib/transaction.py#L364) with `return [m, n, None, None, None]`, but that is not quite a fix, although it seems to work for me for now. I tried to debug the issue, but got lost in the bitcoin and segwit protocol.
spesmilo/electrum
diff --git a/lib/tests/test_transaction.py b/lib/tests/test_transaction.py index 99c6d001b..609006cdd 100644 --- a/lib/tests/test_transaction.py +++ b/lib/tests/test_transaction.py @@ -231,6 +231,10 @@ class TestTransaction(unittest.TestCase): tx = transaction.Transaction('010000000001010d350cefa29138de18a2d63a93cffda63721b07a6ecfa80a902f9514104b55ca0000000000fdffffff012a4a824a00000000160014b869999d342a5d42d6dc7af1efc28456da40297a024730440220475bb55814a52ea1036919e4408218c693b8bf93637b9f54c821b5baa3b846e102207276ed7a79493142c11fb01808a4142bbdd525ae7bdccdf8ecb7b8e3c856b4d90121024cdeaca7a53a7e23a1edbe9260794eaa83063534b5f111ee3c67d8b0cb88f0eec8010000') self.assertEqual('51087ece75c697cc872d2e643d646b0f3e1f2666fa1820b7bff4343d50dd680e', tx.txid()) + def test_txid_input_p2wsh_p2sh_not_multisig(self): + tx = transaction.Transaction('0100000000010160f84fdcda039c3ca1b20038adea2d49a53db92f7c467e8def13734232bb610804000000232200202814720f16329ab81cb8867c4d447bd13255931f23e6655944c9ada1797fcf88ffffffff0ba3dcfc04000000001976a91488124a57c548c9e7b1dd687455af803bd5765dea88acc9f44900000000001976a914da55045a0ccd40a56ce861946d13eb861eb5f2d788ac49825e000000000017a914ca34d4b190e36479aa6e0023cfe0a8537c6aa8dd87680c0d00000000001976a914651102524c424b2e7c44787c4f21e4c54dffafc088acf02fa9000000000017a914ee6c596e6f7066466d778d4f9ba633a564a6e95d874d250900000000001976a9146ca7976b48c04fd23867748382ee8401b1d27c2988acf5119600000000001976a914cf47d5dcdba02fd547c600697097252d38c3214a88ace08a12000000000017a914017bef79d92d5ec08c051786bad317e5dd3befcf87e3d76201000000001976a9148ec1b88b66d142bcbdb42797a0fd402c23e0eec288ac718f6900000000001976a914e66344472a224ce6f843f2989accf435ae6a808988ac65e51300000000001976a914cad6717c13a2079066f876933834210ebbe68c3f88ac0347304402201a4907c4706104320313e182ecbb1b265b2d023a79586671386de86bb47461590220472c3db9fc99a728ebb9b555a72e3481d20b181bd059a9c1acadfb853d90c96c01210338a46f2a54112fef8803c8478bc17e5f8fc6a5ec276903a946c1fafb2e3a8b181976a914eda8660085bf607b82bd18560ca8f3a9ec49178588ac00000000') + self.assertEqual('e9933221a150f78f9f224899f8568ff6422ffcc28ca3d53d87936368ff7c4b1d', tx.txid()) + class NetworkMock(object):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 1 }
2.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc python3-pyqt5 pyqt5-dev-tools protobuf-compiler" ], "python": "3.9", "reqs_path": [ "contrib/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2017.11.5 chardet==3.0.4 dnspython==1.15.0 ecdsa==0.13 -e git+https://github.com/spesmilo/electrum.git@743ef9ec8f1e69c56f587359f00de19f4f05ff0a#egg=Electrum exceptiongroup==1.2.2 idna==2.6 iniconfig==2.1.0 jsonrpclib-pelix==0.3.1 packaging==24.2 pbkdf2==1.3 pluggy==1.5.0 protobuf==3.5.0.post1 pyaes==1.6.1 PySocks==1.6.7 pytest==8.3.5 qrcode==5.3 requests==2.18.4 six==1.11.0 tomli==2.2.1 urllib3==1.22
name: electrum channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2017.11.5 - chardet==3.0.4 - dnspython==1.15.0 - ecdsa==0.13 - exceptiongroup==1.2.2 - idna==2.6 - iniconfig==2.1.0 - jsonrpclib-pelix==0.3.1 - packaging==24.2 - pbkdf2==1.3 - pluggy==1.5.0 - protobuf==3.5.0.post1 - pyaes==1.6.1 - pysocks==1.6.7 - pytest==8.3.5 - qrcode==5.3 - requests==2.18.4 - six==1.11.0 - tomli==2.2.1 - urllib3==1.22 prefix: /opt/conda/envs/electrum
[ "lib/tests/test_transaction.py::TestTransaction::test_txid_input_p2wsh_p2sh_not_multisig" ]
[]
[ "lib/tests/test_transaction.py::TestBCDataStream::test_bytes", "lib/tests/test_transaction.py::TestBCDataStream::test_compact_size", "lib/tests/test_transaction.py::TestBCDataStream::test_string", "lib/tests/test_transaction.py::TestTransaction::test_errors", "lib/tests/test_transaction.py::TestTransaction::test_estimated_output_size", "lib/tests/test_transaction.py::TestTransaction::test_parse_xpub", "lib/tests/test_transaction.py::TestTransaction::test_tx_signed", "lib/tests/test_transaction.py::TestTransaction::test_tx_signed_segwit", "lib/tests/test_transaction.py::TestTransaction::test_tx_unsigned", "lib/tests/test_transaction.py::TestTransaction::test_txid_coinbase_to_p2pk", "lib/tests/test_transaction.py::TestTransaction::test_txid_coinbase_to_p2pkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2pk_to_p2pkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2pk_to_p2sh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2pk_to_p2wpkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2pkh_to_p2pkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2pkh_to_p2sh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2pkh_to_p2wpkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2sh_to_p2pkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2sh_to_p2sh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2sh_to_p2wpkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2wpkh_to_p2pkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2wpkh_to_p2sh", "lib/tests/test_transaction.py::TestTransaction::test_txid_p2wpkh_to_p2wpkh", "lib/tests/test_transaction.py::TestTransaction::test_txid_segwit_coinbase_to_p2pk", "lib/tests/test_transaction.py::TestTransaction::test_txid_segwit_coinbase_to_p2pkh", "lib/tests/test_transaction.py::TestTransaction::test_version_field" ]
[]
MIT License
2,095
[ "lib/transaction.py" ]
[ "lib/transaction.py" ]
geopandas__geopandas-656
f70a66918df086bd0fca37f03e160224f400fe14
2018-01-29 03:56:04
caefd7562a5cfd80cc86b37796a22f4bfa3aa9d2
codecov[bot]: # [Codecov](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=h1) Report > Merging [#656](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=desc) into [master](https://codecov.io/gh/geopandas/geopandas/commit/36c8bcda34e4f46315eda812b11a0d9db32d01f2?src=pr&el=desc) will **not change** coverage. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/geopandas/geopandas/pull/656/graphs/tree.svg?width=650&height=150&src=pr&token=6TOPMcCBhB)](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #656 +/- ## ======================================= Coverage 94.83% 94.83% ======================================= Files 14 14 Lines 1065 1065 ======================================= Hits 1010 1010 Misses 55 55 ``` | [Impacted Files](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [geopandas/plotting.py](https://codecov.io/gh/geopandas/geopandas/pull/656/diff?src=pr&el=tree#diff-Z2VvcGFuZGFzL3Bsb3R0aW5nLnB5) | `93.93% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=footer). Last update [36c8bcd...ed1caa5](https://codecov.io/gh/geopandas/geopandas/pull/656?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). jdmcbr: @martinfleis @jorisvandenbossche I got this at least to a merge-able state since I saw it came up recently. It seems like something along these lines would still be useful, though maybe a better way than what I did here. martinfleis: I fully support this PR. Keeping these plotting methods part of our public API was probably never intended and we do not approach them as such anyway. martinfleis: I have merged in changes from master and added test for DeprecationWarning. Once green we are ready to merge as there were no objections against and it is not even breaking change (for now).
diff --git a/geopandas/plotting.py b/geopandas/plotting.py index f915a569..6b0b096d 100644 --- a/geopandas/plotting.py +++ b/geopandas/plotting.py @@ -6,6 +6,21 @@ import pandas as pd import geopandas +def deprecated(new): + """Helper to provide deprecation warning.""" + + def old(*args, **kwargs): + warnings.warn( + "{} is intended for internal ".format(new.__name__[1:]) + + "use only, and will be deprecated.", + DeprecationWarning, + stacklevel=2, + ) + new(*args, **kwargs) + + return old + + def _flatten_multi_geoms(geoms, prefix="Multi"): """ Returns Series like geoms and index, except that any Multi geometries @@ -17,7 +32,6 @@ def _flatten_multi_geoms(geoms, prefix="Multi"): Returns ------- - components : list of geometry component_index : index array @@ -40,7 +54,7 @@ def _flatten_multi_geoms(geoms, prefix="Multi"): return components, np.array(component_index) -def plot_polygon_collection( +def _plot_polygon_collection( ax, geoms, values=None, color=None, cmap=None, vmin=None, vmax=None, **kwargs ): """ @@ -48,32 +62,25 @@ def plot_polygon_collection( Parameters ---------- - ax : matplotlib.axes.Axes where shapes will be plotted - geoms : a sequence of `N` Polygons and/or MultiPolygons (can be mixed) values : a sequence of `N` values, optional Values will be mapped to colors using vmin/vmax/cmap. They should have 1:1 correspondence with the geometries (not their components). Otherwise follows `color` / `facecolor` kwargs. - edgecolor : single color or sequence of `N` colors Color for the edge of the polygons - facecolor : single color or sequence of `N` colors Color to fill the polygons. Cannot be used together with `values`. - color : single color or sequence of `N` colors Sets both `edgecolor` and `facecolor` - **kwargs Additional keyword arguments passed to the collection Returns ------- - collection : matplotlib.collections.Collection that was plotted """ @@ -130,7 +137,10 @@ def plot_polygon_collection( return collection -def plot_linestring_collection( +plot_polygon_collection = deprecated(_plot_polygon_collection) + + +def _plot_linestring_collection( ax, geoms, values=None, color=None, cmap=None, vmin=None, vmax=None, **kwargs ): """ @@ -138,25 +148,19 @@ def plot_linestring_collection( Parameters ---------- - ax : matplotlib.axes.Axes where shapes will be plotted - geoms : a sequence of `N` LineStrings and/or MultiLineStrings (can be mixed) - values : a sequence of `N` values, optional Values will be mapped to colors using vmin/vmax/cmap. They should have 1:1 correspondence with the geometries (not their components). - color : single color or sequence of `N` colors Cannot be used together with `values`. Returns ------- - collection : matplotlib.collections.Collection that was plotted - """ from matplotlib.collections import LineCollection from matplotlib.colors import is_color_like @@ -194,7 +198,10 @@ def plot_linestring_collection( return collection -def plot_point_collection( +plot_linestring_collection = deprecated(_plot_linestring_collection) + + +def _plot_point_collection( ax, geoms, values=None, @@ -264,6 +271,9 @@ def plot_point_collection( return collection +plot_point_collection = deprecated(_plot_point_collection) + + def plot_series(s, cmap=None, color=None, ax=None, figsize=None, **style_kwds): """ Plot a GeoSeries. @@ -369,7 +379,7 @@ def plot_series(s, cmap=None, color=None, ax=None, figsize=None, **style_kwds): facecolor = color values_ = values[poly_idx] if cmap else None - plot_polygon_collection( + _plot_polygon_collection( ax, polys, values_, facecolor=facecolor, cmap=cmap, **style_kwds ) @@ -377,7 +387,7 @@ def plot_series(s, cmap=None, color=None, ax=None, figsize=None, **style_kwds): lines = expl_series[line_idx] if not lines.empty: values_ = values[line_idx] if cmap else None - plot_linestring_collection( + _plot_linestring_collection( ax, lines, values_, color=color, cmap=cmap, **style_kwds ) @@ -385,7 +395,9 @@ def plot_series(s, cmap=None, color=None, ax=None, figsize=None, **style_kwds): points = expl_series[point_idx] if not points.empty: values_ = values[point_idx] if cmap else None - plot_point_collection(ax, points, values_, color=color, cmap=cmap, **style_kwds) + _plot_point_collection( + ax, points, values_, color=color, cmap=cmap, **style_kwds + ) plt.draw() return ax @@ -616,7 +628,7 @@ def plot_dataframe( polys = expl_series[poly_idx & np.invert(nan_idx)] subset = values[poly_idx & np.invert(nan_idx)] if not polys.empty: - plot_polygon_collection( + _plot_polygon_collection( ax, polys, subset, vmin=mn, vmax=mx, cmap=cmap, **style_kwds ) @@ -624,7 +636,7 @@ def plot_dataframe( lines = expl_series[line_idx & np.invert(nan_idx)] subset = values[line_idx & np.invert(nan_idx)] if not lines.empty: - plot_linestring_collection( + _plot_linestring_collection( ax, lines, subset, vmin=mn, vmax=mx, cmap=cmap, **style_kwds ) @@ -635,7 +647,7 @@ def plot_dataframe( if isinstance(markersize, np.ndarray): markersize = np.take(markersize, multiindex, axis=0) markersize = markersize[point_idx & np.invert(nan_idx)] - plot_point_collection( + _plot_point_collection( ax, points, subset, @@ -746,7 +758,6 @@ def _mapclassify_choro(values, scheme, **classification_kwds): binning Binning objects that holds the Series with values replaced with class identifier and the bins. - """ try: import mapclassify.classifiers as classifiers
geopandas.plotting.plot_point_collection geopandas.plotting.plot_point_collection(ax, shapely.geometry.Point(1,2)) x = geoms.x.values y = geoms.y.values , line 193, in plot_point_collection x = geoms.x.values AttributeError: 'float' object has no attribute 'values' geopandas.plotting.plot_point_collection(ax, [shapely.geometry.Point(1,2), shapely.geometry.Point(3,3)]) line 193, in plot_point_collection x = geoms.x.values AttributeError: 'list' object has no attribute 'x'
geopandas/geopandas
diff --git a/geopandas/tests/test_plotting.py b/geopandas/tests/test_plotting.py index 306e3b6c..5ffacf1c 100644 --- a/geopandas/tests/test_plotting.py +++ b/geopandas/tests/test_plotting.py @@ -744,40 +744,40 @@ class TestPlotCollections: # failing with matplotlib 1.4.3 (edge stays black even when specified) pytest.importorskip("matplotlib", "1.5.0") - from geopandas.plotting import plot_point_collection + from geopandas.plotting import _plot_point_collection, plot_point_collection from matplotlib.collections import PathCollection fig, ax = plt.subplots() - coll = plot_point_collection(ax, self.points) + coll = _plot_point_collection(ax, self.points) assert isinstance(coll, PathCollection) ax.cla() # default: single default matplotlib color - coll = plot_point_collection(ax, self.points) + coll = _plot_point_collection(ax, self.points) _check_colors(self.N, coll.get_facecolors(), [MPL_DFT_COLOR] * self.N) # edgecolor depends on matplotlib version # _check_colors(self.N, coll.get_edgecolors(), [MPL_DFT_COLOR]*self.N) ax.cla() # specify single other color - coll = plot_point_collection(ax, self.points, color="g") + coll = _plot_point_collection(ax, self.points, color="g") _check_colors(self.N, coll.get_facecolors(), ["g"] * self.N) _check_colors(self.N, coll.get_edgecolors(), ["g"] * self.N) ax.cla() # specify edgecolor/facecolor - coll = plot_point_collection(ax, self.points, facecolor="g", edgecolor="r") + coll = _plot_point_collection(ax, self.points, facecolor="g", edgecolor="r") _check_colors(self.N, coll.get_facecolors(), ["g"] * self.N) _check_colors(self.N, coll.get_edgecolors(), ["r"] * self.N) ax.cla() # list of colors - coll = plot_point_collection(ax, self.points, color=["r", "g", "b"]) + coll = _plot_point_collection(ax, self.points, color=["r", "g", "b"]) _check_colors(self.N, coll.get_facecolors(), ["r", "g", "b"]) _check_colors(self.N, coll.get_edgecolors(), ["r", "g", "b"]) ax.cla() - coll = plot_point_collection( + coll = _plot_point_collection( ax, self.points, color=[(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)], @@ -796,14 +796,18 @@ class TestPlotCollections: # not a color with pytest.raises(TypeError): - plot_point_collection(ax, self.points, color="not color") + _plot_point_collection(ax, self.points, color="not color") + + # check DeprecationWarning + with pytest.warns(DeprecationWarning): + plot_point_collection(ax, self.points) def test_points_values(self): - from geopandas.plotting import plot_point_collection + from geopandas.plotting import _plot_point_collection # default colormap fig, ax = plt.subplots() - coll = plot_point_collection(ax, self.points, self.values) + coll = _plot_point_collection(ax, self.points, self.values) fig.canvas.draw_idle() cmap = plt.get_cmap() expected_colors = cmap(np.arange(self.N) / (self.N - 1)) @@ -812,36 +816,39 @@ class TestPlotCollections: # _check_colors(self.N, coll.get_edgecolors(), expected_colors) def test_linestrings(self): - from geopandas.plotting import plot_linestring_collection + from geopandas.plotting import ( + _plot_linestring_collection, + plot_linestring_collection, + ) from matplotlib.collections import LineCollection fig, ax = plt.subplots() - coll = plot_linestring_collection(ax, self.lines) + coll = _plot_linestring_collection(ax, self.lines) assert isinstance(coll, LineCollection) ax.cla() # default: single default matplotlib color - coll = plot_linestring_collection(ax, self.lines) + coll = _plot_linestring_collection(ax, self.lines) _check_colors(self.N, coll.get_color(), [MPL_DFT_COLOR] * self.N) ax.cla() # specify single other color - coll = plot_linestring_collection(ax, self.lines, color="g") + coll = _plot_linestring_collection(ax, self.lines, color="g") _check_colors(self.N, coll.get_colors(), ["g"] * self.N) ax.cla() # specify edgecolor / facecolor - coll = plot_linestring_collection(ax, self.lines, facecolor="g", edgecolor="r") + coll = _plot_linestring_collection(ax, self.lines, facecolor="g", edgecolor="r") _check_colors(self.N, coll.get_facecolors(), ["g"] * self.N) _check_colors(self.N, coll.get_edgecolors(), ["r"] * self.N) ax.cla() # list of colors - coll = plot_linestring_collection(ax, self.lines, color=["r", "g", "b"]) + coll = _plot_linestring_collection(ax, self.lines, color=["r", "g", "b"]) _check_colors(self.N, coll.get_colors(), ["r", "g", "b"]) ax.cla() - coll = plot_linestring_collection( + coll = _plot_linestring_collection( ax, self.lines, color=[(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)], @@ -854,7 +861,7 @@ class TestPlotCollections: ax.cla() # pass through of kwargs - coll = plot_linestring_collection(ax, self.lines, linestyle="--", linewidth=1) + coll = _plot_linestring_collection(ax, self.lines, linestyle="--", linewidth=1) exp_ls = _style_to_linestring_onoffseq("dashed", 1) res_ls = coll.get_linestyle()[0] assert res_ls[0] == exp_ls[0] @@ -863,15 +870,19 @@ class TestPlotCollections: # not a color with pytest.raises(TypeError): - plot_linestring_collection(ax, self.lines, color="not color") + _plot_linestring_collection(ax, self.lines, color="not color") + + # check DeprecationWarning + with pytest.warns(DeprecationWarning): + plot_linestring_collection(ax, self.lines) def test_linestrings_values(self): - from geopandas.plotting import plot_linestring_collection + from geopandas.plotting import _plot_linestring_collection fig, ax = plt.subplots() # default colormap - coll = plot_linestring_collection(ax, self.lines, self.values) + coll = _plot_linestring_collection(ax, self.lines, self.values) fig.canvas.draw_idle() cmap = plt.get_cmap() expected_colors = cmap(np.arange(self.N) / (self.N - 1)) @@ -879,7 +890,7 @@ class TestPlotCollections: ax.cla() # specify colormap - coll = plot_linestring_collection(ax, self.lines, self.values, cmap="RdBu") + coll = _plot_linestring_collection(ax, self.lines, self.values, cmap="RdBu") fig.canvas.draw_idle() cmap = plt.get_cmap("RdBu") expected_colors = cmap(np.arange(self.N) / (self.N - 1)) @@ -887,7 +898,7 @@ class TestPlotCollections: ax.cla() # specify vmin/vmax - coll = plot_linestring_collection(ax, self.lines, self.values, vmin=3, vmax=5) + coll = _plot_linestring_collection(ax, self.lines, self.values, vmin=3, vmax=5) fig.canvas.draw_idle() cmap = plt.get_cmap() expected_colors = cmap([0]) @@ -895,33 +906,33 @@ class TestPlotCollections: ax.cla() def test_polygons(self): - from geopandas.plotting import plot_polygon_collection + from geopandas.plotting import _plot_polygon_collection, plot_polygon_collection from matplotlib.collections import PatchCollection fig, ax = plt.subplots() - coll = plot_polygon_collection(ax, self.polygons) + coll = _plot_polygon_collection(ax, self.polygons) assert isinstance(coll, PatchCollection) ax.cla() # default: single default matplotlib color - coll = plot_polygon_collection(ax, self.polygons) + coll = _plot_polygon_collection(ax, self.polygons) _check_colors(self.N, coll.get_facecolor(), [MPL_DFT_COLOR] * self.N) _check_colors(self.N, coll.get_edgecolor(), ["k"] * self.N) ax.cla() # default: color sets both facecolor and edgecolor - coll = plot_polygon_collection(ax, self.polygons, color="g") + coll = _plot_polygon_collection(ax, self.polygons, color="g") _check_colors(self.N, coll.get_facecolor(), ["g"] * self.N) _check_colors(self.N, coll.get_edgecolor(), ["g"] * self.N) ax.cla() # default: color can be passed as a list - coll = plot_polygon_collection(ax, self.polygons, color=["g", "b", "r"]) + coll = _plot_polygon_collection(ax, self.polygons, color=["g", "b", "r"]) _check_colors(self.N, coll.get_facecolor(), ["g", "b", "r"]) _check_colors(self.N, coll.get_edgecolor(), ["g", "b", "r"]) ax.cla() - coll = plot_polygon_collection( + coll = _plot_polygon_collection( ax, self.polygons, color=[(0.5, 0.5, 0.5, 0.5), (0.1, 0.2, 0.3, 0.5), (0.4, 0.5, 0.6, 0.5)], @@ -939,28 +950,31 @@ class TestPlotCollections: ax.cla() # only setting facecolor keeps default for edgecolor - coll = plot_polygon_collection(ax, self.polygons, facecolor="g") + coll = _plot_polygon_collection(ax, self.polygons, facecolor="g") _check_colors(self.N, coll.get_facecolor(), ["g"] * self.N) _check_colors(self.N, coll.get_edgecolor(), ["k"] * self.N) ax.cla() # custom facecolor and edgecolor - coll = plot_polygon_collection(ax, self.polygons, facecolor="g", edgecolor="r") + coll = _plot_polygon_collection(ax, self.polygons, facecolor="g", edgecolor="r") _check_colors(self.N, coll.get_facecolor(), ["g"] * self.N) _check_colors(self.N, coll.get_edgecolor(), ["r"] * self.N) ax.cla() # not a color with pytest.raises(TypeError): - plot_polygon_collection(ax, self.polygons, color="not color") + _plot_polygon_collection(ax, self.polygons, color="not color") + # check DeprecationWarning + with pytest.warns(DeprecationWarning): + plot_polygon_collection(ax, self.polygons) def test_polygons_values(self): - from geopandas.plotting import plot_polygon_collection + from geopandas.plotting import _plot_polygon_collection fig, ax = plt.subplots() # default colormap, edge is still black by default - coll = plot_polygon_collection(ax, self.polygons, self.values) + coll = _plot_polygon_collection(ax, self.polygons, self.values) fig.canvas.draw_idle() cmap = plt.get_cmap() exp_colors = cmap(np.arange(self.N) / (self.N - 1)) @@ -970,7 +984,7 @@ class TestPlotCollections: ax.cla() # specify colormap - coll = plot_polygon_collection(ax, self.polygons, self.values, cmap="RdBu") + coll = _plot_polygon_collection(ax, self.polygons, self.values, cmap="RdBu") fig.canvas.draw_idle() cmap = plt.get_cmap("RdBu") exp_colors = cmap(np.arange(self.N) / (self.N - 1)) @@ -978,7 +992,7 @@ class TestPlotCollections: ax.cla() # specify vmin/vmax - coll = plot_polygon_collection(ax, self.polygons, self.values, vmin=3, vmax=5) + coll = _plot_polygon_collection(ax, self.polygons, self.values, vmin=3, vmax=5) fig.canvas.draw_idle() cmap = plt.get_cmap() exp_colors = cmap([0]) @@ -986,7 +1000,7 @@ class TestPlotCollections: ax.cla() # override edgecolor - coll = plot_polygon_collection(ax, self.polygons, self.values, edgecolor="g") + coll = _plot_polygon_collection(ax, self.polygons, self.values, edgecolor="g") fig.canvas.draw_idle() cmap = plt.get_cmap() exp_colors = cmap(np.arange(self.N) / (self.N - 1))
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 1 }
0.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "black", "flake8", "descartes", "matplotlib", "rtree" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==25.3.0 black==25.1.0 certifi==2025.1.31 click==8.1.8 click-plugins==1.1.1 cligj==0.7.2 contourpy==1.3.0 cycler==0.12.1 Cython==3.0.12 descartes==1.1.0 exceptiongroup==1.2.2 fiona==1.10.1 flake8==7.2.0 fonttools==4.56.0 -e git+https://github.com/geopandas/geopandas.git@f70a66918df086bd0fca37f03e160224f400fe14#egg=geopandas importlib_metadata==8.6.1 importlib_resources==6.5.2 iniconfig==2.1.0 kiwisolver==1.4.7 matplotlib==3.9.4 mccabe==0.7.0 mypy-extensions==1.0.0 numpy==2.0.2 packaging==24.2 pandas==2.2.3 pathspec==0.12.1 pillow==11.1.0 platformdirs==4.3.7 pluggy==1.5.0 pycodestyle==2.13.0 pyflakes==3.3.2 pyparsing==3.2.3 pyproj==3.6.1 pytest==8.3.5 python-dateutil==2.9.0.post0 pytz==2025.2 rtree==1.4.0 shapely==2.0.7 six==1.17.0 tomli==2.2.1 typing_extensions==4.13.0 tzdata==2025.2 zipp==3.21.0
name: geopandas channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==25.3.0 - black==25.1.0 - certifi==2025.1.31 - click==8.1.8 - click-plugins==1.1.1 - cligj==0.7.2 - contourpy==1.3.0 - cycler==0.12.1 - cython==3.0.12 - descartes==1.1.0 - exceptiongroup==1.2.2 - fiona==1.10.1 - flake8==7.2.0 - fonttools==4.56.0 - importlib-metadata==8.6.1 - importlib-resources==6.5.2 - iniconfig==2.1.0 - kiwisolver==1.4.7 - matplotlib==3.9.4 - mccabe==0.7.0 - mypy-extensions==1.0.0 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pathspec==0.12.1 - pillow==11.1.0 - platformdirs==4.3.7 - pluggy==1.5.0 - pycodestyle==2.13.0 - pyflakes==3.3.2 - pyparsing==3.2.3 - pyproj==3.6.1 - pytest==8.3.5 - python-dateutil==2.9.0.post0 - pytz==2025.2 - rtree==1.4.0 - shapely==2.0.7 - six==1.17.0 - tomli==2.2.1 - typing-extensions==4.13.0 - tzdata==2025.2 - zipp==3.21.0 prefix: /opt/conda/envs/geopandas
[ "geopandas/tests/test_plotting.py::TestPlotCollections::test_points", "geopandas/tests/test_plotting.py::TestPlotCollections::test_points_values" ]
[ "geopandas/tests/test_plotting.py::TestPointPlotting::test_legend", "geopandas/tests/test_plotting.py::TestPointPlotting::test_multipoints", "geopandas/tests/test_plotting.py::TestLineStringPlotting::test_single_color", "geopandas/tests/test_plotting.py::TestLineStringPlotting::test_style_kwargs", "geopandas/tests/test_plotting.py::TestLineStringPlotting::test_subplots_norm", "geopandas/tests/test_plotting.py::TestLineStringPlotting::test_multilinestrings", "geopandas/tests/test_plotting.py::TestPolygonZPlotting::test_plot", "geopandas/tests/test_plotting.py::TestGeometryCollectionPlotting::test_colors", "geopandas/tests/test_plotting.py::TestGeometryCollectionPlotting::test_values", "geopandas/tests/test_plotting.py::TestNonuniformGeometryPlotting::test_colors", "geopandas/tests/test_plotting.py::TestNonuniformGeometryPlotting::test_style_kwargs", "geopandas/tests/test_plotting.py::TestPlotCollections::test_linestrings", "geopandas/tests/test_plotting.py::TestPlotCollections::test_linestrings_values", "geopandas/tests/test_plotting.py::TestPlotCollections::test_polygons", "geopandas/tests/test_plotting.py::TestPlotCollections::test_polygons_values", "geopandas/tests/test_plotting.py::test_column_values" ]
[ "geopandas/tests/test_plotting.py::TestPointPlotting::test_figsize", "geopandas/tests/test_plotting.py::TestPointPlotting::test_default_colors", "geopandas/tests/test_plotting.py::TestPointPlotting::test_colormap", "geopandas/tests/test_plotting.py::TestPointPlotting::test_single_color", "geopandas/tests/test_plotting.py::TestPointPlotting::test_markersize", "geopandas/tests/test_plotting.py::TestPointPlotting::test_style_kwargs", "geopandas/tests/test_plotting.py::TestPointPlotting::test_subplots_norm", "geopandas/tests/test_plotting.py::TestPointPlotting::test_empty_plot", "geopandas/tests/test_plotting.py::TestPointPlotting::test_misssing", "geopandas/tests/test_plotting.py::TestPointZPlotting::test_plot" ]
[]
BSD 3-Clause "New" or "Revised" License
2,096
[ "geopandas/plotting.py" ]
[ "geopandas/plotting.py" ]
pydata__sparse-96
b9fc91cb68b37757f37881843b2e420a3c07f9be
2018-01-29 07:07:22
b9fc91cb68b37757f37881843b2e420a3c07f9be
diff --git a/docs/operations.rst b/docs/operations.rst index 0e0b5e9..7e4ef2b 100644 --- a/docs/operations.rst +++ b/docs/operations.rst @@ -288,6 +288,6 @@ All of the following will raise an :obj:`IndexError`, like in Numpy 1.13 and lat Other Operations ---------------- :obj:`COO` arrays support a number of other common operations. Among them are -:obj:`dot`, :obj:`tensordot`, :obj:`concatenate` and :obj:`stack`, -:obj:`COO.transpose` and :obj:`COO.reshape`. You can view the full list on the -API reference page for :obj:`sparse` +:obj:`dot <COO.dot>`, :obj:`tensordot <COO.tensordot>`, :obj:`concatenate <COO.concatenate>` +and :obj:`stack <COO.stack>`, :obj:`COO.transpose <COO.transpose>` and :obj:`reshape <COO.reshape>`. +You can view the full list on the API reference page for :obj:`sparse`. diff --git a/sparse/coo.py b/sparse/coo.py index d85045c..71dd450 100644 --- a/sparse/coo.py +++ b/sparse/coo.py @@ -1738,7 +1738,17 @@ def dot(a, b): raise NotImplementedError( "Cannot perform dot product on types %s, %s" % (type(a), type(b))) - return tensordot(a, b, axes=((a.ndim - 1,), (b.ndim - 2,))) + + if a.ndim == 1 and b.ndim == 1: + return (a * b).sum() + + a_axis = -1 + b_axis = -2 + + if b.ndim == 1: + b_axis = -1 + + return tensordot(a, b, axes=(a_axis, b_axis)) def _dot(a, b):
Dot should distinguish between vectors and matrices. For example, if `x` is a vector and `y` is a matrix, `dot` should recognize the difference (and also return a vector). Same for the other way around, and also for two vectors.
pydata/sparse
diff --git a/sparse/tests/test_coo.py b/sparse/tests/test_coo.py index ae3889c..b7fab97 100644 --- a/sparse/tests/test_coo.py +++ b/sparse/tests/test_coo.py @@ -158,10 +158,16 @@ def test_tensordot(a_shape, b_shape, axes): # assert isinstance(sparse.tensordot(a, sb, axes), COO) -def test_dot(): - import operator - sa = sparse.random((3, 4, 5), density=0.5) - sb = sparse.random((5, 6), density=0.5) [email protected]('a_shape, b_shape', [ + ((3, 4, 5), (5, 6)), + ((4, 5), (5, 6)), + ((5,), (5, 6)), + ((4, 5), (5,)), + ((5,), (5,)), +]) +def test_dot(a_shape, b_shape): + sa = sparse.random(a_shape, density=0.5) + sb = sparse.random(b_shape, density=0.5) a = sa.todense() b = sb.todense()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[tests]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-flake8", "packaging" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 distlib==0.3.9 filelock==3.4.1 flake8==5.0.4 importlib-metadata==4.2.0 importlib-resources==5.4.0 iniconfig==1.1.1 mccabe==0.7.0 numpy==1.19.5 packaging==21.3 platformdirs==2.4.0 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-flake8==1.1.1 scipy==1.5.4 six==1.17.0 -e git+https://github.com/pydata/sparse.git@b9fc91cb68b37757f37881843b2e420a3c07f9be#egg=sparse toml==0.10.2 tomli==1.2.3 tox==3.28.0 typing_extensions==4.1.1 virtualenv==20.16.2 zipp==3.6.0
name: sparse channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - distlib==0.3.9 - filelock==3.4.1 - flake8==5.0.4 - importlib-metadata==4.2.0 - importlib-resources==5.4.0 - iniconfig==1.1.1 - mccabe==0.7.0 - numpy==1.19.5 - packaging==21.3 - platformdirs==2.4.0 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-flake8==1.1.1 - scipy==1.5.4 - six==1.17.0 - toml==0.10.2 - tomli==1.2.3 - tox==3.28.0 - typing-extensions==4.1.1 - virtualenv==20.16.2 - zipp==3.6.0 prefix: /opt/conda/envs/sparse
[ "sparse/tests/test_dok.py::test_setitem[shape0-index0-0.5826552618712125]", "sparse/tests/test_dok.py::test_setitem[shape1-index1-0.7460147285439795]", "sparse/tests/test_dok.py::test_setitem[shape3-1-0.46952317151598433]", "sparse/tests/test_dok.py::test_setitem[shape4-index4-0.9996877370935534]", "sparse/tests/test_dok.py::test_setitem[shape5-index5-0.2113884330951875]", "sparse/tests/test_dok.py::test_setitem[shape9-index9-0.33929608062088334]", "sparse/tests/test_dok.py::test_setitem[shape11-index11-0.967812819317291]", "sparse/tests/test_dok.py::test_setitem[shape13-index13-0.8106431250735691]" ]
[ "sparse/__init__.py::flake-8::FLAKE8", "sparse/compatibility.py::flake-8::FLAKE8", "sparse/coo.py::flake-8::FLAKE8", "sparse/dok.py::flake-8::FLAKE8", "sparse/slicing.py::flake-8::FLAKE8", "sparse/sparse_array.py::flake-8::FLAKE8", "sparse/utils.py::flake-8::FLAKE8", "sparse/tests/test_coo.py::flake-8::FLAKE8", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func2]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func3]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func4]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[func5]", "sparse/tests/test_dok.py::flake-8::FLAKE8" ]
[ "sparse/coo.py::sparse.coo.COO", "sparse/coo.py::sparse.coo.COO.T", "sparse/coo.py::sparse.coo.COO.__len__", "sparse/coo.py::sparse.coo.COO.dot", "sparse/coo.py::sparse.coo.COO.dtype", "sparse/coo.py::sparse.coo.COO.from_numpy", "sparse/coo.py::sparse.coo.COO.from_scipy_sparse", "sparse/coo.py::sparse.coo.COO.linear_loc", "sparse/coo.py::sparse.coo.COO.max", "sparse/coo.py::sparse.coo.COO.maybe_densify", "sparse/coo.py::sparse.coo.COO.min", "sparse/coo.py::sparse.coo.COO.nbytes", "sparse/coo.py::sparse.coo.COO.nnz", "sparse/coo.py::sparse.coo.COO.prod", "sparse/coo.py::sparse.coo.COO.reduce", "sparse/coo.py::sparse.coo.COO.reshape", "sparse/coo.py::sparse.coo.COO.sort_indices", "sparse/coo.py::sparse.coo.COO.sum", "sparse/coo.py::sparse.coo.COO.sum_duplicates", "sparse/coo.py::sparse.coo.COO.todense", "sparse/coo.py::sparse.coo.COO.transpose", "sparse/dok.py::sparse.dok.DOK", "sparse/dok.py::sparse.dok.DOK.from_coo", "sparse/dok.py::sparse.dok.DOK.from_numpy", "sparse/dok.py::sparse.dok.DOK.nnz", "sparse/dok.py::sparse.dok.DOK.to_coo", "sparse/dok.py::sparse.dok.DOK.todense", "sparse/slicing.py::sparse.slicing.check_index", "sparse/slicing.py::sparse.slicing.normalize_index", "sparse/slicing.py::sparse.slicing.normalize_slice", "sparse/slicing.py::sparse.slicing.posify_index", "sparse/slicing.py::sparse.slicing.replace_ellipsis", "sparse/slicing.py::sparse.slicing.sanitize_index", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.density", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.ndim", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.nnz", "sparse/sparse_array.py::sparse.sparse_array.SparseArray.size", "sparse/utils.py::sparse.utils.random", "sparse/tests/test_coo.py::test_reductions[True-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[True-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[True-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-None-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-None-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-0-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-0-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-1-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-1-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-2-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-2-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_reductions[False-axis4-max-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_reductions[False-axis4-min-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[True-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-None-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-0-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-1-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-2-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amax-kwargs0-eqkwargs0]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs1-eqkwargs1]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-sum-kwargs2-eqkwargs2]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-prod-kwargs3-eqkwargs3]", "sparse/tests/test_coo.py::test_ufunc_reductions[False-axis4-amin-kwargs4-eqkwargs4]", "sparse/tests/test_coo.py::test_transpose[None]", "sparse/tests/test_coo.py::test_transpose[axis1]", "sparse/tests/test_coo.py::test_transpose[axis2]", "sparse/tests/test_coo.py::test_transpose[axis3]", "sparse/tests/test_coo.py::test_transpose[axis4]", "sparse/tests/test_coo.py::test_transpose[axis5]", "sparse/tests/test_coo.py::test_transpose[axis6]", "sparse/tests/test_coo.py::test_transpose_error[axis0]", "sparse/tests/test_coo.py::test_transpose_error[axis1]", "sparse/tests/test_coo.py::test_transpose_error[axis2]", "sparse/tests/test_coo.py::test_transpose_error[axis3]", "sparse/tests/test_coo.py::test_transpose_error[axis4]", "sparse/tests/test_coo.py::test_transpose_error[axis5]", "sparse/tests/test_coo.py::test_reshape[a0-b0]", "sparse/tests/test_coo.py::test_reshape[a1-b1]", "sparse/tests/test_coo.py::test_reshape[a2-b2]", "sparse/tests/test_coo.py::test_reshape[a3-b3]", "sparse/tests/test_coo.py::test_reshape[a4-b4]", "sparse/tests/test_coo.py::test_reshape[a5-b5]", "sparse/tests/test_coo.py::test_reshape[a6-b6]", "sparse/tests/test_coo.py::test_reshape[a7-b7]", "sparse/tests/test_coo.py::test_reshape[a8-b8]", "sparse/tests/test_coo.py::test_reshape[a9-b9]", "sparse/tests/test_coo.py::test_large_reshape", "sparse/tests/test_coo.py::test_reshape_same", "sparse/tests/test_coo.py::test_to_scipy_sparse", "sparse/tests/test_coo.py::test_tensordot[a_shape0-b_shape0-axes0]", "sparse/tests/test_coo.py::test_tensordot[a_shape1-b_shape1-axes1]", "sparse/tests/test_coo.py::test_tensordot[a_shape2-b_shape2-axes2]", "sparse/tests/test_coo.py::test_tensordot[a_shape3-b_shape3-axes3]", "sparse/tests/test_coo.py::test_tensordot[a_shape4-b_shape4-axes4]", "sparse/tests/test_coo.py::test_tensordot[a_shape5-b_shape5-axes5]", "sparse/tests/test_coo.py::test_tensordot[a_shape6-b_shape6-axes6]", "sparse/tests/test_coo.py::test_tensordot[a_shape7-b_shape7-axes7]", "sparse/tests/test_coo.py::test_tensordot[a_shape8-b_shape8-axes8]", "sparse/tests/test_coo.py::test_tensordot[a_shape9-b_shape9-0]", "sparse/tests/test_coo.py::test_dot[a_shape0-b_shape0]", "sparse/tests/test_coo.py::test_dot[a_shape1-b_shape1]", "sparse/tests/test_coo.py::test_dot[a_shape2-b_shape2]", "sparse/tests/test_coo.py::test_dot[a_shape3-b_shape3]", "sparse/tests/test_coo.py::test_dot[a_shape4-b_shape4]", "sparse/tests/test_coo.py::test_elemwise[expm1]", "sparse/tests/test_coo.py::test_elemwise[log1p]", "sparse/tests/test_coo.py::test_elemwise[sin]", "sparse/tests/test_coo.py::test_elemwise[tan]", "sparse/tests/test_coo.py::test_elemwise[sinh]", "sparse/tests/test_coo.py::test_elemwise[tanh]", "sparse/tests/test_coo.py::test_elemwise[floor]", "sparse/tests/test_coo.py::test_elemwise[ceil]", "sparse/tests/test_coo.py::test_elemwise[sqrt]", "sparse/tests/test_coo.py::test_elemwise[conjugate0]", "sparse/tests/test_coo.py::test_elemwise[round_]", "sparse/tests/test_coo.py::test_elemwise[rint]", "sparse/tests/test_coo.py::test_elemwise[<lambda>0]", "sparse/tests/test_coo.py::test_elemwise[conjugate1]", "sparse/tests/test_coo.py::test_elemwise[conjugate2]", "sparse/tests/test_coo.py::test_elemwise[<lambda>1]", "sparse/tests/test_coo.py::test_elemwise[abs]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape0-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape1-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape2-ne]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-mul]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-add]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-sub]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-gt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-lt]", "sparse/tests/test_coo.py::test_elemwise_binary[shape3-ne]", "sparse/tests/test_coo.py::test_auto_densification_fails[pow]", "sparse/tests/test_coo.py::test_auto_densification_fails[truediv]", "sparse/tests/test_coo.py::test_auto_densification_fails[floordiv]", "sparse/tests/test_coo.py::test_auto_densification_fails[ge]", "sparse/tests/test_coo.py::test_auto_densification_fails[le]", "sparse/tests/test_coo.py::test_auto_densification_fails[eq]", "sparse/tests/test_coo.py::test_auto_densification_fails[mod]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[True-mod-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-pow-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-truediv-3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-floordiv-4]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-gt-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-lt--5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-ge-5]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-le--3]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_elemwise_scalar[False-mod-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[True-eq-1]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-mul-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-add-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-sub-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-gt--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-lt-5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ne-0]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-ge--5]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-le-3]", "sparse/tests/test_coo.py::test_leftside_elemwise_scalar[False-eq-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[add-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[sub--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[pow--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[truediv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[floordiv-0]", "sparse/tests/test_coo.py::test_scalar_densification_fails[gt--5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[lt-5]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ne-1]", "sparse/tests/test_coo.py::test_scalar_densification_fails[ge--3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[le-3]", "sparse/tests/test_coo.py::test_scalar_densification_fails[eq-0]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary[shape3-xor]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_binary[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_scalar[shape3-and_]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape0-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape1-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape2-rshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-lshift]", "sparse/tests/test_coo.py::test_bitshift_scalar[shape3-rshift]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape0-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape1-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape2-invert]", "sparse/tests/test_coo.py::test_unary_bitwise_densification_fails[shape3-invert]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape0-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape1-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape2-xor]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-or_]", "sparse/tests/test_coo.py::test_binary_bitwise_densification_fails[shape3-xor]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape0-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape1-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape2-rshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-lshift]", "sparse/tests/test_coo.py::test_binary_bitshift_densification_fails[shape3-rshift]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape0-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape1-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape2-xor]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-and_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-or_]", "sparse/tests/test_coo.py::test_bitwise_binary_bool[shape3-xor]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape0-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape1-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape2-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary[shape3-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape0-and_]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape1-and_]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape2-and_]", "sparse/tests/test_coo.py::test_numpy_mixed_binary_bitwise[shape3-and_]", "sparse/tests/test_coo.py::test_elemwise_binary_empty", "sparse/tests/test_coo.py::test_gt", "sparse/tests/test_coo.py::test_slicing[0]", "sparse/tests/test_coo.py::test_slicing[1]", "sparse/tests/test_coo.py::test_slicing[-1]", "sparse/tests/test_coo.py::test_slicing[index3]", "sparse/tests/test_coo.py::test_slicing[index4]", "sparse/tests/test_coo.py::test_slicing[index5]", "sparse/tests/test_coo.py::test_slicing[index6]", "sparse/tests/test_coo.py::test_slicing[index7]", "sparse/tests/test_coo.py::test_slicing[index8]", "sparse/tests/test_coo.py::test_slicing[index9]", "sparse/tests/test_coo.py::test_slicing[index10]", "sparse/tests/test_coo.py::test_slicing[index11]", "sparse/tests/test_coo.py::test_slicing[index12]", "sparse/tests/test_coo.py::test_slicing[index13]", "sparse/tests/test_coo.py::test_slicing[index14]", "sparse/tests/test_coo.py::test_slicing[index15]", "sparse/tests/test_coo.py::test_slicing[index16]", "sparse/tests/test_coo.py::test_slicing[index17]", "sparse/tests/test_coo.py::test_slicing[index18]", "sparse/tests/test_coo.py::test_slicing[index19]", "sparse/tests/test_coo.py::test_slicing[index20]", "sparse/tests/test_coo.py::test_slicing[index21]", "sparse/tests/test_coo.py::test_slicing[index22]", "sparse/tests/test_coo.py::test_slicing[index23]", "sparse/tests/test_coo.py::test_slicing[index24]", "sparse/tests/test_coo.py::test_slicing[index25]", "sparse/tests/test_coo.py::test_slicing[index26]", "sparse/tests/test_coo.py::test_slicing[index27]", "sparse/tests/test_coo.py::test_slicing[index28]", "sparse/tests/test_coo.py::test_slicing[index29]", "sparse/tests/test_coo.py::test_slicing[index30]", "sparse/tests/test_coo.py::test_slicing[index31]", "sparse/tests/test_coo.py::test_slicing[index32]", "sparse/tests/test_coo.py::test_slicing[index33]", "sparse/tests/test_coo.py::test_slicing[index34]", "sparse/tests/test_coo.py::test_slicing[index35]", "sparse/tests/test_coo.py::test_slicing[index36]", "sparse/tests/test_coo.py::test_slicing[index37]", "sparse/tests/test_coo.py::test_slicing[index38]", "sparse/tests/test_coo.py::test_slicing[index39]", "sparse/tests/test_coo.py::test_slicing[index40]", "sparse/tests/test_coo.py::test_slicing[index41]", "sparse/tests/test_coo.py::test_slicing[index42]", "sparse/tests/test_coo.py::test_slicing[index43]", "sparse/tests/test_coo.py::test_slicing[index44]", "sparse/tests/test_coo.py::test_custom_dtype_slicing", "sparse/tests/test_coo.py::test_slicing_errors[index0]", "sparse/tests/test_coo.py::test_slicing_errors[index1]", "sparse/tests/test_coo.py::test_slicing_errors[index2]", "sparse/tests/test_coo.py::test_slicing_errors[5]", "sparse/tests/test_coo.py::test_slicing_errors[-5]", "sparse/tests/test_coo.py::test_slicing_errors[foo]", "sparse/tests/test_coo.py::test_slicing_errors[index6]", "sparse/tests/test_coo.py::test_canonical", "sparse/tests/test_coo.py::test_concatenate", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[stack-1]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-0]", "sparse/tests/test_coo.py::test_concatenate_mixed[concatenate-1]", "sparse/tests/test_coo.py::test_stack[0-shape0]", "sparse/tests/test_coo.py::test_stack[0-shape1]", "sparse/tests/test_coo.py::test_stack[0-shape2]", "sparse/tests/test_coo.py::test_stack[1-shape0]", "sparse/tests/test_coo.py::test_stack[1-shape1]", "sparse/tests/test_coo.py::test_stack[1-shape2]", "sparse/tests/test_coo.py::test_stack[-1-shape0]", "sparse/tests/test_coo.py::test_stack[-1-shape1]", "sparse/tests/test_coo.py::test_stack[-1-shape2]", "sparse/tests/test_coo.py::test_large_concat_stack", "sparse/tests/test_coo.py::test_coord_dtype", "sparse/tests/test_coo.py::test_addition", "sparse/tests/test_coo.py::test_addition_not_ok_when_large_and_sparse", "sparse/tests/test_coo.py::test_broadcasting[shape10-shape20-add]", "sparse/tests/test_coo.py::test_broadcasting[shape10-shape20-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape11-shape21-add]", "sparse/tests/test_coo.py::test_broadcasting[shape11-shape21-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape12-shape22-add]", "sparse/tests/test_coo.py::test_broadcasting[shape12-shape22-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape13-shape23-add]", "sparse/tests/test_coo.py::test_broadcasting[shape13-shape23-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape14-shape24-add]", "sparse/tests/test_coo.py::test_broadcasting[shape14-shape24-mul]", "sparse/tests/test_coo.py::test_broadcasting[shape15-shape25-add]", "sparse/tests/test_coo.py::test_broadcasting[shape15-shape25-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape10-shape20-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape11-shape21-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape12-shape22-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape13-shape23-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape14-shape24-mul]", "sparse/tests/test_coo.py::test_numpy_mixed_broadcasting[shape15-shape25-mul]", "sparse/tests/test_coo.py::test_broadcast_to[shape10-shape20]", "sparse/tests/test_coo.py::test_broadcast_to[shape11-shape21]", "sparse/tests/test_coo.py::test_broadcast_to[shape12-shape22]", "sparse/tests/test_coo.py::test_scalar_multiplication[2]", "sparse/tests/test_coo.py::test_scalar_multiplication[2.5]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar2]", "sparse/tests/test_coo.py::test_scalar_multiplication[scalar3]", "sparse/tests/test_coo.py::test_scalar_exponentiation", "sparse/tests/test_coo.py::test_create_with_lists_of_tuples", "sparse/tests/test_coo.py::test_sizeof", "sparse/tests/test_coo.py::test_scipy_sparse_interface", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[coo]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csr]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[dok]", "sparse/tests/test_coo.py::test_scipy_sparse_interaction[csc]", "sparse/tests/test_coo.py::test_op_scipy_sparse[mul]", "sparse/tests/test_coo.py::test_op_scipy_sparse[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse[sub]", "sparse/tests/test_coo.py::test_op_scipy_sparse[gt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[lt]", "sparse/tests/test_coo.py::test_op_scipy_sparse[ne]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[add]", "sparse/tests/test_coo.py::test_op_scipy_sparse_left[sub]", "sparse/tests/test_coo.py::test_cache_csr", "sparse/tests/test_coo.py::test_empty_shape", "sparse/tests/test_coo.py::test_single_dimension", "sparse/tests/test_coo.py::test_raise_dense", "sparse/tests/test_coo.py::test_large_sum", "sparse/tests/test_coo.py::test_add_many_sparse_arrays", "sparse/tests/test_coo.py::test_caching", "sparse/tests/test_coo.py::test_scalar_slicing", "sparse/tests/test_coo.py::test_triul[shape0-0]", "sparse/tests/test_coo.py::test_triul[shape1-1]", "sparse/tests/test_coo.py::test_triul[shape2--1]", "sparse/tests/test_coo.py::test_triul[shape3--2]", "sparse/tests/test_coo.py::test_triul[shape4-1000]", "sparse/tests/test_coo.py::test_empty_reduction", "sparse/tests/test_coo.py::test_random_shape[0.1-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.1-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.3-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.5-shape2]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape0]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape1]", "sparse/tests/test_coo.py::test_random_shape[0.7-shape2]", "sparse/tests/test_coo.py::test_two_random_unequal", "sparse/tests/test_coo.py::test_two_random_same_seed", "sparse/tests/test_coo.py::test_random_sorted", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.0-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.01-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.1-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape0-<lambda>-bool]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-None-float64]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-rvs-int]", "sparse/tests/test_coo.py::test_random_rvs[0.2-shape1-<lambda>-bool]", "sparse/tests/test_coo.py::test_scalar_shape_construction", "sparse/tests/test_coo.py::test_len", "sparse/tests/test_coo.py::test_density", "sparse/tests/test_coo.py::test_size", "sparse/tests/test_coo.py::test_np_array", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.1-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.3-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.5-shape2]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape0]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape1]", "sparse/tests/test_dok.py::test_random_shape_nnz[0.7-shape2]", "sparse/tests/test_dok.py::test_convert_to_coo", "sparse/tests/test_dok.py::test_convert_from_coo", "sparse/tests/test_dok.py::test_convert_from_numpy", "sparse/tests/test_dok.py::test_convert_to_numpy", "sparse/tests/test_dok.py::test_construct[2-data0]", "sparse/tests/test_dok.py::test_construct[shape1-data1]", "sparse/tests/test_dok.py::test_construct[shape2-data2]", "sparse/tests/test_dok.py::test_getitem[0.1-shape0]", "sparse/tests/test_dok.py::test_getitem[0.1-shape1]", "sparse/tests/test_dok.py::test_getitem[0.1-shape2]", "sparse/tests/test_dok.py::test_getitem[0.3-shape0]", "sparse/tests/test_dok.py::test_getitem[0.3-shape1]", "sparse/tests/test_dok.py::test_getitem[0.3-shape2]", "sparse/tests/test_dok.py::test_getitem[0.5-shape0]", "sparse/tests/test_dok.py::test_getitem[0.5-shape1]", "sparse/tests/test_dok.py::test_getitem[0.5-shape2]", "sparse/tests/test_dok.py::test_getitem[0.7-shape0]", "sparse/tests/test_dok.py::test_getitem[0.7-shape1]", "sparse/tests/test_dok.py::test_getitem[0.7-shape2]", "sparse/tests/test_dok.py::test_setitem[shape2-index2-value2]", "sparse/tests/test_dok.py::test_setitem[shape6-index6-value6]", "sparse/tests/test_dok.py::test_setitem[shape7-index7-value7]", "sparse/tests/test_dok.py::test_setitem[shape8-index8-value8]", "sparse/tests/test_dok.py::test_setitem[shape10-index10-value10]", "sparse/tests/test_dok.py::test_setitem[shape12-index12-value12]", "sparse/tests/test_dok.py::test_default_dtype", "sparse/tests/test_dok.py::test_int_dtype", "sparse/tests/test_dok.py::test_float_dtype", "sparse/tests/test_dok.py::test_set_zero" ]
[]
BSD 3-Clause "New" or "Revised" License
2,097
[ "docs/operations.rst", "sparse/coo.py" ]
[ "docs/operations.rst", "sparse/coo.py" ]
OpenNMT__OpenNMT-tf-59
cb66148199d33cb7087d0da297590681514faf6d
2018-01-29 16:43:32
cb66148199d33cb7087d0da297590681514faf6d
diff --git a/README.md b/README.md index a37f315e..7a6dfd11 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ and all of the above can be used simultaneously to train novel and complex archi OpenNMT-tf is also compatible with some of the best TensorFlow features: -* asynchronous distributed training +* replicated and distributed training * monitoring with [TensorBoard](https://www.tensorflow.org/get_started/summaries_and_tensorboard) * inference with [TensorFlow Serving](https://www.tensorflow.org/serving/) and the TensorFlow C++ API. diff --git a/bin/average_checkpoints.py b/bin/average_checkpoints.py index 26faed37..97d3b473 100644 --- a/bin/average_checkpoints.py +++ b/bin/average_checkpoints.py @@ -34,7 +34,7 @@ def main(): help="The model directory containing the checkpoints.") parser.add_argument("--output_dir", required=True, help="The output directory where the averaged checkpoint will be saved.") - parser.add_argument("--max_count", default=8, + parser.add_argument("--max_count", type=int, default=8, help="The maximal number of checkpoints to average.") args = parser.parse_args() diff --git a/bin/main.py b/bin/main.py index 9c128bb0..52792cd1 100644 --- a/bin/main.py +++ b/bin/main.py @@ -74,13 +74,14 @@ def load_model(model_dir, model_file=None): return model -def train(estimator, model, config): +def train(estimator, model, config, num_devices=1): """Runs training. Args: estimator: A `tf.estimator.Estimator`. model: A `opennmt.models.Model`. config: The configuration. + num_devices: The number of devices used for training. """ if "eval" not in config: config["eval"] = {} @@ -132,6 +133,7 @@ def train(estimator, model, config): config["data"]["train_features_file"], labels_file=config["data"]["train_labels_file"], batch_type=train_batch_type, + batch_multiplier=num_devices, bucket_width=config["train"].get("bucket_width", 5), sample_buffer_size=config["train"].get( "sample_buffer_size", default_sample_buffer_size), @@ -241,6 +243,8 @@ def main(): parser.add_argument("--checkpoint_path", default=None, help=("Checkpoint or directory to use for inference or export " "(when a directory is set, the latest checkpoint is used).")) + parser.add_argument("--num_gpus", type=int, default=1, + help="Number of GPUs to use for in-graph replication.") parser.add_argument("--chief_host", default="", help="hostname:port of the chief worker (for distributed training).") parser.add_argument("--worker_hosts", default="", @@ -286,8 +290,11 @@ def main(): tf.logging.info("Creating model directory %s", config["model_dir"]) os.makedirs(config["model_dir"]) - session_config = tf.ConfigProto() - session_config.gpu_options.allow_growth = args.gpu_allow_growth + session_config = tf.ConfigProto( + allow_soft_placement=True, + log_device_placement=False, + gpu_options=tf.GPUOptions( + allow_growth=args.gpu_allow_growth)) run_config = tf.estimator.RunConfig( model_dir=config["model_dir"], @@ -309,7 +316,7 @@ def main(): model = load_model(config["model_dir"], model_file=args.model) estimator = tf.estimator.Estimator( - model, + model.model_fn(num_devices=args.num_gpus), config=run_config, params=config["params"]) @@ -320,7 +327,7 @@ def main(): if args.run == "train": if args.data_dir: config["data"] = _prefix_paths(args.data_dir, config["data"]) - train(estimator, model, config) + train(estimator, model, config, num_devices=args.num_gpus) elif args.run == "infer": if not args.features_file: parser.error("--features_file is required for inference.") diff --git a/docs/package/opennmt.utils.parallel.rst b/docs/package/opennmt.utils.parallel.rst new file mode 100644 index 00000000..c85542c7 --- /dev/null +++ b/docs/package/opennmt.utils.parallel.rst @@ -0,0 +1,7 @@ +opennmt\.utils\.parallel module +=============================== + +.. automodule:: opennmt.utils.parallel + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/package/opennmt.utils.rst b/docs/package/opennmt.utils.rst index f82c1cf3..123c73cf 100644 --- a/docs/package/opennmt.utils.rst +++ b/docs/package/opennmt.utils.rst @@ -20,6 +20,7 @@ Submodules opennmt.utils.losses opennmt.utils.misc opennmt.utils.optim + opennmt.utils.parallel opennmt.utils.position opennmt.utils.reducer opennmt.utils.transformer diff --git a/docs/training.md b/docs/training.md index 9b21e822..3558687c 100644 --- a/docs/training.md +++ b/docs/training.md @@ -18,9 +18,23 @@ then open the URL displayed in the shell to monitor and visualize several data, * word embeddings * decoder sampling probability -## Distributed +## Replicated training -OpenNMT-tf supports asynchronous distributed training. The user should set on the command line: +OpenNMT-tf training can make use of multiple GPUs with *in-graph replication*. In this mode, the main section of the graph is replicated over multiple devices and batches are processed in parallel. The resulting graph is equivalent to train with batches `N` times larger, where `N` is the number of used GPUs. + +For example, if your machine has 4 GPUs, simply add the `--num_gpus` option: + +```bash +python -m bin.main train [...] --num_gpus 4 +``` + +Note that evaluation and inference will run on a single device. + +## Distributed training + +OpenNMT-tf also supports asynchronous distributed training with *between-graph replication*. In this mode, each graph replica processes a batch independently, compute the gradients, and asynchronously update a shared set of parameters. + +To enable distributed training, the user should set on the command line: * a **chief worker** host that runs a training loop and manages checkpoints, summaries, etc. * a list of **worker** hosts that run a training loop @@ -32,6 +46,6 @@ Then a training instance should be started on each host with a selected task, e. CUDA_VISIBLE_DEVICES=0 python -m bin.main train [...] --ps_hosts localhost:2222 --chief_host localhost:2223 --worker_hosts localhost:2224,localhost:2225 --task_type worker --task_index 1 ``` -will start the worker 1 on the current machine and first GPU. +will start the worker 1 on the current machine and first GPU. By setting `CUDA_VISIBLE_DEVICES` correctly, asynchronous distributed training can be run on a single multi-GPU machine. -For more details, see the documentation of [`tf.estimator.train_and_evaluate`](https://www.tensorflow.org/versions/r1.4/api_docs/python/tf/estimator/train_and_evaluate). Also see [tensorflow/ecosystem](https://github.com/tensorflow/ecosystem) to integrate distributed training with open-source frameworks like Docker or Kubernetes. +For more details, see the documentation of [`tf.estimator.train_and_evaluate`](https://www.tensorflow.org/api_docs/python/tf/estimator/train_and_evaluate). Also see [tensorflow/ecosystem](https://github.com/tensorflow/ecosystem) to integrate distributed training with open-source frameworks like Docker or Kubernetes. diff --git a/opennmt/decoders/self_attention_decoder.py b/opennmt/decoders/self_attention_decoder.py index c45ecbba..b5d37865 100644 --- a/opennmt/decoders/self_attention_decoder.py +++ b/opennmt/decoders/self_attention_decoder.py @@ -46,26 +46,42 @@ class SelfAttentionDecoder(Decoder): self.relu_dropout = relu_dropout self.position_encoder = position_encoder - def _init_cache(self, memory, memory_sequence_length): + def _build_memory_mask(self, memory, memory_sequence_length=None): + if memory_sequence_length is None: + return None + else: + return transformer.build_sequence_mask( + memory_sequence_length, + num_heads=self.num_heads, + maximum_length=tf.shape(memory)[1], + dtype=memory.dtype) + + def _init_cache(self, memory, memory_sequence_length=None): cache = { "memory": memory, - "memory_sequence_length": memory_sequence_length + "memory_mask": self._build_memory_mask( + memory, memory_sequence_length=memory_sequence_length) } batch_size = tf.shape(memory)[0] depth = memory.get_shape().as_list()[-1] - for l in range(self.num_layers): - keys = tf.zeros([batch_size, 0, depth]) - values = tf.zeros([batch_size, 0, depth]) - - # Ensure shape invariance for tf.while_loop. - keys._shape = tf.TensorShape([None, None, depth]) # pylint: disable=protected-access - values._shape = tf.TensorShape([None, None, depth]) # pylint: disable=protected-access + def _create_placeholder(shape, loop_shape=None): + placeholder = tf.zeros(shape) + if loop_shape is not None: + placeholder._shape = tf.TensorShape(loop_shape) # pylint: disable=protected-access + return placeholder + for l in range(self.num_layers): cache["layer_{}".format(l)] = { - "keys": keys, - "values": values + "self_keys": _create_placeholder( + [batch_size, 0, depth], loop_shape=[None, None, depth]), + "self_values": _create_placeholder( + [batch_size, 0, depth], loop_shape=[None, None, depth]), + "memory_keys": _create_placeholder( + [batch_size, 0, depth], loop_shape=[None, None, depth]), + "memory_values": _create_placeholder( + [batch_size, 0, depth], loop_shape=[None, None, depth]), } return cache @@ -75,13 +91,14 @@ class SelfAttentionDecoder(Decoder): def _impl(ids, step, cache): inputs = embedding_fn(ids[:, -1:]) + inputs *= self.num_units**0.5 inputs = self.position_encoder.apply_one(inputs, step + 1) outputs = self._self_attention_stack( inputs, mode=mode, cache=cache, memory=cache["memory"], - memory_sequence_length=cache["memory_sequence_length"]) + memory_sequence_length=None) outputs = outputs[:, -1:, :] logits = tf.layers.dense(outputs, vocab_size) return logits, cache @@ -109,23 +126,22 @@ class SelfAttentionDecoder(Decoder): num_heads=self.num_heads, maximum_length=tf.shape(inputs)[1], dtype=inputs.dtype) - if memory_sequence_length is not None: - memory_mask = transformer.build_sequence_mask( - memory_sequence_length, - num_heads=self.num_heads, - maximum_length=tf.shape(memory)[1], - dtype=memory.dtype) + if memory is not None: + if cache is not None: + memory_mask = cache["memory_mask"] + elif memory_sequence_length is not None: + memory_mask = self._build_memory_mask( + memory, memory_sequence_length=memory_sequence_length) for l in range(self.num_layers): layer_name = "layer_{}".format(l) layer_cache = cache[layer_name] if cache is not None else None with tf.variable_scope(layer_name): with tf.variable_scope("masked_multi_head"): - inputs_norm = transformer.norm(inputs) encoded = transformer.multi_head_attention( self.num_heads, - inputs_norm, - inputs_norm, + transformer.norm(inputs), + None, mode, num_units=self.num_units, mask=decoder_mask, @@ -145,6 +161,7 @@ class SelfAttentionDecoder(Decoder): memory, mode, mask=memory_mask, + cache=layer_cache, dropout=self.attention_dropout) context = transformer.drop_and_add( encoded, @@ -182,6 +199,7 @@ class SelfAttentionDecoder(Decoder): if sampling_probability is not None: raise ValueError("Scheduled sampling is not supported with SelfAttentionDecoder") + inputs *= self.num_units**0.5 if self.position_encoder is not None: inputs = self.position_encoder(inputs, sequence_length=sequence_length) @@ -213,7 +231,7 @@ class SelfAttentionDecoder(Decoder): inputs = tf.expand_dims(start_tokens, 1) lengths = tf.zeros([batch_size], dtype=tf.int32) log_probs = tf.zeros([batch_size]) - cache = self._init_cache(memory, memory_sequence_length) + cache = self._init_cache(memory, memory_sequence_length=memory_sequence_length) symbols_to_logits_fn = self._symbols_to_logits_fn(embedding, vocab_size, mode) @@ -282,7 +300,7 @@ class SelfAttentionDecoder(Decoder): mode=tf.estimator.ModeKeys.PREDICT, memory=None, memory_sequence_length=None): - cache = self._init_cache(memory, memory_sequence_length) + cache = self._init_cache(memory, memory_sequence_length=memory_sequence_length) symbols_to_logits_fn = self._symbols_to_logits_fn(embedding, vocab_size, mode) outputs, log_probs = beam_search( diff --git a/opennmt/encoders/self_attention_encoder.py b/opennmt/encoders/self_attention_encoder.py index a556f366..3f3677fb 100644 --- a/opennmt/encoders/self_attention_encoder.py +++ b/opennmt/encoders/self_attention_encoder.py @@ -46,6 +46,7 @@ class SelfAttentionEncoder(Encoder): self.position_encoder = position_encoder def encode(self, inputs, sequence_length=None, mode=tf.estimator.ModeKeys.TRAIN): + inputs *= self.num_units**0.5 if self.position_encoder is not None: inputs = self.position_encoder(inputs, sequence_length=sequence_length) @@ -64,11 +65,10 @@ class SelfAttentionEncoder(Encoder): for l in range(self.num_layers): with tf.variable_scope("layer_{}".format(l)): with tf.variable_scope("multi_head"): - inputs_norm = transformer.norm(inputs) context = transformer.multi_head_attention( self.num_heads, - inputs_norm, - inputs_norm, + transformer.norm(inputs), + None, mode, num_units=self.num_units, mask=mask, diff --git a/opennmt/models/model.py b/opennmt/models/model.py index 39456578..7d36e9af 100644 --- a/opennmt/models/model.py +++ b/opennmt/models/model.py @@ -10,6 +10,29 @@ import tensorflow as tf from opennmt.utils.optim import optimize from opennmt.utils.misc import add_dict_to_collection, item_or_tuple +from opennmt.utils.parallel import GraphDispatcher + + +def filter_irregular_batches(multiple): + """Transformation that filters out batches based on their size. + + Args: + multiple: The divisor of the batch size. + + Returns: + A ``tf.data.Dataset`` transformation. + """ + def _apply_fn(dataset): + """Transformation function.""" + + def _predicate(*x): + flat = tf.contrib.framework.nest.flatten(x) + batch_size = tf.shape(flat[0])[0] + return tf.equal(tf.mod(batch_size, multiple), 0) + + return dataset.filter(_predicate) + + return _apply_fn @six.add_metaclass(abc.ABCMeta) @@ -19,48 +42,94 @@ class Model(object): def __init__(self, name): self.name = name - def __call__(self, features, labels, params, mode, config): - """Creates the model. + def model_fn(self, num_devices=1): + """Returns the model function. + + Args: + num_devices: The number of devices used for training. See Also: ``tf.estimator.Estimator`` 's ``model_fn`` argument for more details about arguments and the returned value. """ - if mode == tf.estimator.ModeKeys.TRAIN: - self._register_word_counters(features, labels) - - with tf.variable_scope(self.name, initializer=self._initializer(params)) as model_scope: - outputs, predictions = self._build(features, labels, params, mode, config) + dispatcher = GraphDispatcher(num_devices) + + def _loss_op(features, labels, params, mode, config): + """Single callable to compute the loss.""" + logits, _ = self._build(features, labels, params, mode, config) + return self._compute_loss(features, labels, logits, params, mode) + + def _normalize_loss(num, den=None): + """Normalizes the loss.""" + if isinstance(num, list): # Sharded mode. + if den is not None: + assert isinstance(den, list) + return tf.add_n(num) / tf.add_n(den) + else: + return tf.reduce_mean(num) + elif den is not None: + return num / den + else: + return num - if predictions is not None: - # Register predictions in a collection so that hooks can easily fetch them. - add_dict_to_collection("predictions", predictions) + def _extract_loss(loss): + """Extracts and summarizes the loss.""" + if not isinstance(loss, tuple): + actual_loss = _normalize_loss(loss) + tboard_loss = actual_loss + else: + actual_loss = _normalize_loss(loss[0], den=loss[1]) + tboard_loss = _normalize_loss(loss[0], den=loss[2]) if len(loss) > 2 else actual_loss + tf.summary.scalar("loss", tboard_loss) + return actual_loss - if mode != tf.estimator.ModeKeys.PREDICT: - with tf.variable_scope(model_scope): - loss = self._compute_loss(features, labels, outputs, params, mode) + def _model_fn(features, labels, params, mode, config): + """model_fn implementation.""" + if mode == tf.estimator.ModeKeys.TRAIN: + self._register_word_counters(features, labels) - if isinstance(loss, tuple): - loss, display_loss = loss - else: - display_loss = loss + features_shards = dispatcher.shard(features) + labels_shards = dispatcher.shard(labels) - tf.summary.scalar("loss", display_loss) + with tf.variable_scope(self.name, initializer=self._initializer(params)): + losses_shards = dispatcher( + _loss_op, features_shards, labels_shards, params, mode, config) - if mode == tf.estimator.ModeKeys.TRAIN: + loss = _extract_loss(losses_shards) train_op = optimize(loss, params) return tf.estimator.EstimatorSpec( - mode, loss=loss, train_op=train_op) - else: + mode, + loss=loss, + train_op=train_op) + elif mode == tf.estimator.ModeKeys.EVAL: + with tf.variable_scope(self.name): + logits, predictions = self._build(features, labels, params, mode, config) + loss = self._compute_loss(features, labels, logits, params, mode) + + loss = _extract_loss(loss) eval_metric_ops = self._compute_metrics(features, labels, predictions) + if predictions is not None: + # Register predictions in a collection so that hooks can easily fetch them. + add_dict_to_collection("predictions", predictions) + return tf.estimator.EstimatorSpec( - mode, loss=loss, eval_metric_ops=eval_metric_ops) - else: - export_outputs = {} - export_outputs[tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \ - tf.estimator.export.PredictOutput(predictions) - return tf.estimator.EstimatorSpec( - mode, predictions=predictions, export_outputs=export_outputs) + mode, + loss=loss, + eval_metric_ops=eval_metric_ops) + elif mode == tf.estimator.ModeKeys.PREDICT: + with tf.variable_scope(self.name): + _, predictions = self._build(features, labels, params, mode, config) + + export_outputs = {} + export_outputs[tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = ( + tf.estimator.export.PredictOutput(predictions)) + + return tf.estimator.EstimatorSpec( + mode, + predictions=predictions, + export_outputs=export_outputs) + + return _model_fn def _initializer(self, params): """Returns the global initializer for this model. @@ -268,6 +337,7 @@ class Model(object): features_file, labels_file=None, batch_type="examples", + batch_multiplier=1, bucket_width=None, sample_buffer_size=None, maximum_features_length=None, @@ -306,6 +376,7 @@ class Model(object): labels, maximum_features_length=maximum_features_length, maximum_labels_length=maximum_labels_length)) + batch_size = batch_size * batch_multiplier if mode == tf.estimator.ModeKeys.TRAIN and bucket_width is not None: # Form batches with sequences of similar lengths to improve efficiency. @@ -333,7 +404,10 @@ class Model(object): if bucket_width > 1: key += 1 # For bucket_width == 1, key 0 is unassigned. size = batch_size // (key * bucket_width) - return tf.to_int64(tf.maximum(size, 1)) + if batch_multiplier > 1: + # Make the window size a multiple of batch_multiplier. + size = size + batch_multiplier - size % batch_multiplier + return tf.to_int64(tf.maximum(size, batch_multiplier)) if batch_type == "examples": batchify_fn = tf.contrib.data.group_by_window( @@ -352,6 +426,8 @@ class Model(object): padded_shapes=padded_shapes) if mode == tf.estimator.ModeKeys.TRAIN: + if batch_multiplier > 1: + dataset = dataset.apply(filter_irregular_batches(batch_multiplier)) dataset = dataset.repeat() iterator = dataset.make_initializable_iterator() @@ -370,6 +446,7 @@ class Model(object): features_file, labels_file=None, batch_type="examples", + batch_multiplier=1, bucket_width=None, sample_buffer_size=None, maximum_features_length=None, @@ -387,6 +464,8 @@ class Model(object): labels_file: The file containing output labels. batch_type: The training batching stragety to use: can be "examples" or "tokens". + batch_multiplier: The batch size multiplier to prepare splitting accross + replicated graph parts. bucket_width: The width of the length buckets to select batch candidates from. ``None`` to not constrain batch formation. sample_buffer_size: The number of elements from which to sample. @@ -417,6 +496,7 @@ class Model(object): features_file, labels_file=labels_file, batch_type=batch_type, + batch_multiplier=batch_multiplier, bucket_width=bucket_width, sample_buffer_size=sample_buffer_size, maximum_features_length=maximum_features_length, diff --git a/opennmt/models/sequence_tagger.py b/opennmt/models/sequence_tagger.py index 4a3e7a75..20155bd5 100644 --- a/opennmt/models/sequence_tagger.py +++ b/opennmt/models/sequence_tagger.py @@ -134,7 +134,9 @@ class SequenceTagger(Model): outputs, tf.cast(labels["tags_id"], tf.int32), length) - return tf.reduce_mean(-log_likelihood) + loss = tf.reduce_sum(-log_likelihood) + loss_normalizer = tf.shape(log_likelihood)[0] + return loss, loss_normalizer else: return cross_entropy_sequence_loss( outputs, diff --git a/opennmt/utils/losses.py b/opennmt/utils/losses.py index a44c02df..5d7efbdd 100644 --- a/opennmt/utils/losses.py +++ b/opennmt/utils/losses.py @@ -26,7 +26,7 @@ def cross_entropy_sequence_loss(logits, label_smoothing=0.0, average_in_time=False, mode=tf.estimator.ModeKeys.TRAIN): - """Computes the reduced cross entropy loss of sequences. + """Computes the cross entropy loss of sequences. Args: logits: The unscaled probabilities. @@ -37,7 +37,7 @@ def cross_entropy_sequence_loss(logits, mode: A ``tf.estimator.ModeKeys`` mode. Returns: - A tuple with the loss and the token level loss. + A tuple (cumulated loss, loss normalizer, token-level normalizer). """ batch_size = tf.shape(logits)[0] max_time = tf.shape(logits)[1] @@ -46,18 +46,20 @@ def cross_entropy_sequence_loss(logits, weights = tf.sequence_mask( sequence_length, maxlen=max_time, dtype=cross_entropy.dtype) loss = tf.reduce_sum(cross_entropy * weights) - loss_per_token = loss / tf.reduce_sum(weights) + loss_token_normalizer = tf.reduce_sum(weights) if average_in_time or mode != tf.estimator.ModeKeys.TRAIN: - return loss_per_token, loss_per_token + loss_normalizer = loss_token_normalizer else: - return loss / tf.cast(batch_size, loss.dtype), loss_per_token + loss_normalizer = tf.cast(batch_size, loss.dtype) + + return loss, loss_normalizer, loss_token_normalizer def cross_entropy_loss(logits, labels, label_smoothing=0.0, mode=tf.estimator.ModeKeys.TRAIN): - """Computes the reduced cross entropy loss. + """Computes the cross entropy loss. Args: logits: The unscaled probabilities. @@ -66,8 +68,9 @@ def cross_entropy_loss(logits, mode: A ``tf.estimator.ModeKeys`` mode. Returns: - The loss. + The cumulated loss and the loss normalizer. """ cross_entropy = _softmax_cross_entropy(logits, labels, label_smoothing, mode) - loss = tf.reduce_mean(cross_entropy) - return loss + loss = tf.reduce_sum(cross_entropy) + loss_normalizer = tf.shape(cross_entropy)[0] + return loss, loss_normalizer diff --git a/opennmt/utils/optim.py b/opennmt/utils/optim.py index 75fe515d..d41f9632 100644 --- a/opennmt/utils/optim.py +++ b/opennmt/utils/optim.py @@ -117,4 +117,5 @@ def optimize(loss, params): summaries=[ "learning_rate", "global_gradient_norm", - ]) + ], + colocate_gradients_with_ops=True) diff --git a/opennmt/utils/parallel.py b/opennmt/utils/parallel.py new file mode 100644 index 00000000..97db3cd5 --- /dev/null +++ b/opennmt/utils/parallel.py @@ -0,0 +1,175 @@ +"""Utilities to run execution in parallel.""" + +import six + +import tensorflow as tf + +from tensorflow.python.client import device_lib + + +class GraphDispatcher(object): + """Helper class to replicate graph parts on multiple devices and dispatch + sharded batches. + """ + + def __init__(self, num_devices): + """Initializes the dispatcher. + + Args: + num_devices: The number of devices to dispatch on. + + Raises: + ValueError: if the number of visible devices is lower than + :obj:`num_devices`. + """ + devices = [x.name for x in device_lib.list_local_devices() if x.device_type == "GPU"] + + if not devices: + self._n = 1 + self._devices = [None] + elif len(devices) < num_devices: + raise ValueError("Only %d devices are visible but %d were requested" + % (len(devices), num_replicas)) + else: + self._n = num_devices + self._devices = devices[:self._n] + + def shard(self, data): + """Shards a structure of ``tf.Tensor`` for dispatching. + + Args: + data: A ``tf.Tensor`` of dictionary of ``tf.Tensor``. + + Returns: + A list of the same ``tf.Tensor`` structure. + """ + return split_batch(data, self._n) + + def repeat(self, data): + """Ensures that the object is dispatchable list. + + Args: + data: The object to convert. + + Returns: + :obj:`data` if it is valid list or a list where :obj:`data` is replicated. + + Raises: + ValueError: if :obj:`data` is a non dispatchable list. + """ + if isinstance(data, list): + if len(data) != self._n: + raise ValueError("List arguments must contain %d elements, saw %d instead" + % (self._n, len(data))) + return data + else: + return [data] * self._n + + def _parallel_args(self, *args, **kwargs): + """Makes each argument dispatchable.""" + if args: + parallel_args = [self.repeat(arg) for arg in args] + parallel_args = [list(arg) for arg in zip(*parallel_args)] + else: + parallel_args = [[] for _ in range(self._n)] + parallel_kwargs = [{} for _ in range(self._n)] + for k, v in six.iteritems(kwargs): + values = self.repeat(v) + for i in range(self._n): + parallel_kwargs[i][k] = values[i] + return parallel_args, parallel_kwargs + + def __call__(self, fun, *args, **kwargs): + """Dispatches :obj:`fun` calls accross devices. + + * Each argument must either not be a list or a list with length the + number of devices used for dispatching. + * Variables are copied in a daisy chain fashion between devices + (credits to Tensor2Tensor). + + Args: + fun: A callable. + *args: The callable arguments. + **kwargs: The callable keyword arguments. + + Returns: + The sharded outputs of :obj:`fun`. + """ + funs = self.repeat(fun) + args, kwargs = self._parallel_args(*args, **kwargs) + + outputs = [] + cache = {} + tensor_to_var = {} + + for i, device in enumerate(self._devices): + + # pylint: disable=cell-var-from-loop + def _daisy_chain_getter(getter, name, *args, **kwargs): + """Get a variable and cache in a daisy chain.""" + # Copyright 2017 The Tensor2Tensor Authors. + # Licensed under the Apache License, Version 2.0 + device_var_key = (device, name) + if device_var_key in cache: + # if we have the variable on the correct device, return it. + return cache[device_var_key] + if name in cache: + # if we have it on a different device, copy it from the last device + last_device_v = cache[name] + var = tensor_to_var[last_device_v] + v = tf.identity(last_device_v) + else: + var = getter(name, *args, **kwargs) + v = tf.identity(var._ref()) # pylint: disable=protected-access + + # keep track of the original variable + tensor_to_var[v] = var + v.read_value = lambda: tf.identity(v) + v.assign_sub = var.assign_sub + # update the cache + cache[name] = v + cache[device_var_key] = v + return v + + with tf.name_scope("parallel_{}".format(i)): + with tf.variable_scope( + tf.get_variable_scope(), + reuse=True if i > 0 else None, + custom_getter=_daisy_chain_getter): + if device is None: + outputs.append(funs[i](*args[i], **kwargs[i])) + else: + with tf.device(device): + outputs.append(funs[i](*args[i], **kwargs[i])) + + # If the function returned a tuple, also return a tuple of sharded results. + if isinstance(outputs[0], tuple): + outputs = tuple(list(output) for output in zip(*outputs)) + + return outputs + + +def split_batch(data, num_shards): + """Split data into shards.""" + + def _split_dictionary(dictionary): + """Split a dictionary into shards.""" + shards = [{} for _ in range(num_shards)] + for name, tensor in six.iteritems(dictionary): + if isinstance(tensor, tf.SparseTensor): + for i, shard in enumerate(tf.sparse_split(sp_input=tensor, num_split=num_shards, axis=0)): + shards[i][name] = shard + else: + for i, shard in enumerate(tf.split(tensor, num_shards)): + shards[i][name] = shard + return shards + + with tf.name_scope("split_inputs"): + if data is None: + data_shards = None + elif isinstance(data, dict): + data_shards = _split_dictionary(data) + else: + data_shards = tf.split(data, num_shards) + + return data_shards diff --git a/opennmt/utils/transformer.py b/opennmt/utils/transformer.py index 3fe296e9..961ec3ee 100644 --- a/opennmt/utils/transformer.py +++ b/opennmt/utils/transformer.py @@ -76,6 +76,20 @@ def build_future_mask(sequence_length, mask = tf.reshape(mask, [-1, num_heads, tf.shape(mask)[1], tf.shape(mask)[2]]) return mask +def fused_projection(inputs, num_units, num_outputs=1): + """Projects the same input into multiple output spaces. + + Args: + inputs: The inputs to project. + num_units: The number of output units of each space. + num_outputs: The number of output spaces. + + Returns: + :obj:`num_outputs` ``tf.Tensor`` of depth :obj:`num_units`. + """ + return tf.split( + tf.layers.conv1d(inputs, num_units * num_outputs, 1), num_outputs, axis=2) + def split_heads(inputs, num_heads): """Splits a tensor in depth. @@ -103,14 +117,13 @@ def combine_heads(inputs): inputs = tf.reshape(inputs, [tf.shape(inputs)[0], tf.shape(inputs)[1], -1]) return inputs -def scaled_dot_attention(queries, - keys, - values, - mode, - mask=None, - dropout=0.0): - """Computes the scaled dot-product attention as described - in https://arxiv.org/abs/1706.03762. +def dot_product_attention(queries, + keys, + values, + mode, + mask=None, + dropout=0.0): + """Computes the dot product attention. Args: queries: The sequence of queries. A tensor of shape :math:`[B, T_1, ...]`. @@ -124,9 +137,8 @@ def scaled_dot_attention(queries, Returns: A tuple ``(context vector, attention vector)``. """ - # Scaled dot-product between queries and keys. + # Dot product between queries and keys. dot = tf.matmul(queries, keys, transpose_b=True) - dot = tf.div(dot, tf.sqrt(tf.cast(tf.shape(keys)[-1], dot.dtype))) if mask is not None: dot = dot * mask + ((1.0 - mask) * dot.dtype.min) @@ -159,6 +171,7 @@ def multi_head_attention(num_heads, num_heads: The number of attention heads. queries: The sequence of queries. A tensor of shape :math:`[B, T_1, ...]`. memory: The sequence to attend. A tensor of shape :math:`[B, T_2, ...]`. + If ``None``, computes self-attention. mode: A ``tf.estimator.ModeKeys`` mode. num_units: The number of hidden units. If not set, it is set to the input dimension. @@ -175,21 +188,34 @@ def multi_head_attention(num_heads, raise ValueError("Multi head attention requires that num_units is a" " multiple of {}".format(num_heads)) - queries = tf.layers.conv1d(queries, num_units, 1) - memory = tf.layers.conv1d(memory, num_units * 2, 1) - keys, values = tf.split(memory, [num_units, num_units], axis=2) - - if cache is not None: - keys = tf.concat([cache["keys"], keys], axis=1) - values = tf.concat([cache["values"], values], axis=1) - cache["keys"] = keys - cache["values"] = values + if memory is None: + queries, keys, values = fused_projection(queries, num_units, num_outputs=3) + + if cache is not None: + keys = tf.concat([cache["self_keys"], keys], axis=1) + values = tf.concat([cache["self_values"], values], axis=1) + cache["self_keys"] = keys + cache["self_values"] = values + else: + queries = tf.layers.conv1d(queries, num_units, 1) + + if cache is not None: + keys, values = tf.cond( + tf.equal(tf.shape(cache["memory_keys"])[1], 0), + true_fn=lambda: fused_projection(memory, num_units, num_outputs=2), + false_fn=lambda: [cache["memory_keys"], cache["memory_values"]]) + cache["memory_keys"] = keys + cache["memory_values"] = values + else: + keys, values = fused_projection(memory, num_units, num_outputs=2) queries = split_heads(queries, num_heads) keys = split_heads(keys, num_heads) values = split_heads(values, num_heads) - heads, _ = scaled_dot_attention( + queries *= (num_units // num_heads)**-0.5 + + heads, _ = dot_product_attention( queries, keys, values,
Multi-GPU synchronous training Distributed training is currently asynchronous. It should also support a local multi-GPU synchronous mode.
OpenNMT/OpenNMT-tf
diff --git a/opennmt/tests/transformer_test.py b/opennmt/tests/transformer_test.py index 079b35df..4f8182a4 100644 --- a/opennmt/tests/transformer_test.py +++ b/opennmt/tests/transformer_test.py @@ -121,7 +121,7 @@ class TransformerTest(tf.test.TestCase): keys = values mask = transformer.build_sequence_mask(values_length, num_heads=num_heads) - context, attn = transformer.scaled_dot_attention( + context, attn = transformer.dot_product_attention( queries, keys, values, @@ -152,7 +152,7 @@ class TransformerTest(tf.test.TestCase): shape=(None, num_heads, None, depth)) mask = transformer.build_future_mask(queries_length, num_heads=num_heads) - context, attn = transformer.scaled_dot_attention( + context, attn = transformer.dot_product_attention( queries, queries, queries,
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 12 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[TensorFlow]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose2", "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work bleach==1.5.0 certifi==2021.5.30 dataclasses==0.8 enum34==1.1.10 html5lib==0.9999999 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Markdown==3.3.7 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose2==0.13.0 numpy==1.19.5 -e git+https://github.com/OpenNMT/OpenNMT-tf.git@cb66148199d33cb7087d0da297590681514faf6d#egg=OpenNMT_tf packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work protobuf==3.19.6 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 PyYAML==6.0.1 six==1.17.0 tensorflow==1.4.0 tensorflow-tensorboard==0.4.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work Werkzeug==2.0.3 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: OpenNMT-tf channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - bleach==1.5.0 - dataclasses==0.8 - enum34==1.1.10 - html5lib==0.9999999 - markdown==3.3.7 - nose2==0.13.0 - numpy==1.19.5 - protobuf==3.19.6 - pyyaml==6.0.1 - six==1.17.0 - tensorflow==1.4.0 - tensorflow-tensorboard==0.4.0 - werkzeug==2.0.3 prefix: /opt/conda/envs/OpenNMT-tf
[ "opennmt/tests/transformer_test.py::TransformerTest::testMaskedScaledDotAttention", "opennmt/tests/transformer_test.py::TransformerTest::testScaledDotAttention" ]
[]
[ "opennmt/tests/transformer_test.py::TransformerTest::testBuildFutureMask", "opennmt/tests/transformer_test.py::TransformerTest::testBuildFutureMaskWithMaxLen", "opennmt/tests/transformer_test.py::TransformerTest::testBuildSequenceMask", "opennmt/tests/transformer_test.py::TransformerTest::testBuildSequenceMaskWithMaxLen", "opennmt/tests/transformer_test.py::TransformerTest::testTileSequenceLength", "opennmt/tests/transformer_test.py::TransformerTest::test_session" ]
[]
MIT License
2,098
[ "docs/package/opennmt.utils.rst", "opennmt/models/sequence_tagger.py", "opennmt/encoders/self_attention_encoder.py", "bin/main.py", "opennmt/models/model.py", "opennmt/utils/losses.py", "docs/training.md", "opennmt/utils/transformer.py", "README.md", "docs/package/opennmt.utils.parallel.rst", "opennmt/decoders/self_attention_decoder.py", "opennmt/utils/optim.py", "opennmt/utils/parallel.py", "bin/average_checkpoints.py" ]
[ "docs/package/opennmt.utils.rst", "opennmt/models/sequence_tagger.py", "opennmt/encoders/self_attention_encoder.py", "bin/main.py", "opennmt/models/model.py", "opennmt/utils/losses.py", "docs/training.md", "opennmt/utils/transformer.py", "README.md", "docs/package/opennmt.utils.parallel.rst", "opennmt/decoders/self_attention_decoder.py", "opennmt/utils/optim.py", "opennmt/utils/parallel.py", "bin/average_checkpoints.py" ]
CartoDB__cartoframes-368
6977ce422f9c1ac4e45f70d10438c36b34f52bd9
2018-01-29 19:21:37
3f73c6e380983a820e7703bebea0b752618aa722
diff --git a/cartoframes/layer.py b/cartoframes/layer.py index f3173cb8..6ddaa641 100644 --- a/cartoframes/layer.py +++ b/cartoframes/layer.py @@ -408,7 +408,7 @@ class QueryLayer(AbstractLayer): ]).format(col=self.color, query=self.orig_query) agg_func = '\'CDB_Math_Mode(cf_value_{})\''.format(self.color) self.scheme = { - 'bins': ','.join(str(i) for i in range(1, 11)), + 'bins': [str(i) for i in range(1, 11)], 'name': (self.scheme.get('name') if self.scheme else 'Bold'), 'bin_method': '', }
extra conditional shows up in cartocss for category torque maps It maybe happening in others...
CartoDB/cartoframes
diff --git a/test/test_layer.py b/test/test_layer.py index 806afe2d..e24117b4 100644 --- a/test/test_layer.py +++ b/test/test_layer.py @@ -192,7 +192,7 @@ class TestQueryLayer(unittest.TestCase): ql._setup([BaseMap(), ql], 1) self.assertDictEqual(ql.scheme, dict(name='Antique', bin_method='', - bins=','.join(str(i) for i in range(1, 11)))) + bins=[str(i) for i in range(1, 11)])) # expect category maps query with open('qlayerquery.txt', 'w') as f: f.write(ql.query)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 1 }, "num_modified_files": 1 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 appdirs==1.4.4 attrs==22.2.0 Babel==2.11.0 backcall==0.2.0 carto==1.11.3 -e git+https://github.com/CartoDB/cartoframes.git@6977ce422f9c1ac4e45f70d10438c36b34f52bd9#egg=cartoframes certifi==2021.5.30 charset-normalizer==2.0.12 decorator==5.1.1 docutils==0.18.1 future==1.0.0 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 MarkupSafe==2.0.1 numpy==1.19.5 packaging==21.3 pandas==1.1.5 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 pockets==0.9.1 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrestcli==0.6.11 pytest==7.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 Shapely==1.8.5.post1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-napoleon==0.7 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 tqdm==4.64.1 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 wcwidth==0.2.13 webcolors==1.7 zipp==3.6.0
name: cartoframes channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - appdirs==1.4.4 - attrs==22.2.0 - babel==2.11.0 - backcall==0.2.0 - carto==1.11.3 - charset-normalizer==2.0.12 - decorator==5.1.1 - docutils==0.18.1 - future==1.0.0 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - markupsafe==2.0.1 - numpy==1.19.5 - packaging==21.3 - pandas==1.1.5 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - pockets==0.9.1 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrestcli==0.6.11 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - shapely==1.8.5.post1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-napoleon==0.7 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - tqdm==4.64.1 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - wcwidth==0.2.13 - webcolors==1.7 - zipp==3.6.0 prefix: /opt/conda/envs/cartoframes
[ "test/test_layer.py::TestQueryLayer::test_querylayer_time_category" ]
[]
[ "test/test_layer.py::TestAbstractLayer::test_class", "test/test_layer.py::TestLayer::test_layer_setup_dataframe", "test/test_layer.py::TestBaseMap::test_basemap_invalid", "test/test_layer.py::TestBaseMap::test_basemap_source", "test/test_layer.py::TestQueryLayer::test_querylayer_colors", "test/test_layer.py::TestQueryLayer::test_querylayer_get_cartocss", "test/test_layer.py::TestQueryLayer::test_querylayer_size_and_time", "test/test_layer.py::TestQueryLayer::test_querylayer_size_column_key", "test/test_layer.py::TestQueryLayer::test_querylayer_size_default", "test/test_layer.py::TestQueryLayer::test_querylayer_size_defaults", "test/test_layer.py::TestQueryLayer::test_querylayer_time_default", "test/test_layer.py::TestQueryLayer::test_querylayer_time_errors", "test/test_layer.py::TestQueryLayer::test_querylayer_time_numeric" ]
[]
BSD 3-Clause "New" or "Revised" License
2,099
[ "cartoframes/layer.py" ]
[ "cartoframes/layer.py" ]
google__mobly-392
a9f2bda0ac5a6a1b11c794522daa8fd97ca0d0b0
2018-01-29 23:58:55
95286a01a566e056d44acfa9577a45bc7f37f51d
xpconanfan: Review status: 0 of 4 files reviewed at latest revision, all discussions resolved, some commit checks failed. --- *[mobly/controllers/android_device_lib/adb.py, line 180 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBjvbMANdfbOTNM2-U:-LCBjvbMANdfbOTNM2-V:b7l1db) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L180)):* > ```Python > raise AdbError(cmd=args, stdout=out, stderr=err, ret_code=ret) > > def _stream_cmd(self, args, shell, level, handler): > ``` `_execute_and_process_stdout` --- *[mobly/controllers/android_device_lib/adb.py, line 203 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBfPD94fYurH-HZEyD:-LCBfPD94fYurH-HZEyE:bpp6stj) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L203)):* > ```Python > shell=shell, > bufsize=1) > out_buffer = [] > ``` not needed --- *[mobly/controllers/android_device_lib/adb.py, line 209 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBiHuR8rnHRDx-bEdS:-LCBiHuR8rnHRDx-bEdT:b-l6mwxj) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L209)):* > ```Python > if line: > line = line.decode('utf-8') > logging.log(level, line.rstrip()) > ``` remove --- *[mobly/controllers/android_device_lib/adb.py, line 211 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBiJcUEBtCnoGwoaZG:-LCBiJcUEBtCnoGwoaZH:b3jasbe) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L211)):* > ```Python > logging.log(level, line.rstrip()) > out_buffer.append(line) > if handler: > ``` handler is required --- *[mobly/controllers/android_device_lib/adb.py, line 212 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBiOS_24Z1patPnBHE:-LCBiOSa35mms-i-UJpX:b-oend6p) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L212)):* > ```Python > out_buffer.append(line) > if handler: > handler(line) > ``` what if the handler throw an error? --- *[mobly/controllers/android_device_lib/adb.py, line 215 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBjs9dDT6Wm_UZyWS9:-LCBjs9eAmZg3ACbCv61:bpp6stj) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L215)):* > ```Python > else: > (eof_out, eof_err) = proc.communicate() > out_buffer.append(eof_out) > ``` not needed --- *[mobly/controllers/android_device_lib/adb.py, line 220 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBj6MG7ZrCpiqLI7Tl:-LCBj6MG7ZrCpiqLI7Tm:b-uchag3) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L220)):* > ```Python > ret = proc.returncode > if ret == 0: > return out > ``` return stderr if there is any? --- *[mobly/controllers/android_device_lib/adb.py, line 222 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBj8bhDhNpC3DuUuHC:-LCBj8bhDhNpC3DuUuHD:bl10liu) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L222)):* > ```Python > return out > else: > raise AdbError(cmd=args, stdout=out, stderr=err, ret_code=ret) > ``` stdout doesn't need to be in this error right? --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> winterfroststrom: Need to add unit tests/test some process failure scenarios. --- Review status: 0 of 4 files reviewed at latest revision, 8 unresolved discussions. --- *[mobly/controllers/android_device_lib/adb.py, line 180 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBjvbMANdfbOTNM2-U:-LCG8a7C60ls5L4qKTud:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L180)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> `_execute_and_process_stdout` </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 203 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBfPD94fYurH-HZEyD:-LCG8bMF--aVh_9tUFq_:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L203)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> not needed </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 209 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBiHuR8rnHRDx-bEdS:-LCG8dDm9nm_1gm5GdqO:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L209)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> remove </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 211 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBiJcUEBtCnoGwoaZG:-LCG8eD9Eyqaj__UoZFU:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L211)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> handler is required </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 212 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBiOS_24Z1patPnBHE:-LCG8jHS41rApna13Htx:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L212)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> what if the handler throw an error? </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 215 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBjs9dDT6Wm_UZyWS9:-LCG8kKD2Hmn-IbJqBvA:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L215)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> not needed </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 220 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBj6MG7ZrCpiqLI7Tl:-LCG8lc_61VKdtc3WOBK:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L220)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> return stderr if there is any? </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 222 at r2](https://reviewable.io/reviews/google/mobly/392#-LCBj8bhDhNpC3DuUuHC:-LCG8nL5-zQtHHr02Nr6:b-896fix) ([raw file](https://github.com/google/mobly/blob/0497c3b77272c047ac5aee2bef53ef4ccca3586c/mobly/controllers/android_device_lib/adb.py#L222)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> stdout doesn't need to be in this error right? </blockquote></details> Done. --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392#-:-LCG8q6B36XlhYrmpxnm:b-g4nxph)* <!-- Sent from Reviewable.io --> winterfroststrom: Added unit tests and tested when process dies randomly. --- Review status: 0 of 4 files reviewed at latest revision, 8 unresolved discussions. --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392#-:-LCH-RNkCtcFH8B1X0Ot:b-umsxqg)* <!-- Sent from Reviewable.io --> xpconanfan: Review status: 0 of 4 files reviewed at latest revision, all discussions resolved. --- *[mobly/controllers/android_device_lib/adb.py, line 206 at r3](https://reviewable.io/reviews/google/mobly/392#-LC_oUj11zZI2SW6BFxY:-LC_oUj11zZI2SW6BFxZ:bfcm7qw) ([raw file](https://github.com/google/mobly/blob/f4f42130704c02f0cb40b35670586bca7d6abd87/mobly/controllers/android_device_lib/adb.py#L206)):* > ```Python > shell=shell, > bufsize=1) > err = '' > ``` not needed? --- *[mobly/controllers/android_device_lib/adb.py, line 221 at r3](https://reviewable.io/reviews/google/mobly/392#-LC_khCQERM8OBURZEIG:-LC_khCQERM8OBURZEIH:b-bj5aq6) ([raw file](https://github.com/google/mobly/blob/f4f42130704c02f0cb40b35670586bca7d6abd87/mobly/controllers/android_device_lib/adb.py#L221)):* > ``` > [elided] > ``` What is elided?... --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> winterfroststrom: Review status: 0 of 4 files reviewed at latest revision, 2 unresolved discussions. --- *[mobly/controllers/android_device_lib/adb.py, line 206 at r3](https://reviewable.io/reviews/google/mobly/392#-LC_oUj11zZI2SW6BFxY:-LCa2y-22qhSnK9V6gwj:b-896fix) ([raw file](https://github.com/google/mobly/blob/f4f42130704c02f0cb40b35670586bca7d6abd87/mobly/controllers/android_device_lib/adb.py#L206)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> not needed? </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 221 at r3](https://reviewable.io/reviews/google/mobly/392#-LC_khCQERM8OBURZEIG:-LCa2HHCD0FDbh1fefYe:bta5lbs) ([raw file](https://github.com/google/mobly/blob/f4f42130704c02f0cb40b35670586bca7d6abd87/mobly/controllers/android_device_lib/adb.py#L221)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> > ``` > [elided] > ``` What is elided?... </blockquote></details> it's just to indicate that stdout is missing, would you prefer empty string or None instead? --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> xpconanfan: Review status: 0 of 4 files reviewed at latest revision, 2 unresolved discussions. --- *[mobly/controllers/android_device_lib/adb.py, line 221 at r3](https://reviewable.io/reviews/google/mobly/392#-LC_khCQERM8OBURZEIG:-LCaOWPv9OG92CD17Pzd:bs8gkfc) ([raw file](https://github.com/google/mobly/blob/f4f42130704c02f0cb40b35670586bca7d6abd87/mobly/controllers/android_device_lib/adb.py#L221)):* <details><summary><i>Previously, winterfroststrom wrote…</i></summary><blockquote> it's just to indicate that stdout is missing, would you prefer empty string or None instead? </blockquote></details> Should we add a more specific msg explaining that stdout has already been piped through the handler? --- *[mobly/controllers/android_device_lib/adb.py, line 331 at r4](https://reviewable.io/reviews/google/mobly/392#-LCaOs5ZBoq-5AmkNi-D:-LCaOs5ZBoq-5AmkNi-E:bx7a4dg) ([raw file](https://github.com/google/mobly/blob/a39e767452e1ba5220345ebec5f521d945d20eea/mobly/controllers/android_device_lib/adb.py#L331)):* > ```Python > runner: string, the test runner name, which defaults to > DEFAULT_INSTRUMENTATION_RUNNER. > handler: func, a function to parse the instrumentation output line > ``` "optional. When specified, does xxx" --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> winterfroststrom: Review status: 0 of 4 files reviewed at latest revision, 3 unresolved discussions. --- *[mobly/controllers/android_device_lib/adb.py, line 221 at r3](https://reviewable.io/reviews/google/mobly/392#-LC_khCQERM8OBURZEIG:-LCaUIpY3umLkm0YdjzJ:b-896fix) ([raw file](https://github.com/google/mobly/blob/f4f42130704c02f0cb40b35670586bca7d6abd87/mobly/controllers/android_device_lib/adb.py#L221)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> Should we add a more specific msg explaining that stdout has already been piped through the handler? </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 331 at r4](https://reviewable.io/reviews/google/mobly/392#-LCaOs5ZBoq-5AmkNi-D:-LCaUKbUA152l5z1U3Ky:b-896fix) ([raw file](https://github.com/google/mobly/blob/a39e767452e1ba5220345ebec5f521d945d20eea/mobly/controllers/android_device_lib/adb.py#L331)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> "optional. When specified, does xxx" </blockquote></details> Done. --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> xpconanfan: Review status: 0 of 4 files reviewed at latest revision, all discussions resolved. --- *[mobly/controllers/android_device_lib/adb.py, line 336 at r5](https://reviewable.io/reviews/google/mobly/392#-LCac0mS4I0glk8XzpA8:-LCabwlT-XOBIGhkg8H0:b-bxk46c) ([raw file](https://github.com/google/mobly/blob/c37138c24f1393dc30dffa9bdbf41db6fd5f7181/mobly/controllers/android_device_lib/adb.py#L336)):* > ``` > generated; otherwise if not specified, the stdout is simply > returned once the instrumentation is finished. > ``` "otherwise" is sufficient. remove "if not specified". --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> xpconanfan: Review status: 0 of 4 files reviewed at latest revision, 1 unresolved discussion. --- *[mobly/base_instrumentation_test.py, line 284 at r5](https://reviewable.io/reviews/google/mobly/392#-LCacEVA7shTjMVPehlV:-LCacEVA7shTjMVPehlW:b9c5inx) ([raw file](https://github.com/google/mobly/blob/c37138c24f1393dc30dffa9bdbf41db6fd5f7181/mobly/base_instrumentation_test.py#L284)):* > ```Python > > Attributes: > begin_time: string or None, optional timestamp for when the test > ``` "or None" is not needed? --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> winterfroststrom: Review status: 0 of 4 files reviewed at latest revision, 2 unresolved discussions. --- *[mobly/base_instrumentation_test.py, line 284 at r5](https://reviewable.io/reviews/google/mobly/392#-LCacEVA7shTjMVPehlV:-LCaeAvNAb_3fda-yQG8:b-896fix) ([raw file](https://github.com/google/mobly/blob/c37138c24f1393dc30dffa9bdbf41db6fd5f7181/mobly/base_instrumentation_test.py#L284)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> "or None" is not needed? </blockquote></details> Done. --- *[mobly/controllers/android_device_lib/adb.py, line 336 at r5](https://reviewable.io/reviews/google/mobly/392#-LCac0mS4I0glk8XzpA8:-LCaeBet0-UgDN6bB7v_:b-896fix) ([raw file](https://github.com/google/mobly/blob/c37138c24f1393dc30dffa9bdbf41db6fd5f7181/mobly/controllers/android_device_lib/adb.py#L336)):* <details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote> > ``` > generated; otherwise if not specified, the stdout is simply > returned once the instrumentation is finished. > ``` "otherwise" is sufficient. remove "if not specified". </blockquote></details> Done. --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392)* <!-- Sent from Reviewable.io --> xpconanfan: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/> --- Reviewed 2 of 4 files at r3, 2 of 2 files at r6. Review status: all files reviewed at latest revision, all discussions resolved. --- *Comments from [Reviewable](https://reviewable.io/reviews/google/mobly/392#-:-LCaef1KC3KP9xeRQjRJ:bnfp4nl)* <!-- Sent from Reviewable.io -->
diff --git a/mobly/controllers/android_device_lib/adb.py b/mobly/controllers/android_device_lib/adb.py index 3abcaa9..90dcd0b 100644 --- a/mobly/controllers/android_device_lib/adb.py +++ b/mobly/controllers/android_device_lib/adb.py @@ -181,6 +181,47 @@ class AdbProxy(object): else: raise AdbError(cmd=args, stdout=out, stderr=err, ret_code=ret) + def _execute_and_process_stdout(self, args, shell, handler): + """Executes adb commands and processes the stdout with a handler. + + Args: + args: string or list of strings, program arguments. + See subprocess.Popen() documentation. + shell: bool, True to run this command through the system shell, + False to invoke it directly. See subprocess.Popen() docs. + handler: func, a function to handle adb stdout line by line. + + Returns: + The stderr of the adb command run if exit code is 0. + + Raises: + AdbError: The adb command exit code is not 0. + """ + proc = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=shell, + bufsize=1) + try: + while proc.poll() is None: + line = proc.stdout.readline() + if line: + handler(line) + else: + break + finally: + (_, err) = proc.communicate() + ret = proc.returncode + if ret == 0: + return err + else: + raise AdbError( + cmd=args, + stdout='[elided, processed via handler]', + stderr=err, + ret_code=ret) + def _construct_adb_cmd(self, raw_name, args, shell): """Constructs an adb command with arguments for a subprocess call. @@ -225,6 +266,12 @@ class AdbProxy(object): adb_cmd, shell=shell, timeout=timeout, stderr=stderr) return out + def _execute_adb_and_process_stdout(self, name, args, shell, handler): + adb_cmd = self._construct_adb_cmd(name, args, shell=shell) + out = self._execute_and_process_stdout( + adb_cmd, shell=shell, handler=handler) + return out + def getprop(self, prop_name): """Get a property of the device. @@ -262,7 +309,7 @@ class AdbProxy(object): return self._exec_adb_cmd( 'forward', args, shell, timeout=None, stderr=None) - def instrument(self, package, options=None, runner=None): + def instrument(self, package, options=None, runner=None, handler=None): """Runs an instrumentation command on the device. This is a convenience wrapper to avoid parameter formatting. @@ -284,9 +331,14 @@ class AdbProxy(object): class. runner: string, the test runner name, which defaults to DEFAULT_INSTRUMENTATION_RUNNER. + handler: optional func, when specified the function is used to parse + the instrumentation stdout line by line as the output is + generated; otherwise, the stdout is simply returned once the + instrumentation is finished. Returns: - The output of instrumentation command. + The stdout of instrumentation command or the stderr if the handler + is set. """ if runner is None: runner = DEFAULT_INSTRUMENTATION_RUNNER @@ -302,7 +354,17 @@ class AdbProxy(object): options_string, package, runner) logging.info('AndroidDevice|%s: Executing adb shell %s', self.serial, instrumentation_command) - return self.shell(instrumentation_command) + if handler is None: + # Flow kept for backwards-compatibility reasons + self._exec_adb_cmd( + 'shell', + instrumentation_command, + shell=False, + timeout=None, + stderr=None) + else: + return self._execute_adb_and_process_stdout( + 'shell', instrumentation_command, shell=False, handler=handler) def __getattr__(self, name): def adb_call(args=None, shell=False, timeout=None, stderr=None):
Add timing information for instrumentation tests
google/mobly
diff --git a/mobly/base_instrumentation_test.py b/mobly/base_instrumentation_test.py index bb72075..32258ca 100644 --- a/mobly/base_instrumentation_test.py +++ b/mobly/base_instrumentation_test.py @@ -19,6 +19,7 @@ from enum import Enum from mobly import base_test from mobly import records from mobly import signals +from mobly import utils class _InstrumentationStructurePrefixes(object): @@ -280,6 +281,8 @@ class _InstrumentationBlock(object): needs to be done for those. Attributes: + begin_time: string, optional timestamp for when the test corresponding + to the instrumentation block began. current_key: string, the current key that is being parsed, default to _InstrumentationKnownStatusKeys.STREAM. error_message: string, an error message indicating that something @@ -302,6 +305,10 @@ class _InstrumentationBlock(object): self.state = state self.prefix = prefix self.previous_instrumentation_block = previous_instrumentation_block + if previous_instrumentation_block: + # The parser never needs lookback for two previous blocks, + # so unset to allow previous blocks to get garbage collected. + previous_instrumentation_block.previous_instrumentation_block = None self._empty = True self.error_message = '' @@ -319,6 +326,8 @@ class _InstrumentationBlock(object): } self.unknown_keys = defaultdict(list) + self.begin_time = None + @property def is_empty(self): """Deteremines whether or not anything has been parsed with this @@ -371,6 +380,8 @@ class _InstrumentationBlock(object): _InstrumentationStructurePrefixes.STATUS_CODE, status_code_line, ) + if self.status_code == _InstrumentationStatusCodes.START: + self.begin_time = utils.get_current_epoch_time() def set_key(self, structure_prefix, key_line): """Sets the current key for the instrumentation block. @@ -440,11 +451,14 @@ class _InstrumentationBlock(object): self.state = new_state return self else: - return _InstrumentationBlock( + next_block = _InstrumentationBlock( state=new_state, prefix=self.prefix, previous_instrumentation_block=self, ) + if self.status_code in _InstrumentationStatusCodeCategories.TIMING: + next_block.begin_time = self.begin_time + return next_block class _InstrumentationBlockFormatter(object): @@ -466,6 +480,7 @@ class _InstrumentationBlockFormatter(object): for key, value in instrumentation_block.unknown_keys.items(): self._unknown_keys[key] = '\n'.join( instrumentation_block.unknown_keys[key]).rstrip() + self._begin_time = instrumentation_block.begin_time def _get_name(self): """Gets the method name of the test method for the instrumentation @@ -589,6 +604,9 @@ class _InstrumentationBlockFormatter(object): t_name=self._get_full_name(), t_class=mobly_test_class, ) + if self._begin_time: + tr_record.begin_time = self._begin_time + if self._is_failed(): tr_record.test_fail( e=signals.TestFailure(details=details, extras=extras)) @@ -923,17 +941,20 @@ class BaseInstrumentationTestClass(base_test.BaseTestClass): TestError if the instrumentation run crashed or if parsing the output failed. """ - instrumentation_output = device.adb.instrument( + # Dictionary hack to allow overwriting the instrumentation_block in the + # parse_instrumentation closure + instrumentation_block = [_InstrumentationBlock(prefix=prefix)] + + def parse_instrumentation(raw_line): + line = raw_line.rstrip().decode('utf-8') + logging.info(line) + instrumentation_block[0] = self._parse_line( + instrumentation_block[0], line) + + stderr = device.adb.instrument( package=package, options=options, runner=runner, - ).decode('utf-8') - logging.info('Outputting instrumentation test log...') - logging.info(instrumentation_output) - - # TODO(winterfrosts): Implement online output generation and parsing. - instrumentation_block = _InstrumentationBlock(prefix=prefix) - for line in instrumentation_output.splitlines(): - instrumentation_block = self._parse_line(instrumentation_block, - line) - return self._finish_parsing(instrumentation_block) + handler=parse_instrumentation) + + return self._finish_parsing(instrumentation_block[0]) diff --git a/tests/mobly/base_instrumentation_test_test.py b/tests/mobly/base_instrumentation_test_test.py index 3908015..64cd09c 100755 --- a/tests/mobly/base_instrumentation_test_test.py +++ b/tests/mobly/base_instrumentation_test_test.py @@ -26,6 +26,8 @@ from mobly.base_instrumentation_test import _InstrumentationStructurePrefixes from mobly.base_instrumentation_test import BaseInstrumentationTestClass from mobly import config_parser from mobly import signals +from mobly.controllers import android_device +from mobly.controllers.android_device_lib import adb # A mock test package for instrumentation. MOCK_TEST_PACKAGE = 'com.my.package.test' @@ -56,10 +58,14 @@ class MockInstrumentationTest(BaseInstrumentationTestClass): super(MockInstrumentationTest, self).__init__(mock_test_run_configs) def run_mock_instrumentation_test(self, instrumentation_output, prefix): - mock_device = mock.Mock() - mock_device.adb = mock.Mock() - mock_device.adb.instrument = mock.MagicMock( - return_value=instrumentation_output) + def fake_instrument(package, options=None, runner=None, handler=None): + for line in instrumentation_output.splitlines(): + handler(line) + return instrumentation_output + + mock_device = mock.Mock(spec=android_device.AndroidDevice) + mock_device.adb = mock.Mock(spec=adb.AdbProxy) + mock_device.adb.instrument = fake_instrument return self.run_instrumentation_test( mock_device, MOCK_TEST_PACKAGE, prefix=prefix) @@ -161,7 +167,8 @@ class BaseInstrumentationTestTest(unittest.TestCase): expected_skipped=[], expected_completed_and_passed=False, expected_has_error=False, - prefix=None): + prefix=None, + expected_executed_times=[]): result = self.run_instrumentation_test( self.convert_to_raw_output(instrumentation_output), prefix=prefix) if expected_has_error: @@ -178,6 +185,12 @@ class BaseInstrumentationTestTest(unittest.TestCase): for actual_test, expected_test in zip(result.skipped, expected_skipped): self.assert_equal_test(actual_test, expected_test) + if expected_executed_times: + for actual_test, expected_time in zip(result.executed, + expected_executed_times): + (expected_begin_time, expected_end_time) = expected_time + self.assertEquals(actual_test.begin_time, expected_begin_time) + self.assertEquals(actual_test.end_time, expected_end_time) def test_run_instrumentation_test_with_invalid_syntax(self): instrumentation_output = """\ @@ -256,7 +269,8 @@ INSTRUMENTATION_STATUS_CODE: -1 logged_format = mock_call[1][0] self.assertIsInstance(logged_format, str) - def test_run_instrumentation_test_with_passing_test(self): + @mock.patch('mobly.utils.get_current_epoch_time') + def test_run_instrumentation_test_with_passing_test(self, mock_get_time): instrumentation_output = """\ INSTRUMENTATION_STATUS: numtests=1 INSTRUMENTATION_STATUS: stream= @@ -285,10 +299,12 @@ INSTRUMENTATION_CODE: -1 expected_executed = [ ('com.my.package.test.BasicTest#basicTest', signals.TestPass), ] + mock_get_time.side_effect = [13, 51] self.assert_run_instrumentation_test( instrumentation_output, expected_executed=expected_executed, - expected_completed_and_passed=True) + expected_completed_and_passed=True, + expected_executed_times=[(13, 51)]) def test_run_instrumentation_test_with_random_whitespace(self): instrumentation_output = """\ @@ -690,7 +706,8 @@ INSTRUMENTATION_CODE: -1""" expected_skipped=expected_skipped, expected_completed_and_passed=True) - def test_run_instrumentation_test_with_crashed_test(self): + @mock.patch('mobly.utils.get_current_epoch_time') + def test_run_instrumentation_test_with_crashed_test(self, mock_get_time): instrumentation_output = """\ INSTRUMENTATION_STATUS: class=com.my.package.test.BasicTest INSTRUMENTATION_STATUS: current=1 @@ -705,12 +722,15 @@ INSTRUMENTATION_CODE: 0""" expected_executed = [ ('com.my.package.test.BasicTest#crashTest', signals.TestError), ] + mock_get_time.side_effect = [67, 942] self.assert_run_instrumentation_test( instrumentation_output, expected_executed=expected_executed, - expected_has_error=True) + expected_has_error=True, + expected_executed_times=[(67, 942)]) - def test_run_instrumentation_test_with_crashing_test(self): + @mock.patch('mobly.utils.get_current_epoch_time') + def test_run_instrumentation_test_with_crashing_test(self, mock_get_time): instrumentation_output = """\ INSTRUMENTATION_STATUS: class=com.my.package.test.BasicTest INSTRUMENTATION_STATUS: current=1 @@ -742,10 +762,13 @@ INSTRUMENTATION_CODE: -1""" ('com.my.package.test.BasicTest#crashAndRecover2Test', signals.TestError), ] + mock_get_time.side_effect = [16, 412, 4143, 6547] + # TODO(winterfrosts): Fix this issue with overlapping timing self.assert_run_instrumentation_test( instrumentation_output, expected_executed=expected_executed, - expected_completed_and_passed=True) + expected_completed_and_passed=True, + expected_executed_times=[(16, 4143), (412, 6547)]) def test_run_instrumentation_test_with_runner_setup_crash(self): instrumentation_output = """\ @@ -782,7 +805,8 @@ INSTRUMENTATION_CODE: 0 expected_executed=expected_executed, expected_has_error=True) - def test_run_instrumentation_test_with_multiple_tests(self): + @mock.patch('mobly.utils.get_current_epoch_time') + def test_run_instrumentation_test_with_multiple_tests(self, mock_get_time): instrumentation_output = """\ INSTRUMENTATION_STATUS: class=com.my.package.test.BasicTest INSTRUMENTATION_STATUS: current=1 @@ -1066,10 +1090,12 @@ INSTRUMENTATION_CODE: -1""" signals.TestSkip), ('com.my.package.test.BasicTest#ignoredTest', signals.TestSkip), ] + mock_get_time.side_effect = [54, 64, -1, -1, -1, -1, 89, 94] self.assert_run_instrumentation_test( instrumentation_output, expected_executed=expected_executed, - expected_skipped=expected_skipped) + expected_skipped=expected_skipped, + expected_executed_times=[(54, 64), (89, 94)]) def test__Instrumentation_block_set_key_on_multiple_equals_sign(self): value = "blah=blah, blah2=blah2, blah=2=1=2" diff --git a/tests/mobly/controllers/android_device_lib/adb_test.py b/tests/mobly/controllers/android_device_lib/adb_test.py index 8004728..1c75a9d 100755 --- a/tests/mobly/controllers/android_device_lib/adb_test.py +++ b/tests/mobly/controllers/android_device_lib/adb_test.py @@ -67,6 +67,20 @@ class AdbTest(unittest.TestCase): mock_proc.returncode = 0 return (mock_psutil_process, mock_popen) + def _mock_execute_and_process_stdout_process(self, mock_popen): + # the created proc object in adb._execute_and_process_stdout() + mock_proc = mock.Mock() + mock_popen.return_value = mock_proc + + mock_popen.return_value.poll.return_value = None + mock_popen.return_value.stdout.readline.side_effect = [''] + + mock_proc.communicate = mock.Mock( + return_value=(MOCK_DEFAULT_STDOUT.encode('utf-8'), + MOCK_DEFAULT_STDERR.encode('utf-8'))) + mock_proc.returncode = 0 + return mock_popen + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') @mock.patch('mobly.controllers.android_device_lib.adb.psutil.Process') def test_exec_cmd_no_timeout_success(self, mock_psutil_process, @@ -124,6 +138,73 @@ class AdbTest(unittest.TestCase): adb.AdbProxy()._exec_cmd( ['fake_cmd'], shell=False, timeout=-1, stderr=None) + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') + def test_execute_and_process_stdout_reads_stdout(self, mock_popen): + self._mock_execute_and_process_stdout_process(mock_popen) + mock_popen.return_value.stdout.readline.side_effect = ['1', '2', ''] + mock_handler = mock.MagicMock() + + err = adb.AdbProxy()._execute_and_process_stdout( + ['fake_cmd'], shell=False, handler=mock_handler) + self.assertEqual(mock_handler.call_count, 2) + mock_handler.assert_any_call('1') + mock_handler.assert_any_call('2') + + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') + def test_execute_and_process_stdout_when_cmd_exits(self, mock_popen): + self._mock_execute_and_process_stdout_process(mock_popen) + mock_popen.return_value.poll.side_effect = [None, None, None, 0] + mock_popen.return_value.stdout.readline.return_value = '123' + mock_handler = mock.MagicMock() + + err = adb.AdbProxy()._execute_and_process_stdout( + ['fake_cmd'], shell=False, handler=mock_handler) + + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') + def test_execute_and_process_stdout_when_cmd_eof(self, mock_popen): + self._mock_execute_and_process_stdout_process(mock_popen) + mock_popen.return_value.poll.return_value = None + mock_popen.return_value.stdout.readline.side_effect = [ + '1', '2', '3', '' + ] + mock_handler = mock.MagicMock() + + err = adb.AdbProxy()._execute_and_process_stdout( + ['fake_cmd'], shell=False, handler=mock_handler) + + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') + def test_execute_and_process_stdout_returns_stderr(self, mock_popen): + self._mock_execute_and_process_stdout_process(mock_popen) + + err = adb.AdbProxy()._execute_and_process_stdout( + ['fake_cmd'], shell=False, handler=mock.MagicMock()) + self.assertEqual(MOCK_DEFAULT_STDERR, err.decode('utf-8')) + + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') + def test_execute_and_process_stdout_raises_adb_error(self, mock_popen): + self._mock_execute_and_process_stdout_process(mock_popen) + mock_popen.return_value.returncode = 1 + + with self.assertRaisesRegex(adb.AdbError, + 'Error executing adb cmd .*'): + err = adb.AdbProxy()._execute_and_process_stdout( + ['fake_cmd'], shell=False, handler=mock.MagicMock()) + + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') + def test_execute_and_process_stdout_when_handler_crash(self, mock_popen): + self._mock_execute_and_process_stdout_process(mock_popen) + mock_popen.return_value.stdout.readline.side_effect = [ + '1', '2', '3', '' + ] + mock_handler = mock.MagicMock() + mock_handler.side_effect = ['', TypeError('fake crash'), '', ''] + + with self.assertRaisesRegex(TypeError, 'fake crash'): + err = adb.AdbProxy()._execute_and_process_stdout( + ['fake_cmd'], shell=False, handler=mock_handler) + + mock_popen.return_value.communicate.assert_called_once_with() + def test_construct_adb_cmd(self): adb_cmd = adb.AdbProxy()._construct_adb_cmd( 'shell', 'arg1', shell=False) @@ -255,6 +336,24 @@ class AdbTest(unittest.TestCase): mock_exec_cmd.assert_called_once_with( mock_adb_cmd, shell=True, timeout=None, stderr=None) + def test_execute_adb_and_process_stdout_formats_command(self): + with mock.patch.object(adb.AdbProxy, '_execute_and_process_stdout' + ) as mock_execute_and_process_stdout: + with mock.patch.object( + adb.AdbProxy, + '_construct_adb_cmd') as mock_construct_adb_cmd: + mock_adb_cmd = mock.MagicMock() + mock_adb_args = mock.MagicMock() + mock_handler = mock.MagicMock() + mock_construct_adb_cmd.return_value = mock_adb_cmd + + adb.AdbProxy()._execute_adb_and_process_stdout( + 'shell', mock_adb_args, shell=False, handler=mock_handler) + mock_construct_adb_cmd.assert_called_once_with( + 'shell', mock_adb_args, shell=False) + mock_execute_and_process_stdout.assert_called_once_with( + mock_adb_cmd, shell=False, handler=mock_handler) + @mock.patch('mobly.controllers.android_device_lib.adb.subprocess.Popen') @mock.patch('mobly.controllers.android_device_lib.adb.psutil.Process') def test_exec_adb_cmd_with_stderr_pipe(self, mock_psutil_process, @@ -276,7 +375,6 @@ class AdbTest(unittest.TestCase): the basic case. """ with mock.patch.object(adb.AdbProxy, '_exec_cmd') as mock_exec_cmd: - mock_exec_cmd.return_value = MOCK_DEFAULT_COMMAND_OUTPUT adb.AdbProxy().instrument(MOCK_INSTRUMENTATION_PACKAGE) mock_exec_cmd.assert_called_once_with( ['adb', 'shell', MOCK_BASIC_INSTRUMENTATION_COMMAND], @@ -289,7 +387,6 @@ class AdbTest(unittest.TestCase): with a runner specified. """ with mock.patch.object(adb.AdbProxy, '_exec_cmd') as mock_exec_cmd: - mock_exec_cmd.return_value = MOCK_DEFAULT_COMMAND_OUTPUT adb.AdbProxy().instrument( MOCK_INSTRUMENTATION_PACKAGE, runner=MOCK_INSTRUMENTATION_RUNNER) @@ -304,7 +401,6 @@ class AdbTest(unittest.TestCase): with options. """ with mock.patch.object(adb.AdbProxy, '_exec_cmd') as mock_exec_cmd: - mock_exec_cmd.return_value = MOCK_DEFAULT_COMMAND_OUTPUT adb.AdbProxy().instrument( MOCK_INSTRUMENTATION_PACKAGE, options=MOCK_INSTRUMENTATION_OPTIONS) @@ -314,6 +410,61 @@ class AdbTest(unittest.TestCase): timeout=None, stderr=None) + def test_instrument_with_handler(self): + """Verifies the AndroidDevice object's instrument command is correct + with a handler passed in. + """ + + def mock_handler(raw_line): + pass + + with mock.patch.object(adb.AdbProxy, '_execute_and_process_stdout' + ) as mock_execute_and_process_stdout: + adb.AdbProxy().instrument( + MOCK_INSTRUMENTATION_PACKAGE, handler=mock_handler) + mock_execute_and_process_stdout.assert_called_once_with( + ['adb', 'shell', MOCK_BASIC_INSTRUMENTATION_COMMAND], + shell=False, + handler=mock_handler) + + def test_instrument_with_handler_with_runner(self): + """Verifies the AndroidDevice object's instrument command is correct + with a handler passed in and a runner specified. + """ + + def mock_handler(raw_line): + pass + + with mock.patch.object(adb.AdbProxy, '_execute_and_process_stdout' + ) as mock_execute_and_process_stdout: + adb.AdbProxy().instrument( + MOCK_INSTRUMENTATION_PACKAGE, + runner=MOCK_INSTRUMENTATION_RUNNER, + handler=mock_handler) + mock_execute_and_process_stdout.assert_called_once_with( + ['adb', 'shell', MOCK_RUNNER_INSTRUMENTATION_COMMAND], + shell=False, + handler=mock_handler) + + def test_instrument_with_handler_with_options(self): + """Verifies the AndroidDevice object's instrument command is correct + with a handler passed in and options. + """ + + def mock_handler(raw_line): + pass + + with mock.patch.object(adb.AdbProxy, '_execute_and_process_stdout' + ) as mock_execute_and_process_stdout: + adb.AdbProxy().instrument( + MOCK_INSTRUMENTATION_PACKAGE, + options=MOCK_INSTRUMENTATION_OPTIONS, + handler=mock_handler) + mock_execute_and_process_stdout.assert_called_once_with( + ['adb', 'shell', MOCK_OPTIONS_INSTRUMENTATION_COMMAND], + shell=False, + handler=mock_handler) + def test_cli_cmd_to_string(self): cmd = ['"adb"', 'a b', 'c//'] self.assertEqual(adb.cli_cmd_to_string(cmd), '\'"adb"\' \'a b\' c//')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
1.7
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "mock", "pytz" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 future==1.0.0 iniconfig==2.1.0 -e git+https://github.com/google/mobly.git@a9f2bda0ac5a6a1b11c794522daa8fd97ca0d0b0#egg=mobly mock==1.0.1 packaging==24.2 pluggy==1.5.0 portpicker==1.6.0 psutil==7.0.0 pyserial==3.5 pytest==8.3.5 pytz==2025.2 PyYAML==6.0.2 timeout-decorator==0.5.0 tomli==2.2.1
name: mobly channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - future==1.0.0 - iniconfig==2.1.0 - mock==1.0.1 - packaging==24.2 - pluggy==1.5.0 - portpicker==1.6.0 - psutil==7.0.0 - pyserial==3.5 - pytest==8.3.5 - pytz==2025.2 - pyyaml==6.0.2 - timeout-decorator==0.5.0 - tomli==2.2.1 prefix: /opt/conda/envs/mobly
[ "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_adb_and_process_stdout_formats_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_and_process_stdout_raises_adb_error", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_and_process_stdout_reads_stdout", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_and_process_stdout_returns_stderr", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_and_process_stdout_when_cmd_eof", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_and_process_stdout_when_cmd_exits", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_execute_and_process_stdout_when_handler_crash", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_instrument_with_handler", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_instrument_with_handler_with_options", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_instrument_with_handler_with_runner" ]
[]
[ "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test__Instrumentation_block_set_key_on_multiple_equals_sign", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_parse_instrumentation_options_with_mixed_user_params", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_parse_instrumentation_options_with_no_instrumentation_params", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_parse_instrumentation_options_with_no_user_params", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_parse_instrumentation_options_with_only_instrumentation_params", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_logs_correctly", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_assumption_failure_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_crashed_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_crashing_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_failing_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_ignored_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_invalid_syntax", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_missing_runner", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_missing_test_package", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_multiple_tests", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_no_output", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_no_tests", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_passing_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_prefix_test", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_random_whitespace", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_runner_setup_crash", "tests/mobly/base_instrumentation_test_test.py::BaseInstrumentationTestTest::test_run_instrumentation_test_with_runner_teardown_crash", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_cli_cmd_to_string", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_list", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_one_arg_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_one_arg_command_list", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_one_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_serial", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_serial_with_list", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_auto_quotes", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_list", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_one_arg_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_one_arg_command_list", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_one_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_serial", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_shell_true_with_serial_with_list", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_construct_adb_cmd_with_special_characters", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_adb_cmd", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_adb_cmd_formats_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_adb_cmd_formats_command_with_shell_true", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_adb_cmd_with_shell_true", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_adb_cmd_with_stderr_pipe", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_cmd_error_no_timeout", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_cmd_no_timeout_success", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_cmd_timed_out", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_cmd_with_negative_timeout_value", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_exec_cmd_with_timeout_success", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_forward", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_has_shell_command_called_correctly", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_has_shell_command_with_existing_command", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_has_shell_command_with_missing_command_on_newer_devices", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_has_shell_command_with_missing_command_on_older_devices", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_instrument_with_options", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_instrument_with_runner", "tests/mobly/controllers/android_device_lib/adb_test.py::AdbTest::test_instrument_without_parameters" ]
[]
Apache License 2.0
2,100
[ "mobly/controllers/android_device_lib/adb.py" ]
[ "mobly/controllers/android_device_lib/adb.py" ]
tox-dev__tox-756
1d6ca4b5c488c9c8ba10d25cb79ca89c254cd786
2018-01-30 11:10:25
a52e3519cb3333d5b53e9741a7d63efa0709d184
codecov[bot]: # [Codecov](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=h1) Report > Merging [#756](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=desc) into [master](https://codecov.io/gh/tox-dev/tox/commit/1d6ca4b5c488c9c8ba10d25cb79ca89c254cd786?src=pr&el=desc) will **decrease** coverage by `0.04%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/tox-dev/tox/pull/756/graphs/tree.svg?src=pr&token=DYodAwDCZ5&width=650&height=150)](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #756 +/- ## ========================================== - Coverage 94.88% 94.84% -0.05% ========================================== Files 11 11 Lines 2386 2386 ========================================== - Hits 2264 2263 -1 - Misses 122 123 +1 ``` | [Impacted Files](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [tox/\_\_init\_\_.py](https://codecov.io/gh/tox-dev/tox/pull/756/diff?src=pr&el=tree#diff-dG94L19faW5pdF9fLnB5) | `93.33% <100%> (ø)` | :arrow_up: | | [tox/\_pytestplugin.py](https://codecov.io/gh/tox-dev/tox/pull/756/diff?src=pr&el=tree#diff-dG94L19weXRlc3RwbHVnaW4ucHk=) | `96% <0%> (-0.45%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=footer). Last update [1d6ca4b...f758fb9](https://codecov.io/gh/tox-dev/tox/pull/756?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/changelog/755.bugfix.rst b/changelog/755.bugfix.rst new file mode 100644 index 00000000..cbd625a6 --- /dev/null +++ b/changelog/755.bugfix.rst @@ -0,0 +1,1 @@ +fix #755 by reverting the ``cmdline`` import to the old location and changing the entry point instead - by @fschulze diff --git a/setup.py b/setup.py index 4d91d9f6..6fc8eb6a 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ def main(): author='holger krekel', author_email='[email protected]', packages=['tox'], - entry_points={'console_scripts': ['tox=tox:cmdline', + entry_points={'console_scripts': ['tox=tox.session:run_main', 'tox-quickstart=tox._quickstart:main']}, python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', setup_requires=['setuptools_scm'], diff --git a/tox/__init__.py b/tox/__init__.py index b7a16315..ba64b356 100644 --- a/tox/__init__.py +++ b/tox/__init__.py @@ -52,6 +52,6 @@ class exception: super(exception.MinVersionError, self).__init__(message) -from .session import run_main as cmdline # noqa +from .session import main as cmdline # noqa __all__ = ('hookspec', 'hookimpl', 'cmdline', 'exception', '__version__')
tox 3.0.0rc1 breaks ``devpi test`` From https://github.com/tox-dev/tox/issues/728#issuecomment-361198048 The change of ``cmdline`` in f85b827c54de0f2531f69fc0c4883c8f4f3c8ecc broke ``devpi test``. It's easy to fix on the devpi side, but I'm not sure if it will also break detox. I wasn't able to get detox tests running when I tried. Maybe it would be better to revert ``cmdline`` back to ``test.session.main`` and change the entry point to tox.session.run_main instead. cc @gaborbernat @obestwalter
tox-dev/tox
diff --git a/doc/example/pytest.rst b/doc/example/pytest.rst index 521c741d..f871090d 100644 --- a/doc/example/pytest.rst +++ b/doc/example/pytest.rst @@ -95,6 +95,17 @@ importable from somewhere then your ``pytest`` invocation may end up importing the package from the checkout directory rather than the installed package. +This issue may be characterised by pytest test-collection error messages, in python 3.x environments, that look like: + +.. code-block:: shell + + import file mismatch: + imported module 'myproj.foo.tests.test_foo' has this __file__ attribute: + /home/myuser/repos/myproj/build/lib/myproj/foo/tests/test_foo.py + which is not the same as the test file we want to collect: + /home/myuser/repos/myproj/myproj/foo/tests/test_foo.py + HINT: remove __pycache__ / .pyc files and/or use a unique basename for your test file modules + There are a few ways to prevent this. With installed tests (the tests packages are known to ``setup.py``), a diff --git a/tests/test_z_cmdline.py b/tests/test_z_cmdline.py index 1a38001b..746032cb 100644 --- a/tests/test_z_cmdline.py +++ b/tests/test_z_cmdline.py @@ -2,7 +2,6 @@ import os import platform import re import subprocess -import sys import py import pytest @@ -901,6 +900,5 @@ def test_tox_quickstart_script(): def test_tox_cmdline(monkeypatch): - monkeypatch.setattr(sys, 'argv', ['caller_script', '--help']) with pytest.raises(SystemExit): - tox.cmdline() + tox.cmdline(['caller_script', '--help'])
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_git_commit_hash", "has_added_files", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 2 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[testing]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-mock", "pytest-timeout", "pytest-xdist" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 execnet==2.1.1 filelock==3.18.0 iniconfig==2.1.0 packaging==24.2 platformdirs==4.3.7 pluggy==0.13.1 py==1.11.0 pytest==7.4.4 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-timeout==2.3.1 pytest-xdist==3.6.1 six==1.17.0 tomli==2.2.1 -e git+https://github.com/tox-dev/tox.git@1d6ca4b5c488c9c8ba10d25cb79ca89c254cd786#egg=tox virtualenv==20.29.3
name: tox channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - execnet==2.1.1 - filelock==3.18.0 - iniconfig==2.1.0 - packaging==24.2 - platformdirs==4.3.7 - pluggy==0.13.1 - py==1.11.0 - pytest==7.4.4 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-timeout==2.3.1 - pytest-xdist==3.6.1 - six==1.17.0 - tomli==2.2.1 - tox==3.0.0rc1 - virtualenv==20.29.3 prefix: /opt/conda/envs/tox
[ "tests/test_z_cmdline.py::test_tox_cmdline" ]
[ "tests/test_z_cmdline.py::test_report_protocol", "tests/test_z_cmdline.py::test__resolve_pkg", "tests/test_z_cmdline.py::test_minversion", "tests/test_z_cmdline.py::test_notoxini_help_still_works", "tests/test_z_cmdline.py::test_envdir_equals_toxini_errors_out", "tests/test_z_cmdline.py::test_venv_special_chars_issue252", "tests/test_z_cmdline.py::test_unknown_environment", "tests/test_z_cmdline.py::test_separate_sdist_no_sdistfile" ]
[ "tests/test_z_cmdline.py::test__resolve_pkg_doubledash", "tests/test_z_cmdline.py::TestSession::test_make_sdist", "tests/test_z_cmdline.py::TestSession::test_make_sdist_distshare", "tests/test_z_cmdline.py::TestSession::test_log_pcall", "tests/test_z_cmdline.py::TestSession::test_summary_status", "tests/test_z_cmdline.py::TestSession::test_getvenv", "tests/test_z_cmdline.py::test_notoxini_help_ini_still_works", "tests/test_z_cmdline.py::test_run_custom_install_command_error", "tests/test_z_cmdline.py::test_unknown_interpreter_and_env", "tests/test_z_cmdline.py::test_unknown_interpreter", "tests/test_z_cmdline.py::test_skip_platform_mismatch", "tests/test_z_cmdline.py::test_skip_unknown_interpreter", "tests/test_z_cmdline.py::test_skip_unknown_interpreter_result_json", "tests/test_z_cmdline.py::test_unknown_dep", "tests/test_z_cmdline.py::test_skip_sdist", "tests/test_z_cmdline.py::test_minimal_setup_py_empty", "tests/test_z_cmdline.py::test_minimal_setup_py_comment_only", "tests/test_z_cmdline.py::test_minimal_setup_py_non_functional", "tests/test_z_cmdline.py::test_sdist_fails", "tests/test_z_cmdline.py::test_no_setup_py_exits", "tests/test_z_cmdline.py::test_package_install_fails", "tests/test_z_cmdline.py::test_toxuone_env", "tests/test_z_cmdline.py::test_different_config_cwd", "tests/test_z_cmdline.py::test_json", "tests/test_z_cmdline.py::test_developz", "tests/test_z_cmdline.py::test_usedevelop", "tests/test_z_cmdline.py::test_usedevelop_mixed", "tests/test_z_cmdline.py::test_test_usedevelop[.]", "tests/test_z_cmdline.py::test_test_usedevelop[src]", "tests/test_z_cmdline.py::test_alwayscopy", "tests/test_z_cmdline.py::test_alwayscopy_default", "tests/test_z_cmdline.py::test_empty_activity_ignored", "tests/test_z_cmdline.py::test_empty_activity_shown_verbose", "tests/test_z_cmdline.py::test_test_piphelp", "tests/test_z_cmdline.py::test_notest", "tests/test_z_cmdline.py::test_PYC", "tests/test_z_cmdline.py::test_env_VIRTUALENV_PYTHON", "tests/test_z_cmdline.py::test_sdistonly", "tests/test_z_cmdline.py::test_separate_sdist", "tests/test_z_cmdline.py::test_sdist_latest", "tests/test_z_cmdline.py::test_installpkg", "tests/test_z_cmdline.py::test_envsitepackagesdir", "tests/test_z_cmdline.py::test_envsitepackagesdir_skip_missing_issue280", "tests/test_z_cmdline.py::test_verbosity[]", "tests/test_z_cmdline.py::test_verbosity[-v]", "tests/test_z_cmdline.py::test_verbosity[-vv]", "tests/test_z_cmdline.py::test_envtmpdir", "tests/test_z_cmdline.py::test_missing_env_fails", "tests/test_z_cmdline.py::test_tox_quickstart_script" ]
[ "tests/test_z_cmdline.py::test_tox_console_script" ]
MIT License
2,102
[ "setup.py", "tox/__init__.py", "changelog/755.bugfix.rst" ]
[ "setup.py", "tox/__init__.py", "changelog/755.bugfix.rst" ]
pyout__pyout-33
059e9a40c3579885991c0b3e4e1a597108b33869
2018-01-30 22:02:21
5c6e7d9ea60e63704333e554567561df1123bd3f
diff --git a/CHANGELOG.md b/CHANGELOG.md index bfdb874..ac34470 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. TODO Summary ### Added + +- A new style attribute, "transform", has been added to the schema. + + This feature can be used to provide a function that takes a field + value and returns a transformed field value. + ### Changed ### Deprecated ### Fixed diff --git a/pyout/elements.py b/pyout/elements.py index 628c16d..160a1f8 100644 --- a/pyout/elements.py +++ b/pyout/elements.py @@ -11,7 +11,7 @@ schema = { "type": "string", "enum": ["left", "right", "center"], "default": "left", - "scope": "table"}, + "scope": "column"}, "bold": { "description": "Whether text is bold", "oneOf": [{"type": "boolean"}, @@ -46,12 +46,13 @@ schema = { "max": {"type": ["integer", "null"]}, "min": {"type": ["integer", "null"]}}}], "default": "auto", - "scope": "table"}, + "scope": "column"}, "styles": { "type": "object", "properties": {"align": {"$ref": "#/definitions/align"}, "bold": {"$ref": "#/definitions/bold"}, "color": {"$ref": "#/definitions/color"}, + "transform": {"$ref": "#/definitions/transform"}, "underline": {"$ref": "#/definitions/underline"}, "width": {"$ref": "#/definitions/width"}}, "additionalProperties": False}, @@ -72,7 +73,14 @@ schema = { "description": "Map a value to a style", "type": "object", "properties": {"label": {"type": "object"}}, - "additionalProperties": False} + "additionalProperties": False}, + "transform": { + "description": """An arbitrary function. + This function will be called with the (unprocessed) field + value as the single argument and should return a + transformed value. Note: This function should not have + side-effects because it may be called multiple times.""", + "scope": "field"} }, "type": "object", "properties": { diff --git a/pyout/field.py b/pyout/field.py index 8e81204..9dee4fc 100644 --- a/pyout/field.py +++ b/pyout/field.py @@ -1,48 +1,99 @@ """Define a "field" based on a sequence of processor functions. """ +from itertools import chain +from collections import defaultdict class Field(object): - """Format, process, and render tabular fields. + """Render values based on a list of processors. A Field instance is a template for a string that is defined by its - width, text alignment, and its "processors". When a field is - called with a value, it renders the value as a string with the - specified width and text alignment. Before this string is - returned, it is passed through the chain of processors. The - rendered string is the result returned by the last processor. + width, text alignment, and its "processors". + + When a field is called with a value, the value is rendered in + three steps. + + pre -> format -> post + + During the first step, the value is fed through the list of + pre-format processor functions. The result of this value is then + formatted as a string with the specified width and alignment. + Finally, this result is fed through the list of the post-format + processors. The rendered string is the result returned by the + last processor Parameters ---------- - width : int - align : {'left', 'right', 'center'} + width : int, optional + align : {'left', 'right', 'center'}, optional + default_keys, other_keys : sequence, optional + Together, these define the "registered" set of processor keys + that can be used in the `pre` and `post` dicts. Any key given + to the `add` method or instance call must be contained in one + of these collections. + + The processor lists for `default_keys` is used when the + instance is called without a list of `keys`. `other_keys` + defines additional keys that can be passed as the `keys` + argument to the instance call. Attributes ---------- width : int - align : str - processors : dict - Each key maps to a list of processors. The keys "core" and - "default" must always be present. When an instance object is - called, the rendered result is always sent through the "core" - processors. It will then be sent through the "default" - processors unless another key is provided as the optional - `which` argument. - - A processor should take two positional arguments, the value - that is being rendered and the current result. Its return - value will be passed to the next processor as the current - result. + registered_keys : set + Set of available keys. + default_keys : list + Defines which processor lists are called by default and in + what order. The values can be overridden by the `keys` + argument when calling the instance. + pre, post : dict of lists + These map each registered key to a list of processors. + Conceptually, `pre` and `post` are a list of functions that + form a pipeline, but they are structured as a dict of lists to + allow different processors to be grouped by key. By + specifying keys, the caller can control which groups are + "enabled". """ _align_values = {"left": "<", "right": ">", "center": "^"} - def __init__(self, width=10, align="left"): + def __init__(self, width=10, align="left", + default_keys=None, other_keys=None): self._width = width self._align = align self._fmt = self._build_format() - self.processors = {"core": [], "default": []} + self.default_keys = default_keys or [] + self.registered_keys = set(chain(self.default_keys, other_keys or [])) + + self.pre = defaultdict(list) + self.post = defaultdict(list) + + def _check_if_registered(self, key): + if key not in self.registered_keys: + raise ValueError( + "key '{}' was not specified at initialization".format(key)) + + def add(self, kind, key, *values): + """Add processor functions. + + Parameters + ---------- + kind : {"pre", "post"} + key : str + A registered key. Add the functions (in order) to this + key's list of processors. + *values : callables + Processors to add. + """ + if kind == "pre": + procs = self.pre + elif kind == "post": + procs = self.post + else: + raise ValueError("kind is not 'pre' or 'post'") + self._check_if_registered(key) + procs[key].extend(values) @property def width(self): @@ -57,22 +108,52 @@ class Field(object): align = self._align_values[self._align] return "".join(["{:", align, str(self.width), "}"]) - def __call__(self, value, which="default"): + def _format(self, _, result): + """Wrap format call as a two-argument processor function. + """ + return self._fmt.format(result) + + def __call__(self, value, keys=None, exclude_post=False): """Render `value` by feeding it through the processors. Parameters ---------- value : str - which : str, optional - A key for the `processors` attribute that indicates the - list of processors to use in addition to the "core" list. + keys : sequence, optional + These lists define which processor lists are called and in + what order. If not specified, the `default_keys` + attribute will be used. + exclude_post : bool, optional + Whether to return the vaue after the format step rather + than feeding it through post-format processors. """ - result = self._fmt.format(value) - for fn in self.processors["core"] + self.processors[which]: + if keys is None: + keys = self.default_keys + for key in keys: + self._check_if_registered(key) + + pre_funcs = chain(*(self.pre[k] for k in keys)) + if exclude_post: + post_funcs = [] + else: + post_funcs = chain(*(self.post[k] for k in keys)) + + funcs = chain(pre_funcs, [self._format], post_funcs) + result = value + for fn in funcs: result = fn(value, result) return result +class StyleFunctionError(Exception): + """Signal that a style function failed. + """ + def __init__(self, function): + msg = ("Style transform {} raised an exception. " + "See above.".format(function)) + super(StyleFunctionError, self).__init__(msg) + + class StyleProcessors(object): """A base class for generating Field.processors for styled output. @@ -138,6 +219,17 @@ class StyleProcessors(object): return result[:length] return truncate_fn + @staticmethod + def transform(function): + """Return a processor for a style's "transform" function. + """ + def transform_fn(_, result): + try: + return function(result) + except: + raise StyleFunctionError(function) + return transform_fn + def by_key(self, key): """Return a processor for the style given by `key`. @@ -234,8 +326,24 @@ class StyleProcessors(object): return keys[0] raise ValueError("Type of `value` could not be determined") - def from_style(self, column_style): - """Yield processors based on `column_style`. + def pre_from_style(self, column_style): + """Yield pre-format processors based on `column_style`. + + Parameters + ---------- + column_style : dict + A style where the top-level keys correspond to style + attributes such as "bold" or "color". + + Returns + ------- + A generator object. + """ + if "transform" in column_style: + yield self.transform(column_style["transform"]) + + def post_from_style(self, column_style): + """Yield post-format processors based on `column_style`. Parameters ---------- diff --git a/pyout/tabular.py b/pyout/tabular.py index f33549c..d205a79 100644 --- a/pyout/tabular.py +++ b/pyout/tabular.py @@ -47,10 +47,10 @@ class TermProcessors(StyleProcessors): return result return maybe_reset_fn - def from_style(self, column_style): - """Call StyleProcessors.from_style, adding a Terminal-specific reset. + def post_from_style(self, column_style): + """A Terminal-specific reset to StyleProcessors.post_from_style. """ - for proc in super(TermProcessors, self).from_style(column_style): + for proc in super(TermProcessors, self).post_from_style(column_style): yield proc yield self._maybe_reset() @@ -175,7 +175,7 @@ class Tabular(object): for column in self._columns: cstyle = self._style[column] - procs = [] + core_procs = [] style_width = cstyle["width"] is_auto = style_width == "auto" or _safe_get(style_width, "auto") @@ -187,17 +187,26 @@ class Tabular(object): if wmax is not None: marker = _safe_get(style_width, "marker", True) - procs = [self._tproc.truncate(wmax, marker)] + core_procs = [self._tproc.truncate(wmax, marker)] elif is_auto is False: raise ValueError("No 'width' specified") else: width = style_width - procs = [self._tproc.truncate(width)] - - field = Field(width=width, align=cstyle["align"]) - field.processors["core"] = procs - field.processors["default"] = list(self._tproc.from_style(cstyle)) - + core_procs = [self._tproc.truncate(width)] + + # We are creating a distinction between "core" processors, + # that we always want to be active and "default" + # processors that we want to be active unless there's an + # overriding style (i.e., a header is being written or the + # `style` argument to __call__ is specified). + field = Field(width=width, align=cstyle["align"], + default_keys=["core", "default"], + other_keys=["override"]) + field.add("pre", "default", + *(self._tproc.pre_from_style(cstyle))) + field.add("post", "core", *core_procs) + field.add("post", "default", + *(self._tproc.post_from_style(cstyle))) self._fields[column] = field @property @@ -233,12 +242,15 @@ class Tabular(object): return self._seq_to_dict return self._attrs_to_dict - def _set_widths(self, row): + def _set_widths(self, row, proc_group): """Update auto-width Fields based on `row`. Parameters ---------- row : dict + proc_group : {'default', 'override'} + Whether to consider 'default' or 'override' key for pre- + and post-format processors. Raises ------ @@ -248,12 +260,21 @@ class Tabular(object): rewrite = False for column in self._columns: if column in self._autowidth_columns: - value_width = len(str(row[column])) + field = self._fields[column] + # If we've added style transform function as + # pre-format processors, we want to measure the width + # of their result rather than the raw value. + if field.pre[proc_group]: + value = field(row[column], keys=[proc_group], + exclude_post=True) + else: + value = row[column] + value_width = len(str(value)) wmax = self._autowidth_columns[column]["max"] - if value_width > self._fields[column].width: - if wmax is None or self._fields[column].width < wmax: + if value_width > field.width: + if wmax is None or field.width < wmax: rewrite = True - self._fields[column].width = value_width + field.width = value_width if rewrite: raise RewritePrevious @@ -281,24 +302,76 @@ class Tabular(object): if self._lock: self._lock.release() - def _writerow(self, row, style=None, adopt=True): - fields = self._fields + def _style_proc_group(self, style, adopt=True): + """Return whether group is "default" or "override". + In the case of "override", the self._fields pre-format and + post-format processors will be set under the "override" key. + """ + fields = self._fields if style is not None: + if adopt: + style = elements.adopt(self._style, style) elements.validate(style) - rowstyle = elements.adopt(self._style, style) if adopt else style for column in self._columns: - fields[column].processors["row"] = list( - self._tproc.from_style(rowstyle[column])) - proc_key = "row" + fields[column].add( + "pre", "override", + *(self._tproc.pre_from_style(style[column]))) + fields[column].add( + "post", "override", + *(self._tproc.post_from_style(style[column]))) + return "override" else: - proc_key = "default" + return "default" - proc_fields = [fields[c](row[c], proc_key) for c in self._columns] + def _writerow(self, row, proc_keys=None): + proc_fields = [self._fields[c](row[c], keys=proc_keys) + for c in self._columns] self.term.stream.write( self._style["separator_"].join(proc_fields) + "\n") + def _check_and_write(self, row, style, adopt=True, + repaint=True, no_write=False): + """Main internal entry point for writing a row. + + Parameters + ---------- + row : dict + Data to write. + style : dict or None + Overridding style or None. + adopt : bool, optional + If true, overlay `style` on top of `self._style`. + Otherwise, treat `style` as a standalone style. + repaint : bool, optional + Whether to repaint of width check reports that previous + widths are stale. + no_write : bool, optional + Do the check but don't write. Instead, return the + processor keys that can be used to call self._writerow + directly. + """ + repainted = False + proc_group = self._style_proc_group(style, adopt=adopt) + try: + self._set_widths(row, proc_group) + except RewritePrevious: + if repaint: + self._repaint() + repainted = True + + if proc_group == "override": + # Override the "default" processor key. + proc_keys = ["core", "override"] + else: + # Use the set of processors defined by _setup_fields. + proc_keys = None + + if no_write: + return proc_keys, repainted + self._writerow(row, proc_keys) + def _maybe_write_header(self): if self._style["header_"] is None: return @@ -310,13 +383,10 @@ class Tabular(object): else: row = dict(zip(self._columns, self._columns)) - try: - self._set_widths(row) - except RewritePrevious: - # We're at the header, so there aren't any previous lines - # to update. - pass - self._writerow(row, style=self._style["header_"], adopt=False) + # Set repaint=False because we're at the header, so there + # aren't any previous lines to update. + self._check_and_write(row, self._style["header_"], + adopt=False, repaint=False) @staticmethod def _strip_callables(row): @@ -415,12 +485,7 @@ class Tabular(object): with self._write_lock(): if not self._rows: self._maybe_write_header() - - try: - self._set_widths(row) - except RewritePrevious: - self._repaint() - self._writerow(row, style=style) + self._check_and_write(row, style) self._rows.append(row) if callables: self._start_callables(row, callables) @@ -499,10 +564,10 @@ class Tabular(object): idx = len(self._rows) - nback self._rows[idx].update(values) - try: - self._set_widths(self._rows[idx]) - except RewritePrevious: - self._repaint() - else: + # Set no_write=True because there is no reason to go back + # and rewrite row if we've already repainted. + keys, repainted = self._check_and_write(self._rows[idx], style, + no_write=True) + if not repainted: with self._moveback(nback): - self._writerow(self._rows[idx], style) + self._writerow(self._rows[idx], keys)
Allow to specify in the style to humanize the value E.g. as datalad is using for sizes. Could also be time, etc. I think the easiest would be just to allow callbacks as to how to format the value (`format_value` attribute? may be it is already implemented?), since humanize provides all those humanize functions: ```shell $> python -c 'import humanize; print(dir(humanize))' ['Fraction', 'N_', 'P_', 'VERSION', '__all__', '__builtins__', '__doc__', '__file__', '__name__', '__package__', '__path__', 'activate', 'apnumber', 'compat', 'deactivate', 'filesize', 'fractional', 'human_powers', 'i18n', 'intcomma', 'intword', 'naturaldate', 'naturalday', 'naturaldelta', 'naturalsize', 'naturaltime', 'number', 'ordinal', 'powers', 're', 'suffixes', 'time', 'x'] ```
pyout/pyout
diff --git a/pyout/tests/test_field.py b/pyout/tests/test_field.py index fd4209b..2a36ff1 100644 --- a/pyout/tests/test_field.py +++ b/pyout/tests/test_field.py @@ -15,17 +15,26 @@ def test_field_update(): def test_field_processors(): - field = Field(width=6, align="center") + def pre(_, result): + return result.upper() - def proc1(_, result): + def post1(_, result): return "AAA" + result - def proc2(_, result): + def post2(_, result): return result + "ZZZ" - field.processors["default"] = [proc1, proc2] + field = Field(width=6, align="center", + default_keys=["some_key", "another_key"]) + field.add("pre", "some_key", pre) + field.add("post", "another_key", *[post1, post2]) + assert field("ok") == "AAA OK ZZZ" - assert field("ok") == "AAA ok ZZZ" + with pytest.raises(ValueError): + field.add("not pre or post", "k") + + with pytest.raises(ValueError): + field.add("pre", "not registered key") def test_truncate_mark_true(): diff --git a/pyout/tests/test_tabular.py b/pyout/tests/test_tabular.py index 873932b..ae8b507 100644 --- a/pyout/tests/test_tabular.py +++ b/pyout/tests/test_tabular.py @@ -9,6 +9,7 @@ from mock import patch import pytest from pyout import Tabular +from pyout.field import StyleFunctionError # TestTerminal, unicode_cap, and unicode_parm are copied from # blessings' tests. @@ -599,6 +600,85 @@ def test_tabular_write_intervals_bold(): assert eq_repr(fd.getvalue(), expected) +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_transform(): + fd = StringIO() + out = Tabular(style={"val": {"transform": lambda x: x[::-1]}}, + stream=fd) + out(OrderedDict([("name", "foo"), + ("val", "330")])) + out(OrderedDict([("name", "bar"), + ("val", "780")])) + + expected = ("foo 033\n" + "bar 087\n") + assert eq_repr(fd.getvalue(), expected) + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_transform_with_header(): + fd = StringIO() + out = Tabular(style={"header_": {}, + "name": {"width": 4}, + "val": {"transform": lambda x: x[::-1]}}, + stream=fd) + out(OrderedDict([("name", "foo"), + ("val", "330")])) + out(OrderedDict([("name", "bar"), + ("val", "780")])) + + expected = ("name val\n" + "foo 033\n" + "bar 087\n") + assert eq_repr(fd.getvalue(), expected) + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_transform_autowidth(): + fd = StringIO() + out = Tabular(style={"val": {"transform": lambda x: x * 2}}, + stream=fd) + out(OrderedDict([("name", "foo"), + ("val", "330")])) + out(OrderedDict([("name", "bar"), + ("val", "7800")])) + + lines = fd.getvalue().splitlines() + assert len([ln for ln in lines if ln.endswith("foo 330330 ")]) == 1 + assert len([ln for ln in lines if ln.endswith("bar 78007800")]) == 1 + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_transform_on_header(): + fd = StringIO() + out = Tabular(style={"header_": {"transform": str.upper}, + "name": {"width": 4}, + "val": {"width": 3}}, + stream=fd) + out(OrderedDict([("name", "foo"), + ("val", "330")])) + out(OrderedDict([("name", "bar"), + ("val", "780")])) + + expected = ("NAME VAL\n" + "foo 330\n" + "bar 780\n") + assert eq_repr(fd.getvalue(), expected) + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_transform_func_error(): + fd = StringIO() + out = Tabular(style={"name": {"width": 4}, + "val": {"transform": lambda x: x[::-1]}}, + stream=fd) + # The transform function receives the data as given, so it fails + # trying to index an integer. + with pytest.raises(StyleFunctionError): + out(OrderedDict([("name", "foo"), + ("val", 330)])) + + @patch("pyout.tabular.Terminal", TestTerminal) def test_tabular_write_width_truncate_long(): fd = StringIO()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 4 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[full]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work blessings==1.7 certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work jsonschema==3.2.0 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/pyout/pyout.git@059e9a40c3579885991c0b3e4e1a597108b33869#egg=pyout pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyrsistent==0.18.0 pytest==6.2.4 pytest-timeout==2.1.0 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pyout channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - blessings==1.7 - jsonschema==3.2.0 - mock==5.2.0 - pyrsistent==0.18.0 - pytest-timeout==2.1.0 - six==1.17.0 prefix: /opt/conda/envs/pyout
[ "pyout/tests/test_field.py::test_field_base", "pyout/tests/test_field.py::test_field_update", "pyout/tests/test_field.py::test_field_processors", "pyout/tests/test_field.py::test_truncate_mark_true", "pyout/tests/test_field.py::test_truncate_mark_string", "pyout/tests/test_field.py::test_truncate_mark_short", "pyout/tests/test_field.py::test_truncate_nomark", "pyout/tests/test_field.py::test_style_value_type", "pyout/tests/test_field.py::test_style_processor_translate", "pyout/tests/test_tabular.py::test_tabular_write_color", "pyout/tests/test_tabular.py::test_tabular_write_columns_from_orderdict_row", "pyout/tests/test_tabular.py::test_tabular_write_columns_orderdict_mapping[sequence]", "pyout/tests/test_tabular.py::test_tabular_write_columns_orderdict_mapping[dict]", "pyout/tests/test_tabular.py::test_tabular_write_data_as_list", "pyout/tests/test_tabular.py::test_tabular_write_header", "pyout/tests/test_tabular.py::test_tabular_write_data_as_object", "pyout/tests/test_tabular.py::test_tabular_write_different_data_types_same_output", "pyout/tests/test_tabular.py::test_tabular_write_header_with_style", "pyout/tests/test_tabular.py::test_tabular_nondefault_separator", "pyout/tests/test_tabular.py::test_tabular_write_data_as_list_no_columns", "pyout/tests/test_tabular.py::test_tabular_write_style_override", "pyout/tests/test_tabular.py::test_tabular_default_style", "pyout/tests/test_tabular.py::test_tabular_write_multicolor", "pyout/tests/test_tabular.py::test_tabular_write_align", "pyout/tests/test_tabular.py::test_tabular_rewrite", "pyout/tests/test_tabular.py::test_tabular_rewrite_notfound", "pyout/tests/test_tabular.py::test_tabular_rewrite_multi_id", "pyout/tests/test_tabular.py::test_tabular_rewrite_multi_value", "pyout/tests/test_tabular.py::test_tabular_rewrite_with_ids_property", "pyout/tests/test_tabular.py::test_tabular_rewrite_auto_width", "pyout/tests/test_tabular.py::test_tabular_rewrite_data_as_list", "pyout/tests/test_tabular.py::test_tabular_repaint", "pyout/tests/test_tabular.py::test_tabular_repaint_with_header", "pyout/tests/test_tabular.py::test_tabular_write_label_color", "pyout/tests/test_tabular.py::test_tabular_write_label_bold", "pyout/tests/test_tabular.py::test_tabular_write_label_bold_false", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color_open_ended", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color_outside_intervals", "pyout/tests/test_tabular.py::test_tabular_write_intervals_bold", "pyout/tests/test_tabular.py::test_tabular_write_transform", "pyout/tests/test_tabular.py::test_tabular_write_transform_with_header", "pyout/tests/test_tabular.py::test_tabular_write_transform_autowidth", "pyout/tests/test_tabular.py::test_tabular_write_transform_on_header", "pyout/tests/test_tabular.py::test_tabular_write_transform_func_error", "pyout/tests/test_tabular.py::test_tabular_write_width_truncate_long", "pyout/tests/test_tabular.py::test_tabular_write_autowidth", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_with_header", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=True]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=False]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=\\u2026]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max_with_header", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_different_data_types_same_output", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_auto_false_exception", "pyout/tests/test_tabular.py::test_tabular_write_callable_values", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multi_return", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multicol_key_infer_column[result=tuple]", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multicol_key_infer_column[result=dict]", "pyout/tests/test_tabular.py::test_tabular_write_wait_noop_if_nothreads" ]
[]
[]
[]
MIT License
2,103
[ "pyout/field.py", "pyout/tabular.py", "pyout/elements.py", "CHANGELOG.md" ]
[ "pyout/field.py", "pyout/tabular.py", "pyout/elements.py", "CHANGELOG.md" ]
automl__SMAC3-389
4e3816d77bc3d2b5406bb71b2fe93da2831ebb0f
2018-01-31 23:40:47
f710fa60dbf2c64e42ce14aa0eb529f92378560a
diff --git a/examples/branin/restore_state.py b/examples/branin/restore_state.py index 12cb6b51d..55cce7426 100644 --- a/examples/branin/restore_state.py +++ b/examples/branin/restore_state.py @@ -23,12 +23,15 @@ def main(): 'deterministic' : True, 'output_dir' : 'restore_me'} original_scenario = Scenario(orig_scen_dict) - smac = SMAC(scenario=original_scenario) + smac = SMAC(scenario=original_scenario, + run_id=1) smac.optimize() print("\n########## BUDGET EXHAUSTED! Restoring optimization: ##########\n") - # Now the output is in the folder 'restore_me' + # Now the output is in the folder 'restore_me/run_1' (or whatever run_id has + # been passed to the SMAC-object above) + old_output_dir = os.path.join(original_scenario.output_dir, 'run_1') # # We could simply modify the scenario-object, stored in # 'smac.solver.scenario' and start optimization again: @@ -43,22 +46,18 @@ def main(): 'output_dir' : 'restored'}) # We load the runhistory, ... - rh_path = os.path.join(original_scenario.output_dir, "runhistory.json") + rh_path = os.path.join(old_output_dir, "runhistory.json") runhistory = RunHistory(aggregate_func=None) runhistory.load_json(rh_path, new_scenario.cs) # ... stats, ... - stats_path = os.path.join(original_scenario.output_dir, "stats.json") + stats_path = os.path.join(old_output_dir, "stats.json") stats = Stats(new_scenario) stats.load(stats_path) # ... and trajectory. - traj_path = os.path.join(original_scenario.output_dir, "traj_aclib2.json") + traj_path = os.path.join(old_output_dir, "traj_aclib2.json") trajectory = TrajLogger.read_traj_aclib_format( fn=traj_path, cs=new_scenario.cs) incumbent = trajectory[-1]["incumbent"] - # Because we changed the output_dir, we might want to copy the old - # trajectory-file (runhistory and stats will be complete) - new_traj_path = os.path.join(new_scenario.output_dir, "traj_aclib2.json") - shutil.copy(traj_path, new_traj_path) # Now we can initialize SMAC with the recovered objects and restore the # state where we left off. By providing stats and a restore_incumbent, SMAC @@ -66,7 +65,14 @@ def main(): smac = SMAC(scenario=new_scenario, runhistory=runhistory, stats=stats, - restore_incumbent=incumbent) + restore_incumbent=incumbent, + run_id=1) + # Because we changed the output_dir, we might want to copy the old + # trajectory-file (runhistory and stats will be complete, but trajectory is + # written sequentially) + new_traj_path = os.path.join(new_scenario.output_dir, "run_1", "traj_aclib2.json") + shutil.copy(traj_path, new_traj_path) + smac.optimize() if "__main__" == __name__: diff --git a/smac/facade/smac_facade.py b/smac/facade/smac_facade.py index 3368bac94..5b315dec3 100644 --- a/smac/facade/smac_facade.py +++ b/smac/facade/smac_facade.py @@ -128,15 +128,26 @@ class SMAC(object): aggregate_func = average_cost self.scenario = scenario - self.output_dir = create_output_directory(scenario, run_id) + self.output_dir = "" + if not restore_incumbent: + self.output_dir = create_output_directory(scenario, run_id) + elif scenario.output_dir is not None: + # output-directory is created in CLI when restoring from a + # folder. calling the function again in the facade results in two + # folders being created: run_X and run_X.OLD. if we are + # restoring, the output-folder exists already and we omit creating it, + # but set the self-output_dir to the dir. + # necessary because we want to write traj to new output-dir in CLI. + self.output_dir = os.path.join(scenario.output_dir, + "run_%d" % (run_id)) if ( scenario.deterministic is True and getattr(scenario, 'tuner_timeout', None) is None and scenario.run_obj == 'quality' ): self.logger.info('Optimizing a deterministic scenario for ' - 'qualitiy without a tuner timeout - will make ' - 'SMAC deterministi!') + 'quality without a tuner timeout - will make ' + 'SMAC deterministic!') scenario.intensification_percentage = 1e-10 scenario.write() @@ -175,7 +186,7 @@ class SMAC(object): # initial acquisition function if acquisition_function is None: acquisition_function = EI(model=model) - + # inject model if necessary if acquisition_function.model is None: acquisition_function.model = model diff --git a/smac/smac_cli.py b/smac/smac_cli.py index b2382c0c7..4bab14f49 100644 --- a/smac/smac_cli.py +++ b/smac/smac_cli.py @@ -64,21 +64,19 @@ class SMACCLI(object): stats = None incumbent = None - # Restore state (needs to be before scenario-creation!) - if args_.restore_state: - root_logger.debug("Restoring state from %s...", args_.restore_state) - rh, stats, traj_list_aclib, traj_list_old = self.restore_state_before_scen(args_) - # Create scenario-object scen = Scenario(args_.scenario_file, misc_args) - # Restore state (continued, needs to be after scenario-creation!) + # Restore state if args_.restore_state: + root_logger.debug("Restoring state from %s...", args_.restore_state) + rh, stats, traj_list_aclib, traj_list_old = self.restore_state(scen, args_) + scen.output_dir_for_this_run = create_output_directory( scen, args_.seed, root_logger, ) scen.write() - stats, incumbent = self.restore_state_after_scen(scen, stats, + incumbent = self.restore_state_after_output_dir(scen, stats, traj_list_aclib, traj_list_old) if args_.warmstart_runhistory: @@ -129,12 +127,9 @@ class SMACCLI(object): except (TAEAbortException, FirstRunCrashedException) as err: self.logger.error(err) - def restore_state_before_scen(self, args_): + def restore_state(self, scen, args_): """Read in files for state-restoration: runhistory, stats, trajectory. """ - # Construct dummy-scenario for object-creation (mainly cs is needed) - tmp_scen = InputReader().read_scenario_file(args_.scenario_file) - tmp_scen = Scenario(tmp_scen, cmd_args={'output_dir':''}) # Check for folder and files rh_path = os.path.join(args_.restore_state, "runhistory.json") stats_path = os.path.join(args_.restore_state, "stats.json") @@ -145,9 +140,9 @@ class SMACCLI(object): raise FileNotFoundError("Could not find folder from which to restore.") # Load runhistory and stats rh = RunHistory(aggregate_func=None) - rh.load_json(rh_path, tmp_scen.cs) + rh.load_json(rh_path, scen.cs) self.logger.debug("Restored runhistory from %s", rh_path) - stats = Stats(tmp_scen) # Need to inject actual scenario later for output_dir! + stats = Stats(scen) stats.load(stats_path) self.logger.debug("Restored stats from %s", stats_path) with open(traj_path_aclib, 'r') as traj_fn: @@ -156,13 +151,11 @@ class SMACCLI(object): traj_list_old = traj_fn.readlines() return rh, stats, traj_list_aclib, traj_list_old - def restore_state_after_scen(self, scen, stats, traj_list_aclib, - traj_list_old): - """Finish processing files for state-restoration. The actual scenario - needs to be injected into stats, as well as the trajectory dealt with - (it is read in, but needs to be written to new output-folder after - scenario is constructed.""" - stats.scenario = scen # inject actual scen for output_dir + def restore_state_after_output_dir(self, scen, stats, traj_list_aclib, + traj_list_old): + """Finish processing files for state-restoration. Trajectory + is read in, but needs to be written to new output-folder. Therefore, the + output-dir is created. This needs to be considered in the SMAC-facade.""" # write trajectory-list traj_path_aclib = os.path.join(scen.output_dir, "traj_aclib2.json") traj_path_old = os.path.join(scen.output_dir, "traj_old.csv") @@ -171,8 +164,9 @@ class SMACCLI(object): with open(traj_path_old, 'w') as traj_fn: traj_fn.writelines(traj_list_old) # read trajectory to retrieve incumbent + # TODO replace this with simple traj_path_aclib? trajectory = TrajLogger.read_traj_aclib_format(fn=traj_path_aclib, cs=scen.cs) incumbent = trajectory[-1]["incumbent"] self.logger.debug("Restored incumbent %s from %s", incumbent, traj_path_aclib) - return stats, incumbent + return incumbent
Path to scenario folder to restore state Hi, I was running the example on restoring the state for the branin function and I run into this error ``` INFO:smac.scenario.scenario.Scenario:Output to restored Output directory= restore_me/run_1 ['traj_old.csv', 'scenario.txt', 'runhistory.json', 'param_config_space.pcs', 'stats.json', 'traj_aclib2.json'] Traceback (most recent call last): File "restore_state.py", line 77, in <module> main() File "restore_state.py", line 64, in main shutil.copy(traj_path, new_traj_path) File "/usr/lib/python3.5/shutil.py", line 235, in copy copyfile(src, dst, follow_symlinks=follow_symlinks) File "/usr/lib/python3.5/shutil.py", line 115, in copyfile with open(dst, 'wb') as fdst: FileNotFoundError: [Errno 2] No such file or directory: 'restored/traj_aclib2.json' ``` I figure the issue comes from this line: https://github.com/automl/SMAC3/blob/56a5af9e3666ae54f7c126b62ae44852af65842d/examples/branin/restore_state.py#L46 The output scenario folder should redefined as `output_dir = os.path.join(original_scenario.output_dir, 'run_1')`, right?
automl/SMAC3
diff --git a/test/test_cli/test_restore_state.py b/test/test_cli/test_restore_state.py index 8bcc3f066..1459b5fa9 100644 --- a/test/test_cli/test_restore_state.py +++ b/test/test_cli/test_restore_state.py @@ -40,6 +40,7 @@ class TestSMACCLI(unittest.TestCase): for output_dir in self.output_dirs: if output_dir: shutil.rmtree(output_dir, ignore_errors=True) + #pass os.chdir(self.current_dir) @attr('slow') @@ -100,7 +101,12 @@ class TestSMACCLI(unittest.TestCase): # Increase limit and run for 10 (so 5 more) by using restore_state testargs = ["python", "scripts/smac", "--restore_state", self.output_one, "--scenario_file", - self.scenario_one, "--verbose", "DEBUG"] + self.scenario_two, "--verbose", "DEBUG"] with mock.patch.object(sys, 'argv', testargs): self.smaccli.main_cli() + self.assertTrue(os.path.exists(self.output_one)) + self.assertFalse(os.path.exists(self.output_one + '.OLD')) + self.assertTrue(os.path.exists(self.output_two)) + self.assertFalse(os.path.exists(self.output_two + '.OLD')) +
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 3 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y build-essential swig" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 ConfigSpace==0.4.19 Cython==3.0.12 docutils==0.18.1 filelock==3.4.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 joblib==1.1.1 MarkupSafe==2.0.1 nose==1.3.7 numpy==1.19.5 packaging==21.3 pluggy==1.0.0 psutil==7.0.0 py==1.11.0 Pygments==2.14.0 pynisher==0.6.4 pyparsing==3.1.4 pyrfr==0.8.2 pytest==7.0.1 pytz==2025.2 requests==2.27.1 scikit-learn==0.24.2 scipy==1.5.4 six==1.17.0 -e git+https://github.com/automl/SMAC3.git@4e3816d77bc3d2b5406bb71b2fe93da2831ebb0f#egg=smac snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 threadpoolctl==3.1.0 tomli==1.2.3 typing==3.7.4.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: SMAC3 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - configspace==0.4.19 - cython==3.0.12 - docutils==0.18.1 - filelock==3.4.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - joblib==1.1.1 - markupsafe==2.0.1 - nose==1.3.7 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - psutil==7.0.0 - py==1.11.0 - pygments==2.14.0 - pynisher==0.6.4 - pyparsing==3.1.4 - pyrfr==0.8.2 - pytest==7.0.1 - pytz==2025.2 - requests==2.27.1 - scikit-learn==0.24.2 - scipy==1.5.4 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - threadpoolctl==3.1.0 - tomli==1.2.3 - typing==3.7.4.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/SMAC3
[ "test/test_cli/test_restore_state.py::TestSMACCLI::test_same_dir" ]
[]
[ "test/test_cli/test_restore_state.py::TestSMACCLI::test_illegal_input", "test/test_cli/test_restore_state.py::TestSMACCLI::test_missing_dir", "test/test_cli/test_restore_state.py::TestSMACCLI::test_run_and_restore" ]
[]
BSD 3-Clause License
2,105
[ "smac/smac_cli.py", "smac/facade/smac_facade.py", "examples/branin/restore_state.py" ]
[ "smac/smac_cli.py", "smac/facade/smac_facade.py", "examples/branin/restore_state.py" ]
NeuralEnsemble__python-neo-475
c66dbfe3e625607e13a74dcdc409c255d89dc608
2018-02-01 12:57:12
f0285a7ab15ff6535d3e6736e0163c4fa6aea091
diff --git a/neo/io/spike2io.py b/neo/io/spike2io.py index 405f98f0..d21f3fc4 100644 --- a/neo/io/spike2io.py +++ b/neo/io/spike2io.py @@ -5,7 +5,7 @@ from neo.rawio.spike2rawio import Spike2RawIO class Spike2IO(Spike2RawIO, BaseFromRaw): - _prefered_signal_group_mode = 'split-all' + _prefered_signal_group_mode = 'group-by-same-units' def __init__(self, filename): Spike2RawIO.__init__(self, filename=filename) diff --git a/neo/rawio/spike2rawio.py b/neo/rawio/spike2rawio.py index f14b1f60..08ef148a 100644 --- a/neo/rawio/spike2rawio.py +++ b/neo/rawio/spike2rawio.py @@ -53,6 +53,8 @@ class Spike2RawIO(BaseRawIO): info['datetime_detail'] = 0 info['datetime_year'] = 0 + self._time_factor = info['us_per_time'] * info['dtime_base'] + self._channel_infos = [] for chan_id in range(info['channels']): fid.seek(512 + 140 * chan_id) @@ -88,36 +90,106 @@ class Spike2RawIO(BaseRawIO): self._channel_infos.append(chan_info) - # get data blocks index + # get data blocks index for all channel + # run through all data block of of channel to prepare chan to block maps self._memmap = np.memmap(self.filename, dtype='u1', offset=0, mode='r') - self._data_blocks = [] + self._all_data_blocks = {} + self._by_seg_data_blocks = {} for c, chan_info in enumerate(self._channel_infos): data_blocks = [] ind = chan_info['firstblock'] for b in range(chan_info['blocks']): - block_info = self._memmap[ind:ind + 20].view(blockHeaderDesciption)[0] - data_blocks.append((ind, block_info['items'], 0)) + data_blocks.append((ind, block_info['items'], 0, + block_info['start_time'], block_info['end_time'])) ind = block_info['succ_block'] data_blocks = np.array(data_blocks, dtype=[( - 'pos', 'int32'), ('size', 'int32'), ('cumsum', 'int32')]) - data_blocks['cumsum'][1:] = np.cumsum(data_blocks['size'][:-1]) - + 'pos', 'int32'), ('size', 'int32'), ('cumsum', 'int32'), + ('start_time', 'int32'), ('end_time', 'int32')]) data_blocks['pos'] += 20 # 20 is ths header size - self._data_blocks.append(data_blocks) - + self._all_data_blocks[c] = data_blocks + self._by_seg_data_blocks[c] = [] + + # For all signal channel detect gaps between data block (pause in rec) so new Segment. + # then check that all channel have the same gaps. + # this part is tricky because we need to check that all channel have same pause. + all_gaps_block_ind = {} + for c, chan_info in enumerate(self._channel_infos): + if chan_info['kind'] in [1, 9]: + data_blocks = self._all_data_blocks[c] + sig_size = np.sum(self._all_data_blocks[chan_id]['size']) + if sig_size>0: + sample_interval = chan_info['divide'] * info['time_per_adc'] + # detect gaps + inter_block_sizes = data_blocks['start_time'][1:] - data_blocks['end_time'][:-1] + gaps_block_ind, = np.nonzero(inter_block_sizes>sample_interval) + all_gaps_block_ind[c] = gaps_block_ind + + # find t_start/t_stop for each seg based on gaps indexe + self._sig_t_starts = {} + self._sig_t_stops = {} + if len(all_gaps_block_ind) == 0: + # this means no signal channels + nb_segment = 1 + # loop over event/spike channel to get the min/max time + t_start, t_stop = None, None + for chan_id, chan_info in enumerate(self._channel_infos): + data_blocks = self._all_data_blocks[chan_id] + if data_blocks.size > 0: + # if t_start is None or data_blocks[0]['start_time']<t_start: + # t_start = data_blocks[0]['start_time'] + if t_stop is None or data_blocks[-1]['end_time']>t_stop: + t_stop = data_blocks[-1]['end_time'] + # self._seg_t_starts = [t_start] + self._seg_t_starts = [0] + self._seg_t_stops = [t_stop] + else: + all_nb_seg = np.array([v.size+1 for v in all_gaps_block_ind.values()]) + assert np.all(all_nb_seg[0]==all_nb_seg), \ + 'Signal channel have differents pause so diffrents nb_segment' + nb_segment = int(all_nb_seg[0]) + + for chan_id, gaps_block_ind in all_gaps_block_ind.items(): + data_blocks = self._all_data_blocks[chan_id] + self._sig_t_starts[chan_id] = [] + self._sig_t_stops[chan_id] = [] + + for seg_ind in range(nb_segment): + if seg_ind==0: + fisrt_bl = 0 + else: + fisrt_bl = gaps_block_ind[seg_ind-1] + 1 + self._sig_t_starts[chan_id].append(data_blocks[fisrt_bl]['start_time']) + + if seg_ind<nb_segment-1: + last_bl = gaps_block_ind[seg_ind] + else: + last_bl = data_blocks.size - 1 + + self._sig_t_stops[chan_id].append(data_blocks[last_bl]['end_time']) + + in_seg_data_block = data_blocks[fisrt_bl:last_bl+1] + in_seg_data_block['cumsum'][1:] = np.cumsum(in_seg_data_block['size'][:-1]) + self._by_seg_data_blocks[chan_id].append(in_seg_data_block) + + self._seg_t_starts = [] + self._seg_t_stops = [] + for seg_ind in range(nb_segment): + # there is a small delay between all channel so take the max/min for t_start/t_stop + t_start = min(self._sig_t_starts[chan_id][seg_ind] for chan_id in self._sig_t_starts) + t_stop = max(self._sig_t_stops[chan_id][seg_ind] for chan_id in self._sig_t_stops) + self._seg_t_starts.append(t_start) + self._seg_t_stops.append(t_stop) + # create typed channels sig_channels = [] unit_channels = [] event_channels = [] - all_signal_length = [] # this is incredible but shape difer channel to channel!!! self.internal_unit_ids = {} - self._spike_sounts = {} for chan_id, chan_info in enumerate(self._channel_infos): - if chan_info['kind'] in [1, 6, 7, 9]: if self.take_ideal_sampling_rate: sampling_rate = info['ideal_rate'] @@ -134,8 +206,7 @@ class Spike2RawIO(BaseRawIO): if chan_info['kind'] in [1, 9]: # AnalogSignal - sig_size = np.sum(self._data_blocks[chan_id]['size']) - if sig_size == 0: + if chan_id not in self._sig_t_starts: continue units = chan_info['unit'] if chan_info['kind'] == 1: # int16 @@ -150,8 +221,6 @@ class Spike2RawIO(BaseRawIO): sig_channels.append((name, chan_id, sampling_rate, sig_dtype, units, gain, offset, group_id)) - all_signal_length.append(sig_size) - elif chan_info['kind'] in [2, 3, 4, 5, 8]: # Event event_channels.append((name, chan_id, 'event')) @@ -170,76 +239,41 @@ class Spike2RawIO(BaseRawIO): if self.ced_units: # this is a hudge pain because need # to jump over all blocks - nb_spike_by_ids = {} - data_blocks = self._data_blocks[chan_id] + data_blocks = self._all_data_blocks[chan_id] dt = get_channel_dtype(chan_info) + unit_ids = set() for bl in range(data_blocks.size): ind0 = data_blocks[bl]['pos'] ind1 = data_blocks[bl]['size'] * dt.itemsize + ind0 raw_data = self._memmap[ind0:ind1].view(dt) marker = raw_data['marker'] & 255 - for unit_id in np.unique(marker): - nb_spike = nb_spike_by_ids.get(unit_id, 0) - nb_spike += np.sum(marker == unit_id) - nb_spike_by_ids[unit_id] = nb_spike + unit_ids.update(np.unique(marker)) + unit_ids = sorted(list(unit_ids)) else: # All spike from one channel are group in one SpikeTrain - nb_spike_by_ids = {'all': data_blocks['size'].sum()} - for unit_id in sorted(nb_spike_by_ids.keys()): + unit_ids = ['all'] + for unit_id in unit_ids: unit_index = len(unit_channels) self.internal_unit_ids[unit_index] = (chan_id, unit_id) - self._spike_sounts[unit_index] = nb_spike_by_ids[unit_id] _id = "ch{}#{}".format(chan_id, unit_id) unit_channels.append((name, _id, wf_units, wf_gain, wf_offset, wf_left_sweep, wf_sampling_rate)) - + sig_channels = np.array(sig_channels, dtype=_signal_channel_dtype) - print(sig_channels) unit_channels = np.array(unit_channels, dtype=_unit_channel_dtype) event_channels = np.array(event_channels, dtype=_event_channel_dtype) - - if len(sig_channels) > 0: - sampling_rate = np.unique(sig_channels['sampling_rate']) - assert sampling_rate.size == 1 - self._sampling_rate = float(sampling_rate[0]) - self._signal_length = min(all_signal_length) - - all_kind = [self._channel_infos[chan_id]['kind'] for chan_id in sig_channels['id']] - all_kind = np.unique(all_kind) - assert all_kind.size == 1, 'IO only support when all channel have the same dtype' - - if all_kind[0] == 1: - self._sig_dtype = np.dtype('int16') - elif all_kind[0] == 9: - self._sig_dtype = np.dtype('float32') - - self._time_factor = self._global_info['us_per_time'] * self._global_info['dtime_base'] - - # t_stop: best between events, spikes and signals + if len(sig_channels) > 0: - t_stop_sig = self._signal_length / self._sampling_rate - else: - t_stop_sig = 0. - - t_stop_ev = 0. - for chan_id, chan_info in enumerate(self._channel_infos): - if chan_info['kind'] in [1, 9, 0]: - continue - data_blocks = self._data_blocks[chan_id] - if data_blocks.size > 0: - dt = get_channel_dtype(chan_info) - ind0 = data_blocks[-1]['pos'] - ind1 = data_blocks[-1]['size'] * dt.itemsize + ind0 - raw_data = self._memmap[ind0:ind1].view(dt) - last_time = raw_data['tick'][-1] * self._time_factor - if last_time > t_stop_ev: - t_stop_ev = last_time - self._t_stop = max(t_stop_sig, t_stop_ev) - + # signal channel can different sampling_rate/dtype/t_start/signal_length... + # grouping them is difficults, so each channe = one group + + sig_channels['group_id'] = np.arange(sig_channels.size) + self._sig_dtypes = {s['group_id']: np.dtype(s['dtype']) for s in sig_channels} + # fille into header dict self.header = {} self.header['nb_block'] = 1 - self.header['nb_segment'] = [1] + self.header['nb_segment'] = [nb_segment] self.header['signal_channels'] = sig_channels self.header['unit_channels'] = unit_channels self.header['event_channels'] = event_channels @@ -273,26 +307,33 @@ class Spike2RawIO(BaseRawIO): return self.filename def _segment_t_start(self, block_index, seg_index): - return 0. + return self._seg_t_starts[seg_index] * self._time_factor def _segment_t_stop(self, block_index, seg_index): - return self._t_stop + return self._seg_t_stops[seg_index] * self._time_factor def _get_signal_size(self, block_index, seg_index, channel_indexes): - return self._signal_length + assert len(channel_indexes) == 1 + chan_id = self.header['signal_channels'][channel_indexes[0]]['id'] + sig_size = np.sum(self._by_seg_data_blocks[chan_id][seg_index]['size']) + return sig_size def _get_signal_t_start(self, block_index, seg_index, channel_indexes): - return 0. + assert len(channel_indexes) == 1 + chan_id = self.header['signal_channels'][channel_indexes[0]]['id'] + return self._sig_t_starts[chan_id][seg_index] * self._time_factor def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, channel_indexes): if i_start is None: i_start = 0 if i_stop is None: - i_stop = self._signal_length + i_stop = self._get_signal_size(block_index, seg_index, channel_indexes) - dt = self._sig_dtype - if channel_indexes is None: - channel_indexes = np.arange(self.header['signal_channels'].size) + assert len(channel_indexes) == 1 + chan_index = channel_indexes[0] + chan_id = self.header['signal_channels'][chan_index]['id'] + group_id = self.header['signal_channels'][channel_indexes[0]]['group_id'] + dt = self._sig_dtypes[group_id] raw_signals = np.zeros((i_stop - i_start, len(channel_indexes)), dtype=dt) for c, channel_index in enumerate(channel_indexes): @@ -302,7 +343,7 @@ class Spike2RawIO(BaseRawIO): # indexes. So this make the job too difficult. chan_header = self.header['signal_channels'][channel_index] chan_id = chan_header['id'] - data_blocks = self._data_blocks[chan_id] + data_blocks = self._by_seg_data_blocks[chan_id][seg_index] # loop over data blocks and get chunks bl0 = np.searchsorted(data_blocks['cumsum'], i_start, side='left') @@ -316,28 +357,52 @@ class Spike2RawIO(BaseRawIO): # right border # be carfull that bl could be both bl0 and bl1!! border = data.size - (i_stop - data_blocks[bl]['cumsum']) - data = data[:-border] + if border>0: + data = data[:-border] if bl == bl0: # left border border = i_start - data_blocks[bl]['cumsum'] data = data[border:] raw_signals[ind:data.size + ind, c] = data ind += data.size - return raw_signals - - def _get_internal_timestamp_(self, chan_id, t_start, t_stop, other_field=None, marker_filter=None): + + def _count_in_time_slice(self, seg_index, chan_id, lim0, lim1, marker_filter=None): + # count event or spike in time slice + data_blocks = self._all_data_blocks[chan_id] + chan_info = self._channel_infos[chan_id] + dt = get_channel_dtype(chan_info) + nb = 0 + for bl in range(data_blocks.size): + ind0 = data_blocks[bl]['pos'] + ind1 = data_blocks[bl]['size'] * dt.itemsize + ind0 + raw_data = self._memmap[ind0:ind1].view(dt) + ts = raw_data['tick'] + keep = (ts >= lim0) & (ts <= lim1) + if marker_filter is not None: + keep2 = (raw_data['marker'] & 255) == marker_filter + keep = keep & keep2 + nb += np.sum(keep) + if ts[-1] > lim1: + break + return nb + + def _get_internal_timestamp_(self, seg_index, chan_id, + t_start, t_stop, other_field=None, marker_filter=None): chan_info = self._channel_infos[chan_id] - data_blocks = self._data_blocks[chan_id] + # data_blocks = self._by_seg_data_blocks[chan_id][seg_index] + data_blocks = self._all_data_blocks[chan_id] dt = get_channel_dtype(chan_info) if t_start is None: - lim0 = 0 + # lim0 = 0 + lim0 = self._seg_t_starts[seg_index] else: lim0 = int(t_start / self._time_factor) if t_stop is None: - lim1 = 2**32 + # lim1 = 2**32 + lim1 = self._seg_t_stops[seg_index] else: lim1 = int(t_stop / self._time_factor) @@ -374,7 +439,15 @@ class Spike2RawIO(BaseRawIO): return timestamps, othervalues def _spike_count(self, block_index, seg_index, unit_index): - return self._spike_sounts[unit_index] + chan_id, unit_id = self.internal_unit_ids[unit_index] + if self.ced_units: + marker_filter = unit_id + else: + marker_filter = None + lim0 = self._seg_t_starts[seg_index] + lim1 = self._seg_t_stops[seg_index] + return self._count_in_time_slice(seg_index, chan_id, + lim0, lim1, marker_filter=marker_filter) def _get_spike_timestamps(self, block_index, seg_index, unit_index, t_start, t_stop): unit_header = self.header['unit_channels'][unit_index] @@ -385,7 +458,7 @@ class Spike2RawIO(BaseRawIO): else: marker_filter = None - spike_timestamps = self._get_internal_timestamp_( + spike_timestamps = self._get_internal_timestamp_(seg_index, chan_id, t_start, t_stop, marker_filter=marker_filter) return spike_timestamps @@ -404,8 +477,8 @@ class Spike2RawIO(BaseRawIO): else: marker_filter = None - timestamps, waveforms = self._get_internal_timestamp_(chan_id, t_start, t_stop, - other_field='waveform', marker_filter=marker_filter) + timestamps, waveforms = self._get_internal_timestamp_(seg_index, chan_id, + t_start, t_stop, other_field='waveform', marker_filter=marker_filter) waveforms = waveforms.reshape(timestamps.size, 1, -1) @@ -414,9 +487,9 @@ class Spike2RawIO(BaseRawIO): def _event_count(self, block_index, seg_index, event_channel_index): event_header = self.header['event_channels'][event_channel_index] chan_id = int(event_header['id']) # because set to string in header - data_blocks = self._data_blocks[chan_id] - nb_event = data_blocks['size'].sum() - return nb_event + lim0 = self._seg_t_starts[seg_index] + lim1 = self._seg_t_stops[seg_index] + return self._count_in_time_slice(seg_index, chan_id, lim0, lim1, marker_filter=None) def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop): event_header = self.header['event_channels'][event_channel_index] @@ -424,13 +497,14 @@ class Spike2RawIO(BaseRawIO): chan_info = self._channel_infos[chan_id] if chan_info['kind'] == 5: - timestamps, labels = self._get_internal_timestamp_( + timestamps, labels = self._get_internal_timestamp_(seg_index, chan_id, t_start, t_stop, other_field='marker') elif chan_info['kind'] == 8: - timestamps, labels = self._get_internal_timestamp_( + timestamps, labels = self._get_internal_timestamp_(seg_index, chan_id, t_start, t_stop, other_field='label') else: - timestamps = self._get_internal_timestamp_(chan_id, t_start, t_stop, other_field=None) + timestamps = self._get_internal_timestamp_(seg_index, + chan_id, t_start, t_stop, other_field=None) labels = np.zeros(timestamps.size, dtype='U') labels = labels.astype('U') @@ -478,11 +552,11 @@ def get_channel_dtype(chan_info): dt = [('tick', 'i4'), ('marker', 'i4')] elif chan_info['kind'] in [6]: # AdcMark data (waveform) dt = [('tick', 'i4'), ('marker', 'i4'), - #~ ('adc', 'S%d' % chan_info['n_extra'])] + # ('adc', 'S%d' % chan_info['n_extra'])] ('waveform', 'int16', chan_info['n_extra'] // 2)] elif chan_info['kind'] in [7]: # RealMark data (waveform) dt = [('tick', 'i4'), ('marker', 'i4'), - #~ ('real', 'S%d' % chan_info['n_extra'])] + # ('real', 'S%d' % chan_info['n_extra'])] ('waveform', 'float32', chan_info['n_extra'] // 4)] elif chan_info['kind'] in [8]: # TextMark data dt = [('tick', 'i4'), ('marker', 'i4'),
Hitting Spike2IO asserion on sampling_rate I'm trying to import a spike2 smr file using `Spike2IO(filename=filename)` but am hitting an assertion: ``` Traceback (most recent call last): File "G:/Python/libs/Playground/src/playground2.py", line 3, in <module> reader = Spike2IO(filename=r'G:\Python\1-9-2018-mouse-left-odor mixture-10 trials.smr') File "E:\Python\Python35-x64\lib\site-packages\neo\io\spike2io.py", line 10, in __init__ BaseFromRaw.__init__(self, filename) File "E:\Python\Python35-x64\lib\site-packages\neo\io\basefromrawio.py", line 71, in __init__ self.parse_header() File "E:\Python\Python35-x64\lib\site-packages\neo\rawio\baserawio.py", line 155, in parse_header self._parse_header() File "E:\Python\Python35-x64\lib\site-packages\neo\rawio\spike2rawio.py", line 199, in _parse_header assert sampling_rate.size==1 AssertionError ``` So I printed the value of `sampling_rate` and it's `[ 1000. 2000. 10000.]`. Is it unable to use channels with different sampling rates?
NeuralEnsemble/python-neo
diff --git a/neo/rawio/tests/test_spike2rawio.py b/neo/rawio/tests/test_spike2rawio.py index 320c3826..88f271ff 100644 --- a/neo/rawio/tests/test_spike2rawio.py +++ b/neo/rawio/tests/test_spike2rawio.py @@ -17,6 +17,7 @@ class TestSpike2RawIO(BaseTestRawIO, unittest.TestCase, ): 'File_spike2_2.smr', 'File_spike2_3.smr', '130322-1LY.smr', # this is for bug 182 + 'multi_sampling.smr', # this is for bug 466 ] entities_to_test = files_to_download diff --git a/neo/test/iotest/test_spike2io.py b/neo/test/iotest/test_spike2io.py index 842116cb..8dd200a3 100644 --- a/neo/test/iotest/test_spike2io.py +++ b/neo/test/iotest/test_spike2io.py @@ -8,19 +8,52 @@ from __future__ import absolute_import, division import unittest +import quantities as pq + from neo.io import Spike2IO from neo.test.iotest.common_io_test import BaseTestIO class TestSpike2IO(BaseTestIO, unittest.TestCase, ): ioclass = Spike2IO - files_to_test = ['File_spike2_1.smr', - 'File_spike2_2.smr', - 'File_spike2_3.smr', - '130322-1LY.smr', # this is for bug 182 - ] + files_to_test = [ + 'File_spike2_1.smr', + 'File_spike2_2.smr', + 'File_spike2_3.smr', + '130322-1LY.smr', # this is for bug 182 + 'multi_sampling.smr', # this is for bug 466 + ] files_to_download = files_to_test + def test_multi_sampling(self): + """ + Some file can have several sampling_rate. + This one contain 3 differents signals sampling rate + """ + filename = self.get_filename_path('multi_sampling.smr') + reader = Spike2IO(filename=filename) + bl = reader.read_block(signal_group_mode = 'group-by-same-units') + assert len(bl.segments) == 10 + seg =bl.segments[0] + + # 7 group_id one per channel + assert len(seg.analogsignals) == 7 + + # 1 channel for 1kHz + assert seg.analogsignals[0].shape == (14296, 1) + assert seg.analogsignals[0].sampling_rate == 1000*pq.Hz + + # 4 channel for 2kHz + for c in range(1, 5): + assert seg.analogsignals[c].shape == (28632, 1) + assert seg.analogsignals[c].sampling_rate == 2000*pq.Hz + + # 2 channel for 10kHz + for c in range(5, 7): + assert seg.analogsignals[c].shape == (114618, 1) + assert seg.analogsignals[c].sampling_rate == 10000*pq.Hz + + if __name__ == "__main__": unittest.main()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/NeuralEnsemble/python-neo.git@c66dbfe3e625607e13a74dcdc409c255d89dc608#egg=neo nose==1.3.7 numpy==1.19.5 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 quantities==0.13.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: python-neo channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - nose==1.3.7 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - quantities==0.13.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/python-neo
[ "neo/rawio/tests/test_spike2rawio.py::TestSpike2RawIO::test_read_all", "neo/test/iotest/test_spike2io.py::TestSpike2IO::test_assert_readed_neo_object_is_compliant", "neo/test/iotest/test_spike2io.py::TestSpike2IO::test_multi_sampling", "neo/test/iotest/test_spike2io.py::TestSpike2IO::test_readed_with_lazy_is_compliant" ]
[]
[ "neo/test/iotest/test_spike2io.py::TestSpike2IO::test_load_lazy_objects", "neo/test/iotest/test_spike2io.py::TestSpike2IO::test_read_then_write", "neo/test/iotest/test_spike2io.py::TestSpike2IO::test_write_then_read" ]
[]
BSD 3-Clause "New" or "Revised" License
2,106
[ "neo/io/spike2io.py", "neo/rawio/spike2rawio.py" ]
[ "neo/io/spike2io.py", "neo/rawio/spike2rawio.py" ]
paris-saclay-cds__specio-44
e966bc2b7f0955631517780272b8ebd62f6c6a1b
2018-02-01 18:25:43
e966bc2b7f0955631517780272b8ebd62f6c6a1b
codecov[bot]: # [Codecov](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=h1) Report > Merging [#44](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=desc) into [master](https://codecov.io/gh/paris-saclay-cds/specio/commit/e966bc2b7f0955631517780272b8ebd62f6c6a1b?src=pr&el=desc) will **increase** coverage by `0.02%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/paris-saclay-cds/specio/pull/44/graphs/tree.svg?src=pr&token=IO4kafGqN0&height=150&width=650)](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #44 +/- ## ========================================== + Coverage 94.58% 94.61% +0.02% ========================================== Files 26 26 Lines 1219 1225 +6 ========================================== + Hits 1153 1159 +6 Misses 66 66 ``` | [Impacted Files](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [specio/core/functions.py](https://codecov.io/gh/paris-saclay-cds/specio/pull/44/diff?src=pr&el=tree#diff-c3BlY2lvL2NvcmUvZnVuY3Rpb25zLnB5) | `93.1% <100%> (+0.12%)` | :arrow_up: | | [specio/core/tests/test\_functions.py](https://codecov.io/gh/paris-saclay-cds/specio/pull/44/diff?src=pr&el=tree#diff-c3BlY2lvL2NvcmUvdGVzdHMvdGVzdF9mdW5jdGlvbnMucHk=) | `100% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=footer). Last update [e966bc2...9678af8](https://codecov.io/gh/paris-saclay-cds/specio/pull/44?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/specio/core/functions.py b/specio/core/functions.py index c47f79d..8185e05 100644 --- a/specio/core/functions.py +++ b/specio/core/functions.py @@ -144,7 +144,7 @@ def _validate_filenames(uri): return sorted(glob.glob(os.path.expanduser(uri))) -def _zip_spectrum(spectrum): +def _zip_spectrum(spectrum, tol_wavelength): """Compress if possible several Spectrum into a single one. Parameters @@ -152,6 +152,10 @@ def _zip_spectrum(spectrum): spectrum : list of Spectrum The list of Spectrum to zip. + tol_wavelength : float + Tolerance to merge spectrum when their wavelength are slightly + different. + Returns ------- zipped_spectrum : Spectrum or list of Spectrum @@ -166,7 +170,8 @@ def _zip_spectrum(spectrum): wavelength = spectrum[0].wavelength try: consistent_wavelength = [np.allclose(sp.wavelength, - wavelength) + wavelength, + atol=tol_wavelength) for sp in spectrum] if not all(consistent_wavelength): return spectrum @@ -194,7 +199,7 @@ def _zip_spectrum(spectrum): return output_spectrum -def specread(uri, format=None, **kwargs): +def specread(uri, format=None, tol_wavelength=1e-5, **kwargs): """Read spectra in a given format. Reads spectrum from the specified file. Returns a list or a @@ -215,6 +220,10 @@ def specread(uri, format=None, **kwargs): The format to use to read the file. By default specio selects the appropriate for you based on the filename and its contents. + tol_wavelength : float, optional + Tolerance to merge spectrum when their wavelength are slightly + different. + kwargs : dict Further keyword arguments are passed to the reader. See :func:`.help` to see what arguments are available for a particular format. @@ -241,7 +250,7 @@ def specread(uri, format=None, **kwargs): spectrum = _get_reader_get_data(uri, format, **kwargs) if isinstance(spectrum, list): - spectrum = _zip_spectrum(spectrum) + spectrum = _zip_spectrum(spectrum, tol_wavelength) return spectrum
Add an argument to read_csv to merge wavelength
paris-saclay-cds/specio
diff --git a/specio/core/tests/test_functions.py b/specio/core/tests/test_functions.py index 0cfd489..049cdbb 100644 --- a/specio/core/tests/test_functions.py +++ b/specio/core/tests/test_functions.py @@ -87,14 +87,25 @@ def _generate_list_spectrum(*args): for _ in range(n_spectrum)] +def _generate_list_spectrum_close_wavelength(*args): + n_wavelength = 5 + tol = 1e-3 + wavelength = np.arange(5) + np.random.uniform(low=-tol, high=tol) + return Spectrum(np.random.random(n_wavelength), + wavelength, + None) + + @pytest.mark.parametrize( - "side_effect,spectra_type,spectra_shape", - [(_generate_spectrum_identical_wavelength, Spectrum, (10, 5)), - (_generate_spectrum_different_wavelength_size, list, 10), - (_generate_spectrum_different_wavelength, list, 10), - (_generate_list_spectrum, list, 30)]) -def test_specread_consitent_wavelength(side_effect, spectra_type, - spectra_shape, mocker): + "side_effect,tol_wavelength,spectra_type,spectra_shape", + [(_generate_spectrum_identical_wavelength, 1e-5, Spectrum, (10, 5)), + (_generate_spectrum_different_wavelength_size, 1e-5, list, 10), + (_generate_spectrum_different_wavelength, 1e-5, list, 10), + (_generate_list_spectrum, 1e-5, list, 30), + (_generate_list_spectrum_close_wavelength, 1e-2, Spectrum, (10, 5)), + (_generate_list_spectrum_close_wavelength, 1e-5, list, 10)]) +def test_specread_consitent_wavelength(side_effect, tol_wavelength, + spectra_type, spectra_shape, mocker): # emulate that we read several file mocker.patch('specio.core.functions._validate_filenames', return_value=['filename' for _ in range(10)]) @@ -103,7 +114,7 @@ def test_specread_consitent_wavelength(side_effect, spectra_type, side_effect=side_effect) # emulate the spectrum reading - spectra = specread('') + spectra = specread('', tol_wavelength=tol_wavelength) assert isinstance(spectra, spectra_type) if isinstance(spectra, Spectrum): assert spectra.amplitudes.shape == spectra_shape
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 1 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[tests,docs]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-gallery", "sphinx_rtd_theme", "numpydoc", "matplotlib" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cycler==0.11.0 docutils==0.18.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 kiwisolver==1.3.1 MarkupSafe==2.0.1 matplotlib==3.3.4 numpy==1.19.5 numpydoc==1.1.0 packaging==21.3 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyopenms==2.6.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-mock==3.6.1 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 spc @ git+https://github.com/glemaitre/spc.git@44b67d49e1e4fe9364e7cbce9a93086037703511 -e git+https://github.com/paris-saclay-cds/specio.git@e966bc2b7f0955631517780272b8ebd62f6c6a1b#egg=specio Sphinx==5.3.0 sphinx-gallery==0.10.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: specio channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - cycler==0.11.0 - docutils==0.18.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - kiwisolver==1.3.1 - markupsafe==2.0.1 - matplotlib==3.3.4 - numpy==1.19.5 - numpydoc==1.1.0 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyopenms==2.6.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - spc==0.4.0 - sphinx==5.3.0 - sphinx-gallery==0.10.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/specio
[ "specio/core/tests/test_functions.py::test_specread_consitent_wavelength[_generate_spectrum_identical_wavelength-1e-05-Spectrum-spectra_shape0]", "specio/core/tests/test_functions.py::test_specread_consitent_wavelength[_generate_spectrum_different_wavelength_size-1e-05-list-10]", "specio/core/tests/test_functions.py::test_specread_consitent_wavelength[_generate_spectrum_different_wavelength-1e-05-list-10]", "specio/core/tests/test_functions.py::test_specread_consitent_wavelength[_generate_list_spectrum-1e-05-list-30]", "specio/core/tests/test_functions.py::test_specread_consitent_wavelength[_generate_list_spectrum_close_wavelength-0.01-Spectrum-spectra_shape4]", "specio/core/tests/test_functions.py::test_specread_consitent_wavelength[_generate_list_spectrum_close_wavelength-1e-05-list-10]" ]
[ "specio/core/tests/test_functions.py::test_get_reader_error[ValueError-Could", "specio/core/tests/test_functions.py::test_get_reader_error[OSError-No", "specio/core/tests/test_functions.py::test_get_reader_error[IndexError-No" ]
[ "specio/core/tests/test_functions.py::test_help", "specio/core/tests/test_functions.py::test_get_reader", "specio/core/tests/test_functions.py::test_specread_single_file" ]
[]
BSD 3-Clause "New" or "Revised" License
2,107
[ "specio/core/functions.py" ]
[ "specio/core/functions.py" ]
dpkp__kafka-python-1364
08a7fb7b754a754c6c64e96d4ba5c4f56cf38a5f
2018-02-01 19:09:05
618c5051493693c1305aa9f08e8a0583d5fcf0e3
dpkp: > Wow, this is a lot more work than I remembered when I investigated how Java fixed this... is that because you're also porting in other updates from the Java client? Or is my memory just faulty? This is just 3949. The upstream PR is here: https://github.com/apache/kafka/pull/1762
diff --git a/kafka/cluster.py b/kafka/cluster.py index d646fdf..1ab4218 100644 --- a/kafka/cluster.py +++ b/kafka/cluster.py @@ -291,6 +291,13 @@ class ClusterMetadata(object): for listener in self._listeners: listener(self) + if self.need_all_topic_metadata: + # the listener may change the interested topics, + # which could cause another metadata refresh. + # If we have already fetched all topics, however, + # another fetch should be unnecessary. + self._need_update = False + def add_listener(self, listener): """Add a callback function to be called on each metadata update""" self._listeners.add(listener) diff --git a/kafka/consumer/fetcher.py b/kafka/consumer/fetcher.py index afb8f52..f9fcb37 100644 --- a/kafka/consumer/fetcher.py +++ b/kafka/consumer/fetcher.py @@ -326,9 +326,6 @@ class Fetcher(six.Iterator): max_records = self.config['max_poll_records'] assert max_records > 0 - if self._subscriptions.needs_partition_assignment: - return {}, False - drained = collections.defaultdict(list) records_remaining = max_records @@ -397,9 +394,6 @@ class Fetcher(six.Iterator): def _message_generator(self): """Iterate over fetched_records""" - if self._subscriptions.needs_partition_assignment: - raise StopIteration('Subscription needs partition assignment') - while self._next_partition_records or self._completed_fetches: if not self._next_partition_records: diff --git a/kafka/consumer/group.py b/kafka/consumer/group.py index 0224d16..1c1f1e8 100644 --- a/kafka/consumer/group.py +++ b/kafka/consumer/group.py @@ -644,6 +644,11 @@ class KafkaConsumer(six.Iterator): timeout_ms = min(timeout_ms, self._coordinator.time_to_next_poll()) self._client.poll(timeout_ms=timeout_ms) + # after the long poll, we should check whether the group needs to rebalance + # prior to returning data so that the group can stabilize faster + if self._coordinator.need_rejoin(): + return {} + records, _ = self._fetcher.fetched_records(max_records) return records @@ -1055,6 +1060,11 @@ class KafkaConsumer(six.Iterator): poll_ms = 0 self._client.poll(timeout_ms=poll_ms) + # after the long poll, we should check whether the group needs to rebalance + # prior to returning data so that the group can stabilize faster + if self._coordinator.need_rejoin(): + continue + # We need to make sure we at least keep up with scheduled tasks, # like heartbeats, auto-commits, and metadata refreshes timeout_at = self._next_timeout() diff --git a/kafka/consumer/subscription_state.py b/kafka/consumer/subscription_state.py index 3d4dfef..10d722e 100644 --- a/kafka/consumer/subscription_state.py +++ b/kafka/consumer/subscription_state.py @@ -68,7 +68,6 @@ class SubscriptionState(object): self._group_subscription = set() self._user_assignment = set() self.assignment = dict() - self.needs_partition_assignment = False self.listener = None # initialize to true for the consumers to fetch offset upon starting up @@ -172,7 +171,6 @@ class SubscriptionState(object): log.info('Updating subscribed topics to: %s', topics) self.subscription = set(topics) self._group_subscription.update(topics) - self.needs_partition_assignment = True # Remove any assigned partitions which are no longer subscribed to for tp in set(self.assignment.keys()): @@ -192,12 +190,12 @@ class SubscriptionState(object): raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) self._group_subscription.update(topics) - def mark_for_reassignment(self): + def reset_group_subscription(self): + """Reset the group's subscription to only contain topics subscribed by this consumer.""" if self._user_assignment: raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) assert self.subscription is not None, 'Subscription required' self._group_subscription.intersection_update(self.subscription) - self.needs_partition_assignment = True def assign_from_user(self, partitions): """Manually assign a list of TopicPartitions to this consumer. @@ -220,18 +218,17 @@ class SubscriptionState(object): if self.subscription is not None: raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) - self._user_assignment.clear() - self._user_assignment.update(partitions) + if self._user_assignment != set(partitions): + self._user_assignment = set(partitions) - for partition in partitions: - if partition not in self.assignment: - self._add_assigned_partition(partition) + for partition in partitions: + if partition not in self.assignment: + self._add_assigned_partition(partition) - for tp in set(self.assignment.keys()) - self._user_assignment: - del self.assignment[tp] + for tp in set(self.assignment.keys()) - self._user_assignment: + del self.assignment[tp] - self.needs_partition_assignment = False - self.needs_fetch_committed_offsets = True + self.needs_fetch_committed_offsets = True def assign_from_subscribed(self, assignments): """Update the assignment to the specified partitions @@ -245,16 +242,18 @@ class SubscriptionState(object): assignments (list of TopicPartition): partitions to assign to this consumer instance. """ - if self.subscription is None: + if not self.partitions_auto_assigned(): raise IllegalStateError(self._SUBSCRIPTION_EXCEPTION_MESSAGE) for tp in assignments: if tp.topic not in self.subscription: raise ValueError("Assigned partition %s for non-subscribed topic." % str(tp)) + + # after rebalancing, we always reinitialize the assignment state self.assignment.clear() for tp in assignments: self._add_assigned_partition(tp) - self.needs_partition_assignment = False + self.needs_fetch_committed_offsets = True log.info("Updated partition assignment: %s", assignments) def unsubscribe(self): @@ -262,7 +261,6 @@ class SubscriptionState(object): self.subscription = None self._user_assignment.clear() self.assignment.clear() - self.needs_partition_assignment = True self.subscribed_pattern = None def group_subscription(self): diff --git a/kafka/coordinator/base.py b/kafka/coordinator/base.py index 301c06d..820fc1f 100644 --- a/kafka/coordinator/base.py +++ b/kafka/coordinator/base.py @@ -344,23 +344,25 @@ class BaseCoordinator(object): def ensure_active_group(self): """Ensure that the group is active (i.e. joined and synced)""" with self._lock: - if not self.need_rejoin(): - return - - # call on_join_prepare if needed. We set a flag to make sure that - # we do not call it a second time if the client is woken up before - # a pending rebalance completes. - if not self.rejoining: - self._on_join_prepare(self._generation.generation_id, - self._generation.member_id) - self.rejoining = True - if self._heartbeat_thread is None: self._start_heartbeat_thread() while self.need_rejoin(): self.ensure_coordinator_ready() + # call on_join_prepare if needed. We set a flag + # to make sure that we do not call it a second + # time if the client is woken up before a pending + # rebalance completes. This must be called on each + # iteration of the loop because an event requiring + # a rebalance (such as a metadata refresh which + # changes the matched subscription set) can occur + # while another rebalance is still in progress. + if not self.rejoining: + self._on_join_prepare(self._generation.generation_id, + self._generation.member_id) + self.rejoining = True + # ensure that there are no pending requests to the coordinator. # This is important in particular to avoid resending a pending # JoinGroup request. diff --git a/kafka/coordinator/consumer.py b/kafka/coordinator/consumer.py index ab30883..9438a7e 100644 --- a/kafka/coordinator/consumer.py +++ b/kafka/coordinator/consumer.py @@ -84,6 +84,8 @@ class ConsumerCoordinator(BaseCoordinator): self.config[key] = configs[key] self._subscription = subscription + self._is_leader = False + self._joined_subscription = set() self._metadata_snapshot = self._build_metadata_snapshot(subscription, client.cluster) self._assignment_snapshot = None self._cluster = client.cluster @@ -132,11 +134,22 @@ class ConsumerCoordinator(BaseCoordinator): def group_protocols(self): """Returns list of preferred (protocols, metadata)""" - topics = self._subscription.subscription - assert topics is not None, 'Consumer has not subscribed to topics' + if self._subscription.subscription is None: + raise Errors.IllegalStateError('Consumer has not subscribed to topics') + # dpkp note: I really dislike this. + # why? because we are using this strange method group_protocols, + # which is seemingly innocuous, to set internal state (_joined_subscription) + # that is later used to check whether metadata has changed since we joined a group + # but there is no guarantee that this method, group_protocols, will get called + # in the correct sequence or that it will only be called when we want it to be. + # So this really should be moved elsewhere, but I don't have the energy to + # work that out right now. If you read this at some later date after the mutable + # state has bitten you... I'm sorry! It mimics the java client, and that's the + # best I've got for now. + self._joined_subscription = set(self._subscription.subscription) metadata_list = [] for assignor in self.config['assignors']: - metadata = assignor.metadata(topics) + metadata = assignor.metadata(self._joined_subscription) group_protocol = (assignor.name, metadata) metadata_list.append(group_protocol) return metadata_list @@ -158,21 +171,29 @@ class ConsumerCoordinator(BaseCoordinator): # check if there are any changes to the metadata which should trigger # a rebalance - if self._subscription_metadata_changed(cluster): - - if (self.config['api_version'] >= (0, 9) - and self.config['group_id'] is not None): - - self._subscription.mark_for_reassignment() - - # If we haven't got group coordinator support, - # just assign all partitions locally - else: - self._subscription.assign_from_subscribed([ - TopicPartition(topic, partition) - for topic in self._subscription.subscription - for partition in self._metadata_snapshot[topic] - ]) + if self._subscription.partitions_auto_assigned(): + metadata_snapshot = self._build_metadata_snapshot(self._subscription, cluster) + if self._metadata_snapshot != metadata_snapshot: + self._metadata_snapshot = metadata_snapshot + + # If we haven't got group coordinator support, + # just assign all partitions locally + if self._auto_assign_all_partitions(): + self._subscription.assign_from_subscribed([ + TopicPartition(topic, partition) + for topic in self._subscription.subscription + for partition in self._metadata_snapshot[topic] + ]) + + def _auto_assign_all_partitions(self): + # For users that use "subscribe" without group support, + # we will simply assign all partitions to this consumer + if self.config['api_version'] < (0, 9): + return True + elif self.config['group_id'] is None: + return True + else: + return False def _build_metadata_snapshot(self, subscription, cluster): metadata_snapshot = {} @@ -181,16 +202,6 @@ class ConsumerCoordinator(BaseCoordinator): metadata_snapshot[topic] = set(partitions) return metadata_snapshot - def _subscription_metadata_changed(self, cluster): - if not self._subscription.partitions_auto_assigned(): - return False - - metadata_snapshot = self._build_metadata_snapshot(self._subscription, cluster) - if self._metadata_snapshot != metadata_snapshot: - self._metadata_snapshot = metadata_snapshot - return True - return False - def _lookup_assignor(self, name): for assignor in self.config['assignors']: if assignor.name == name: @@ -199,12 +210,10 @@ class ConsumerCoordinator(BaseCoordinator): def _on_join_complete(self, generation, member_id, protocol, member_assignment_bytes): - # if we were the assignor, then we need to make sure that there have - # been no metadata updates since the rebalance begin. Otherwise, we - # won't rebalance again until the next metadata change - if self._assignment_snapshot is not None and self._assignment_snapshot != self._metadata_snapshot: - self._subscription.mark_for_reassignment() - return + # only the leader is responsible for monitoring for metadata changes + # (i.e. partition changes) + if not self._is_leader: + self._assignment_snapshot = None assignor = self._lookup_assignor(protocol) assert assignor, 'Coordinator selected invalid assignment protocol: %s' % protocol @@ -307,6 +316,7 @@ class ConsumerCoordinator(BaseCoordinator): # keep track of the metadata used for assignment so that we can check # after rebalance completion whether anything has changed self._cluster.request_update() + self._is_leader = True self._assignment_snapshot = self._metadata_snapshot log.debug("Performing assignment for group %s using strategy %s" @@ -338,8 +348,8 @@ class ConsumerCoordinator(BaseCoordinator): " for group %s failed on_partitions_revoked", self._subscription.listener, self.group_id) - self._assignment_snapshot = None - self._subscription.mark_for_reassignment() + self._is_leader = False + self._subscription.reset_group_subscription() def need_rejoin(self): """Check whether the group should be rejoined @@ -347,9 +357,23 @@ class ConsumerCoordinator(BaseCoordinator): Returns: bool: True if consumer should rejoin group, False otherwise """ - return (self._subscription.partitions_auto_assigned() and - (super(ConsumerCoordinator, self).need_rejoin() or - self._subscription.needs_partition_assignment)) + if not self._subscription.partitions_auto_assigned(): + return False + + if self._auto_assign_all_partitions(): + return False + + # we need to rejoin if we performed the assignment and metadata has changed + if (self._assignment_snapshot is not None + and self._assignment_snapshot != self._metadata_snapshot): + return True + + # we need to join if our subscription has changed since the last join + if (self._joined_subscription is not None + and self._joined_subscription != self._subscription.subscription): + return True + + return super(ConsumerCoordinator, self).need_rejoin() def refresh_committed_offsets_if_needed(self): """Fetch committed offsets for assigned partitions."""
KAFKA-3949: Fix race condition between group rebalance and metadata update Details in https://issues.apache.org/jira/browse/KAFKA-3949 Note that the fix refactored a fair bit of code for managing subscription state: https://github.com/apache/kafka/pull/1762 And then KIP-70 (tracked in #1242) further modified this code. Related: #1237 / #1240 / #1242
dpkp/kafka-python
diff --git a/test/test_coordinator.py b/test/test_coordinator.py index e094b9c..7a2627e 100644 --- a/test/test_coordinator.py +++ b/test/test_coordinator.py @@ -62,7 +62,7 @@ def test_group_protocols(coordinator): # Requires a subscription try: coordinator.group_protocols() - except AssertionError: + except Errors.IllegalStateError: pass else: assert False, 'Exception not raised when expected' @@ -85,8 +85,7 @@ def test_pattern_subscription(coordinator, api_version): coordinator.config['api_version'] = api_version coordinator._subscription.subscribe(pattern='foo') assert coordinator._subscription.subscription == set([]) - assert coordinator._subscription_metadata_changed({}) is False - assert coordinator._subscription.needs_partition_assignment is False + assert coordinator._metadata_snapshot == coordinator._build_metadata_snapshot(coordinator._subscription, {}) cluster = coordinator._client.cluster cluster.update_metadata(MetadataResponse[0]( @@ -100,12 +99,10 @@ def test_pattern_subscription(coordinator, api_version): # 0.9 consumers should trigger dynamic partition assignment if api_version >= (0, 9): - assert coordinator._subscription.needs_partition_assignment is True assert coordinator._subscription.assignment == {} # earlier consumers get all partitions assigned locally else: - assert coordinator._subscription.needs_partition_assignment is False assert set(coordinator._subscription.assignment.keys()) == set([ TopicPartition('foo1', 0), TopicPartition('foo2', 0)]) @@ -195,7 +192,6 @@ def test_perform_assignment(mocker, coordinator): def test_on_join_prepare(coordinator): coordinator._subscription.subscribe(topics=['foobar']) coordinator._on_join_prepare(0, 'member-foo') - assert coordinator._subscription.needs_partition_assignment is True def test_need_rejoin(coordinator): @@ -205,13 +201,6 @@ def test_need_rejoin(coordinator): coordinator._subscription.subscribe(topics=['foobar']) assert coordinator.need_rejoin() is True - coordinator._subscription.needs_partition_assignment = False - coordinator.rejoin_needed = False - assert coordinator.need_rejoin() is False - - coordinator._subscription.needs_partition_assignment = True - assert coordinator.need_rejoin() is True - def test_refresh_committed_offsets_if_needed(mocker, coordinator): mocker.patch.object(ConsumerCoordinator, 'fetch_committed_offsets', @@ -388,7 +377,6 @@ def test_maybe_auto_commit_offsets_sync(mocker, api_version, group_id, enable, @pytest.fixture def patched_coord(mocker, coordinator): coordinator._subscription.subscribe(topics=['foobar']) - coordinator._subscription.needs_partition_assignment = False mocker.patch.object(coordinator, 'coordinator_unknown', return_value=False) coordinator.coordinator_id = 0 mocker.patch.object(coordinator, 'coordinator', return_value=0) @@ -461,47 +449,39 @@ def test_send_offset_commit_request_success(mocker, patched_coord, offsets): offsets, future, mocker.ANY, response) [email protected]('response,error,dead,reassign', [ [email protected]('response,error,dead', [ (OffsetCommitResponse[0]([('foobar', [(0, 30), (1, 30)])]), - Errors.GroupAuthorizationFailedError, False, False), + Errors.GroupAuthorizationFailedError, False), (OffsetCommitResponse[0]([('foobar', [(0, 12), (1, 12)])]), - Errors.OffsetMetadataTooLargeError, False, False), + Errors.OffsetMetadataTooLargeError, False), (OffsetCommitResponse[0]([('foobar', [(0, 28), (1, 28)])]), - Errors.InvalidCommitOffsetSizeError, False, False), + Errors.InvalidCommitOffsetSizeError, False), (OffsetCommitResponse[0]([('foobar', [(0, 14), (1, 14)])]), - Errors.GroupLoadInProgressError, False, False), + Errors.GroupLoadInProgressError, False), (OffsetCommitResponse[0]([('foobar', [(0, 15), (1, 15)])]), - Errors.GroupCoordinatorNotAvailableError, True, False), + Errors.GroupCoordinatorNotAvailableError, True), (OffsetCommitResponse[0]([('foobar', [(0, 16), (1, 16)])]), - Errors.NotCoordinatorForGroupError, True, False), + Errors.NotCoordinatorForGroupError, True), (OffsetCommitResponse[0]([('foobar', [(0, 7), (1, 7)])]), - Errors.RequestTimedOutError, True, False), + Errors.RequestTimedOutError, True), (OffsetCommitResponse[0]([('foobar', [(0, 25), (1, 25)])]), - Errors.CommitFailedError, False, True), + Errors.CommitFailedError, False), (OffsetCommitResponse[0]([('foobar', [(0, 22), (1, 22)])]), - Errors.CommitFailedError, False, True), + Errors.CommitFailedError, False), (OffsetCommitResponse[0]([('foobar', [(0, 27), (1, 27)])]), - Errors.CommitFailedError, False, True), + Errors.CommitFailedError, False), (OffsetCommitResponse[0]([('foobar', [(0, 17), (1, 17)])]), - Errors.InvalidTopicError, False, False), + Errors.InvalidTopicError, False), (OffsetCommitResponse[0]([('foobar', [(0, 29), (1, 29)])]), - Errors.TopicAuthorizationFailedError, False, False), + Errors.TopicAuthorizationFailedError, False), ]) def test_handle_offset_commit_response(mocker, patched_coord, offsets, - response, error, dead, reassign): + response, error, dead): future = Future() patched_coord._handle_offset_commit_response(offsets, future, time.time(), response) assert isinstance(future.exception, error) assert patched_coord.coordinator_id is (None if dead else 0) - if reassign: - assert patched_coord._generation is Generation.NO_GENERATION - assert patched_coord.rejoin_needed is True - assert patched_coord.state is MemberState.UNJOINED - else: - assert patched_coord._generation is not Generation.NO_GENERATION - assert patched_coord.rejoin_needed is False - assert patched_coord.state is MemberState.STABLE @pytest.fixture @@ -570,6 +550,10 @@ def test_send_offset_fetch_request_success(patched_coord, partitions): Errors.GroupLoadInProgressError, False), (OffsetFetchResponse[0]([('foobar', [(0, 123, b'', 16), (1, 234, b'', 16)])]), Errors.NotCoordinatorForGroupError, True), + (OffsetFetchResponse[0]([('foobar', [(0, 123, b'', 25), (1, 234, b'', 25)])]), + Errors.UnknownMemberIdError, False), + (OffsetFetchResponse[0]([('foobar', [(0, 123, b'', 22), (1, 234, b'', 22)])]), + Errors.IllegalGenerationError, False), (OffsetFetchResponse[0]([('foobar', [(0, 123, b'', 29), (1, 234, b'', 29)])]), Errors.TopicAuthorizationFailedError, False), (OffsetFetchResponse[0]([('foobar', [(0, 123, b'', 0), (1, 234, b'', 0)])]), @@ -627,7 +611,7 @@ def test_ensure_active_group(mocker, coordinator): coordinator._subscription.subscribe(topics=['foobar']) mocker.patch.object(coordinator, 'coordinator_unknown', return_value=False) mocker.patch.object(coordinator, '_send_join_group_request', return_value=Future().success(True)) - mocker.patch.object(coordinator, 'need_rejoin', side_effect=[True, True, False]) + mocker.patch.object(coordinator, 'need_rejoin', side_effect=[True, False]) mocker.patch.object(coordinator, '_on_join_complete') mocker.patch.object(coordinator, '_heartbeat_thread')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_issue_reference", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 6 }
1.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-catchlog", "pytest-sugar", "pytest-mock", "mock", "python-snappy", "lz4tools", "xxhash" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libsnappy-dev" ], "python": "3.6", "reqs_path": [ "docs/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cramjam==2.5.0 docutils==0.18.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 -e git+https://github.com/dpkp/kafka-python.git@08a7fb7b754a754c6c64e96d4ba5c4f56cf38a5f#egg=kafka_python lz4tools==1.3.1.2 MarkupSafe==2.0.1 mock==5.2.0 packaging==21.3 pluggy==1.0.0 pockets==0.9.1 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-catchlog==1.2.2 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-sugar==0.9.6 python-snappy==0.7.3 pytz==2025.2 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-napoleon==0.7 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 termcolor==1.1.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 xxhash==3.2.0 zipp==3.6.0
name: kafka-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - cramjam==2.5.0 - docutils==0.18.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - lz4tools==1.3.1.2 - markupsafe==2.0.1 - mock==5.2.0 - packaging==21.3 - pluggy==1.0.0 - pockets==0.9.1 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-catchlog==1.2.2 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-sugar==0.9.6 - python-snappy==0.7.3 - pytz==2025.2 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-napoleon==0.7 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - termcolor==1.1.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - xxhash==3.2.0 - zipp==3.6.0 prefix: /opt/conda/envs/kafka-python
[ "test/test_coordinator.py::test_group_protocols", "test/test_coordinator.py::test_ensure_active_group" ]
[]
[ "test/test_coordinator.py::test_init", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version0]", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version1]", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version2]", "test/test_coordinator.py::test_autocommit_enable_api_version[api_version3]", "test/test_coordinator.py::test_protocol_type", "test/test_coordinator.py::test_pattern_subscription[api_version0]", "test/test_coordinator.py::test_pattern_subscription[api_version1]", "test/test_coordinator.py::test_pattern_subscription[api_version2]", "test/test_coordinator.py::test_pattern_subscription[api_version3]", "test/test_coordinator.py::test_lookup_assignor", "test/test_coordinator.py::test_join_complete", "test/test_coordinator.py::test_subscription_listener", "test/test_coordinator.py::test_subscription_listener_failure", "test/test_coordinator.py::test_perform_assignment", "test/test_coordinator.py::test_on_join_prepare", "test/test_coordinator.py::test_need_rejoin", "test/test_coordinator.py::test_refresh_committed_offsets_if_needed", "test/test_coordinator.py::test_fetch_committed_offsets", "test/test_coordinator.py::test_close", "test/test_coordinator.py::test_commit_offsets_async", "test/test_coordinator.py::test_commit_offsets_sync", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version0-foobar-True-None-False-False-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version1-foobar-True-None-True-True-False-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version2-foobar-True-None-True-True-False-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version3-foobar-False-None-False-False-False-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version4-foobar-True-error4-True-True-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version5-foobar-True-error5-True-True-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version6-foobar-True-error6-True-True-True-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version7-foobar-True-error7-True-True-False-True]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version8-foobar-True-None-True-True-False-False]", "test/test_coordinator.py::test_maybe_auto_commit_offsets_sync[api_version9-None-True-None-False-False-True-False]", "test/test_coordinator.py::test_send_offset_commit_request_fail", "test/test_coordinator.py::test_send_offset_commit_request_versions[api_version0-OffsetCommitRequest_v0]", "test/test_coordinator.py::test_send_offset_commit_request_versions[api_version1-OffsetCommitRequest_v1]", "test/test_coordinator.py::test_send_offset_commit_request_versions[api_version2-OffsetCommitRequest_v2]", "test/test_coordinator.py::test_send_offset_commit_request_failure", "test/test_coordinator.py::test_send_offset_commit_request_success", "test/test_coordinator.py::test_handle_offset_commit_response[response0-GroupAuthorizationFailedError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response1-OffsetMetadataTooLargeError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response2-InvalidCommitOffsetSizeError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response3-GroupLoadInProgressError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response4-GroupCoordinatorNotAvailableError-True]", "test/test_coordinator.py::test_handle_offset_commit_response[response5-NotCoordinatorForGroupError-True]", "test/test_coordinator.py::test_handle_offset_commit_response[response6-RequestTimedOutError-True]", "test/test_coordinator.py::test_handle_offset_commit_response[response7-CommitFailedError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response8-CommitFailedError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response9-CommitFailedError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response10-InvalidTopicError-False]", "test/test_coordinator.py::test_handle_offset_commit_response[response11-TopicAuthorizationFailedError-False]", "test/test_coordinator.py::test_send_offset_fetch_request_fail", "test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version0-OffsetFetchRequest_v0]", "test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version1-OffsetFetchRequest_v1]", "test/test_coordinator.py::test_send_offset_fetch_request_versions[api_version2-OffsetFetchRequest_v1]", "test/test_coordinator.py::test_send_offset_fetch_request_failure", "test/test_coordinator.py::test_send_offset_fetch_request_success", "test/test_coordinator.py::test_handle_offset_fetch_response[response0-GroupLoadInProgressError-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response1-NotCoordinatorForGroupError-True]", "test/test_coordinator.py::test_handle_offset_fetch_response[response2-UnknownMemberIdError-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response3-IllegalGenerationError-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response4-TopicAuthorizationFailedError-False]", "test/test_coordinator.py::test_handle_offset_fetch_response[response5-None-False]", "test/test_coordinator.py::test_heartbeat", "test/test_coordinator.py::test_lookup_coordinator_failure" ]
[]
Apache License 2.0
2,108
[ "kafka/cluster.py", "kafka/consumer/group.py", "kafka/coordinator/consumer.py", "kafka/consumer/fetcher.py", "kafka/coordinator/base.py", "kafka/consumer/subscription_state.py" ]
[ "kafka/cluster.py", "kafka/consumer/group.py", "kafka/coordinator/consumer.py", "kafka/consumer/fetcher.py", "kafka/coordinator/base.py", "kafka/consumer/subscription_state.py" ]
dask__dask-3126
cceb6e2ac50a85b3f34154612dc98508432f057c
2018-02-01 20:35:59
de6c2a49d76066abb51085570816322f063fc5c5
jcrist: cc @mrocklin. mrocklin: This looks fine to me. The similarity between the methods makes me want to find a way to put it on the Base class, although obviously there are sufficient dissimilarities with types like Array and Scalar that this probably isn't feasible. jcrist: > The similarity between the methods makes me want to find a way to put it on the Base class, although obviously there are sufficient dissimilarities with types like Array and Scalar that this probably isn't feasible. Yeah, I don't think this is feasible. Simple enough to reimplement for each collection. One generic thing I did think about adding is the following function: ```python def to_delayed(*args, **kwargs): """Convert multiple dask collections to delayed objects at the same time Parameters ------------ optimize_graph : bool, optional If True (default), optimize all collections together before converting to delayed objects. """ if kwargs.pop('optimize_graph', True): args = dask.optimize(*args) return tuple(a.to_delayed(optimize_graph=False) for a in args) ``` I decided against it because: - This relies on collections having the same `to_delayed` signature - Different collections return different things (lists of partitions, arrays of partitions, etc...). Grouping these all together felt off. It's small enough to implement that I didn't think it was worth adding until a need arises. Can always add it later, but it's harder to remove things.
diff --git a/dask/array/core.py b/dask/array/core.py index 254232621..edbf36f8a 100644 --- a/dask/array/core.py +++ b/dask/array/core.py @@ -1833,10 +1833,14 @@ class Array(Base): memo[id(self)] = c return c - def to_delayed(self): - """ Convert Array into dask Delayed objects + def to_delayed(self, optimize_graph=True): + """Convert into an array of ``dask.delayed`` objects, one per chunk. - Returns an array of values, one value per chunk. + Parameters + ---------- + optimize_graph : bool, optional + If True [default], the graph is optimized before converting into + ``dask.delayed`` objects. See Also -------- @@ -1844,7 +1848,9 @@ class Array(Base): """ from ..delayed import Delayed keys = self.__dask_keys__() - dsk = self.__dask_optimize__(self.__dask_graph__(), keys) + dsk = self.__dask_graph__() + if optimize_graph: + dsk = self.__dask_optimize__(dsk, keys) L = ndeepmap(self.ndim, lambda k: Delayed(k, dsk), keys) return np.array(L, dtype=object) diff --git a/dask/bag/core.py b/dask/bag/core.py index 27a0c6a6e..5c002d1e0 100644 --- a/dask/bag/core.py +++ b/dask/bag/core.py @@ -339,14 +339,19 @@ class Item(Base): __int__ = __float__ = __complex__ = __bool__ = Base.compute - def to_delayed(self): - """ Convert bag item to dask.delayed. + def to_delayed(self, optimize_graph=True): + """Convert into a ``dask.delayed`` object. - Returns a single value. + Parameters + ---------- + optimize_graph : bool, optional + If True [default], the graph is optimized before converting into + ``dask.delayed`` objects. """ from dask.delayed import Delayed - dsk = self.__dask_optimize__(self.__dask_graph__(), - self.__dask_keys__()) + dsk = self.__dask_graph__() + if optimize_graph: + dsk = self.__dask_optimize__(dsk, self.__dask_keys__()) return Delayed(self.key, dsk) @@ -1260,14 +1265,24 @@ class Bag(Base): divisions = [None] * (self.npartitions + 1) return dd.DataFrame(dsk, name, meta, divisions) - def to_delayed(self): - """ Convert bag to list of dask Delayed. + def to_delayed(self, optimize_graph=True): + """Convert into a list of ``dask.delayed`` objects, one per partition. - Returns list of Delayed, one per partition. + Parameters + ---------- + optimize_graph : bool, optional + If True [default], the graph is optimized before converting into + ``dask.delayed`` objects. + + See Also + -------- + dask.bag.from_delayed """ from dask.delayed import Delayed keys = self.__dask_keys__() - dsk = self.__dask_optimize__(self.__dask_graph__(), keys) + dsk = self.__dask_graph__() + if optimize_graph: + dsk = self.__dask_optimize__(dsk, keys) return [Delayed(k, dsk) for k in keys] def repartition(self, npartitions): diff --git a/dask/dataframe/core.py b/dask/dataframe/core.py index e857330a3..f8abd5a8d 100644 --- a/dask/dataframe/core.py +++ b/dask/dataframe/core.py @@ -20,7 +20,7 @@ from .. import array as da from .. import core from ..utils import partial_by_order from .. import threaded -from ..compatibility import apply, operator_div, bind_method, PY3 +from ..compatibility import apply, operator_div, bind_method from ..context import globalmethod from ..utils import (random_state_data, pseudorandom, derived_from, funcname, memory_repr, put_lines, M, key_split, OperatorMethodMixin) @@ -179,6 +179,21 @@ class Scalar(Base, OperatorMethodMixin): def _get_binary_operator(cls, op, inv=False): return lambda self, other: _scalar_binary(op, self, other, inv=inv) + def to_delayed(self, optimize_graph=True): + """Convert into a ``dask.delayed`` object. + + Parameters + ---------- + optimize_graph : bool, optional + If True [default], the graph is optimized before converting into + ``dask.delayed`` objects. + """ + from dask.delayed import Delayed + dsk = self.__dask_graph__() + if optimize_graph: + dsk = self.__dask_optimize__(dsk, self.__dask_keys__()) + return Delayed(self.key, dsk) + def _scalar_binary(op, self, other, inv=False): name = '{0}-{1}'.format(funcname(op), tokenize(self, other)) @@ -1014,9 +1029,29 @@ Dask Name: {name}, {task} tasks""".format(klass=self.__class__.__name__, from .io import to_csv return to_csv(self, filename, **kwargs) - def to_delayed(self): - """ See dd.to_delayed docstring for more information """ - return to_delayed(self) + def to_delayed(self, optimize_graph=True): + """Convert into a list of ``dask.delayed`` objects, one per partition. + + Parameters + ---------- + optimize_graph : bool, optional + If True [default], the graph is optimized before converting into + ``dask.delayed`` objects. + + Examples + -------- + >>> partitions = df.to_delayed() # doctest: +SKIP + + See Also + -------- + dask.dataframe.from_delayed + """ + from dask.delayed import Delayed + keys = self.__dask_keys__() + dsk = self.__dask_graph__() + if optimize_graph: + dsk = self.__dask_optimize__(dsk, keys) + return [Delayed(k, dsk) for k in keys] @classmethod def _get_unary_operator(cls, op): @@ -4032,19 +4067,24 @@ def maybe_shift_divisions(df, periods, freq): return df -def to_delayed(df): - """ Create Dask Delayed objects from a Dask Dataframe +def to_delayed(df, optimize_graph=True): + """Convert into a list of ``dask.delayed`` objects, one per partition. - Returns a list of delayed values, one value per partition. + Deprecated, please use the equivalent ``df.to_delayed`` method instead. - Examples + Parameters + ---------- + optimize_graph : bool, optional + If True [default], the graph is optimized before converting into + ``dask.delayed`` objects. + + See Also -------- - >>> partitions = df.to_delayed() # doctest: +SKIP + dask.dataframe.from_delayed """ - from dask.delayed import Delayed - keys = df.__dask_keys__() - dsk = df.__dask_optimize__(df.__dask_graph__(), keys) - return [Delayed(k, dsk) for k in keys] + warnings.warn("DeprecationWarning: The `dd.to_delayed` function is " + "deprecated, please use the `.to_delayed()` method instead.") + return df.to_delayed(optimize_graph=optimize_graph) @wraps(pd.to_datetime) @@ -4071,7 +4111,3 @@ def _repr_data_series(s, index): else: dtype = str(s.dtype) return pd.Series([dtype] + ['...'] * npartitions, index=index, name=s.name) - - -if PY3: - _Frame.to_delayed.__doc__ = to_delayed.__doc__ diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index dbdade336..d8759b91d 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -21,6 +21,8 @@ DataFrame - Avoid mutation in dataframe groupby tests (:pr:`3118`) `Matthew Rocklin`_ - ``read_csv``, ``read_table``, and ``read_parquet`` accept iterables of paths (:pr:`3124`) `Jim Crist`_ +- Deprecates the ``dd.to_delayed`` *function* in favor of the existing method + (:pr:`3126`) `Jim Crist`_ Bag +++ @@ -39,6 +41,8 @@ Core computing. (:pr:`3071`) `Jim Crist`_ - Rename ``dask.optimize`` module to ``dask.optimization`` (:pr:`3071`) `Jim Crist`_ - Change task ordering to do a full traversal (:pr:`3066`) `Matthew Rocklin`_ +- Adds an ``optimize_graph`` keyword to all ``to_delayed`` methods to allow + controlling whether optimizations occur on conversion. (:pr:`3126`) `Jim Crist`_ 0.16.1 / 2018-01-09 diff --git a/docs/source/dataframe-api.rst b/docs/source/dataframe-api.rst index 87ee05fec..f50d99619 100644 --- a/docs/source/dataframe-api.rst +++ b/docs/source/dataframe-api.rst @@ -292,7 +292,6 @@ Store DataFrames to_hdf to_records to_bag - to_delayed DataFrame Methods ~~~~~~~~~~~~~~~~~ @@ -341,7 +340,6 @@ Storage and Conversion .. autofunction:: from_bcolz .. autofunction:: from_dask_array .. autofunction:: from_delayed -.. autofunction:: to_delayed .. autofunction:: to_records .. autofunction:: to_csv .. autofunction:: to_bag diff --git a/docs/source/dataframe-create.rst b/docs/source/dataframe-create.rst index 7d429c3a2..759e4b0e6 100644 --- a/docs/source/dataframe-create.rst +++ b/docs/source/dataframe-create.rst @@ -39,7 +39,7 @@ Dask Collections: from_delayed from_dask_array dask.bag.core.Bag.to_dataframe - to_delayed + DataFrame.to_delayed to_records to_bag
to_delayed always optimizes The `da.Array.to_delayed` call optimizes the underlying dask array's task graph before creating the dask.delayed objects. In some cases this is not desired. One option here would be to have an `optimize_graph=True` keyword argument. There might also be broader solutions?
dask/dask
diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py index a698df0bf..7d9e9492c 100644 --- a/dask/array/tests/test_array_core.py +++ b/dask/array/tests/test_array_core.py @@ -2370,12 +2370,20 @@ def test_to_delayed(): assert a.compute() == s -def test_to_delayed_optimizes(): +def test_to_delayed_optimize_graph(): x = da.ones((4, 4), chunks=(2, 2)) y = x[1:][1:][1:][:, 1:][:, 1:][:, 1:] + + # optimizations d = y.to_delayed().flatten().tolist()[0] assert len([k for k in d.dask if k[0].startswith('getitem')]) == 1 + # no optimizations + d2 = y.to_delayed(optimize_graph=False).flatten().tolist()[0] + assert dict(d2.dask) == dict(y.dask) + + assert (d.compute() == d2.compute()).all() + def test_cumulative(): x = da.arange(20, chunks=5) diff --git a/dask/bag/tests/test_bag.py b/dask/bag/tests/test_bag.py index d3973630e..004139fba 100644 --- a/dask/bag/tests/test_bag.py +++ b/dask/bag/tests/test_bag.py @@ -18,6 +18,7 @@ from dask.bag.core import (Bag, lazify, lazify_task, map, collect, reduceby, reify, partition, inline_singleton_lists, optimize, from_delayed) from dask.compatibility import BZ2File, GzipFile, PY2 +from dask.delayed import Delayed from dask.utils import filetexts, tmpfile, tmpdir from dask.utils_test import inc, add @@ -972,8 +973,6 @@ def test_bag_compute_forward_kwargs(): def test_to_delayed(): - from dask.delayed import Delayed - b = db.from_sequence([1, 2, 3, 4, 5, 6], npartitions=3) a, b, c = b.map(inc).to_delayed() assert all(isinstance(x, Delayed) for x in [a, b, c]) @@ -985,17 +984,24 @@ def test_to_delayed(): assert t.compute() == 21 -def test_to_delayed_optimizes(): +def test_to_delayed_optimize_graph(): b = db.from_sequence([1, 2, 3, 4, 5, 6], npartitions=1) b2 = b.map(inc).map(inc).map(inc) [d] = b2.to_delayed() text = str(dict(d.dask)) assert text.count('reify') == 1 + [d2] = b2.to_delayed(optimize_graph=False) + assert dict(d2.dask) == dict(b2.dask) + assert d.compute() == d2.compute() - d = b2.sum().to_delayed() + x = b2.sum() + d = x.to_delayed() text = str(dict(d.dask)) assert text.count('reify') == 0 + d2 = x.to_delayed(optimize_graph=False) + assert dict(d2.dask) == dict(x.dask) + assert d.compute() == d2.compute() [d] = b2.to_textfiles('foo.txt', compute=False) text = str(dict(d.dask)) diff --git a/dask/dataframe/io/tests/test_io.py b/dask/dataframe/io/tests/test_io.py index 45b034cd2..af6f1506b 100644 --- a/dask/dataframe/io/tests/test_io.py +++ b/dask/dataframe/io/tests/test_io.py @@ -528,17 +528,36 @@ def test_from_delayed_sorted(): def test_to_delayed(): df = pd.DataFrame({'x': [1, 2, 3, 4], 'y': [10, 20, 30, 40]}) ddf = dd.from_pandas(df, npartitions=2) + + # Frame a, b = ddf.to_delayed() assert isinstance(a, Delayed) assert isinstance(b, Delayed) - assert_eq(a.compute(), df.iloc[:2]) + # Scalar + x = ddf.x.sum() + dx = x.to_delayed() + assert isinstance(dx, Delayed) + assert_eq(dx.compute(), x) + -def test_to_delayed_optimizes(): +def test_to_delayed_optimize_graph(): df = pd.DataFrame({'x': list(range(20))}) ddf = dd.from_pandas(df, npartitions=20) - x = (ddf + 1).loc[:2] + ddf2 = (ddf + 1).loc[:2] - d = x.to_delayed()[0] + # Frame + d = ddf2.to_delayed()[0] assert len(d.dask) < 20 + d2 = ddf2.to_delayed(optimize_graph=False)[0] + assert sorted(d2.dask) == sorted(ddf2.dask) + assert_eq(ddf2.get_partition(0), d.compute()) + assert_eq(ddf2.get_partition(0), d2.compute()) + + # Scalar + x = ddf2.x.sum() + dx = x.to_delayed() + dx2 = x.to_delayed(optimize_graph=False) + assert len(dx.dask) < len(dx2.dask) + assert_eq(dx.compute(), dx2.compute())
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 6 }
0.16
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 click==8.0.4 cloudpickle==2.2.1 -e git+https://github.com/dask/dask.git@cceb6e2ac50a85b3f34154612dc98508432f057c#egg=dask distributed==1.20.2 HeapDict==1.0.1 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work locket==1.0.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work msgpack-python==0.5.6 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 partd==1.2.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 python-dateutil==2.9.0.post0 pytz==2025.2 six==1.17.0 sortedcontainers==2.4.0 tblib==1.7.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work toolz==0.12.0 tornado==6.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zict==2.1.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - click==8.0.4 - cloudpickle==2.2.1 - distributed==1.20.2 - heapdict==1.0.1 - locket==1.0.0 - msgpack-python==0.5.6 - numpy==1.19.5 - pandas==1.1.5 - partd==1.2.0 - psutil==7.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - six==1.17.0 - sortedcontainers==2.4.0 - tblib==1.7.0 - toolz==0.12.0 - tornado==6.1 - zict==2.1.0 prefix: /opt/conda/envs/dask
[ "dask/array/tests/test_array_core.py::test_to_delayed_optimize_graph", "dask/bag/tests/test_bag.py::test_to_delayed_optimize_graph", "dask/dataframe/io/tests/test_io.py::test_to_delayed", "dask/dataframe/io/tests/test_io.py::test_to_delayed_optimize_graph" ]
[ "dask/array/tests/test_array_core.py::test_matmul" ]
[ "dask/array/tests/test_array_core.py::test_getem", "dask/array/tests/test_array_core.py::test_top", "dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules", "dask/array/tests/test_array_core.py::test_top_literals", "dask/array/tests/test_array_core.py::test_atop_literals", "dask/array/tests/test_array_core.py::test_concatenate3_on_scalars", "dask/array/tests/test_array_core.py::test_chunked_dot_product", "dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one", "dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions", "dask/array/tests/test_array_core.py::test_broadcast_dimensions", "dask/array/tests/test_array_core.py::test_Array", "dask/array/tests/test_array_core.py::test_uneven_chunks", "dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims", "dask/array/tests/test_array_core.py::test_keys", "dask/array/tests/test_array_core.py::test_Array_computation", "dask/array/tests/test_array_core.py::test_stack", "dask/array/tests/test_array_core.py::test_short_stack", "dask/array/tests/test_array_core.py::test_stack_scalars", "dask/array/tests/test_array_core.py::test_stack_promote_type", "dask/array/tests/test_array_core.py::test_stack_rechunk", "dask/array/tests/test_array_core.py::test_concatenate", "dask/array/tests/test_array_core.py::test_concatenate_unknown_axes", "dask/array/tests/test_array_core.py::test_concatenate_rechunk", "dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings", "dask/array/tests/test_array_core.py::test_block_simple_row_wise", "dask/array/tests/test_array_core.py::test_block_simple_column_wise", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_row_wise", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_multiple_rows", "dask/array/tests/test_array_core.py::test_block_with_1d_arrays_column_wise", "dask/array/tests/test_array_core.py::test_block_mixed_1d_and_2d", "dask/array/tests/test_array_core.py::test_block_complicated", "dask/array/tests/test_array_core.py::test_block_nested", "dask/array/tests/test_array_core.py::test_block_3d", "dask/array/tests/test_array_core.py::test_block_with_mismatched_shape", "dask/array/tests/test_array_core.py::test_block_no_lists", "dask/array/tests/test_array_core.py::test_block_invalid_nesting", "dask/array/tests/test_array_core.py::test_block_empty_lists", "dask/array/tests/test_array_core.py::test_block_tuple", "dask/array/tests/test_array_core.py::test_binops", "dask/array/tests/test_array_core.py::test_broadcast_shapes", "dask/array/tests/test_array_core.py::test_elemwise_on_scalars", "dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays", "dask/array/tests/test_array_core.py::test_elemwise_differently_chunked", "dask/array/tests/test_array_core.py::test_elemwise_dtype", "dask/array/tests/test_array_core.py::test_operators", "dask/array/tests/test_array_core.py::test_operator_dtype_promotion", "dask/array/tests/test_array_core.py::test_field_access", "dask/array/tests/test_array_core.py::test_field_access_with_shape", "dask/array/tests/test_array_core.py::test_T", "dask/array/tests/test_array_core.py::test_norm", "dask/array/tests/test_array_core.py::test_broadcast_to", "dask/array/tests/test_array_core.py::test_broadcast_to_array", "dask/array/tests/test_array_core.py::test_broadcast_to_scalar", "dask/array/tests/test_array_core.py::test_broadcast_to_chunks", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape0-v_shape0]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape1-v_shape1]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape2-v_shape2]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape3-v_shape3]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape4-v_shape4]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape5-v_shape5]", "dask/array/tests/test_array_core.py::test_broadcast_operator[u_shape6-v_shape6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]", "dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]", "dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]", "dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]", "dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]", "dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]", "dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]", "dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]", "dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]", "dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]", "dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]", "dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]", "dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]", "dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]", "dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]", "dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]", "dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]", "dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]", "dask/array/tests/test_array_core.py::test_reshape[original_shape31-new_shape31-chunks31]", "dask/array/tests/test_array_core.py::test_reshape[original_shape32-new_shape32-chunks32]", "dask/array/tests/test_array_core.py::test_reshape[original_shape33-new_shape33-chunks33]", "dask/array/tests/test_array_core.py::test_reshape[original_shape34-new_shape34-chunks34]", "dask/array/tests/test_array_core.py::test_reshape_exceptions", "dask/array/tests/test_array_core.py::test_reshape_splat", "dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only", "dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions", "dask/array/tests/test_array_core.py::test_full", "dask/array/tests/test_array_core.py::test_map_blocks", "dask/array/tests/test_array_core.py::test_map_blocks2", "dask/array/tests/test_array_core.py::test_map_blocks_with_constants", "dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs", "dask/array/tests/test_array_core.py::test_map_blocks_with_chunks", "dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference", "dask/array/tests/test_array_core.py::test_from_function_requires_block_args", "dask/array/tests/test_array_core.py::test_repr", "dask/array/tests/test_array_core.py::test_slicing_with_ellipsis", "dask/array/tests/test_array_core.py::test_slicing_with_ndarray", "dask/array/tests/test_array_core.py::test_dtype", "dask/array/tests/test_array_core.py::test_blockdims_from_blockshape", "dask/array/tests/test_array_core.py::test_coerce", "dask/array/tests/test_array_core.py::test_bool", "dask/array/tests/test_array_core.py::test_store_delayed_target", "dask/array/tests/test_array_core.py::test_store", "dask/array/tests/test_array_core.py::test_store_regions", "dask/array/tests/test_array_core.py::test_store_compute_false", "dask/array/tests/test_array_core.py::test_store_locks", "dask/array/tests/test_array_core.py::test_to_dask_dataframe", "dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions", "dask/array/tests/test_array_core.py::test_dtype_complex", "dask/array/tests/test_array_core.py::test_astype", "dask/array/tests/test_array_core.py::test_arithmetic", "dask/array/tests/test_array_core.py::test_elemwise_consistent_names", "dask/array/tests/test_array_core.py::test_optimize", "dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays", "dask/array/tests/test_array_core.py::test_getter", "dask/array/tests/test_array_core.py::test_size", "dask/array/tests/test_array_core.py::test_nbytes", "dask/array/tests/test_array_core.py::test_itemsize", "dask/array/tests/test_array_core.py::test_Array_normalizes_dtype", "dask/array/tests/test_array_core.py::test_from_array_with_lock", "dask/array/tests/test_array_core.py::test_from_array_tasks_always_call_getter", "dask/array/tests/test_array_core.py::test_from_array_no_asarray", "dask/array/tests/test_array_core.py::test_from_array_getitem", "dask/array/tests/test_array_core.py::test_from_array_minus_one", "dask/array/tests/test_array_core.py::test_asarray", "dask/array/tests/test_array_core.py::test_asanyarray", "dask/array/tests/test_array_core.py::test_from_func", "dask/array/tests/test_array_core.py::test_concatenate3_2", "dask/array/tests/test_array_core.py::test_map_blocks3", "dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks", "dask/array/tests/test_array_core.py::test_normalize_chunks", "dask/array/tests/test_array_core.py::test_raise_on_no_chunks", "dask/array/tests/test_array_core.py::test_chunks_is_immutable", "dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs", "dask/array/tests/test_array_core.py::test_long_slice", "dask/array/tests/test_array_core.py::test_ellipsis_slicing", "dask/array/tests/test_array_core.py::test_point_slicing", "dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice", "dask/array/tests/test_array_core.py::test_slice_with_floats", "dask/array/tests/test_array_core.py::test_slice_with_integer_types", "dask/array/tests/test_array_core.py::test_index_with_integer_types", "dask/array/tests/test_array_core.py::test_vindex_basic", "dask/array/tests/test_array_core.py::test_vindex_nd", "dask/array/tests/test_array_core.py::test_vindex_negative", "dask/array/tests/test_array_core.py::test_vindex_errors", "dask/array/tests/test_array_core.py::test_vindex_merge", "dask/array/tests/test_array_core.py::test_empty_array", "dask/array/tests/test_array_core.py::test_memmap", "dask/array/tests/test_array_core.py::test_to_npy_stack", "dask/array/tests/test_array_core.py::test_view", "dask/array/tests/test_array_core.py::test_view_fortran", "dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension", "dask/array/tests/test_array_core.py::test_broadcast_chunks", "dask/array/tests/test_array_core.py::test_chunks_error", "dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs", "dask/array/tests/test_array_core.py::test_dont_fuse_outputs", "dask/array/tests/test_array_core.py::test_dont_dealias_outputs", "dask/array/tests/test_array_core.py::test_timedelta_op", "dask/array/tests/test_array_core.py::test_to_delayed", "dask/array/tests/test_array_core.py::test_cumulative", "dask/array/tests/test_array_core.py::test_atop_names", "dask/array/tests/test_array_core.py::test_atop_new_axes", "dask/array/tests/test_array_core.py::test_atop_kwargs", "dask/array/tests/test_array_core.py::test_atop_chunks", "dask/array/tests/test_array_core.py::test_from_delayed", "dask/array/tests/test_array_core.py::test_A_property", "dask/array/tests/test_array_core.py::test_copy_mutate", "dask/array/tests/test_array_core.py::test_npartitions", "dask/array/tests/test_array_core.py::test_astype_gh1151", "dask/array/tests/test_array_core.py::test_elemwise_name", "dask/array/tests/test_array_core.py::test_map_blocks_name", "dask/array/tests/test_array_core.py::test_array_picklable", "dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks", "dask/array/tests/test_array_core.py::test_concatenate_axes", "dask/array/tests/test_array_core.py::test_atop_concatenate", "dask/array/tests/test_array_core.py::test_common_blockdim", "dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly", "dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks", "dask/array/tests/test_array_core.py::test_uneven_chunks_atop", "dask/array/tests/test_array_core.py::test_warn_bad_rechunking", "dask/array/tests/test_array_core.py::test_optimize_fuse_keys", "dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn", "dask/array/tests/test_array_core.py::test_map_blocks_delayed", "dask/array/tests/test_array_core.py::test_no_chunks", "dask/array/tests/test_array_core.py::test_no_chunks_2d", "dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks", "dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks", "dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d", "dask/array/tests/test_array_core.py::test_index_array_with_array_1d", "dask/array/tests/test_array_core.py::test_index_array_with_array_2d", "dask/array/tests/test_array_core.py::test_setitem_1d", "dask/array/tests/test_array_core.py::test_setitem_2d", "dask/array/tests/test_array_core.py::test_setitem_errs", "dask/array/tests/test_array_core.py::test_zero_slice_dtypes", "dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk", "dask/array/tests/test_array_core.py::test_atop_zero_shape", "dask/array/tests/test_array_core.py::test_atop_zero_shape_new_axes", "dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape", "dask/array/tests/test_array_core.py::test_from_array_name", "dask/array/tests/test_array_core.py::test_concatenate_errs", "dask/array/tests/test_array_core.py::test_stack_errs", "dask/array/tests/test_array_core.py::test_atop_with_numpy_arrays", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]", "dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]", "dask/array/tests/test_array_core.py::test_constructor_plugin", "dask/array/tests/test_array_core.py::test_no_warnings_on_metadata", "dask/array/tests/test_array_core.py::test_delayed_array_key_hygeine", "dask/array/tests/test_array_core.py::test_empty_chunks_in_array_len", "dask/bag/tests/test_bag.py::test_Bag", "dask/bag/tests/test_bag.py::test_keys", "dask/bag/tests/test_bag.py::test_bag_map", "dask/bag/tests/test_bag.py::test_map_method", "dask/bag/tests/test_bag.py::test_starmap", "dask/bag/tests/test_bag.py::test_filter", "dask/bag/tests/test_bag.py::test_remove", "dask/bag/tests/test_bag.py::test_iter", "dask/bag/tests/test_bag.py::test_repr[str]", "dask/bag/tests/test_bag.py::test_repr[repr]", "dask/bag/tests/test_bag.py::test_pluck", "dask/bag/tests/test_bag.py::test_pluck_with_default", "dask/bag/tests/test_bag.py::test_unzip", "dask/bag/tests/test_bag.py::test_fold", "dask/bag/tests/test_bag.py::test_distinct", "dask/bag/tests/test_bag.py::test_frequencies", "dask/bag/tests/test_bag.py::test_topk", "dask/bag/tests/test_bag.py::test_topk_with_non_callable_key[1]", "dask/bag/tests/test_bag.py::test_topk_with_non_callable_key[2]", "dask/bag/tests/test_bag.py::test_topk_with_multiarg_lambda", "dask/bag/tests/test_bag.py::test_lambdas", "dask/bag/tests/test_bag.py::test_reductions", "dask/bag/tests/test_bag.py::test_reduction_names", "dask/bag/tests/test_bag.py::test_tree_reductions", "dask/bag/tests/test_bag.py::test_aggregation[1]", "dask/bag/tests/test_bag.py::test_aggregation[3]", "dask/bag/tests/test_bag.py::test_aggregation[4]", "dask/bag/tests/test_bag.py::test_non_splittable_reductions[1]", "dask/bag/tests/test_bag.py::test_non_splittable_reductions[10]", "dask/bag/tests/test_bag.py::test_std", "dask/bag/tests/test_bag.py::test_var", "dask/bag/tests/test_bag.py::test_join", "dask/bag/tests/test_bag.py::test_foldby", "dask/bag/tests/test_bag.py::test_foldby_tree_reduction", "dask/bag/tests/test_bag.py::test_map_partitions", "dask/bag/tests/test_bag.py::test_map_partitions_args_kwargs", "dask/bag/tests/test_bag.py::test_random_sample_size", "dask/bag/tests/test_bag.py::test_random_sample_prob_range", "dask/bag/tests/test_bag.py::test_random_sample_repeated_computation", "dask/bag/tests/test_bag.py::test_random_sample_different_definitions", "dask/bag/tests/test_bag.py::test_random_sample_random_state", "dask/bag/tests/test_bag.py::test_lazify_task", "dask/bag/tests/test_bag.py::test_lazify", "dask/bag/tests/test_bag.py::test_inline_singleton_lists", "dask/bag/tests/test_bag.py::test_take", "dask/bag/tests/test_bag.py::test_take_npartitions", "dask/bag/tests/test_bag.py::test_take_npartitions_warn", "dask/bag/tests/test_bag.py::test_map_is_lazy", "dask/bag/tests/test_bag.py::test_can_use_dict_to_make_concrete", "dask/bag/tests/test_bag.py::test_read_text", "dask/bag/tests/test_bag.py::test_read_text_large", "dask/bag/tests/test_bag.py::test_read_text_encoding", "dask/bag/tests/test_bag.py::test_read_text_large_gzip", "dask/bag/tests/test_bag.py::test_from_sequence", "dask/bag/tests/test_bag.py::test_from_long_sequence", "dask/bag/tests/test_bag.py::test_product", "dask/bag/tests/test_bag.py::test_partition_collect", "dask/bag/tests/test_bag.py::test_groupby", "dask/bag/tests/test_bag.py::test_groupby_with_indexer", "dask/bag/tests/test_bag.py::test_groupby_with_npartitions_changed", "dask/bag/tests/test_bag.py::test_concat", "dask/bag/tests/test_bag.py::test_flatten", "dask/bag/tests/test_bag.py::test_concat_after_map", "dask/bag/tests/test_bag.py::test_args", "dask/bag/tests/test_bag.py::test_to_dataframe", "dask/bag/tests/test_bag.py::test_to_textfiles[gz-GzipFile]", "dask/bag/tests/test_bag.py::test_to_textfiles[-open]", "dask/bag/tests/test_bag.py::test_to_textfiles[bz2-BZ2File]", "dask/bag/tests/test_bag.py::test_to_textfiles_name_function_preserves_order", "dask/bag/tests/test_bag.py::test_to_textfiles_name_function_warn", "dask/bag/tests/test_bag.py::test_to_textfiles_encoding", "dask/bag/tests/test_bag.py::test_to_textfiles_inputs", "dask/bag/tests/test_bag.py::test_to_textfiles_endlines", "dask/bag/tests/test_bag.py::test_string_namespace", "dask/bag/tests/test_bag.py::test_string_namespace_with_unicode", "dask/bag/tests/test_bag.py::test_str_empty_split", "dask/bag/tests/test_bag.py::test_map_with_iterator_function", "dask/bag/tests/test_bag.py::test_ensure_compute_output_is_concrete", "dask/bag/tests/test_bag.py::test_bag_class_extend", "dask/bag/tests/test_bag.py::test_gh715", "dask/bag/tests/test_bag.py::test_bag_compute_forward_kwargs", "dask/bag/tests/test_bag.py::test_to_delayed", "dask/bag/tests/test_bag.py::test_from_delayed", "dask/bag/tests/test_bag.py::test_from_delayed_iterator", "dask/bag/tests/test_bag.py::test_range", "dask/bag/tests/test_bag.py::test_zip[1]", "dask/bag/tests/test_bag.py::test_zip[7]", "dask/bag/tests/test_bag.py::test_zip[10]", "dask/bag/tests/test_bag.py::test_zip[28]", "dask/bag/tests/test_bag.py::test_repartition[1-1]", "dask/bag/tests/test_bag.py::test_repartition[1-2]", "dask/bag/tests/test_bag.py::test_repartition[1-7]", "dask/bag/tests/test_bag.py::test_repartition[1-11]", "dask/bag/tests/test_bag.py::test_repartition[1-23]", "dask/bag/tests/test_bag.py::test_repartition[2-1]", "dask/bag/tests/test_bag.py::test_repartition[2-2]", "dask/bag/tests/test_bag.py::test_repartition[2-7]", "dask/bag/tests/test_bag.py::test_repartition[2-11]", "dask/bag/tests/test_bag.py::test_repartition[2-23]", "dask/bag/tests/test_bag.py::test_repartition[5-1]", "dask/bag/tests/test_bag.py::test_repartition[5-2]", "dask/bag/tests/test_bag.py::test_repartition[5-7]", "dask/bag/tests/test_bag.py::test_repartition[5-11]", "dask/bag/tests/test_bag.py::test_repartition[5-23]", "dask/bag/tests/test_bag.py::test_repartition[12-1]", "dask/bag/tests/test_bag.py::test_repartition[12-2]", "dask/bag/tests/test_bag.py::test_repartition[12-7]", "dask/bag/tests/test_bag.py::test_repartition[12-11]", "dask/bag/tests/test_bag.py::test_repartition[12-23]", "dask/bag/tests/test_bag.py::test_repartition[23-1]", "dask/bag/tests/test_bag.py::test_repartition[23-2]", "dask/bag/tests/test_bag.py::test_repartition[23-7]", "dask/bag/tests/test_bag.py::test_repartition[23-11]", "dask/bag/tests/test_bag.py::test_repartition[23-23]", "dask/bag/tests/test_bag.py::test_repartition_names", "dask/bag/tests/test_bag.py::test_accumulate", "dask/bag/tests/test_bag.py::test_groupby_tasks", "dask/bag/tests/test_bag.py::test_groupby_tasks_names", "dask/bag/tests/test_bag.py::test_groupby_tasks_2[1000-20-100]", "dask/bag/tests/test_bag.py::test_groupby_tasks_2[12345-234-1042]", "dask/bag/tests/test_bag.py::test_groupby_tasks_3", "dask/bag/tests/test_bag.py::test_to_textfiles_empty_partitions", "dask/bag/tests/test_bag.py::test_reduction_empty", "dask/bag/tests/test_bag.py::test_reduction_empty_aggregate[1]", "dask/bag/tests/test_bag.py::test_reduction_empty_aggregate[2]", "dask/bag/tests/test_bag.py::test_reduction_empty_aggregate[4]", "dask/bag/tests/test_bag.py::test_reduction_with_non_comparable_objects", "dask/bag/tests/test_bag.py::test_empty", "dask/bag/tests/test_bag.py::test_bag_picklable", "dask/bag/tests/test_bag.py::test_msgpack_unicode", "dask/bag/tests/test_bag.py::test_bag_with_single_callable", "dask/bag/tests/test_bag.py::test_optimize_fuse_keys", "dask/bag/tests/test_bag.py::test_reductions_are_lazy", "dask/bag/tests/test_bag.py::test_repeated_groupby", "dask/bag/tests/test_bag.py::test_temporary_directory", "dask/bag/tests/test_bag.py::test_empty_bag", "dask/bag/tests/test_bag.py::test_bag_paths", "dask/dataframe/io/tests/test_io.py::test_meta_from_array", "dask/dataframe/io/tests/test_io.py::test_meta_from_1darray", "dask/dataframe/io/tests/test_io.py::test_meta_from_recarray", "dask/dataframe/io/tests/test_io.py::test_from_array", "dask/dataframe/io/tests/test_io.py::test_from_array_with_record_dtype", "dask/dataframe/io/tests/test_io.py::test_from_pandas_dataframe", "dask/dataframe/io/tests/test_io.py::test_from_pandas_small", "dask/dataframe/io/tests/test_io.py::test_from_pandas_series", "dask/dataframe/io/tests/test_io.py::test_from_pandas_non_sorted", "dask/dataframe/io/tests/test_io.py::test_from_pandas_single_row", "dask/dataframe/io/tests/test_io.py::test_from_pandas_with_datetime_index", "dask/dataframe/io/tests/test_io.py::test_DataFrame_from_dask_array", "dask/dataframe/io/tests/test_io.py::test_Series_from_dask_array", "dask/dataframe/io/tests/test_io.py::test_from_dask_array_compat_numpy_array", "dask/dataframe/io/tests/test_io.py::test_from_dask_array_compat_numpy_array_1d", "dask/dataframe/io/tests/test_io.py::test_from_dask_array_struct_dtype", "dask/dataframe/io/tests/test_io.py::test_from_dask_array_unknown_chunks", "dask/dataframe/io/tests/test_io.py::test_to_bag", "dask/dataframe/io/tests/test_io.py::test_to_records", "dask/dataframe/io/tests/test_io.py::test_from_delayed", "dask/dataframe/io/tests/test_io.py::test_from_delayed_sorted" ]
[]
BSD 3-Clause "New" or "Revised" License
2,109
[ "docs/source/dataframe-api.rst", "dask/bag/core.py", "dask/dataframe/core.py", "docs/source/dataframe-create.rst", "dask/array/core.py", "docs/source/changelog.rst" ]
[ "docs/source/dataframe-api.rst", "dask/bag/core.py", "dask/dataframe/core.py", "docs/source/dataframe-create.rst", "dask/array/core.py", "docs/source/changelog.rst" ]
joke2k__faker-695
80aa92145296f84122c1b43c8f3d63aef5cd6417
2018-02-02 00:11:44
d26db45eebb9dcd02eb73099bb98b660f0e03aad
doctorlard: I'm trying to get the tests passing first. The tox and Py 2.7 build tests seem to be choking on "Māori" being in a comment line, despite the magic PEP-263 comment at the top. doctorlard: For now I have this: ``` suffixes_female = ('',) suffixes_male = ('',) ``` Which at least passes. doctorlard: @fcurella - I've updated this commit with data from DIA, and surnames culled from cemetery plot and electoral roll data.
diff --git a/README.rst b/README.rst index c33feef1..a32e8e0a 100644 --- a/README.rst +++ b/README.rst @@ -132,6 +132,7 @@ Included localized providers: - `en\_AU <https://faker.readthedocs.io/en/master/locales/en_AU.html>`__ - English (Australia) - `en\_CA <https://faker.readthedocs.io/en/master/locales/en_CA.html>`__ - English (Canada) - `en\_GB <https://faker.readthedocs.io/en/master/locales/en_GB.html>`__ - English (Great Britain) +- `en\_NZ <https://faker.readthedocs.io/en/master/locales/en_NZ.html>`__ - English (New Zealand) - `en\_US <https://faker.readthedocs.io/en/master/locales/en_US.html>`__ - English (United States) - `es\_ES <https://faker.readthedocs.io/en/master/locales/es_ES.html>`__ - Spanish (Spain) - `es\_MX <https://faker.readthedocs.io/en/master/locales/es_MX.html>`__ - Spanish (Mexico) diff --git a/faker/providers/address/en_NZ/__init__.py b/faker/providers/address/en_NZ/__init__.py new file mode 100644 index 00000000..40fcb7be --- /dev/null +++ b/faker/providers/address/en_NZ/__init__.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from ..en import Provider as AddressProvider + + +class Provider(AddressProvider): + + city_prefixes = ( + 'North', + 'East', + 'West', + 'South', + 'New', + 'Lake', + 'Port', + 'Upper', + 'Lower', + 'High', + 'Mount', + ) + + city_suffixes = ( + 'town', 'ton', 'land', 'ville', 'berg', 'burgh', + 'borough', 'bury', 'burn', 'ing', 'port', 'mouth', 'stone', 'ings' + 'mouth', 'fort', 'haven', 'leigh', 'side', 'gate', 'neath', 'side', + ' Flats', ' Hill' + ) + + building_number_formats = ('%##', '%#', '%',) + + street_suffixes = ( + # Most common: + 'Arcade', 'Arcade', 'Arcade', + 'Avenue', 'Avenue', 'Avenue', 'Avenue', + 'Avenue', 'Avenue', 'Avenue', 'Avenue', + 'Beach Road', 'Beach Road', 'Beach Road', 'Beach Road', + 'Crescent', 'Crescent', 'Crescent', 'Crescent', 'Crescent', + 'Drive', 'Drive', 'Drive', 'Drive', + 'Mews', 'Mews', 'Mews', + 'Place', 'Place', 'Place', 'Place', + 'Range Road', 'Range Road', + 'Road', 'Road', 'Road', 'Road', 'Road', 'Road', 'Road', 'Road', 'Road', + 'Street', 'Street', 'Street', 'Street', 'Street', 'Street', 'Street', + 'Street', 'Street', 'Street', 'Street', 'Street', 'Street', 'Street', + 'Street', 'Street', 'Street', 'Street', 'Street', 'Street', 'Street', + 'Terrace', 'Terrace', 'Terrace', + 'Way', 'Way', 'Way', + + # Other: + 'Access', 'Alley', 'Alleyway', 'Amble', 'Anchorage', 'Approach', + 'Broadway', 'Bypass', 'Causeway', 'Centre', + 'Circle', 'Circuit', 'Close', 'Concourse', 'Copse', 'Corner', 'Court', + 'Cove', + 'Crest', 'Cross', 'Crossing', + 'Cutting', + 'Esplanade', + 'Flats', + 'Gardens', 'Grove', 'Heights', 'Highway', + 'Lane', 'Line', 'Keys', + 'Parade', 'Park', 'Pass', + 'Plaza', + 'Point', 'Quay', + 'Reserve', + 'Ridge', + 'Rise', + 'Square', + 'Track', 'Trail', + 'View', + ) + + # Māori nouns commonly present in placenames. + te_reo_parts = ( + 'ara', + 'awa', + 'horo', + 'kawa', + 'koro', + 'kowhai', + 'manawa', + 'mata', + 'maunga', + 'moko', + 'motu', + 'ngauru', + 'pa' + 'papa', + 'po', + 'puke', + 'rangi', + 'rohe', + 'rongo', + 'roto', + 'tahi', + 'tai', + 'tangi', + 'tau', + 'tere', + 'tipu', + 'wai', + 'waka', + 'whaka', + 'whanga', + 'whare', + 'weka', + ) + + # Māori endings (usually adjectives) commonly present in placenames. + te_reo_endings = ( + 'hanga', + 'hope', + 'iti', + 'iti', + 'kiwi', + 'makau', + 'nui', + 'nui', + 'nui', + 'nuku', + 'roa', + 'rua', + 'tanga', + 'tapu', + 'toa', + 'whenua', + 'whero', + 'whitu', + ) + + postcode_formats = ( + # as per https://en.wikipedia.org/wiki/Postcodes_in_New_Zealand + # Northland + '0%##', + # Auckland + '1###', + '20##', + '21##', + '22##', + '23##', + '24##', + '25##', + '26##', + # Central North Island + '3###', + '4###', + # Lower North Island + '50##', + '51##', + '52##', + '53##', + '55##', + '57##', + '58##', + # Wellington + '60##', + '61##', + '62##', + '64##', + '69##', + # Upper South Island + '7###', + # Christchurch + '80##', + '81##', + '82##', + '84##', + '85##', + '86##', + '88##', + '89##', + # Southland + '90##', + '92##', + '93##', + '94##', + '95##', + '96##', + '97##', + '98##', + ) + + city_formats = ( + '{{first_name}}{{city_suffix}}', + '{{last_name}}{{city_suffix}}', + '{{last_name}}{{city_suffix}}', + '{{last_name}}{{city_suffix}}', + '{{last_name}}{{city_suffix}}', + '{{last_name}}{{city_suffix}}', + '{{city_prefix}} {{last_name}}{{city_suffix}}', + '{{te_reo_first}}{{te_reo_ending}}', + '{{te_reo_first}}{{te_reo_ending}}', + '{{te_reo_first}}{{te_reo_ending}}', + '{{te_reo_first}}{{te_reo_ending}}', + '{{te_reo_first}}{{te_reo_part}}{{te_reo_ending}}', + '{{te_reo_first}}{{te_reo_part}}{{te_reo_ending}}', + ) + + street_name_formats = ( + '{{first_name}} {{street_suffix}}', + '{{last_name}} {{street_suffix}}', + '{{last_name}} {{street_suffix}}', + '{{last_name}} {{street_suffix}}', + '{{last_name}}-{{last_name}} {{street_suffix}}', + '{{te_reo_first}}{{te_reo_ending}} {{street_suffix}}', + '{{te_reo_first}}{{te_reo_ending}} {{street_suffix}}', + '{{te_reo_first}}{{te_reo_part}}{{te_reo_ending}} {{street_suffix}}', + ) + + street_address_formats = ( + '{{building_number}} {{street_name}}', + '{{building_number}} {{street_name}}', + '{{building_number}} {{street_name}}', + '{{building_number}} {{street_name}}\nRD {{rd_number}}', + '{{secondary_address}}\n{{building_number}} {{street_name}}', + 'PO Box {{building_number}}', + ) + + address_formats = ( + "{{street_address}}\n{{city}} {{postcode}}", + ) + + secondary_address_formats = ( + 'Apt. %##', + 'Flat %#', + 'Suite %##', + 'Unit %#', + 'Level %' + ) + + def state(self): + # New Zealand does not have states. + return '' + + def te_reo_part(self): + return self.random_element(self.te_reo_parts) + + def te_reo_first(self): + return self.random_element(self.te_reo_parts).capitalize() + + def te_reo_ending(self): + return self.random_element(self.te_reo_parts + self.te_reo_endings) + + def city_prefix(self): + return self.random_element(self.city_prefixes) + + def city_suffix(self): + return self.random_element(self.city_suffixes) + + def rd_number(self): + return self.random_element([str(i) for i in range(1, 11)]) + + def secondary_address(self): + return self.numerify( + self.random_element( + self.secondary_address_formats)) diff --git a/faker/providers/automotive/en_NZ/__init__.py b/faker/providers/automotive/en_NZ/__init__.py new file mode 100644 index 00000000..ef69fd23 --- /dev/null +++ b/faker/providers/automotive/en_NZ/__init__.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from .. import Provider as AutomotiveProvider + + +class Provider(AutomotiveProvider): + # See https://en.wikipedia.org/wiki/Vehicle_registration_plates_of_New_Zealand + license_formats = ( + # Old plates + '??%##', + '??%###', + '??%###', + # Three letters since 2002 + 'A??%##', + 'B??%##', + 'C??%##', + 'D??%##', + 'E??%##', + 'F??%##', + 'G??%##', + 'H??%##', + 'J??%##', + 'K??%##', + 'L??%##', + 'M??%##', + # After 2018 + 'N??%##', + ) diff --git a/faker/providers/internet/en_NZ/__init__.py b/faker/providers/internet/en_NZ/__init__.py new file mode 100644 index 00000000..bc08316d --- /dev/null +++ b/faker/providers/internet/en_NZ/__init__.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from .. import Provider as InternetProvider + + +class Provider(InternetProvider): + + free_email_domains = ( + 'gmail.com', + 'yahoo.com', + 'hotmail.com', + 'inspire.net.nz', + 'xtra.co.nz', + ) + + tlds = ( + 'nz', + 'co.nz', + 'org.nz', + 'kiwi', + 'kiwi.nz', + 'geek.nz', + 'net.nz', + 'school.nz', + 'ac.nz', + 'maori.nz' + ) diff --git a/faker/providers/person/en_NZ/__init__.py b/faker/providers/person/en_NZ/__init__.py new file mode 100644 index 00000000..2c545f1d --- /dev/null +++ b/faker/providers/person/en_NZ/__init__.py @@ -0,0 +1,1417 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from collections import OrderedDict + +from .. import Provider as PersonProvider + + +class Provider(PersonProvider): + formats = ( + '{{first_name_male}} {{last_name}}', + '{{first_name_male}} {{last_name}}', + '{{first_name_male}} {{last_name}}', + '{{first_name_male}} {{last_name}}', + '{{first_name_male}} {{last_name}}-{{last_name}}', + '{{first_name_female}} {{last_name}}', + '{{first_name_female}} {{last_name}}', + '{{first_name_female}} {{last_name}}', + '{{first_name_female}} {{last_name}}', + '{{first_name_female}} {{last_name}}-{{last_name}}', + ) + + # Names compiled from the following sources: + # + # https://www.dia.govt.nz/diawebsite.nsf/wpg_URL/Services-Births-Deaths-and-Marriages-Most-Popular-Male-and-Female-First-Names + + first_names_male = OrderedDict(( + ("Aaron", 9912), + ("Adam", 7639), + ("Adrian", 2420), + ("Aidan", 1521), + ("Aiden", 782), + ("Alan", 5689), + ("Alex", 2819), + ("Alexander", 7783), + ("Alistair", 429), + ("Allan", 3148), + ("Allen", 51), + ("Andre", 127), + ("Andrew", 25593), + ("Angus", 1680), + ("Anthony", 12549), + ("Antony", 1594), + ("Archer", 381), + ("Archie", 774), + ("Arlo", 584), + ("Arthur", 630), + ("Asher", 319), + ("Ashley", 861), + ("Ashton", 1236), + ("Austin", 688), + ("Bailey", 1304), + ("Barry", 3624), + ("Beau", 491), + ("Beauden", 125), + ("Ben", 2427), + ("Benjamin", 15497), + ("Bernard", 657), + ("Bevan", 634), + ("Blair", 2863), + ("Blake", 3500), + ("Bodhi", 70), + ("Brad", 450), + ("Bradley", 3910), + ("Brandon", 1000), + ("Braxton", 741), + ("Brayden", 317), + ("Brendan", 2010), + ("Brendon", 3163), + ("Brent", 5564), + ("Brett", 4598), + ("Brian", 6247), + ("Brodie", 216), + ("Brooklyn", 406), + ("Bruce", 6079), + ("Bryan", 1435), + ("Caleb", 5374), + ("Callum", 2364), + ("Cameron", 7756), + ("Campbell", 422), + ("Carl", 3304), + ("Carlos", 122), + ("Carter", 1308), + ("Charles", 3933), + ("Charlie", 2367), + ("Chase", 174), + ("Christian", 1138), + ("Christopher", 23459), + ("Clayton", 59), + ("Clinton", 1004), + ("Cody", 2482), + ("Cohen", 99), + ("Cole", 648), + ("Colin", 3980), + ("Connor", 4632), + ("Conor", 54), + ("Cooper", 2113), + ("Corey", 1656), + ("Cory", 129), + ("Craig", 12702), + ("Cruz", 52), + ("Damian", 1084), + ("Damon", 211), + ("Daniel", 23515), + ("Darren", 3143), + ("Darrin", 217), + ("Darryl", 1517), + ("Darryn", 260), + ("Daryl", 421), + ("David", 36792), + ("Dean", 6096), + ("Declan", 108), + ("Denis", 66), + ("Dennis", 1129), + ("Derek", 1307), + ("Desmond", 224), + ("Dillon", 63), + ("Dion", 1283), + ("Dominic", 801), + ("Donald", 2405), + ("Douglas", 2587), + ("Duncan", 471), + ("Dwayne", 57), + ("Dylan", 6564), + ("Edward", 4864), + ("Eli", 961), + ("Elijah", 2137), + ("Elliot", 54), + ("Eric", 808), + ("Ethan", 6578), + ("Ezra", 309), + ("Felix", 769), + ("Finn", 2084), + ("Fletcher", 447), + ("Flynn", 1577), + ("Francis", 420), + ("Frank", 46), + ("Fraser", 51), + ("Frederick", 49), + ("Gabriel", 739), + ("Gareth", 2087), + ("Garry", 1840), + ("Gary", 5520), + ("Gavin", 3197), + ("Geoffrey", 4439), + ("George", 7320), + ("Gerald", 104), + ("Gerard", 614), + ("Glen", 2709), + ("Glenn", 3983), + ("Gordon", 1444), + ("Graeme", 4705), + ("Graham", 3746), + ("Grant", 8355), + ("Grayson", 259), + ("Gregory", 7916), + ("Hamish", 5758), + ("Harley", 403), + ("Harrison", 2800), + ("Harry", 2454), + ("Harvey", 192), + ("Hayden", 5209), + ("Henry", 3111), + ("Hudson", 281), + ("Hugh", 101), + ("Hugo", 543), + ("Hunter", 3044), + ("Ian", 7592), + ("Isaac", 4208), + ("Isaiah", 349), + ("Israel", 52), + ("Ivan", 236), + ("Jack", 9468), + ("Jackson", 3088), + ("Jacob", 8612), + ("Jake", 2421), + ("Jakob", 46), + ("James", 27224), + ("Jamie", 5064), + ("Jared", 2840), + ("Jarrod", 773), + ("Jason", 14737), + ("Jasper", 246), + ("Jaxon", 623), + ("Jayden", 4541), + ("Jeffrey", 2826), + ("Jeremy", 4775), + ("Jesse", 3965), + ("Joel", 2932), + ("John", 26867), + ("Jonathan", 7957), + ("Jonathon", 349), + ("Jordan", 6499), + ("Joseph", 10061), + ("Josh", 56), + ("Joshua", 17109), + ("Josiah", 52), + ("Julian", 232), + ("Justin", 3882), + ("Kaleb", 492), + ("Kane", 1236), + ("Karl", 3822), + ("Kayden", 123), + ("Keanu", 54), + ("Keegan", 351), + ("Keith", 2175), + ("Kelly", 58), + ("Kelvin", 1262), + ("Kenneth", 3240), + ("Kerry", 2404), + ("Kevin", 9358), + ("Kieran", 1525), + ("Kim", 125), + ("Kingston", 692), + ("Kurt", 515), + ("Kyle", 2568), + ("Lachlan", 2965), + ("Lance", 2958), + ("Lawrence", 226), + ("Lee", 872), + ("Leo", 1872), + ("Leon", 967), + ("Leonard", 102), + ("Leslie", 1126), + ("Levi", 2986), + ("Lewis", 324), + ("Liam", 8629), + ("Lincoln", 857), + ("Lindsay", 883), + ("Lloyd", 46), + ("Logan", 5063), + ("Louis", 863), + ("Luca", 1318), + ("Lucas", 3329), + ("Luka", 119), + ("Lukas", 70), + ("Luke", 8296), + ("Malcolm", 2398), + ("Marcus", 1129), + ("Mark", 23154), + ("Martin", 4260), + ("Mason", 2613), + ("Mathew", 3107), + ("Matthew", 23181), + ("Maurice", 385), + ("Max", 3988), + ("Maxwell", 172), + ("Mervyn", 162), + ("Micah", 52), + ("Michael", 40099), + ("Micheal", 49), + ("Mitchell", 2730), + ("Morgan", 58), + ("Murray", 4843), + ("Nate", 48), + ("Nathan", 8920), + ("Nathaniel", 329), + ("Neil", 3392), + ("Neville", 1268), + ("Nicholas", 13132), + ("Nigel", 4435), + ("Nikau", 53), + ("Nixon", 219), + ("Noah", 3511), + ("Noel", 778), + ("Norman", 221), + ("Oliver", 6515), + ("Oscar", 1987), + ("Owen", 484), + ("Patrick", 6219), + ("Paul", 22959), + ("Peter", 23996), + ("Philip", 7036), + ("Phillip", 5977), + ("Phoenix", 882), + ("Quentin", 67), + ("Quinn", 742), + ("Raymond", 4404), + ("Regan", 1182), + ("Reuben", 1678), + ("Rex", 561), + ("Rhys", 967), + ("Richard", 17664), + ("Ricky", 806), + ("Riley", 2771), + ("Robert", 19791), + ("Robin", 1431), + ("Rodney", 1936), + ("Roger", 2612), + ("Roman", 429), + ("Ronald", 1769), + ("Rory", 220), + ("Ross", 4823), + ("Roy", 101), + ("Russell", 2863), + ("Ryan", 9965), + ("Ryder", 727), + ("Sam", 2347), + ("Samuel", 15565), + ("Scott", 9481), + ("Sean", 5201), + ("Sebastian", 1031), + ("Seth", 780), + ("Shane", 10213), + ("Shannon", 1082), + ("Shaun", 4397), + ("Shayne", 296), + ("Simon", 9846), + ("Sione", 165), + ("Spencer", 52), + ("Stefan", 52), + ("Stephen", 18603), + ("Steven", 11007), + ("Stewart", 499), + ("Stuart", 4662), + ("Taine", 204), + ("Taylor", 1356), + ("Terence", 1154), + ("Terry", 860), + ("Theo", 311), + ("Theodore", 429), + ("Thomas", 15382), + ("Timothy", 10924), + ("Toby", 1490), + ("Todd", 1264), + ("Tom", 47), + ("Tony", 5670), + ("Travis", 65), + ("Trent", 524), + ("Trevor", 3194), + ("Tristan", 111), + ("Troy", 2423), + ("Tyler", 3765), + ("Tyrone", 231), + ("Tyson", 531), + ("Vaughan", 322), + ("Vincent", 907), + ("Walter", 57), + ("Warren", 3223), + ("Warwick", 295), + ("Wayne", 8542), + ("William", 18322), + ("Wyatt", 58), + ("Xavier", 1879), + ("Zac", 111), + ("Zachary", 2569), + ("Zane", 761), + ("Zion", 217), + + ("Anaru", 735), + ("Ari", 984), + ("Ariki", 1178), + ("Hemi", 1360), + ("Hoani", 574), + ("Ihaia", 476), + ("Kahu", 700), + ("Kahurangi", 939), + ("Kauri", 1613), + ("Manaaki", 574), + ("Manaia", 1434), + ("Manawa", 536), + ("Matiu", 455), + ("Mikaere", 1413), + ("Nikau", 1942), + ("Niko", 972), + ("Nikora", 1766), + ("Rawiri", 1553), + ("Tai", 793), + ("Tama", 1257), + ("Tamati", 1766), + ("Tane", 1698), + ("Tangaroa", 605), + ("Te Ariki", 1423), + ("Te Koha", 537), + ("Tiare", 476), + ("Wiremu", 1923), + )) + + first_names_female = OrderedDict(( + ("Aaliyah", 1042), + ("Abbey", 40), + ("Abby", 503), + ("Abigail", 2017), + ("Addison", 538), + ("Adrienne", 625), + ("Aimee", 2315), + ("Alana", 1194), + ("Aleisha", 102), + ("Alexandra", 2689), + ("Alexis", 789), + ("Alice", 3252), + ("Alicia", 683), + ("Alison", 3444), + ("Alyssa", 1032), + ("Amaia", 45), + ("Amanda", 7667), + ("Amber", 3661), + ("Amelia", 4060), + ("Amy", 7061), + ("Anahera", 140), + ("Andrea", 5003), + ("Angel", 695), + ("Angela", 9634), + ("Angelina", 43), + ("Anika", 46), + ("Anita", 1526), + ("Ann", 1834), + ("Anna", 9371), + ("Annabelle", 457), + ("Anne", 3879), + ("Annette", 2348), + ("April", 49), + ("Arabella", 42), + ("Aria", 1025), + ("Ariana", 473), + ("Aroha", 50), + ("Ashlee", 464), + ("Ashleigh", 3158), + ("Ashley", 2477), + ("Aurora", 251), + ("Ava", 2487), + ("Ayla", 612), + ("Bailey", 150), + ("Barbara", 3531), + ("Belinda", 1254), + ("Bella", 1238), + ("Beverley", 996), + ("Billie", 45), + ("Brenda", 2451), + ("Briana", 49), + ("Brianna", 740), + ("Bridget", 1611), + ("Britney", 64), + ("Brittany", 1239), + ("Bronwyn", 2406), + ("Brooke", 3634), + ("Brooklyn", 782), + ("Caitlin", 3370), + ("Caitlyn", 454), + ("Carla", 323), + ("Carmen", 233), + ("Carol", 3626), + ("Caroline", 2530), + ("Carolyn", 3212), + ("Casey", 1097), + ("Cassandra", 489), + ("Catherine", 7765), + ("Chantelle", 55), + ("Charlie", 215), + ("Charlotte", 7759), + ("Chelsea", 1943), + ("Cherie", 1064), + ("Cheryl", 1781), + ("Cheyenne", 345), + ("Chloe", 4582), + ("Christina", 2675), + ("Christine", 10604), + ("Cindy", 65), + ("Claire", 3174), + ("Clara", 41), + ("Clare", 55), + ("Claudia", 804), + ("Colleen", 1367), + ("Courtney", 2941), + ("Crystal", 828), + ("Daisy", 197), + ("Danielle", 4151), + ("Dawn", 62), + ("Debbie", 1389), + ("Deborah", 8819), + ("Debra", 3094), + ("Denise", 3577), + ("Destiny", 190), + ("Diana", 977), + ("Diane", 3952), + ("Dianne", 2314), + ("Donna", 7054), + ("Dorothy", 303), + ("Eden", 1578), + ("Eilish", 52), + ("Elaine", 381), + ("Eleanor", 155), + ("Elise", 48), + ("Elizabeth", 11869), + ("Ella", 5301), + ("Ellen", 124), + ("Ellie", 443), + ("Elsie", 97), + ("Emilia", 145), + ("Emily", 7766), + ("Emma", 13245), + ("Erin", 1624), + ("Esther", 88), + ("Eva", 1637), + ("Evelyn", 634), + ("Evie", 419), + ("Faith", 735), + ("Fiona", 6039), + ("Florence", 291), + ("Frances", 1212), + ("Frankie", 195), + ("Freya", 218), + ("Gabriella", 94), + ("Gabrielle", 808), + ("Gail", 1253), + ("Gaylene", 82), + ("Gemma", 2120), + ("Georgia", 5613), + ("Georgina", 786), + ("Gillian", 1388), + ("Gina", 301), + ("Glenda", 859), + ("Glenys", 410), + ("Gloria", 127), + ("Grace", 6036), + ("Haley", 173), + ("Hannah", 9082), + ("Harmony", 300), + ("Harper", 1186), + ("Harriet", 210), + ("Hayley", 4951), + ("Hazel", 814), + ("Heather", 4351), + ("Heidi", 353), + ("Helen", 7775), + ("Holly", 4402), + ("Hope", 142), + ("Imogen", 293), + ("Indi", 42), + ("Indie", 494), + ("Irene", 166), + ("Isabel", 499), + ("Isabella", 4257), + ("Isabelle", 1182), + ("Isla", 2246), + ("Isobel", 85), + ("Ivy", 577), + ("Jacqueline", 5559), + ("Jade", 3234), + ("Jaime", 61), + ("Jamie", 1066), + ("Jan", 1587), + ("Jane", 4932), + ("Janet", 2253), + ("Janette", 69), + ("Janice", 1881), + ("Janine", 2641), + ("Jasmine", 3786), + ("Jean", 64), + ("Jeanette", 900), + ("Jemma", 200), + ("Jenna", 1162), + ("Jennifer", 9991), + ("Jessica", 12989), + ("Jessie", 1123), + ("Jill", 455), + ("Jillian", 1571), + ("Joan", 199), + ("Joanna", 2716), + ("Joanne", 9329), + ("Jocelyn", 557), + ("Jodi", 56), + ("Jodie", 359), + ("Jolene", 313), + ("Jordan", 797), + ("Jorja", 456), + ("Josephine", 570), + ("Joy", 487), + ("Judith", 4677), + ("Julia", 2092), + ("Julie", 8289), + ("Justine", 1127), + ("Kaitlin", 45), + ("Kaitlyn", 358), + ("Karen", 13524), + ("Karla", 62), + ("Karyn", 429), + ("Kate", 5782), + ("Katelyn", 294), + ("Katherine", 3912), + ("Kathleen", 2503), + ("Kathryn", 5104), + ("Katie", 3455), + ("Katrina", 3184), + ("Kay", 1205), + ("Kaye", 227), + ("Kayla", 2806), + ("Keira", 759), + ("Kellie", 66), + ("Kelly", 6137), + ("Kelsey", 718), + ("Kerry", 1917), + ("Khloe", 98), + ("Kim", 5667), + ("Kimberley", 1578), + ("Kiri", 130), + ("Kirsten", 1183), + ("Kirsty", 2083), + ("Kristy", 172), + ("Krystal", 650), + ("Kyla", 41), + ("Kylie", 3692), + ("Laura", 4669), + ("Lauren", 3275), + ("Layla", 536), + ("Leah", 1894), + ("Leanne", 3478), + ("Leonie", 52), + ("Lesley", 1453), + ("Libby", 48), + ("Lilly", 813), + ("Lily", 3546), + ("Linda", 6288), + ("Lisa", 11891), + ("Lois", 278), + ("Lola", 343), + ("Lorraine", 1675), + ("Louise", 4580), + ("Lucia", 235), + ("Lucy", 4938), + ("Luna", 53), + ("Lydia", 335), + ("Lynda", 1972), + ("Lynette", 3666), + ("Lynley", 228), + ("Lynn", 53), + ("Lynne", 1025), + ("Lynnette", 120), + ("MacKenzie", 67), + ("Mackenzie", 1039), + ("Maddison", 1846), + ("Madeleine", 780), + ("Madeline", 184), + ("Madison", 3128), + ("Maia", 1937), + ("Manaia", 204), + ("Maree", 2270), + ("Margaret", 5517), + ("Maria", 5541), + ("Marian", 60), + ("Marie", 2582), + ("Marilyn", 546), + ("Marion", 370), + ("Mary", 5891), + ("Matilda", 570), + ("Maureen", 1099), + ("Maya", 432), + ("Megan", 5869), + ("Melanie", 4476), + ("Melissa", 6898), + ("Mia", 2627), + ("Michaela", 687), + ("Michele", 1082), + ("Michelle", 12961), + ("Mikaela", 48), + ("Mikayla", 1492), + ("Mila", 1139), + ("Millie", 711), + ("Molly", 1590), + ("Monica", 56), + ("Monique", 1859), + ("Morgan", 646), + ("Mya", 352), + ("Nadine", 126), + ("Naomi", 421), + ("Natalie", 4112), + ("Natasha", 5533), + ("Nevaeh", 673), + ("Ngaire", 116), + ("Niamh", 49), + ("Nicola", 10395), + ("Nicole", 6011), + ("Nikita", 1263), + ("Nikki", 57), + ("Nina", 379), + ("Olive", 525), + ("Olivia", 8816), + ("Paige", 3719), + ("Pamela", 2677), + ("Paris", 551), + ("Patricia", 5007), + ("Paula", 3667), + ("Pauline", 2404), + ("Payton", 44), + ("Penelope", 1213), + ("Peyton", 621), + ("Philippa", 1359), + ("Phoebe", 1380), + ("Piper", 580), + ("Pippa", 416), + ("Poppy", 842), + ("Quinn", 213), + ("Rachael", 3210), + ("Rachel", 9769), + ("Rachelle", 64), + ("Raewyn", 3039), + ("Rebecca", 11608), + ("Rebekah", 1255), + ("Renee", 3387), + ("Rhonda", 131), + ("Riley", 676), + ("Robyn", 5598), + ("Rochelle", 2086), + ("Rose", 1384), + ("Rosemary", 1918), + ("Ruby", 4332), + ("Ruth", 1616), + ("Sadie", 151), + ("Sally", 2445), + ("Samantha", 7549), + ("Sandra", 7429), + ("Sara", 1121), + ("Sarah", 19901), + ("Sasha", 44), + ("Savannah", 443), + ("Scarlett", 1045), + ("Shakira", 52), + ("Shania", 338), + ("Shannon", 2446), + ("Sharlene", 220), + ("Sharon", 7243), + ("Shelley", 2569), + ("Sheree", 169), + ("Sheryl", 1688), + ("Shirley", 1673), + ("Shona", 1210), + ("Sienna", 1358), + ("Sinead", 53), + ("Skye", 97), + ("Skyla", 105), + ("Skylar", 41), + ("Sofia", 630), + ("Sonia", 246), + ("Sonya", 632), + ("Sophia", 2595), + ("Sophie", 7868), + ("Stacey", 3037), + ("Stella", 1323), + ("Stephanie", 5794), + ("Summer", 1477), + ("Susan", 12686), + ("Suzanne", 4705), + ("Tamara", 312), + ("Tania", 6879), + ("Tanya", 1595), + ("Tara", 503), + ("Tayla", 1823), + ("Taylor", 1499), + ("Tegan", 318), + ("Teresa", 2294), + ("Tessa", 1439), + ("Thea", 279), + ("Tiana", 388), + ("Tina", 2124), + ("Toni", 2572), + ("Tori", 50), + ("Tracey", 6914), + ("Tracy", 3999), + ("Trinity", 401), + ("Tyla", 98), + ("Valerie", 394), + ("Vanessa", 3941), + ("Vicki", 3171), + ("Vicky", 198), + ("Victoria", 4823), + ("Violet", 506), + ("Virginia", 54), + ("Vivienne", 802), + ("Wendy", 6832), + ("Whitney", 50), + ("Willow", 743), + ("Yvonne", 1822), + ("Zara", 1292), + ("Zoe", 3973), + ("Zoey", 165), + + ("Amaia", 667), + ("Ana", 730), + ("Anahera", 1760), + ("Anika", 1432), + ("Aria", 1960), + ("Ariana", 1729), + ("Aroha", 1796), + ("Ataahua", 876), + ("Awhina", 583), + ("Hana", 536), + ("Hinewai", 536), + ("Huia", 528), + ("Kahurangi", 730), + ("Kaia", 1576), + ("Kora", 878), + ("Mahi", 556), + ("Maia", 1960), + ("Manaia", 912), + ("Maraea", 703), + ("Mareikura", 948), + ("Mereana", 637), + ("Miriama", 614), + ("Nia", 667), + ("Ria", 703), + ("Terina", 528), + ("Tia", 1695), + ("Tiare", 671), + ("Tui", 1251), + ("Waimarie", 671), + ("Wikitoria", 583), + )) + + first_names = first_names_male.copy() + first_names.update(first_names_female) + + # New Zealand surnames compiled (and cleaned up) from the following sources: + # + # NZ Cemetery plot data: + # https://catalogue.data.govt.nz/dataset?q=cemetery+plots + + last_names = OrderedDict(( + ("Smith", 948), + ("Anderson", 394), + ("Jones", 386), + ("Taylor", 364), + ("Brown", 350), + ("Williams", 337), + ("Thompson", 295), + ("Scott", 266), + ("Harris", 253), + ("Mitchell", 217), + ("Thomas", 214), + ("Campbell", 193), + ("Jackson", 191), + ("Stewart", 188), + ("Martin", 186), + ("Turner", 174), + ("Moore", 173), + ("Simpson", 171), + ("Hart", 166), + ("Bell", 163), + ("Evans", 161), + ("Hansen", 160), + ("Gray", 156), + ("Henderson", 155), + ("Edwards", 153), + ("McDonald", 152), + ("Davis", 150), + ("Ward", 150), + ("Cameron", 149), + ("Wood", 149), + ("MacDonald", 148), + ("Reid", 140), + ("Cook", 138), + ("Bailey", 137), + ("Adams", 136), + ("Mason", 136), + ("Baker", 135), + ("Green", 134), + ("Jensen", 134), + ("Parker", 132), + ("Neal", 131), + ("Russell", 131), + ("Carter", 128), + ("Allen", 127), + ("Roberts", 127), + ("Knight", 126), + ("Morgan", 126), + ("Murphy", 126), + ("Miller", 124), + ("Morris", 124), + ("McKay", 122), + ("Morrison", 121), + ("Wallace", 121), + ("Stevens", 119), + ("Johnston", 113), + ("Jenkins", 111), + ("Lewis", 110), + ("Davies", 109), + ("Oliver", 109), + ("Ryan", 109), + ("Marshall", 108), + ("Webb", 108), + ("Patchett", 107), + ("Hughes", 106), + ("Graham", 104), + ("Wells", 104), + ("Harrison", 103), + ("Larsen", 103), + ("Matthews", 103), + ("Phillips", 102), + ("Clarke", 100), + ("Gibson", 99), + ("Lucas", 99), + ("Price", 97), + ("O'Sullivan", 96), + ("Barnes", 94), + ("Gardiner", 92), + ("Richards", 91), + ("Boyce", 90), + ("Duncan", 89), + ("Fisher", 89), + ("Gill", 89), + ("O'Brien", 89), + ("Gordon", 88), + ("Olsen", 88), + ("Powell", 86), + ("Black", 85), + ("Kennedy", 85), + ("Dixon", 84), + ("Jamieson", 84), + ("O'Connor", 84), + ("Sinclair", 84), + ("Perry", 83), + ("Williamson", 83), + ("Day", 82), + ("Pedersen", 81), + ("Currie", 80), + ("Grant", 80), + ("Rush", 80), + ("McEwen", 79), + ("Wilton", 79), + ("Kelly", 78), + ("Nicholson", 77), + ("Coleman", 76), + ("Davidson", 76), + ("Gardner", 76), + ("Saunders", 76), + ("Rogers", 75), + ("Bryant", 74), + ("Ferguson", 74), + ("Ford", 73), + ("Fowler", 73), + ("McLean", 73), + ("Holland", 72), + ("Lloyd", 72), + ("Page", 72), + ("Francis", 71), + ("Smart", 71), + ("Weston", 71), + ("Chapman", 70), + ("Crawford", 70), + ("Shaw", 70), + ("Sullivan", 70), + ("Webster", 70), + ("Millar", 69), + ("Burton", 68), + ("Fuller", 68), + ("Hamilton", 68), + ("West", 68), + ("Burns", 67), + ("Cox", 67), + ("Cresswell", 67), + ("Holdaway", 67), + ("Hodson", 66), + ("Kerr", 66), + ("Brooks", 64), + ("Fletcher", 64), + ("McCallum", 64), + ("Allan", 63), + ("Buchanan", 63), + ("Carr", 63), + ("Lee", 63), + ("Pickering", 63), + ("Pope", 63), + ("Rowe", 63), + ("Woolley", 63), + ("McLeod", 62), + ("Barnett", 61), + ("Berry", 61), + ("Lane", 61), + ("Tapp", 61), + ("Bartlett", 60), + ("Elliott", 60), + ("Pearson", 60), + ("Wilkinson", 60), + ("Atkinson", 59), + ("Butler", 59), + ("Douglas", 59), + ("Pratt", 59), + ("Cole", 58), + ("Hayward", 58), + ("Little", 58), + ("Newman", 58), + ("Simmons", 58), + ("Barrett", 57), + ("Cooksley", 57), + ("Freeman", 57), + ("Higgins", 57), + ("Hope", 57), + ("McGregor", 57), + ("McMillan", 57), + ("Rose", 57), + ("Sutton", 57), + ("Wong", 57), + ("Harper", 56), + ("Osborne", 56), + ("Stevenson", 56), + ("Bird", 55), + ("Boyd", 55), + ("Dick", 55), + ("Field", 55), + ("Greer", 55), + ("Greig", 55), + ("Nielsen", 55), + ("Reynolds", 55), + ("Forrest", 54), + ("Bradley", 53), + ("Gibbons", 53), + ("Howard", 53), + ("MacKenzie", 53), + ("Nelson", 53), + ("Todd", 53), + ("Waters", 53), + ("Ball", 52), + ("Davey", 52), + ("Holmes", 52), + ("Rodgers", 52), + ("Stratford", 52), + ("Griffiths", 51), + ("Small", 51), + ("Watt", 51), + ("Andrew", 50), + ("Bishop", 50), + ("Dunn", 50), + ("Goodwin", 50), + ("Gore", 50), + ("Healy", 50), + ("May", 50), + ("Munro", 50), + ("Parsons", 50), + ("Poole", 50), + ("Watts", 50), + ("Hills", 49), + ("Peters", 49), + ("Vercoe", 49), + ("Armstrong", 48), + ("Bright", 48), + ("Burgess", 48), + ("Collis", 48), + ("O'Neill", 48), + ("Spencer", 48), + ("Ritchie", 47), + ("Alexander", 46), + ("Curtis", 46), + ("Freeth", 46), + ("Nicol", 46), + ("Robson", 46), + ("Satherley", 46), + ("Stuart", 46), + ("Waugh", 46), + ("Woods", 46), + ("Coley", 45), + ("Fitzgerald", 45), + ("Fleming", 45), + ("Herd", 45), + ("Morton", 45), + ("Beattie", 44), + ("Clifford", 44), + ("Costello", 44), + ("Dawson", 44), + ("Donaldson", 44), + ("Fox", 44), + ("Hay", 44), + ("Jellyman", 44), + ("Joe", 44), + ("Johansen", 44), + ("Knowles", 44), + ("Lawson", 44), + ("O'Donnell", 44), + ("Patterson", 44), + ("Payne", 44), + ("Read", 44), + ("Casey", 43), + ("Chandler", 43), + ("Donald", 43), + ("Gilchrist", 43), + ("Hyde", 43), + ("McIntosh", 43), + ("Paton", 43), + ("Robb", 43), + ("Rutherford", 43), + ("Pike", 42), + ("Dillon", 41), + ("Drummond", 41), + ("Hickey", 41), + ("Hooper", 41), + ("Jordan", 41), + ("Judd", 41), + ("Kenny", 41), + ("Low", 41), + ("Marfell", 41), + ("Newton", 41), + ("O'Leary", 41), + ("Tucker", 41), + ("Carson", 40), + ("Dean", 40), + ("Dickson", 40), + ("George", 40), + ("Ham", 40), + ("McCarthy", 40), + ("McIntyre", 40), + ("Moran", 40), + ("O'Connell", 40), + ("Parkes", 40), + ("Short", 40), + ("Barr", 39), + ("Baxter", 39), + ("Dalton", 39), + ("Forbes", 39), + ("Hawkins", 39), + ("Ireland", 39), + ("Miles", 39), + ("Nash", 39), + ("Owen", 39), + ("Perano", 39), + ("Sowman", 39), + ("Whyte", 39), + ("Bush", 38), + ("Drake", 38), + ("Eden", 38), + ("Giles", 38), + ("Hoare", 38), + ("Hubbard", 38), + ("Hudson", 38), + ("MacKay", 38), + ("McKinnon", 38), + ("Mears", 38), + ("Prentice", 38), + ("Schwass", 38), + ("Simonsen", 38), + ("Walton", 38), + ("Wheeler", 38), + ("Wratt", 38), + ("Avery", 37), + ("Barker", 37), + ("Blake", 37), + ("Conway", 37), + ("Holloway", 37), + ("Horton", 37), + ("Manning", 37), + ("Nolan", 37), + ("Pritchard", 37), + ("Bishell", 36), + ("Blair", 36), + ("Christiansen", 36), + ("Fulton", 36), + ("Gibbs", 36), + ("Griffin", 36), + ("Hook", 36), + ("McGill", 36), + ("Mercer", 36), + ("Middleton", 36), + ("Rayner", 36), + ("Stone", 36), + ("Terry", 36), + ("Walsh", 36), + ("Craig", 35), + ("Craven", 35), + ("Ellery", 35), + ("Findlay", 35), + ("Maxwell", 35), + ("North", 35), + ("Reardon", 35), + ("Tait", 35), + ("Baldwin", 34), + ("Butcher", 34), + ("Caldwell", 34), + ("Doyle", 34), + ("Eaton", 34), + ("Flood", 34), + ("Gifford", 34), + ("Guy", 34), + ("Jennings", 34), + ("Leslie", 34), + ("McMahon", 34), + ("McNabb", 34), + ("Paterson", 34), + ("Porter", 34), + ("Reeves", 34), + ("Seymour", 34), + ("Trask", 34), + ("Warren", 34), + ("Watkins", 34), + ("Wills", 34), + ("Best", 33), + ("Bull", 33), + ("Dawick", 33), + ("Dobson", 33), + ("Gledhill", 33), + ("Hardy", 33), + ("Hayes", 33), + ("Kendall", 33), + ("McCormick", 33), + ("McPherson", 33), + ("Pollard", 33), + ("Rasmussen", 33), + ("Shailer", 33), + ("Shepherd", 33), + ("Sheridan", 33), + ("Simmonds", 33), + ("Steele", 33), + ("Booth", 32), + ("Edmonds", 32), + ("Gunn", 32), + ("Hood", 32), + ("Humphrey", 32), + ("Hutchinson", 32), + ("Laurenson", 32), + ("Long", 32), + ("Lowe", 32), + ("Manson", 32), + ("McGrath", 32), + ("McKenna", 32), + ("Muir", 32), + ("O'Keefe", 32), + ("Potter", 32), + ("Searle", 32), + ("Stubbs", 32), + ("Wall", 32), + ("Wallis", 32), + ("Browne", 31), + ("Carroll", 31), + ("Cunningham", 31), + ("Foley", 31), + ("Franklin", 31), + ("Furness", 31), + ("Gilbert", 31), + ("Hopkins", 31), + ("Jefferies", 31), + ("Johnstone", 31), + ("Linton", 31), + ("Mann", 31), + ("Norton", 31), + ("Rees", 31), + ("Rowlands", 31), + ("Sanders", 31), + ("Bond", 30), + ("Chambers", 30), + ("Cragg", 30), + ("Davison", 30), + ("Gee", 30), + ("Gleeson", 30), + ("Gullery", 30), + ("Hadfield", 30), + ("Haines", 30), + ("Hepburn", 30), + ("Howell", 30), + ("Jeffries", 30), + ("Lamb", 30), + ("Law", 30), + ("MacPherson", 30), + ("McIsaac", 30), + ("Millard", 30), + ("Paul", 30), + ("Pearce", 30), + ("Prouse", 30), + ("Ramsay", 30), + ("Rowland", 30), + ("Spelman", 30), + ("Waghorn", 30), + ("Willis", 30), + ("Zimmerman", 30), + ("Aitken", 29), + ("Booker", 29), + ("Bruce", 29), + ("Burrell", 29), + ("Burt", 29), + ("Funnell", 29), + ("Gilmore", 29), + ("Guthrie", 29), + ("Hewitt", 29), + ("Hogg", 29), + ("Lammas", 29), + ("Lang", 29), + ("Lyons", 29), + ("McDowall", 29), + ("Neilson", 29), + ("Norman", 29), + ("Reed", 29), + ("Rickard", 29), + ("Stokes", 29), + ("Stratton", 29), + ("Strawbridge", 29), + ("York", 29), + ("Alve", 28), + ("Baldick", 28), + ("Banks", 28), + ("Beard", 28), + ("Bowden", 28), + ("Boyle", 28), + ("Carpenter", 28), + ("Connolly", 28), + ("Cooke", 28), + ("Craw", 28), + ("Cumming", 28), + ("Drew", 28), + ("Fairhall", 28), + ("Gillespie", 28), + ("Gillies", 28), + ("Healey", 28), + ("Horn", 28), + ("Ingram", 28), + ("Knox", 28), + ("Lancaster", 28), + ("Landon-Lane", 28), + ("Marsh", 28), + ("Mortimer", 28), + ("Riley", 28), + ("Sixtus", 28), + ("Turnbull", 28), + ("Warner", 28), + ("Aldridge", 27), + ("Allerby", 27), + ("Arnold", 27), + ("Blackwell", 27), + ("Blick", 27), + ("Boon", 27), + ("Bowater", 27), + ("Broughan", 27), + ("Davenport", 27), + ("Foote", 27), + ("Forsyth", 27), + ("Laing", 27), + ("Mayo", 27), + ("McFarlane", 27), + ("McMurray", 27), + ("Monk", 27), + ("Orr", 27), + ("Procter", 27), + ("Shannon", 27), + ("Southee", 27), + ("Stace", 27), + ("Waller", 27), + ("Webby", 27), + ("Arnott", 26), + ("Baird", 26), + ("Bary", 26), + ("Bassett", 26), + ("Buckley", 26), + ("Burke", 26), + ("Claridge", 26), + ("Clunies-Ross", 26), + ("Croad", 26), + ("Dyer", 26), + ("Ewart", 26), + ("Faulkner", 26), + ("Fenton", 26), + ("Gibb", 26), + ("Huddleston", 26), + ("Jarvis", 26), + ("Kay", 26), + ("Kemp", 26), + ("McLachlan", 26), + ("Middlemiss", 26), + ("Moody", 26), + ("Mudgway", 26), + ("Nicholas", 26), + ("Reader", 26), + ("Robert", 26), + ("Steer", 26), + ("Thornton", 26), + ("Toms", 26), + ("Twidle", 26), + ("Vincent", 26), + ("Way", 26), + ("Whittaker", 26), + ("Batchelar", 25), + ("Boniface", 25), + ("Botham", 25), + ("Buick", 25), + ("Burnett", 25), + ("Ching", 25), + ("Christie", 25), + ("Corlett", 25), + ("Coutts", 25), + ("Eglinton", 25), + ("Enright", 25), + ("Foot", 25), + ("Frost", 25), + ("Gaskin", 25), + ("Hanson", 25), + ("Hardie", 25), + ("Henry", 25), + ("Hoskins", 25), + ("Lambert", 25), + ("Learmonth", 25), + ("Logan", 25), + ("Matheson", 25), + ("McManaway", 25), + ("Meads", 25), + ("Meredith", 25), + ("Montgomery", 25), + ("Murdoch", 25), + ("Orchard", 25), + ("Perrin", 25), + ("Peterson", 25), + ("Priest", 25), + ("Rossiter", 25), + ("Shand", 25), + ("Skinner", 25), + ("Soper", 25), + ("Street", 25), + ("Tanner", 25), + ("Aberhart", 24), + ("Berkahn", 24), + ("Burr", 24), + ("Cairns", 24), + ("Corbett", 24), + ("Dalziel", 24), + ("Doherty", 24), + ("Esson", 24), + ("Farland", 24), + ("Godfrey", 24), + ("Guard", 24), + ("Hume", 24), + ("Irving", 24), + ("Jacques", 24), + ("Kirk", 24), + ("Love", 24), + ("Lyon", 24), + )) diff --git a/faker/providers/phone_number/en_NZ/__init__.py b/faker/providers/phone_number/en_NZ/__init__.py new file mode 100644 index 00000000..fe69a81c --- /dev/null +++ b/faker/providers/phone_number/en_NZ/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from .. import Provider as PhoneNumberProvider + + +class Provider(PhoneNumberProvider): + formats = ( + # Local calls + '%## ####', + '%##-####', + '%######', + # National & Mobile dialing + '0{{area_code}} %## ####', + '0{{area_code}} %##-####', + '0{{area_code}}-%##-####', + '0{{area_code}} %######', + # Optional parenthesis + '(0{{area_code}}) %## ####', + '(0{{area_code}}) %##-####', + '(0{{area_code}}) %######', + # International drops the 0 + '+64 {{area_code}} %## ####', + '+64 {{area_code}} %##-####', + '+64 {{area_code}} %######', + '+64-{{area_code}}-%##-####', + '+64{{area_code}}%######', + ) + + area_codes = [ + # Mobiles + '20', + '21', + '22', + '27', + '29', + + '3', # South Island + '4', # Wellington + '6', # Lower North Island + '7', # Central North Island + '9', # Auckland + ] + + def area_code(self): + return self.numerify(self.random_element(self.area_codes)) + + def phone_number(self): + pattern = self.random_element(self.formats) + return self.numerify(self.generator.parse(pattern))
Locale provider for en_NZ Should be a simple job to clone the en_AU provider(s), and adjust. Might be nice to add plausible looking Māori placenames too.
joke2k/faker
diff --git a/tests/providers/test_address.py b/tests/providers/test_address.py index 585e0869..752b294e 100644 --- a/tests/providers/test_address.py +++ b/tests/providers/test_address.py @@ -224,6 +224,23 @@ class TestEnAU(unittest.TestCase): self.assertTrue(state_abbr.isupper()) +class TestEnNZ(unittest.TestCase): + """ Tests addresses in the en_NZ locale """ + + def setUp(self): + self.factory = Faker('en_NZ') + + def test_state(self): + # No states in New Zealand + state = self.factory.state() + assert state == '' + + def test_postcode(self): + for _ in range(100): + postcode = self.factory.postcode() + assert re.match("\d{4}", postcode) + + class TestEnCA(unittest.TestCase): """ Tests addresses in en_CA locale """
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 1 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": null, "python": "3.9", "reqs_path": [ "tests/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 dnspython==2.7.0 email-validator==1.0.3 exceptiongroup==1.2.2 -e git+https://github.com/joke2k/faker.git@80aa92145296f84122c1b43c8f3d63aef5cd6417#egg=Faker idna==3.10 iniconfig==2.1.0 mock==2.0.0 packaging==24.2 pbr==6.1.1 pluggy==1.5.0 pytest==8.3.5 pytest-cov==6.0.0 python-dateutil==2.9.0.post0 six==1.17.0 text-unidecode==1.2 tomli==2.2.1 UkPostcodeParser==1.1.2
name: faker channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - dnspython==2.7.0 - email-validator==1.0.3 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==2.0.0 - packaging==24.2 - pbr==6.1.1 - pluggy==1.5.0 - pytest==8.3.5 - pytest-cov==6.0.0 - python-dateutil==2.9.0.post0 - six==1.17.0 - text-unidecode==1.2 - tomli==2.2.1 - ukpostcodeparser==1.1.2 prefix: /opt/conda/envs/faker
[ "tests/providers/test_address.py::TestEnNZ::test_postcode", "tests/providers/test_address.py::TestEnNZ::test_state" ]
[]
[ "tests/providers/test_address.py::TestBaseProvider::test_alpha_2_country_codes", "tests/providers/test_address.py::TestBaseProvider::test_alpha_2_country_codes_as_default", "tests/providers/test_address.py::TestBaseProvider::test_alpha_3_country_codes", "tests/providers/test_address.py::TestBaseProvider::test_bad_country_code_representation", "tests/providers/test_address.py::TestAr_AA::test_alpha_2_country_codes", "tests/providers/test_address.py::TestAr_AA::test_alpha_2_country_codes_as_default", "tests/providers/test_address.py::TestAr_AA::test_alpha_3_country_codes", "tests/providers/test_address.py::TestAr_AA::test_bad_country_code_representation", "tests/providers/test_address.py::TestDeAT::test_city", "tests/providers/test_address.py::TestDeAT::test_country", "tests/providers/test_address.py::TestDeAT::test_latitude", "tests/providers/test_address.py::TestDeAT::test_longitude", "tests/providers/test_address.py::TestDeAT::test_postcode", "tests/providers/test_address.py::TestDeAT::test_state", "tests/providers/test_address.py::TestDeAT::test_street_suffix_long", "tests/providers/test_address.py::TestDeAT::test_street_suffix_short", "tests/providers/test_address.py::TestDeDE::test_city", "tests/providers/test_address.py::TestDeDE::test_country", "tests/providers/test_address.py::TestDeDE::test_state", "tests/providers/test_address.py::TestDeDE::test_street_suffix_long", "tests/providers/test_address.py::TestDeDE::test_street_suffix_short", "tests/providers/test_address.py::TestFiFI::test_city", "tests/providers/test_address.py::TestFiFI::test_street_suffix", "tests/providers/test_address.py::TestElGR::test_city", "tests/providers/test_address.py::TestElGR::test_latlng", "tests/providers/test_address.py::TestElGR::test_line_address", "tests/providers/test_address.py::TestElGR::test_region", "tests/providers/test_address.py::TestEnAU::test_city_prefix", "tests/providers/test_address.py::TestEnAU::test_postcode", "tests/providers/test_address.py::TestEnAU::test_state", "tests/providers/test_address.py::TestEnAU::test_state_abbr", "tests/providers/test_address.py::TestEnCA::test_city_prefix", "tests/providers/test_address.py::TestEnCA::test_postal_code_letter", "tests/providers/test_address.py::TestEnCA::test_postalcode", "tests/providers/test_address.py::TestEnCA::test_postcode", "tests/providers/test_address.py::TestEnCA::test_province", "tests/providers/test_address.py::TestEnCA::test_province_abbr", "tests/providers/test_address.py::TestEnCA::test_secondary_address", "tests/providers/test_address.py::TestEnGB::test_postcode", "tests/providers/test_address.py::TestEnUS::test_city_prefix", "tests/providers/test_address.py::TestEnUS::test_military_apo", "tests/providers/test_address.py::TestEnUS::test_military_dpo", "tests/providers/test_address.py::TestEnUS::test_military_ship", "tests/providers/test_address.py::TestEnUS::test_military_state", "tests/providers/test_address.py::TestEnUS::test_postcode", "tests/providers/test_address.py::TestEnUS::test_state", "tests/providers/test_address.py::TestEnUS::test_state_abbr", "tests/providers/test_address.py::TestEnUS::test_zipcode", "tests/providers/test_address.py::TestEnUS::test_zipcode_plus4", "tests/providers/test_address.py::TestHuHU::test_address", "tests/providers/test_address.py::TestHuHU::test_postcode_first_digit", "tests/providers/test_address.py::TestHuHU::test_street_address", "tests/providers/test_address.py::TestHuHU::test_street_address_with_county", "tests/providers/test_address.py::TestJaJP::test_address", "tests/providers/test_address.py::TestKoKR::test_address", "tests/providers/test_address.py::TestNeNP::test_address", "tests/providers/test_address.py::TestNoNO::test_address", "tests/providers/test_address.py::TestNoNO::test_city_suffix", "tests/providers/test_address.py::TestNoNO::test_postcode", "tests/providers/test_address.py::TestNoNO::test_street_suffix", "tests/providers/test_address.py::TestZhTW::test_address", "tests/providers/test_address.py::TestZhCN::test_address", "tests/providers/test_address.py::TestPtBr::test_address", "tests/providers/test_address.py::TestPtPT::test_distrito", "tests/providers/test_address.py::TestPtPT::test_freguesia" ]
[]
MIT License
2,111
[ "README.rst", "faker/providers/internet/en_NZ/__init__.py", "faker/providers/person/en_NZ/__init__.py", "faker/providers/automotive/en_NZ/__init__.py", "faker/providers/address/en_NZ/__init__.py", "faker/providers/phone_number/en_NZ/__init__.py" ]
[ "README.rst", "faker/providers/internet/en_NZ/__init__.py", "faker/providers/person/en_NZ/__init__.py", "faker/providers/automotive/en_NZ/__init__.py", "faker/providers/address/en_NZ/__init__.py", "faker/providers/phone_number/en_NZ/__init__.py" ]
MechanicalSoup__MechanicalSoup-192
7171e2b79ba0fbe02a7245066cb5536ddb2fe94e
2018-02-03 12:30:11
7171e2b79ba0fbe02a7245066cb5536ddb2fe94e
moy: Looks good to me.
diff --git a/mechanicalsoup/stateful_browser.py b/mechanicalsoup/stateful_browser.py index a233f6d..b96dc1b 100644 --- a/mechanicalsoup/stateful_browser.py +++ b/mechanicalsoup/stateful_browser.py @@ -177,7 +177,8 @@ class StatefulBrowser(Browser): def select_form(self, selector="form", nr=0): """Select a form in the current page. - :param selector: CSS selector to identify the form to select. + :param selector: CSS selector or a bs4.element.Tag object to identify + the form to select. If not specified, ``selector`` defaults to "form", which is useful if, e.g., there is only one form on the page. For ``selector`` syntax, see the `.select() method in BeautifulSoup @@ -191,8 +192,17 @@ class StatefulBrowser(Browser): :return: The selected form as a soup object. It can also be retrieved later with :func:`get_current_form`. """ - # nr is a 0-based index for consistency with mechanize - found_forms = self.get_current_page().select(selector, limit=nr + 1) + if isinstance(selector, bs4.element.Tag): + if selector.name == "form": + self.__state.form = Form(selector) + return self.get_current_form() + else: + raise LinkNotFoundError() + else: + # nr is a 0-based index for consistency with mechanize + found_forms = self.get_current_page().select(selector, + limit=nr + 1) + if len(found_forms) != nr + 1: if self.__debug: print('select_form failed for', selector)
Allow passing a Tag object to StatefulBrowser.select_form Hi, I've recently had to select a form based on its children and the only way I found to do that is: ```py buttons = browser.get_current_page().select("form > button[name=apply]") browser._StatefulBrowser__current_form = mechanicalsoup.Form(buttons[0].parent) browser.submit_selected() ``` Could you consider adding something like `if isinstance(selector, bs4.element.Tag): …` to `select_form` ?
MechanicalSoup/MechanicalSoup
diff --git a/tests/test_stateful_browser.py b/tests/test_stateful_browser.py index 942ea0b..fba91c9 100644 --- a/tests/test_stateful_browser.py +++ b/tests/test_stateful_browser.py @@ -353,6 +353,18 @@ def test_select_form_nr(): browser.select_form(nr=3) +def test_select_form_tag_object(): + """Test tag object as selector parameter type""" + forms = """<form id="a"></form><form id="b"></form><p></p>""" + soup = BeautifulSoup(forms, "lxml") + with mechanicalsoup.StatefulBrowser() as browser: + browser.open_fake_page(forms) + form = browser.select_form(soup.find("form", {"id": "b"})) + assert form.form['id'] == "b" + with pytest.raises(mechanicalsoup.LinkNotFoundError): + browser.select_form(soup.find("p")) + + def test_referer_follow_link(httpbin): browser = mechanicalsoup.StatefulBrowser() browser.open(httpbin.url)
{ "commit_name": "merge_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
0.9
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-flake8", "pytest-mock" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt", "tests/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 beautifulsoup4==4.12.3 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 flake8==5.0.4 idna==3.10 importlib-metadata==4.2.0 iniconfig==1.1.1 lxml==5.3.1 mccabe==0.7.0 -e git+https://github.com/MechanicalSoup/MechanicalSoup.git@7171e2b79ba0fbe02a7245066cb5536ddb2fe94e#egg=MechanicalSoup packaging==21.3 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-flake8==1.1.1 pytest-mock==3.6.1 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 soupsieve==2.3.2.post1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: MechanicalSoup channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - beautifulsoup4==4.12.3 - charset-normalizer==2.0.12 - coverage==6.2 - flake8==5.0.4 - idna==3.10 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - lxml==5.3.1 - mccabe==0.7.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-flake8==1.1.1 - pytest-mock==3.6.1 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - soupsieve==2.3.2.post1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/MechanicalSoup
[ "tests/test_stateful_browser.py::test_select_form_tag_object" ]
[ "tests/test_stateful_browser.py::flake-8::FLAKE8", "tests/test_stateful_browser.py::test_submit_online", "tests/test_stateful_browser.py::test_form_noaction", "tests/test_stateful_browser.py::test_referer_follow_link", "tests/test_stateful_browser.py::test_download_link", "tests/test_stateful_browser.py::test_download_link_nofile", "tests/test_stateful_browser.py::test_download_link_to_existing_file" ]
[ "tests/test_stateful_browser.py::test_request_forward", "tests/test_stateful_browser.py::test_no_404", "tests/test_stateful_browser.py::test_404", "tests/test_stateful_browser.py::test_user_agent", "tests/test_stateful_browser.py::test_open_relative", "tests/test_stateful_browser.py::test_links", "tests/test_stateful_browser.py::test_submit_btnName[input]", "tests/test_stateful_browser.py::test_submit_btnName[button]", "tests/test_stateful_browser.py::test_get_set_debug", "tests/test_stateful_browser.py::test_list_links", "tests/test_stateful_browser.py::test_launch_browser", "tests/test_stateful_browser.py::test_find_link", "tests/test_stateful_browser.py::test_verbose", "tests/test_stateful_browser.py::test_new_control", "tests/test_stateful_browser.py::test_form_noname", "tests/test_stateful_browser.py::test_form_multiple", "tests/test_stateful_browser.py::test_upload_file", "tests/test_stateful_browser.py::test_with", "tests/test_stateful_browser.py::test_select_form_nr", "tests/test_stateful_browser.py::test_referer_submit", "tests/test_stateful_browser.py::test_referer_submit_headers", "tests/test_stateful_browser.py::test_link_arg_text", "tests/test_stateful_browser.py::test_link_arg_regex", "tests/test_stateful_browser.py::test_link_arg_multiregex", "tests/test_stateful_browser.py::test_download_link_404", "tests/test_stateful_browser.py::test_download_link_referer", "tests/test_stateful_browser.py::test_refresh_open", "tests/test_stateful_browser.py::test_refresh_follow_link", "tests/test_stateful_browser.py::test_refresh_form_not_retained", "tests/test_stateful_browser.py::test_refresh_error" ]
[]
MIT License
2,115
[ "mechanicalsoup/stateful_browser.py" ]
[ "mechanicalsoup/stateful_browser.py" ]
tornadoweb__tornado-2270
15e350aa27493a98bf797d112af304b00c1d63c3
2018-02-03 23:07:50
03f13800e854a6fc9e6efa2168e694d9599348bd
diff --git a/tornado/web.py b/tornado/web.py index 5fa3abd8..7bf5415d 100644 --- a/tornado/web.py +++ b/tornado/web.py @@ -2499,8 +2499,9 @@ class StaticFileHandler(RequestHandler): .. versionadded:: 3.1 """ - if self.check_etag_header(): - return True + # If client sent If-None-Match, use it, ignore If-Modified-Since + if self.request.headers.get('If-None-Match'): + return self.check_etag_header() # Check the If-Modified-Since, and don't send the result if the # content has not been modified
StaticFileHandler returns 304 despite etag mismatch I am not sure if this is an actual issue in the code or with my deployment, but it caused me some head-scratching. I am using Tornado as an API webserver for an embedded system. It also serves a small web application using the StaticFileHandler. To fix issues with caching browsers, I subclassed the StaticFileHandler to send Cache-Control headers like so: class NoCacheStaticFileHandler(tornado.web.StaticFileHandler): def set_extra_headers(self, path): self.set_header('Cache-control', 'no-cache, must-revalidate, max-age=0') The problem I am seeing is that, despite the browser sending a different Etag than the server calculates, the server still sends a 304. I dug a bit into the code and found the following: def should_return_304(self): """Returns True if the headers indicate that we should return 304. .. versionadded:: 3.1 """ if self.check_etag_header(): return True # Check the If-Modified-Since, and don't send the result if the # content has not been modified ims_value = self.request.headers.get("If-Modified-Since") if ims_value is not None: date_tuple = email.utils.parsedate(ims_value) if date_tuple is not None: if_since = datetime.datetime(*date_tuple[:6]) if if_since >= self.modified: return True return False Essentially, this seems to return 304 if either Etag headers match or the If-Modified-Since date the client sent is later than the file on the server. Is this how it should be? The browser sends a different hash, the server knows that the versions do not match but still sends a 304 because the date is newer? It feels like the server should not send a 304 when one property does not match. The issue might be specific to the system here since the dates there are not very reliable. But I had it several times now that the data on the server was updated but it kept sending 304 despite an Etag mismatch because of the If-Modified-Since headers.
tornadoweb/tornado
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py index 646b2f79..39347deb 100644 --- a/tornado/test/web_test.py +++ b/tornado/test/web_test.py @@ -1081,6 +1081,13 @@ class StaticFileTest(WebTestCase): 'If-None-Match': response1.headers['Etag']}) self.assertEqual(response2.code, 304) + def test_static_304_etag_modified_bug(self): + response1 = self.get_and_head("/static/robots.txt") + response2 = self.get_and_head("/static/robots.txt", headers={ + 'If-None-Match': '"MISMATCH"', + 'If-Modified-Since': response1.headers['Last-Modified']}) + self.assertEqual(response2.code, 200) + def test_static_if_modified_since_pre_epoch(self): # On windows, the functions that work with time_t do not accept # negative values, and at least one client (processing.js) seems
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 1 }
4.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work -e git+https://github.com/tornadoweb/tornado.git@15e350aa27493a98bf797d112af304b00c1d63c3#egg=tornado typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: tornado channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/tornado
[ "tornado/test/web_test.py::StaticFileTest::test_static_304_etag_modified_bug" ]
[]
[ "tornado/test/web_test.py::SecureCookieV1Test::test_arbitrary_bytes", "tornado/test/web_test.py::SecureCookieV1Test::test_cookie_tampering_future_timestamp", "tornado/test/web_test.py::SecureCookieV1Test::test_round_trip", "tornado/test/web_test.py::SecureCookieV2Test::test_key_version_increment_version", "tornado/test/web_test.py::SecureCookieV2Test::test_key_version_invalidate_version", "tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip", "tornado/test/web_test.py::SecureCookieV2Test::test_key_version_roundtrip_differing_version", "tornado/test/web_test.py::SecureCookieV2Test::test_round_trip", "tornado/test/web_test.py::CookieTest::test_cookie_special_char", "tornado/test/web_test.py::CookieTest::test_get_cookie", "tornado/test/web_test.py::CookieTest::test_set_cookie", "tornado/test/web_test.py::CookieTest::test_set_cookie_domain", "tornado/test/web_test.py::CookieTest::test_set_cookie_expires_days", "tornado/test/web_test.py::CookieTest::test_set_cookie_false_flags", "tornado/test/web_test.py::CookieTest::test_set_cookie_max_age", "tornado/test/web_test.py::CookieTest::test_set_cookie_overwrite", "tornado/test/web_test.py::AuthRedirectTest::test_absolute_auth_redirect", "tornado/test/web_test.py::AuthRedirectTest::test_relative_auth_redirect", "tornado/test/web_test.py::ConnectionCloseTest::test_connection_close", "tornado/test/web_test.py::RequestEncodingTest::test_error", "tornado/test/web_test.py::RequestEncodingTest::test_group_encoding", "tornado/test/web_test.py::RequestEncodingTest::test_group_question_mark", "tornado/test/web_test.py::RequestEncodingTest::test_slashes", "tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument", "tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_invalid_unicode", "tornado/test/web_test.py::WSGISafeWebTest::test_decode_argument_plus", "tornado/test/web_test.py::WSGISafeWebTest::test_get_argument", "tornado/test/web_test.py::WSGISafeWebTest::test_get_body_arguments", "tornado/test/web_test.py::WSGISafeWebTest::test_get_query_arguments", "tornado/test/web_test.py::WSGISafeWebTest::test_header_injection", "tornado/test/web_test.py::WSGISafeWebTest::test_multi_header", "tornado/test/web_test.py::WSGISafeWebTest::test_no_gzip", "tornado/test/web_test.py::WSGISafeWebTest::test_optional_path", "tornado/test/web_test.py::WSGISafeWebTest::test_redirect", "tornado/test/web_test.py::WSGISafeWebTest::test_reverse_url", "tornado/test/web_test.py::WSGISafeWebTest::test_types", "tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_resources", "tornado/test/web_test.py::WSGISafeWebTest::test_uimodule_unescaped", "tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect", "tornado/test/web_test.py::WSGISafeWebTest::test_web_redirect_double_slash", "tornado/test/web_test.py::NonWSGIWebTests::test_empty_flush", "tornado/test/web_test.py::NonWSGIWebTests::test_flow_control", "tornado/test/web_test.py::ErrorResponseTest::test_default", "tornado/test/web_test.py::ErrorResponseTest::test_failed_write_error", "tornado/test/web_test.py::ErrorResponseTest::test_write_error", "tornado/test/web_test.py::StaticFileTest::test_absolute_static_url", "tornado/test/web_test.py::StaticFileTest::test_absolute_version_exclusion", "tornado/test/web_test.py::StaticFileTest::test_include_host_override", "tornado/test/web_test.py::StaticFileTest::test_path_traversal_protection", "tornado/test/web_test.py::StaticFileTest::test_relative_version_exclusion", "tornado/test/web_test.py::StaticFileTest::test_root_static_path", "tornado/test/web_test.py::StaticFileTest::test_static_304_if_modified_since", "tornado/test/web_test.py::StaticFileTest::test_static_304_if_none_match", "tornado/test/web_test.py::StaticFileTest::test_static_404", "tornado/test/web_test.py::StaticFileTest::test_static_compressed_files", "tornado/test/web_test.py::StaticFileTest::test_static_etag", "tornado/test/web_test.py::StaticFileTest::test_static_files", "tornado/test/web_test.py::StaticFileTest::test_static_head", "tornado/test/web_test.py::StaticFileTest::test_static_head_range", "tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_pre_epoch", "tornado/test/web_test.py::StaticFileTest::test_static_if_modified_since_time_zone", "tornado/test/web_test.py::StaticFileTest::test_static_invalid_range", "tornado/test/web_test.py::StaticFileTest::test_static_range_if_none_match", "tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_invalid_start", "tornado/test/web_test.py::StaticFileTest::test_static_unsatisfiable_range_zero_suffix", "tornado/test/web_test.py::StaticFileTest::test_static_url", "tornado/test/web_test.py::StaticFileTest::test_static_with_range", "tornado/test/web_test.py::StaticFileTest::test_static_with_range_end_edge", "tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_file", "tornado/test/web_test.py::StaticFileTest::test_static_with_range_full_past_end", "tornado/test/web_test.py::StaticFileTest::test_static_with_range_neg_end", "tornado/test/web_test.py::StaticFileTest::test_static_with_range_partial_past_end", "tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_filename", "tornado/test/web_test.py::StaticDefaultFilenameTest::test_static_default_redirect", "tornado/test/web_test.py::StaticFileWithPathTest::test_serve", "tornado/test/web_test.py::CustomStaticFileTest::test_serve", "tornado/test/web_test.py::CustomStaticFileTest::test_static_url", "tornado/test/web_test.py::HostMatchingTest::test_host_matching", "tornado/test/web_test.py::DefaultHostMatchingTest::test_default_host_matching", "tornado/test/web_test.py::NamedURLSpecGroupsTest::test_named_urlspec_groups", "tornado/test/web_test.py::ClearHeaderTest::test_clear_header", "tornado/test/web_test.py::Header204Test::test_204_headers", "tornado/test/web_test.py::Header304Test::test_304_headers", "tornado/test/web_test.py::StatusReasonTest::test_status", "tornado/test/web_test.py::DateHeaderTest::test_date_header", "tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str", "tornado/test/web_test.py::RaiseWithReasonTest::test_httperror_str_from_httputil", "tornado/test/web_test.py::RaiseWithReasonTest::test_raise_with_reason", "tornado/test/web_test.py::ErrorHandlerXSRFTest::test_404_xsrf", "tornado/test/web_test.py::ErrorHandlerXSRFTest::test_error_xsrf", "tornado/test/web_test.py::GzipTestCase::test_gzip", "tornado/test/web_test.py::GzipTestCase::test_gzip_not_requested", "tornado/test/web_test.py::GzipTestCase::test_gzip_static", "tornado/test/web_test.py::GzipTestCase::test_vary_already_present", "tornado/test/web_test.py::GzipTestCase::test_vary_already_present_multiple", "tornado/test/web_test.py::PathArgsInPrepareTest::test_kw", "tornado/test/web_test.py::PathArgsInPrepareTest::test_pos", "tornado/test/web_test.py::ClearAllCookiesTest::test_clear_all_cookies", "tornado/test/web_test.py::ExceptionHandlerTest::test_http_error", "tornado/test/web_test.py::ExceptionHandlerTest::test_known_error", "tornado/test/web_test.py::ExceptionHandlerTest::test_unknown_error", "tornado/test/web_test.py::BuggyLoggingTest::test_buggy_log_exception", "tornado/test/web_test.py::UIMethodUIModuleTest::test_ui_method", "tornado/test/web_test.py::GetArgumentErrorTest::test_catch_error", "tornado/test/web_test.py::MultipleExceptionTest::test_multi_exception", "tornado/test/web_test.py::SetLazyPropertiesTest::test_set_properties", "tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_is_lazy", "tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_from_ui_module_works", "tornado/test/web_test.py::GetCurrentUserTest::test_get_current_user_works", "tornado/test/web_test.py::UnimplementedHTTPMethodsTest::test_unimplemented_standard_methods", "tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_other", "tornado/test/web_test.py::UnimplementedNonStandardMethodsTest::test_unimplemented_patch", "tornado/test/web_test.py::AllHTTPMethodsTest::test_standard_methods", "tornado/test/web_test.py::PatchMethodTest::test_other", "tornado/test/web_test.py::PatchMethodTest::test_patch", "tornado/test/web_test.py::FinishInPrepareTest::test_finish_in_prepare", "tornado/test/web_test.py::Default404Test::test_404", "tornado/test/web_test.py::Custom404Test::test_404", "tornado/test/web_test.py::DefaultHandlerArgumentsTest::test_403", "tornado/test/web_test.py::HandlerByNameTest::test_handler_by_name", "tornado/test/web_test.py::StreamingRequestBodyTest::test_close_during_upload", "tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return", "tornado/test/web_test.py::StreamingRequestBodyTest::test_early_return_with_data", "tornado/test/web_test.py::StreamingRequestBodyTest::test_streaming_body", "tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_chunked_body", "tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_compressed_body", "tornado/test/web_test.py::DecoratedStreamingRequestFlowControlTest::test_flow_control_fixed_body", "tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_chunked_body", "tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_compressed_body", "tornado/test/web_test.py::NativeStreamingRequestFlowControlTest::test_flow_control_fixed_body", "tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_high", "tornado/test/web_test.py::IncorrectContentLengthTest::test_content_length_too_low", "tornado/test/web_test.py::ClientCloseTest::test_client_close", "tornado/test/web_test.py::SignedValueTest::test_expired", "tornado/test/web_test.py::SignedValueTest::test_key_version_retrieval", "tornado/test/web_test.py::SignedValueTest::test_key_versioning_invalid_key", "tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_default_key", "tornado/test/web_test.py::SignedValueTest::test_key_versioning_read_write_non_default_key", "tornado/test/web_test.py::SignedValueTest::test_known_values", "tornado/test/web_test.py::SignedValueTest::test_name_swap", "tornado/test/web_test.py::SignedValueTest::test_non_ascii", "tornado/test/web_test.py::SignedValueTest::test_payload_tampering", "tornado/test/web_test.py::SignedValueTest::test_signature_tampering", "tornado/test/web_test.py::XSRFTest::test_cross_user", "tornado/test/web_test.py::XSRFTest::test_distinct_tokens", "tornado/test/web_test.py::XSRFTest::test_refresh_token", "tornado/test/web_test.py::XSRFTest::test_versioning", "tornado/test/web_test.py::XSRFTest::test_xsrf_fail_argument_invalid_format", "tornado/test/web_test.py::XSRFTest::test_xsrf_fail_body_no_cookie", "tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_invalid_format", "tornado/test/web_test.py::XSRFTest::test_xsrf_fail_cookie_no_body", "tornado/test/web_test.py::XSRFTest::test_xsrf_fail_no_token", "tornado/test/web_test.py::XSRFTest::test_xsrf_success_header", "tornado/test/web_test.py::XSRFTest::test_xsrf_success_non_hex_token", "tornado/test/web_test.py::XSRFTest::test_xsrf_success_post_body", "tornado/test/web_test.py::XSRFTest::test_xsrf_success_query_string", "tornado/test/web_test.py::XSRFTest::test_xsrf_success_short_token", "tornado/test/web_test.py::XSRFCookieKwargsTest::test_xsrf_httponly", "tornado/test/web_test.py::FinishExceptionTest::test_finish_exception", "tornado/test/web_test.py::DecoratorTest::test_addslash", "tornado/test/web_test.py::DecoratorTest::test_removeslash", "tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_match", "tornado/test/web_test.py::CacheTest::test_multiple_strong_etag_not_match", "tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_match", "tornado/test/web_test.py::CacheTest::test_multiple_weak_etag_not_match", "tornado/test/web_test.py::CacheTest::test_strong_etag_match", "tornado/test/web_test.py::CacheTest::test_strong_etag_not_match", "tornado/test/web_test.py::CacheTest::test_weak_etag_match", "tornado/test/web_test.py::CacheTest::test_weak_etag_not_match", "tornado/test/web_test.py::CacheTest::test_wildcard_etag", "tornado/test/web_test.py::RequestSummaryTest::test_missing_remote_ip", "tornado/test/web_test.py::HTTPErrorTest::test_copy", "tornado/test/web_test.py::ApplicationTest::test_listen", "tornado/test/web_test.py::URLSpecReverseTest::test_non_reversible", "tornado/test/web_test.py::URLSpecReverseTest::test_reverse", "tornado/test/web_test.py::URLSpecReverseTest::test_reverse_arguments", "tornado/test/web_test.py::RedirectHandlerTest::test_basic_redirect", "tornado/test/web_test.py::RedirectHandlerTest::test_redirect_pattern", "tornado/test/web_test.py::RedirectHandlerTest::test_redirect_with_appending_argument", "tornado/test/web_test.py::RedirectHandlerTest::test_redirect_with_argument" ]
[]
Apache License 2.0
2,116
[ "tornado/web.py" ]
[ "tornado/web.py" ]
burnash__gspread-502
c65b3893d02d8e3896cfb7dd2d20eb2f0bdcdeb4
2018-02-03 23:44:43
20f113c9b49081f768ac689aa3475ad3301d7af2
danthelion: @burnash What do you think about this implementation? Details are in the referenced issue. burnash: @danthelion thank you for the issue and the PR. As I said in the referenced issue I agree. I'm sorry for the delay in responding. Since you submitted the PR the code of gspread has significantly changed. If you have time, could you rebase your changes to resolve the conflicts? Also please don't forget to add a test to cover this fix. danthelion: Sure, I'll get on it as soon as I can. 👍
diff --git a/gspread/models.py b/gspread/models.py index db592c7..30b36ea 100644 --- a/gspread/models.py +++ b/gspread/models.py @@ -545,11 +545,11 @@ class Worksheet(object): except KeyError: return [] - def get_all_records(self, empty2zero=False, head=1, default_blank=""): - """Returns a list of dictionaries, all of them having the contents - of the spreadsheet with the head row as keys and each of these - dictionaries holding the contents of subsequent rows of cells - as values. + def get_all_records(self, empty2zero=False, head=1, default_blank="", allow_underscores_in_numeric_literals=False): + """Returns a list of dictionaries, all of them having: + - the contents of the spreadsheet's with the head row as keys, + And each of these dictionaries holding + - the contents of subsequent rows of cells as values. Cell values are numericised (strings that can be read as ints or floats are converted). @@ -564,13 +564,17 @@ class Worksheet(object): converted to something else except empty string or zero. :type default_blank: str + :param allow_underscores_in_numeric_literals: (optional) Allow underscores + in numeric literals, + as introduced in PEP 515 + :type allow_underscores_in_numeric_literals: bool """ idx = head - 1 data = self.get_all_values() keys = data[idx] - values = [numericise_all(row, empty2zero, default_blank) + values = [numericise_all(row, empty2zero, default_blank, allow_underscores_in_numeric_literals) for row in data[idx + 1:]] return [dict(zip(keys, row)) for row in values] diff --git a/gspread/utils.py b/gspread/utils.py index 0854611..a966820 100644 --- a/gspread/utils.py +++ b/gspread/utils.py @@ -29,7 +29,7 @@ def finditem(func, seq): return next((item for item in seq if func(item))) -def numericise(value, empty2zero=False, default_blank=""): +def numericise(value, empty2zero=False, default_blank="", allow_underscores_in_numeric_literals=False): """Returns a value that depends on the input string: - Float if input can be converted to Float - Integer if input can be converted to integer @@ -42,6 +42,10 @@ def numericise(value, empty2zero=False, default_blank=""): 'faa' >>> numericise("3") 3 + >>> numericise("3_2", allow_underscores_in_numeric_literals=False) + '3_2' + >>> numericise("3_2", allow_underscores_in_numeric_literals=True) + '32' >>> numericise("3.1") 3.1 >>> numericise("", empty2zero=True) @@ -58,6 +62,8 @@ def numericise(value, empty2zero=False, default_blank=""): >>> """ if value is not None: + if "_" in value and not allow_underscores_in_numeric_literals: + return value try: value = int(value) except ValueError: @@ -73,9 +79,9 @@ def numericise(value, empty2zero=False, default_blank=""): return value -def numericise_all(input, empty2zero=False, default_blank=""): +def numericise_all(input, empty2zero=False, default_blank="", allow_underscores_in_numeric_literals=False): """Returns a list of numericised values from strings""" - return [numericise(s, empty2zero, default_blank) for s in input] + return [numericise(s, empty2zero, default_blank, allow_underscores_in_numeric_literals) for s in input] def rowcol_to_a1(row, col):
Ambiguity in the `numericise` function depending on Python version. https://github.com/burnash/gspread/blob/217b43073e1abe9ceb1d65d2d0719d56f0a14642/gspread/utils.py#L58 I have run into some issues when fetching data from spreadsheets using the `get_all_records()` method when using Python 3.6+. If a worksheet has data in the format `12_34`, which should be interpreted as a string (or numeric if _explicitly_ desired) the `numericise` function will convert it to an `int` because of https://docs.python.org/3/whatsnew/3.6.html#whatsnew36-pep515 introduced in Python3.6, using Python3.5.4 it parses it as the string `12_34`, as expected. I don't think following PEP515 should be the default behaviour in this case. Example Python3.6 ``` >>> from gspread.utils import numericise >>> numericise('18_29') 1829 ``` Python3.5.4 ``` >>> from gspread.utils import numericise >>> numericise('18_29') '18_29' ```
burnash/gspread
diff --git a/tests/test.py b/tests/test.py index 431c0f7..ee42246 100644 --- a/tests/test.py +++ b/tests/test.py @@ -163,6 +163,20 @@ class UtilsTest(unittest.TestCase): gid = 'ogsrar0' self.assertEqual(utils.wid_to_gid(gid), '1015761654') + def test_numericise(self): + self.assertEqual(utils.numericise('faa'), 'faa') + self.assertEqual(utils.numericise('3'), 3) + self.assertEqual(utils.numericise('3_2'), '3_2') + self.assertEqual(utils.numericise('3_2', allow_underscores_in_numeric_literals=False), '3_2') + self.assertEqual(utils.numericise('3_2', allow_underscores_in_numeric_literals=True), 32) + self.assertEqual(utils.numericise('3.1'), 3.1) + self.assertEqual(utils.numericise('', empty2zero=True), 0) + self.assertEqual(utils.numericise('', empty2zero=False), '') + self.assertEqual(utils.numericise('', default_blank=None), None) + self.assertEqual(utils.numericise('', default_blank='foo'), 'foo') + self.assertEqual(utils.numericise(''), '') + self.assertEqual(utils.numericise(None), None) + class GspreadTest(BetamaxGspreadTest): def _sequence_generator(self):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
3.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "requests[security]", "oauth2client", "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
betamax==0.8.1 -e git+https://github.com/burnash/betamax-json-body-serializer.git@0945268b69272cf90c55fdfd962f1801295ff30b#egg=betamax_json_body_serializer certifi==2025.1.31 charset-normalizer==3.4.1 exceptiongroup==1.2.2 -e git+https://github.com/burnash/gspread.git@c65b3893d02d8e3896cfb7dd2d20eb2f0bdcdeb4#egg=gspread httplib2==0.22.0 idna==3.10 iniconfig==2.1.0 nose==1.3.7 oauth2client==4.1.3 packaging==24.2 pluggy==1.5.0 pyasn1==0.6.1 pyasn1_modules==0.4.2 pyparsing==3.2.3 pytest==8.3.5 requests==2.32.3 rsa==4.9 six==1.17.0 tomli==2.2.1 urllib3==2.3.0
name: gspread channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - betamax==0.8.1 - certifi==2025.1.31 - charset-normalizer==3.4.1 - exceptiongroup==1.2.2 - httplib2==0.22.0 - idna==3.10 - iniconfig==2.1.0 - nose==1.3.7 - oauth2client==4.1.3 - packaging==24.2 - pluggy==1.5.0 - pyasn1==0.6.1 - pyasn1-modules==0.4.2 - pyparsing==3.2.3 - pytest==8.3.5 - requests==2.32.3 - rsa==4.9 - six==1.17.0 - tomli==2.2.1 - urllib3==2.3.0 prefix: /opt/conda/envs/gspread
[ "tests/test.py::UtilsTest::test_numericise" ]
[]
[ "tests/test.py::UtilsTest::test_a1_to_rowcol", "tests/test.py::UtilsTest::test_addr_converters", "tests/test.py::UtilsTest::test_extract_id_from_url", "tests/test.py::UtilsTest::test_get_gid", "tests/test.py::UtilsTest::test_no_extract_id_from_url", "tests/test.py::UtilsTest::test_rowcol_to_a1", "tests/test.py::ClientTest::test_create", "tests/test.py::ClientTest::test_import_csv", "tests/test.py::ClientTest::test_no_found_exeption", "tests/test.py::ClientTest::test_openall", "tests/test.py::SpreadsheetTest::test_add_del_worksheet", "tests/test.py::SpreadsheetTest::test_get_worksheet", "tests/test.py::SpreadsheetTest::test_properties", "tests/test.py::SpreadsheetTest::test_sheet1", "tests/test.py::SpreadsheetTest::test_worksheet", "tests/test.py::SpreadsheetTest::test_worksheet_iteration", "tests/test.py::WorksheetTest::test_acell", "tests/test.py::WorksheetTest::test_append_row", "tests/test.py::WorksheetTest::test_cell", "tests/test.py::WorksheetTest::test_clear", "tests/test.py::WorksheetTest::test_delete_row", "tests/test.py::WorksheetTest::test_find", "tests/test.py::WorksheetTest::test_findall", "tests/test.py::WorksheetTest::test_get_all_records", "tests/test.py::WorksheetTest::test_get_all_records_different_header", "tests/test.py::WorksheetTest::test_get_all_values", "tests/test.py::WorksheetTest::test_insert_row", "tests/test.py::WorksheetTest::test_range", "tests/test.py::WorksheetTest::test_resize", "tests/test.py::WorksheetTest::test_update_acell", "tests/test.py::WorksheetTest::test_update_cell", "tests/test.py::WorksheetTest::test_update_cell_multiline", "tests/test.py::WorksheetTest::test_update_cell_unicode", "tests/test.py::WorksheetTest::test_update_cells", "tests/test.py::WorksheetTest::test_update_cells_noncontiguous", "tests/test.py::WorksheetTest::test_update_cells_unicode", "tests/test.py::CellTest::test_numeric_value", "tests/test.py::CellTest::test_properties" ]
[]
MIT License
2,117
[ "gspread/models.py", "gspread/utils.py" ]
[ "gspread/models.py", "gspread/utils.py" ]
conan-io__conan-2432
72c0bd22475858d115cb0f28893a4192362c7129
2018-02-05 17:37:37
c8ee776992121b27d2dcb54be835b501326254bc
diff --git a/conans/client/build/autotools_environment.py b/conans/client/build/autotools_environment.py index 6671d87ad..2d99729ba 100644 --- a/conans/client/build/autotools_environment.py +++ b/conans/client/build/autotools_environment.py @@ -184,7 +184,10 @@ class AutoToolsBuildEnvironment(object): @property def _sysroot_flag(self): - return "--sysroot=%s" % self._adjust_path(self._deps_cpp_info.sysroot) if self._deps_cpp_info.sysroot else None + if self._compiler == 'Visual Studio': + return None + else: + return "--sysroot=%s" % self._adjust_path(self._deps_cpp_info.sysroot) if self._deps_cpp_info.sysroot else None def _configure_link_flags(self): """Not the -L""" @@ -198,7 +201,7 @@ class AutoToolsBuildEnvironment(object): def _configure_flags(self): ret = copy.copy(self._deps_cpp_info.cflags) ret.append(self._architecture_flag) - if self._build_type == "Debug": + if self._build_type == "Debug" and str(self._compiler) in ['gcc', 'clang', 'apple-clang', 'sun-cc']: ret.append("-g") # default debug information elif self._build_type == "Release" and self._compiler == "gcc": # Remove all symbol table and relocation information from the executable. @@ -226,7 +229,9 @@ class AutoToolsBuildEnvironment(object): @property def _architecture_flag(self): - return architecture_dict.get(self._arch, "") + if str(self._compiler) in ['gcc', 'clang', 'apple-clang', 'sun-cc']: + return architecture_dict.get(self._arch, "") + return "" def _get_vars(self): def append(*args): @@ -239,15 +244,21 @@ class AutoToolsBuildEnvironment(object): ret.append(arg) return ret - lib_paths = ['-L%s' % self._adjust_path(x.replace("\\", "/")) for x in self.library_paths] - include_paths = ['-I%s' % self._adjust_path(x.replace("\\", "/")) for x in self.include_paths] + if self._compiler == 'Visual Studio': + lib_paths = ['/LIBPATH:%s' % x.replace("/", "\\") for x in self.library_paths] + include_paths = ['-I%s' % x.replace("/", "\\") for x in self.include_paths] + libs = [lib for lib in self.libs] + else: + lib_paths = ['-L%s' % self._adjust_path(x.replace("\\", "/")) for x in self.library_paths] + include_paths = ['-I%s' % self._adjust_path(x.replace("\\", "/")) for x in self.include_paths] + libs = ['-l%s' % lib for lib in self.libs] ld_flags = append(self.link_flags, lib_paths) + cpp_flags = append(include_paths, ["-D%s" % x for x in self.defines]) - libs = ['-l%s' % lib for lib in self.libs] tmp_compilation_flags = copy.copy(self.flags) - if self.fpic: + if self.fpic and not self._compiler == 'Visual Studio': tmp_compilation_flags.append("-fPIC") cxx_flags = append(tmp_compilation_flags, self.cxx_flags) diff --git a/conans/client/build/cmake.py b/conans/client/build/cmake.py index 86ac0abe4..264fb76fb 100644 --- a/conans/client/build/cmake.py +++ b/conans/client/build/cmake.py @@ -25,7 +25,7 @@ def _get_env_cmake_system_name(): class CMake(object): def __init__(self, conanfile, generator=None, cmake_system_name=True, - parallel=True, build_type=None, toolset=None): + parallel=True, build_type=None, toolset=None, make_program=None): """ :param settings_or_conanfile: Conanfile instance (or settings for retro compatibility) :param generator: Generator name to use or none to autodetect @@ -65,6 +65,15 @@ class CMake(object): # Call the setter to warn and update the definitions if needed self.build_type = build_type + make_program = os.getenv("CONAN_MAKE_PROGRAM") or make_program + if make_program: + if not tools.which(make_program): + self._conanfile.output.warn("The specified make program '%s' cannot be found" + "and will be ignored" % make_program) + else: + self._conanfile.output.info("Using '%s' as CMAKE_MAKE_PROGRAM" % make_program) + self.definitions["CMAKE_MAKE_PROGRAM"] = make_program + @property def build_folder(self): return self.build_dir @@ -318,10 +327,11 @@ class CMake(object): def configure(self, args=None, defs=None, source_dir=None, build_dir=None, source_folder=None, build_folder=None, cache_build_folder=None): + + # TODO: Deprecate source_dir and build_dir in favor of xxx_folder args = args or [] defs = defs or {} - source_dir, self.build_dir = self._get_dirs(source_folder, build_folder, source_dir, build_dir, cache_build_folder) diff --git a/conans/client/cmd/profile.py b/conans/client/cmd/profile.py index 4c3ec76aa..ca0a8430f 100644 --- a/conans/client/cmd/profile.py +++ b/conans/client/cmd/profile.py @@ -44,7 +44,11 @@ def cmd_profile_create(profile_name, cache_profiles_path, output, detect=False): contents = profile.dumps() save(profile_path, contents) - output.info("Empty profile created: %s" % profile_path) + + if detect: + output.info("Profile created with detected settings: %s" % profile_path) + else: + output.info("Empty profile created: %s" % profile_path) return profile_path diff --git a/conans/client/conf/__init__.py b/conans/client/conf/__init__.py index 0206c410a..552d975ad 100644 --- a/conans/client/conf/__init__.py +++ b/conans/client/conf/__init__.py @@ -53,7 +53,7 @@ compiler: version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9", "5", "5.1", "5.2", "5.3", "5.4", "6", "6.1", "6.2", "6.3", "6.4", - "7", "7.1", "7.2"] + "7", "7.1", "7.2", "7.3"] libcxx: [libstdc++, libstdc++11] threads: [None, posix, win32] # Windows MinGW exception: [None, dwarf2, sjlj, seh] # Windows MinGW diff --git a/conans/client/conf/detect.py b/conans/client/conf/detect.py index 0a065dd02..286a87807 100644 --- a/conans/client/conf/detect.py +++ b/conans/client/conf/detect.py @@ -3,6 +3,7 @@ import platform import re from subprocess import Popen, PIPE, STDOUT +from conans.client.output import Color from conans.model.version import Version from conans.tools import vs_installation_path @@ -186,6 +187,23 @@ def _detect_compiler_version(result, output): result.append(("compiler.libcxx", "libc++")) elif compiler == "gcc": result.append(("compiler.libcxx", "libstdc++")) + if Version(version) >= Version("5.1"): + + msg = """ +Conan detected a GCC version > 5 but has adjusted the 'compiler.libcxx' setting to +'libstdc++' for backwards compatibility. +Your compiler is likely using the new CXX11 ABI by default (libstdc++11). + +If you want Conan to use the new ABI, edit the default profile at: + + ~/.conan/profiles/default + +adjusting 'compiler.libcxx=libstdc++11' +""" + output.writeln("\n************************* WARNING: GCC OLD ABI COMPATIBILITY " + "***********************\n %s\n************************************" + "************************************************\n\n\n" % msg, + Color.BRIGHT_RED) elif compiler == "cc": if platform.system() == "SunOS": result.append(("compiler.libstdcxx", "libstdcxx4")) diff --git a/conans/client/generators/scons.py b/conans/client/generators/scons.py index d6cf2f4a2..fae561b10 100644 --- a/conans/client/generators/scons.py +++ b/conans/client/generators/scons.py @@ -18,6 +18,7 @@ class SConsGenerator(Generator): ' "CCFLAGS" : {info.cflags},\n' ' "SHLINKFLAGS" : {info.sharedlinkflags},\n' ' "LINKFLAGS" : {info.exelinkflags},\n' + ' "VERSION" : "{info.version}",\n' ' }},\n') sections = [] diff --git a/conans/client/proxy.py b/conans/client/proxy.py index d2319d18b..19f114f3e 100644 --- a/conans/client/proxy.py +++ b/conans/client/proxy.py @@ -205,16 +205,20 @@ class ConanProxy(object): if self._remote_name: output.info("Not found, retrieving from server '%s' " % self._remote_name) - remote = self._registry.remote(self._remote_name) - return _retrieve_from_remote(remote) + ref_remote = self._registry.remote(self._remote_name) else: ref_remote = self._registry.get_ref(conan_reference) if ref_remote: output.info("Retrieving from predefined remote '%s'" % ref_remote.name) + + if ref_remote: + try: return _retrieve_from_remote(ref_remote) - else: - output.info("Not found in local cache, looking in remotes...") + except NotFoundException: + raise NotFoundException("%s was not found in remote '%s'" % (str(conan_reference), + ref_remote.name)) + output.info("Not found in local cache, looking in remotes...") remotes = self._registry.remotes for remote in remotes: logger.debug("Trying with remote %s" % remote.name) diff --git a/conans/client/tools/win.py b/conans/client/tools/win.py index 4627807bd..99ea22167 100644 --- a/conans/client/tools/win.py +++ b/conans/client/tools/win.py @@ -1,4 +1,5 @@ import glob +import json import os import platform import re @@ -68,19 +69,13 @@ def vs_installation_path(version): if version not in vs_installation_path._cached: vs_path = None - program_files = os.environ.get("ProgramFiles(x86)", os.environ.get("ProgramFiles")) - if program_files: - vswhere_path = os.path.join(program_files, "Microsoft Visual Studio", "Installer", - "vswhere.exe") - if os.path.isfile(vswhere_path): - version_range = "[%d.0, %d.0)" % (int(version), int(version) + 1) - try: - output = subprocess.check_output([vswhere_path, "-version", version_range, - "-legacy", "-property", "installationPath"]) - vs_path = output.decode().strip() - _global_output.info("vswhere detected VS %s in %s" % (version, vs_path)) - except (ValueError, subprocess.CalledProcessError, UnicodeDecodeError) as e: - _global_output.error("vswhere error: %s" % str(e)) + legacy_products = vswhere(legacy=True) + all_products = vswhere(products=["*"]) + products = legacy_products + all_products + + for product in products: + if product["installationVersion"].startswith(("%d." % int(version))): + vs_path = product["installationPath"] # Remember to cache result vs_installation_path._cached[version] = vs_path @@ -88,6 +83,78 @@ def vs_installation_path(version): return vs_installation_path._cached[version] +def vswhere(all_=False, prerelease=False, products=None, requires=None, version="", latest=False, + legacy=False, property_="", nologo=True): + + # 'version' option only works if Visual Studio 2017 is installed: + # https://github.com/Microsoft/vswhere/issues/91 + + products = list() if products is None else products + requires = list() if requires is None else requires + + if legacy and (products or requires): + raise ConanException("The 'legacy' parameter cannot be specified with either the " + "'products' or 'requires' parameter") + + program_files = os.environ.get("ProgramFiles(x86)", os.environ.get("ProgramFiles")) + + vswhere_path = "" + + if program_files: + vswhere_path = os.path.join(program_files, "Microsoft Visual Studio", "Installer", + "vswhere.exe") + if not os.path.isfile(vswhere_path): + raise ConanException("Cannot locate 'vswhere'") + else: + raise ConanException("Cannot locate 'Program Files'/'Program Files (x86)' directory") + + arguments = list() + arguments.append(vswhere_path) + + # Output json format + arguments.append("-format") + arguments.append("json") + + if all_: + arguments.append("-all") + + if prerelease: + arguments.append("-prerelease") + + if products: + arguments.append("-products") + arguments.extend(products) + + if requires: + arguments.append("-requires") + arguments.extend(requires) + + if len(version) is not 0: + arguments.append("-version") + arguments.append(version) + + if latest: + arguments.append("-latest") + + if legacy: + arguments.append("-legacy") + + if len(property_) is not 0: + arguments.append("-property") + arguments.append(property_) + + if nologo: + arguments.append("-nologo") + + try: + output = subprocess.check_output(arguments) + vswhere_out = output.decode().strip() + except (ValueError, subprocess.CalledProcessError, UnicodeDecodeError) as e: + raise ConanException("vswhere error: %s" % str(e)) + + return json.loads(vswhere_out) + + def find_windows_10_sdk(): """finds valid Windows 10 SDK version which can be passed to vcvarsall.bat (vcvars_command)""" # uses the same method as VCVarsQueryRegistry.bat diff --git a/conans/tools.py b/conans/tools.py index 45d360092..3289293a8 100644 --- a/conans/tools.py +++ b/conans/tools.py @@ -5,6 +5,8 @@ import requests from conans.client.tools import * from conans.client.output import ConanOutput # noinspection PyUnresolvedReferences +from conans.util.env_reader import get_env +# noinspection PyUnresolvedReferences from conans.util.files import (_generic_algorithm_sum, load, save, sha256sum, sha1sum, md5sum, md5, touch, relative_dirs, rmdir, mkdir) diff --git a/conans/util/env_reader.py b/conans/util/env_reader.py index 8628843c8..cd6f77bb8 100644 --- a/conans/util/env_reader.py +++ b/conans/util/env_reader.py @@ -9,8 +9,11 @@ import os -def get_env(env_key, default=None, environment=os.environ): +def get_env(env_key, default=None, environment=None): """Get the env variable associated with env_key""" + if environment is None: + environment = os.environ + env_var = environment.get(env_key, default) if env_var != default: if isinstance(default, str): diff --git a/setup.py b/setup.py index 66e70aad8..f35ae708b 100644 --- a/setup.py +++ b/setup.py @@ -25,6 +25,7 @@ def get_requires(filename): requirements.append(line) return requirements + project_requirements = get_requires("conans/requirements.txt") if platform.system() == "Darwin": project_requirements.extend(get_requires("conans/requirements_osx.txt")) @@ -53,7 +54,7 @@ setup( # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html - version=load_version(), # + ".rc1", + version=load_version(), # + ".rc1", description='Conan C/C++ package manager', # long_description="An open source, decentralized package manager, to automate building and sharing of packages", @@ -71,12 +72,14 @@ setup( # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6' ], # What does your project relate to?
CONAN_RUN_TEST: getenv available in recipes for a checker to run unit tests
conan-io/conan
diff --git a/conans/test/build_helpers/autotools_configure_test.py b/conans/test/build_helpers/autotools_configure_test.py index a78d8fd30..d4ecf2233 100644 --- a/conans/test/build_helpers/autotools_configure_test.py +++ b/conans/test/build_helpers/autotools_configure_test.py @@ -44,6 +44,24 @@ class AutoToolsConfigureTest(unittest.TestCase): self.assertEquals(runner.command_called, "make %s -j%s" % (things, cpu_count())) def test_variables(self): + # Visual Studio + settings = MockSettings({"build_type": "Release", + "arch": "x86", + "compiler": "Visual Studio", + "compiler.version": "14", + "compiler.runtime": "MD"}) + conanfile = MockConanfile(settings) + self._set_deps_info(conanfile) + + be = AutoToolsBuildEnvironment(conanfile) + expected = {'CFLAGS': 'a_c_flag', + 'CPPFLAGS': '-Ipath\\includes -Iother\\include\\path -Donedefinition -Dtwodefinition', + 'CXXFLAGS': 'a_c_flag a_cpp_flag', + 'LDFLAGS': 'shared_link_flag exe_link_flag /LIBPATH:one\\lib\\path', + 'LIBS': 'onelib twolib'} + + self.assertEquals(be.vars, expected) + # GCC 32 settings = MockSettings({"build_type": "Release", "arch": "x86", diff --git a/conans/test/build_helpers/cmake_test.py b/conans/test/build_helpers/cmake_test.py index e76bb4ca1..4c418f83c 100644 --- a/conans/test/build_helpers/cmake_test.py +++ b/conans/test/build_helpers/cmake_test.py @@ -1,5 +1,6 @@ import os import shutil +import stat import sys import unittest import platform @@ -33,6 +34,33 @@ class CMakeTest(unittest.TestCase): cmake = CMake(conan_file) self.assertIn('-G "My CMake Generator"', cmake.command_line) + def cmake_make_program_test(self): + settings = Settings.loads(default_settings_yml) + settings.os = "Linux" + settings.compiler = "gcc" + settings.compiler.version = "6.3" + settings.arch = "x86" + settings.build_type = "Release" + conan_file = ConanFileMock() + conan_file.settings = settings + conan_file.source_folder = os.path.join(self.tempdir, "my_cache_source_folder") + conan_file.build_folder = os.path.join(self.tempdir, "my_cache_build_folder") + + # Existing make + make_path = os.path.join(self.tempdir, "make") + save(make_path, "") + st = os.stat(make_path) + os.chmod(make_path, st.st_mode | stat.S_IEXEC) + with tools.environment_append({"CONAN_MAKE_PROGRAM": make_path}): + cmake = CMake(conan_file) + self.assertEquals(cmake.definitions["CMAKE_MAKE_PROGRAM"], make_path) + + # Not existing make + with tools.environment_append({"CONAN_MAKE_PROGRAM": "fake_path/make"}): + cmake = CMake(conan_file) + self.assertNotIn("CMAKE_MAKE_PROGRAM", cmake.definitions) + self.assertIn("The specified make program 'fake_path/make' cannot be found", conan_file.output) + def folders_test(self): def quote_var(var): return "'%s'" % var if platform.system() != "Windows" else var diff --git a/conans/test/command/install_test.py b/conans/test/command/install_test.py index 3284fe5f3..9cc8437e9 100644 --- a/conans/test/command/install_test.py +++ b/conans/test/command/install_test.py @@ -2,14 +2,14 @@ import unittest import platform import os -from conans.test.utils.tools import TestClient +from conans.test.utils.tools import TestClient, TestServer from conans.model.ref import ConanFileReference, PackageReference from conans.paths import CONANFILE, CONANINFO from conans.model.info import ConanInfo from conans.test.utils.cpp_test_files import cpp_hello_conan_files from conans.paths import CONANFILE_TXT from conans.client.conf.detect import detected_os -from conans.util.files import load, mkdir +from conans.util.files import load, mkdir, rmdir class InstallTest(unittest.TestCase): @@ -359,7 +359,7 @@ class TestConan(ConanFile): client.run("install . --build=missing -s os=Windows -s os_build=Windows --install-folder=win_dir") self.assertIn("Hello/0.1@lasote/stable from local cache\n", - client.out) # Test "from local cache" output message + client.out) # Test "from local cache" output message client.run("install . --build=missing -s os=Macos -s os_build=Macos --install-folder=os_dir") conaninfo = load(os.path.join(client.current_folder, "win_dir/conaninfo.txt")) self.assertIn("os=Windows", conaninfo) @@ -428,3 +428,28 @@ class TestConan(ConanFile): ignore_error=True) self.assertTrue(error) self.assertIn("Conanfile not found", client.out) + + def install_broken_reference_test(self): + client = TestClient(servers={"default": TestServer()}, + users={"default": [("lasote", "mypass")]}) + conanfile = """from conans import ConanFile +class Pkg(ConanFile): + pass +""" + client.save({"conanfile.py": conanfile}) + client.run("export . Hello/0.1@lasote/stable") + client.run("remote add_ref Hello/0.1@lasote/stable default") + conan_reference = ConanFileReference.loads("Hello/0.1@lasote/stable") + rmdir(os.path.join(client.client_cache.conan(conan_reference))) + + error = client.run("install Hello/0.1@lasote/stable", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Hello/0.1@lasote/stable was not found in remote 'default'", + client.out) + + # If it was associated, it has to be desasociated + client.run("remote remove_ref Hello/0.1@lasote/stable") + error = client.run("install Hello/0.1@lasote/stable", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Unable to find 'Hello/0.1@lasote/stable' in remotes", + client.out) diff --git a/conans/test/generators/scons_test.py b/conans/test/generators/scons_test.py index 7684c11d8..44ecdebaa 100644 --- a/conans/test/generators/scons_test.py +++ b/conans/test/generators/scons_test.py @@ -12,10 +12,12 @@ class SConsGeneratorTest(unittest.TestCase): ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables") cpp_info = CppInfo("") cpp_info.defines = ["MYDEFINE1"] + cpp_info.version = "0.1" conanfile.deps_cpp_info.update(cpp_info, ref.name) - ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables") + ref = ConanFileReference.loads("MyPkg2/3.2.3@lasote/stables") cpp_info = CppInfo("") cpp_info.defines = ["MYDEFINE2"] + cpp_info.version = "3.2.3" conanfile.deps_cpp_info.update(cpp_info, ref.name) generator = SConsGenerator(conanfile) content = generator.content @@ -23,3 +25,6 @@ class SConsGeneratorTest(unittest.TestCase): self.assertIn(" \"CPPDEFINES\" : [\'MYDEFINE2\', \'MYDEFINE1\'],", scons_lines) self.assertIn(" \"CPPDEFINES\" : [\'MYDEFINE1\'],", scons_lines) self.assertIn(" \"CPPDEFINES\" : [\'MYDEFINE2\'],", scons_lines) + self.assertIn(" \"VERSION\" : \"None\",", scons_lines) + self.assertIn(" \"VERSION\" : \"0.1\",", scons_lines) + self.assertIn(" \"VERSION\" : \"3.2.3\",", scons_lines) diff --git a/conans/test/util/tools_test.py b/conans/test/util/tools_test.py index c66818d0f..7c690dc47 100644 --- a/conans/test/util/tools_test.py +++ b/conans/test/util/tools_test.py @@ -71,6 +71,85 @@ class ToolsTest(unittest.TestCase): with tools.environment_append({"CONAN_CPU_COUNT": "34"}): self.assertEquals(tools.cpu_count(), 34) + def get_env_unit_test(self): + """ + Unit tests tools.get_env + """ + # Test default + self.assertIsNone( + tools.get_env("NOT_DEFINED", environment={}), + None + ) + # Test defined default + self.assertEqual( + tools.get_env("NOT_DEFINED_KEY", default="random_default", environment={}), + "random_default" + ) + # Test return defined string + self.assertEqual( + tools.get_env("FROM_STR", default="", environment={"FROM_STR": "test_string_value"}), + "test_string_value" + ) + # Test boolean conversion + self.assertEqual( + tools.get_env("BOOL_FROM_STR", default=False, environment={"BOOL_FROM_STR": "1"}), + True + ) + self.assertEqual( + tools.get_env("BOOL_FROM_STR", default=True, environment={"BOOL_FROM_STR": "0"}), + False + ) + self.assertEqual( + tools.get_env("BOOL_FROM_STR", default=False, environment={"BOOL_FROM_STR": "True"}), + True + ) + self.assertEqual( + tools.get_env("BOOL_FROM_STR", default=True, environment={"BOOL_FROM_STR": ""}), + False + ) + # Test int conversion + self.assertEqual( + tools.get_env("TO_INT", default=2, environment={"TO_INT": "1"}), + 1 + ) + # Test float conversion + self.assertEqual( + tools.get_env("TO_FLOAT", default=2.0, environment={"TO_FLOAT": "1"}), + 1.0 + ), + # Test list conversion + self.assertEqual( + tools.get_env("TO_LIST", default=[], environment={"TO_LIST": "1,2,3"}), + ["1", "2", "3"] + ) + self.assertEqual( + tools.get_env("TO_LIST_NOT_TRIMMED", default=[], environment={"TO_LIST_NOT_TRIMMED": " 1 , 2 , 3 "}), + [" 1 ", " 2 ", " 3 "] + ) + + def test_get_env_in_conanfile(self): + """ + Test get_env is available and working in conanfile + """ + client = TestClient() + + conanfile = """from conans import ConanFile, tools + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + + def build(self): + run_tests = tools.get_env("CONAN_RUN_TESTS", default=False) + print("test_get_env_in_conafile CONAN_RUN_TESTS=%r" % run_tests) + assert(run_tests == True) + """ + client.save({"conanfile.py": conanfile}) + + with tools.environment_append({"CONAN_RUN_TESTS": "1"}): + client.run("install .") + client.run("build .") + def test_global_tools_overrided(self): client = TestClient()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 11 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc pkg-config" ], "python": "3.6", "reqs_path": [ "conans/requirements.txt", "conans/requirements_dev.txt", "conans/requirements_server.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==1.6.6 attrs==22.2.0 beautifulsoup4==4.12.3 bottle==0.12.25 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 colorama==0.3.9 -e git+https://github.com/conan-io/conan.git@72c0bd22475858d115cb0f28893a4192362c7129#egg=conan coverage==4.2 distro==1.1.0 fasteners==0.19 future==0.16.0 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 mock==1.3.0 node-semver==0.2.0 nose==1.3.7 nose-parameterized==0.5.0 packaging==21.3 patch==1.16 pbr==6.1.1 pluggy==1.0.0 pluginbase==0.7 py==1.11.0 Pygments==2.14.0 PyJWT==1.7.1 pylint==1.8.4 pyparsing==3.1.4 pytest==7.0.1 PyYAML==3.12 requests==2.27.1 six==1.17.0 soupsieve==2.3.2.post1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 waitress==2.0.0 WebOb==1.8.9 WebTest==2.0.35 wrapt==1.16.0 zipp==3.6.0
name: conan channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==1.6.6 - attrs==22.2.0 - beautifulsoup4==4.12.3 - bottle==0.12.25 - charset-normalizer==2.0.12 - codecov==2.1.13 - colorama==0.3.9 - coverage==4.2 - distro==1.1.0 - fasteners==0.19 - future==0.16.0 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - mock==1.3.0 - node-semver==0.2.0 - nose==1.3.7 - nose-parameterized==0.5.0 - packaging==21.3 - patch==1.16 - pbr==6.1.1 - pluggy==1.0.0 - pluginbase==0.7 - py==1.11.0 - pygments==2.14.0 - pyjwt==1.7.1 - pylint==1.8.4 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==3.12 - requests==2.27.1 - six==1.17.0 - soupsieve==2.3.2.post1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - waitress==2.0.0 - webob==1.8.9 - webtest==2.0.35 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/conan
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_variables" ]
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_pkg_config_paths", "conans/test/util/tools_test.py::ToolsTest::test_get_env_in_conanfile", "conans/test/util/tools_test.py::ToolsTest::test_global_tools_overrided" ]
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_make_targets", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_mocked_methods", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_previous_env", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_clean_sh_path", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_cores_ancient_visual", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_deprecated_behaviour", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_run_tests", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_shared", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_sysroot", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_verbose", "conans/test/util/tools_test.py::ReplaceInFileTest::test_replace_in_file", "conans/test/util/tools_test.py::ToolsTest::test_environment_nested" ]
[]
MIT License
2,119
[ "conans/client/generators/scons.py", "conans/client/tools/win.py", "setup.py", "conans/client/build/cmake.py", "conans/client/proxy.py", "conans/tools.py", "conans/client/cmd/profile.py", "conans/client/build/autotools_environment.py", "conans/client/conf/__init__.py", "conans/util/env_reader.py", "conans/client/conf/detect.py" ]
[ "conans/client/generators/scons.py", "conans/client/tools/win.py", "setup.py", "conans/client/build/cmake.py", "conans/client/proxy.py", "conans/tools.py", "conans/client/cmd/profile.py", "conans/client/build/autotools_environment.py", "conans/client/conf/__init__.py", "conans/util/env_reader.py", "conans/client/conf/detect.py" ]
CORE-GATECH-GROUP__serpent-tools-85
d46f0e5a22b6ac257b7ce5f83222eba0f26c3895
2018-02-05 18:59:30
1d475a4dd8982532d097286468b2d616345c8ab8
diff --git a/serpentTools/__init__.py b/serpentTools/__init__.py index 8933bae..6343059 100644 --- a/serpentTools/__init__.py +++ b/serpentTools/__init__.py @@ -4,16 +4,6 @@ ROOT_DIR = os.path.dirname(__file__) from serpentTools.parsers import read from serpentTools import messages - -# List TODOS/feature requests here for now -# Compatibility -# TODO: Test compatibility with earlier numpy releases -# Usage/scripting -# TODO: Update rc with dictionary -# TODO: Update rc with yaml file into dictionary -# TODO: Capture materials with underscores for depletion -# TODO: Find a way to capture some or all of log messages for testing - from ._version import get_versions __version__ = get_versions()['version'] del get_versions diff --git a/serpentTools/messages.py b/serpentTools/messages.py index 8321264..643f994 100644 --- a/serpentTools/messages.py +++ b/serpentTools/messages.py @@ -58,7 +58,7 @@ def info(message): def warning(message): - """Log a warning that something that could go wrong or should be avoided.""" + """Log a warning that something could go wrong or should be avoided.""" __logger__.warning('%s', message) diff --git a/serpentTools/objects/__init__.py b/serpentTools/objects/__init__.py index 11bf9ec..fc528bc 100644 --- a/serpentTools/objects/__init__.py +++ b/serpentTools/objects/__init__.py @@ -1,44 +1,24 @@ """Objects used to support the parsing.""" -class SupportingObject(object): - """ - Base supporting object. - - Parameters - ---------- - container: Some parser from serpentTools.parsers - Container that created this object - - """ - - def __init__(self, container): - self.filePath = container.filePath - - def __str__(self): - return '<{} from {}>'.format(self.__class__.__name__, self.filePath) - - @staticmethod - def _convertVariableName(variable): - """Converta a SERPENT variable to camelCase.""" - lowerSplits = [item.lower() for item in variable.split('_')] - if len(lowerSplits) == 1: - return lowerSplits[0] - else: - return lowerSplits[0] + ''.join([item.capitalize() - for item in lowerSplits[1:]]) - - -class NamedObject(SupportingObject): +class NamedObject(object): """Class for named objects like materials and detectors.""" - def __init__(self, container, name): - SupportingObject.__init__(self, container) + def __init__(self, name): self.name = name def __str__(self): - return '<{} {} from {}>'.format(self.__class__.__name__, - self.name, self.filePath) + return '<{} {}>'.format(self.__class__.__name__, self.name) + + +def convertVariableName(variable): + """Convert a SERPENT variable to camelCase""" + lowerSplits = [item.lower() for item in variable.split('_')] + if len(lowerSplits) == 1: + return lowerSplits[0] + else: + return lowerSplits[0] + ''.join([item.capitalize() + for item in lowerSplits[1:]]) def splitItems(items): diff --git a/serpentTools/objects/containers.py b/serpentTools/objects/containers.py index afe5756..c8ed16b 100644 --- a/serpentTools/objects/containers.py +++ b/serpentTools/objects/containers.py @@ -10,12 +10,12 @@ Contents """ from collections import OrderedDict -from numpy import array, arange, unique, log, divide, ones_like - from matplotlib import pyplot +from numpy import array, arange, unique, log, divide, ones_like + from serpentTools.plot import cartMeshPlot -from serpentTools.objects import SupportingObject, NamedObject +from serpentTools.objects import NamedObject, convertVariableName from serpentTools.messages import warning, SerpentToolsException, debug DET_COLS = ('value', 'energy', 'universe', 'cell', 'material', 'lattice', @@ -23,15 +23,12 @@ DET_COLS = ('value', 'energy', 'universe', 'cell', 'material', 'lattice', """Name of the columns of the data""" -class HomogUniv(SupportingObject): +class HomogUniv(NamedObject): """ Class for storing homogenized universe specifications and retrieving them Parameters ---------- - container: serpentTools.objects.readers.BaseReader or - serpentTools.objects.containers.BranchContainer - Object to which this universe is attached name: str name of the universe bu: float @@ -63,9 +60,8 @@ class HomogUniv(SupportingObject): Other values that do not not conform to inf/b1 dictionaries """ - def __init__(self, container, name, bu, step, day): - SupportingObject.__init__(self, container) - self.name = name + def __init__(self, name, bu, step, day): + NamedObject.__init__(self, name) self.bu = bu self.step = step self.day = day @@ -81,6 +77,7 @@ class HomogUniv(SupportingObject): Sets the value of the variable and, optionally, the associate s.d. .. warning:: + This method will overwrite data for variables that already exist Parameters @@ -101,10 +98,10 @@ class HomogUniv(SupportingObject): """ # 1. Check the input type - variableName = SupportingObject._convertVariableName(variableName) + variableName = convertVariableName(variableName) if not isinstance(uncertainty, bool): - raise TypeError('The variable uncertainty has type %s.\n ...' - 'It should be boolean.', type(uncertainty)) + raise TypeError('The variable uncertainty has type {}, ' + 'should be boolean.'.format(type(uncertainty))) # 2. Pointer to the proper dictionary setter = self._lookup(variableName, uncertainty) # 3. Check if variable is already present. Then set the variable. @@ -186,8 +183,6 @@ class Detector(NamedObject): Parameters ---------- - parser: :py:class:`~serpentTools.parsers.detector.DetectorReader` - Detector reader that created this detector name: str Name of this detector @@ -207,8 +202,8 @@ class Detector(NamedObject): Collection of unique indexes for each requested bin """ - def __init__(self, parser, name): - NamedObject.__init__(self, parser, name) + def __init__(self, name): + NamedObject.__init__(self, name) self.bins = None self.tallies = None self.errors = None @@ -223,9 +218,6 @@ class Detector(NamedObject): return self.bins.shape[0] return 0 - def __str__(self): - return 'Detector {} from {}'.format(self.name, self.filePath) - def addTallyData(self, bins): """Add tally data to this detector""" self.__reshaped = False @@ -312,13 +304,13 @@ class Detector(NamedObject): 'Slicing requires detector to be reshaped') if data not in self._map: raise KeyError( - 'Slicing function only works with the following data arguments:' - '\n{}'.format(', '.join(self._map.keys()))) + 'Data argument {} not in allowed options' + '\n{}'.format(', '.join(data, self._map.keys()))) work = self._map[data] if work is None: raise SerpentToolsException( '{} data for detector {} is None. Cannot perform slicing' - .format(data, self.name)) + .format(data, self.name)) return work[self._getSlices(fixed)] def _getSlices(self, fixed): @@ -397,7 +389,7 @@ class Detector(NamedObject): if not len(slicedTallies.shape) == 1: raise SerpentToolsException( 'Sliced data must be one-dimensional for spectrum plot, not {}' - .format(slicedTallies.shape) + .format(slicedTallies.shape) ) if normalize: lethBins = log( @@ -609,7 +601,7 @@ class Detector(NamedObject): return ax -class BranchContainer(SupportingObject): +class BranchContainer(object): """ Class that stores data for a single branch. @@ -621,8 +613,8 @@ class BranchContainer(SupportingObject): Parameters ---------- - parser: serpentTools.objects.readers.BaseReader - Parser that read the file that created this object + filePath: str + Path to input file from which this container was connected branchID: int Index for the run for this branch branchNames: tuple @@ -641,8 +633,8 @@ class BranchContainer(SupportingObject): ``(universeID, burnup, burnIndex)`` """ - def __init__(self, parser, branchID, branchNames, stateData): - SupportingObject.__init__(self, parser) + def __init__(self, filePath, branchID, branchNames, stateData): + self.filePath = filePath self.branchID = branchID self.stateData = stateData self.universes = {} @@ -696,7 +688,7 @@ class BranchContainer(SupportingObject): ------- newUniv: serpentTools.objects.containers.HomogUniv """ - newUniv = HomogUniv(self, univID, burnup, burnIndex, burnDays) + newUniv = HomogUniv(univID, burnup, burnIndex, burnDays) key = tuple( [univID, burnup, burnIndex] + ([burnDays] if burnDays else [])) if key in self.__keys: @@ -747,16 +739,3 @@ class BranchContainer(SupportingObject): raise KeyError( 'Could not find a universe that matched requested universe {} and ' '{} {}'.format(univID, searchName, searchValue)) - - -if __name__ == '__main__': - import os - from matplotlib import pyplot - - from serpentTools import ROOT_DIR, read - - bwrF = os.path.join(ROOT_DIR, '..', 'examples', 'bwr_det0.m') - bwr = read(bwrF) - s = bwr.detectors['spectrum'] - s.meshPlot('e', 'reaction', xscale='log') - pyplot.show() diff --git a/serpentTools/objects/materials.py b/serpentTools/objects/materials.py index f5706d1..77abf43 100644 --- a/serpentTools/objects/materials.py +++ b/serpentTools/objects/materials.py @@ -4,7 +4,7 @@ import numpy from matplotlib import pyplot from serpentTools import messages -from serpentTools.objects import NamedObject +from serpentTools.objects import NamedObject, convertVariableName class DepletedMaterial(NamedObject): @@ -26,7 +26,7 @@ class DepletedMaterial(NamedObject): names: numpy.array or None Names of isotopes days: numpy.array or None - Days overwhich the material was depleted + Days over which the material was depleted adens: numpy.array or None Atomic density over time for each nuclide mdens: numpy.array or None @@ -37,14 +37,15 @@ class DepletedMaterial(NamedObject): """ def __init__(self, parser, name): - NamedObject.__init__(self, parser, name) + NamedObject.__init__(self, name) self.data = {} self.zai = parser.metadata.get('zai', None) self.names = parser.metadata.get('names', None) self.days = parser.metadata.get('days', None) - self.__burnup__ = None - self.__adens__ = None - self.__mdens__ = None + self.filePath = parser.filePath + self.__burnup = None + self.__adens = None + self.__mdens = None def __getitem__(self, item): if item not in self.data: @@ -57,27 +58,27 @@ class DepletedMaterial(NamedObject): if 'burnup' not in self.data: raise AttributeError('Burnup for material {} has not been loaded' .format(self.name)) - if self.__burnup__ is None: - self.__burnup__ = self.data['burnup'] - return self.__burnup__ + if self.__burnup is None: + self.__burnup = self.data['burnup'] + return self.__burnup @property def adens(self): if 'adens' not in self.data: raise AttributeError('Atomic densities for material {} have not ' 'been loaded'.format(self.name)) - if self.__adens__ is None: - self.__adens__ = self.data['adens'] - return self.__adens__ + if self.__adens is None: + self.__adens = self.data['adens'] + return self.__adens @property def mdens(self): if 'mdens' not in self.data: raise AttributeError('Mass densities for material {} has not been ' 'loaded'.format(self.name)) - if self.__mdens__ is None: - self.__mdens__ = self.data['mdens'] - return self.__mdens__ + if self.__mdens is None: + self.__mdens = self.data['mdens'] + return self.__mdens def addData(self, variable, rawData): """ @@ -90,7 +91,7 @@ class DepletedMaterial(NamedObject): rawData: list List of strings corresponding to the raw data from the file """ - newName = self._convertVariableName(variable) + newName = convertVariableName(variable) messages.debug('Adding {} data to {}'.format(newName, self.name)) if isinstance(rawData, str): scratch = [float(item) for item in rawData.split()] @@ -148,8 +149,8 @@ class DepletedMaterial(NamedObject): if timePoints is not None: timeCheck = self._checkTimePoints(xUnits, timePoints) if any(timeCheck): - raise KeyError('The following times were not present in file {}' - '\n{}'.format(self.filePath, + raise KeyError('The following times were not present in file' + '{}\n{}'.format(self.filePath, ', '.join(timeCheck))) if names and self.names is None: raise AttributeError( diff --git a/serpentTools/parsers/__init__.py b/serpentTools/parsers/__init__.py index 978afc9..fb980bf 100644 --- a/serpentTools/parsers/__init__.py +++ b/serpentTools/parsers/__init__.py @@ -70,7 +70,8 @@ def inferReader(filePath): for reg, reader in six.iteritems(REGEXES): match = re.match(reg, filePath) if match and match.group() == filePath: - info('Inferred reader for {}: {}'.format(filePath, reader.__name__)) + info('Inferred reader for {}: {}' + .format(filePath, reader.__name__)) return reader raise SerpentToolsException( 'Failed to infer filetype and thus accurate reader from' @@ -140,7 +141,8 @@ def read(filePath, reader='infer'): 'Reader type {} not supported'.format(reader) ) else: - assert callable(reader), 'Reader {} is not callable'.format(str(reader)) + assert callable(reader), ( + 'Reader {} is not callable'.format(str(reader))) loader = reader returnedFromLoader = loader(filePath) returnedFromLoader.read() @@ -202,7 +204,8 @@ def depmtx(fileP): if not nMatch: raise SerpentToolsException(failMsg + line) - n0Storage, line, numIso = _parseIsoBlock(f, {}, nMatch, line, nDensRegex) + n0Storage, line, numIso = _parseIsoBlock(f, {}, nMatch, line, + nDensRegex) debug('Found {} isotopes for file {}'.format(numIso, fileP)) n0 = empty((numIso, 1), dtype=longfloat) for indx, v in six.iteritems(n0Storage): diff --git a/serpentTools/parsers/branching.py b/serpentTools/parsers/branching.py index 3daadf8..3d92a86 100644 --- a/serpentTools/parsers/branching.py +++ b/serpentTools/parsers/branching.py @@ -74,7 +74,8 @@ class BranchingReader(XSReader): if branchNames not in self.branches: branchState = self._processBranchStateData() self.branches[branchNames] = ( - BranchContainer(self, coefIndx, branchNames, branchState)) + BranchContainer(self.filePath, coefIndx, branchNames, + branchState)) else: self._advance() return self.branches[branchNames], int(totUniv) @@ -98,7 +99,8 @@ class BranchingReader(XSReader): unvID, numVariables = [int(xx) for xx in self._advance()] univ = branch.addUniverse(unvID, burnup, burnupIndex) for step in range(numVariables): - splitList = self._advance(possibleEndOfFile=step == numVariables-1) + splitList = self._advance( + possibleEndOfFile=step == numVariables - 1) varName = splitList[0] varValues = [float(xx) for xx in splitList[2:]] if self._checkAddVariable(varName): diff --git a/serpentTools/parsers/detector.py b/serpentTools/parsers/detector.py index 9b08305..a2570aa 100644 --- a/serpentTools/parsers/detector.py +++ b/serpentTools/parsers/detector.py @@ -1,6 +1,5 @@ """Parser responsible for reading the ``*det<n>.m`` files""" -import six import numpy from serpentTools.engines import KeywordParser @@ -60,7 +59,7 @@ class DetectorReader(BaseReader): data[indx] = [float(xx) for xx in line.split()] if detName not in self.detectors: # new detector, this data is the tallies - detector = Detector(self, detName) + detector = Detector(detName) detector.addTallyData(data) self.detectors[detName] = detector messages.debug('Adding detector {}'.format(detName)) @@ -70,17 +69,3 @@ class DetectorReader(BaseReader): detector.grids[binType] = data messages.debug('Added bin data {} to detector {}' .format(binType, detName)) - - -if __name__ == '__main__': - from os.path import join - from matplotlib import pyplot - import serpentTools - - det = DetectorReader(join(serpentTools.ROOT_DIR, 'tests', 'ref_det0.m')) - det.read() - xy = det.detectors['xyFissionCapt'] - xy.plot(fixed={'reaction': 1, 'ymesh': 2}) - pyplot.show() - xy.plot(fixed={'reaction': 1, 'ymesh': 2}, sigma=1) - pyplot.show() diff --git a/serpentTools/settings.py b/serpentTools/settings.py index c49c1dc..788f674 100644 --- a/serpentTools/settings.py +++ b/serpentTools/settings.py @@ -158,7 +158,7 @@ class DefaultSettingsLoader(dict): for name, value in defaultSettings.items(): if 'options' in value: options = (value['default'] if value['options'] == 'default' - else value['options']) + else value['options']) else: options = None settingsOptions = {'name': name, @@ -278,8 +278,9 @@ class UserSettingsLoader(dict): dictionary """ settings = {} - settingsPreffix = ([settingsPreffix] if isinstance(settingsPreffix, str) - else settingsPreffix) + settingsPreffix = ( + [settingsPreffix] if isinstance(settingsPreffix, str) + else settingsPreffix) for setting, value in self.items(): settingPath = setting.split('.') if settingPath[0] in settingsPreffix: @@ -346,11 +347,11 @@ class UserSettingsLoader(dict): """ messages.debug('Attempting to read from {}'.format(filePath)) with open(filePath) as yFile: - l = yaml.safe_load(yFile) + loaded = yaml.safe_load(yFile) messages.info('Loading settings onto object with strict:{}' .format(strict)) - for key, value in six.iteritems(l): + for key, value in six.iteritems(loaded): if isinstance(value, dict): self.__recursiveLoad(value, strict, key) else:
Clean up the code There are some debug codes, ```if __name__ == '__main__'```, in some of the scripts. They should be removed. Test scripts, e.g. unittests, still need those to run the tests. - [x] `detector.py` - [x] todos in the root __init__.py
CORE-GATECH-GROUP/serpent-tools
diff --git a/serpentTools/tests/test_container.py b/serpentTools/tests/test_container.py index bba2a3a..fbed4dc 100644 --- a/serpentTools/tests/test_container.py +++ b/serpentTools/tests/test_container.py @@ -10,7 +10,7 @@ class HomogenizedUniverseTester(unittest.TestCase): @classmethod def setUpClass(cls): - cls.univ = containers.HomogUniv(DepletionReader(None), 'dummy', 0, 0, 0) + cls.univ = containers.HomogUniv('dummy', 0, 0, 0) cls.Exp = {} cls.Unc = {} # Data definition
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 9 }
0.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "numpy>=1.11.1 matplotlib>=1.5.0 pyyaml>=3.08 scipy", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 cycler @ file:///tmp/build/80754af9/cycler_1637851556182/work importlib-metadata==4.8.3 iniconfig==1.1.1 kiwisolver @ file:///tmp/build/80754af9/kiwisolver_1612282412546/work matplotlib @ file:///tmp/build/80754af9/matplotlib-suite_1613407855456/work numpy @ file:///tmp/build/80754af9/numpy_and_numpy_base_1603483703303/work olefile @ file:///Users/ktietz/demo/mc3/conda-bld/olefile_1629805411829/work packaging==21.3 Pillow @ file:///tmp/build/80754af9/pillow_1625670622947/work pluggy==1.0.0 py==1.11.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==7.0.1 python-dateutil @ file:///tmp/build/80754af9/python-dateutil_1626374649649/work PyYAML==5.4.1 scipy @ file:///tmp/build/80754af9/scipy_1597686635649/work -e git+https://github.com/CORE-GATECH-GROUP/serpent-tools.git@d46f0e5a22b6ac257b7ce5f83222eba0f26c3895#egg=serpentTools six @ file:///tmp/build/80754af9/six_1644875935023/work tomli==1.2.3 tornado @ file:///tmp/build/80754af9/tornado_1606942266872/work typing_extensions==4.1.1 zipp==3.6.0
name: serpent-tools channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - blas=1.0=openblas - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - cycler=0.11.0=pyhd3eb1b0_0 - dbus=1.13.18=hb2f20db_0 - expat=2.6.4=h6a678d5_0 - fontconfig=2.14.1=h52c9d5c_1 - freetype=2.12.1=h4a9f257_0 - giflib=5.2.2=h5eee18b_0 - glib=2.69.1=h4ff587b_1 - gst-plugins-base=1.14.1=h6a678d5_1 - gstreamer=1.14.1=h5eee18b_1 - icu=58.2=he6710b0_3 - jpeg=9e=h5eee18b_3 - kiwisolver=1.3.1=py36h2531618_0 - lcms2=2.16=hb9589c4_0 - ld_impl_linux-64=2.40=h12ee557_0 - lerc=4.0.0=h6a678d5_0 - libdeflate=1.22=h5eee18b_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgfortran-ng=7.5.0=ha8ba4b0_17 - libgfortran4=7.5.0=ha8ba4b0_17 - libgomp=11.2.0=h1234567_1 - libopenblas=0.3.18=hf726d26_0 - libpng=1.6.39=h5eee18b_0 - libstdcxx-ng=11.2.0=h1234567_1 - libtiff=4.5.1=hffd6297_1 - libuuid=1.41.5=h5eee18b_0 - libwebp=1.2.4=h11a3e52_1 - libwebp-base=1.2.4=h5eee18b_1 - libxcb=1.15=h7f8727e_0 - libxml2=2.9.14=h74e7548_0 - lz4-c=1.9.4=h6a678d5_1 - matplotlib=3.3.4=py36h06a4308_0 - matplotlib-base=3.3.4=py36h62a2d02_0 - ncurses=6.4=h6a678d5_0 - numpy=1.19.2=py36h6163131_0 - numpy-base=1.19.2=py36h75fe3a5_0 - olefile=0.46=pyhd3eb1b0_0 - openssl=1.1.1w=h7f8727e_0 - pcre=8.45=h295c915_0 - pillow=8.3.1=py36h5aabda8_0 - pip=21.2.2=py36h06a4308_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pyqt=5.9.2=py36h05f1152_2 - python=3.6.13=h12debd9_1 - python-dateutil=2.8.2=pyhd3eb1b0_0 - pyyaml=5.4.1=py36h27cfd23_1 - qt=5.9.7=h5867ecd_1 - readline=8.2=h5eee18b_0 - scipy=1.5.2=py36habc2bb6_0 - setuptools=58.0.4=py36h06a4308_0 - sip=4.19.8=py36hf484d3e_0 - six=1.16.0=pyhd3eb1b0_1 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tornado=6.1=py36h27cfd23_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - yaml=0.2.5=h7b6447c_0 - zlib=1.2.13=h5eee18b_1 - zstd=1.5.6=hc292b87_0 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/serpent-tools
[ "serpentTools/tests/test_container.py::HomogenizedUniverseTester::test_getB1Exp", "serpentTools/tests/test_container.py::HomogenizedUniverseTester::test_getB1Unc", "serpentTools/tests/test_container.py::HomogenizedUniverseTester::test_getInfExp", "serpentTools/tests/test_container.py::HomogenizedUniverseTester::test_getInfUnc", "serpentTools/tests/test_container.py::HomogenizedUniverseTester::test_getMeta" ]
[]
[]
[]
MIT License
2,120
[ "serpentTools/parsers/branching.py", "serpentTools/parsers/detector.py", "serpentTools/settings.py", "serpentTools/messages.py", "serpentTools/objects/materials.py", "serpentTools/parsers/__init__.py", "serpentTools/objects/__init__.py", "serpentTools/objects/containers.py", "serpentTools/__init__.py" ]
[ "serpentTools/parsers/branching.py", "serpentTools/parsers/detector.py", "serpentTools/settings.py", "serpentTools/messages.py", "serpentTools/objects/materials.py", "serpentTools/parsers/__init__.py", "serpentTools/objects/__init__.py", "serpentTools/objects/containers.py", "serpentTools/__init__.py" ]
pika__pika-948
3d3b95d31b67dfeaf5ef43650c162e25169336e6
2018-02-05 21:54:53
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
codecov[bot]: # [Codecov](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=h1) Report > Merging [#948](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=desc) into [master](https://codecov.io/gh/pika/pika/commit/1b760184da68bd551bb49a0a22c02178568cc77a?src=pr&el=desc) will **increase** coverage by `0.02%`. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/pika/pika/pull/948/graphs/tree.svg?token=cJFWQg66l4&width=650&height=150&src=pr)](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #948 +/- ## ========================================== + Coverage 82.48% 82.51% +0.02% ========================================== Files 20 20 Lines 3665 3671 +6 Branches 544 545 +1 ========================================== + Hits 3023 3029 +6 Misses 497 497 Partials 145 145 ``` | [Impacted Files](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [pika/channel.py](https://codecov.io/gh/pika/pika/pull/948/diff?src=pr&el=tree#diff-cGlrYS9jaGFubmVsLnB5) | `94.11% <100%> (+0.09%)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=footer). Last update [1b76018...c0d0b3a](https://codecov.io/gh/pika/pika/pull/948?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). lukebakken: @vitaly-krugl - this should be all set. I will address #951 next.
diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py index ffeeadc..cecd3b7 100644 --- a/pika/adapters/base_connection.py +++ b/pika/adapters/base_connection.py @@ -219,7 +219,7 @@ class BaseConnection(connection.Connection): error = 'Connection to %s:%s failed: %s' % (sock_addr_tuple[4][0], sock_addr_tuple[4][1], error) - LOGGER.warning(error) + LOGGER.error(error) return error # Handle SSL Connection Negotiation diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index ea9b6c1..d021e94 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -705,7 +705,8 @@ class BlockingConnection(object): with self._acquire_event_dispatch() as dispatch_acquired: # Check if we can actually process pending events common_terminator = lambda: bool(dispatch_acquired and - (self._channels_pending_dispatch or self._ready_events)) + (self._channels_pending_dispatch or + self._ready_events)) if time_limit is None: self._flush_output(common_terminator) else: @@ -954,7 +955,7 @@ class ReturnedMessage(object): class _ConsumerInfo(object): """Information about an active consumer""" - __slots__ = ('consumer_tag', 'no_ack', 'callback', + __slots__ = ('consumer_tag', 'no_ack', 'on_message_callback', 'alternate_event_sink', 'state') # Consumer states @@ -963,16 +964,16 @@ class _ConsumerInfo(object): TEARING_DOWN = 3 CANCELLED_BY_BROKER = 4 - def __init__(self, consumer_tag, no_ack, callback=None, + def __init__(self, consumer_tag, no_ack, on_message_callback=None, alternate_event_sink=None): """ NOTE: exactly one of callback/alternate_event_sink musts be non-None. :param str consumer_tag: :param bool no_ack: the no-ack value for the consumer - :param callable callback: The function for dispatching messages to + :param callable on_message_callback: The function for dispatching messages to user, having the signature: - callback(channel, method, properties, body) + on_message_callback(channel, method, properties, body) channel: BlockingChannel method: spec.Basic.Deliver properties: spec.BasicProperties @@ -983,12 +984,12 @@ class _ConsumerInfo(object): `_pending_events` container. Signature: alternate_event_sink(evt) """ - assert (callback is None) != (alternate_event_sink is None), ( - 'exactly one of callback/alternate_event_sink must be non-None', - callback, alternate_event_sink) + assert (on_message_callback is None) != (alternate_event_sink is None), ( + 'exactly one of on_message_callback/alternate_event_sink must be non-None', + on_message_callback, alternate_event_sink) self.consumer_tag = consumer_tag self.no_ack = no_ack - self.callback = callback + self.on_message_callback = on_message_callback self.alternate_event_sink = alternate_event_sink self.state = self.SETTING_UP @@ -1316,7 +1317,7 @@ class BlockingChannel(object): self._cleanup() method = method_frame.method raise exceptions.ChannelClosed(method.reply_code, - method.reply_text) + method.reply_text) def _on_consumer_cancelled_by_broker(self, method_frame): """Called by impl when broker cancels consumer via Basic.Cancel. @@ -1402,8 +1403,8 @@ class BlockingChannel(object): if type(evt) is _ConsumerDeliveryEvt: consumer_info = self._consumer_infos[evt.method.consumer_tag] - consumer_info.callback(self, evt.method, - evt.properties, evt.body) + consumer_info.on_message_callback(self, evt.method, + evt.properties, evt.body) elif type(evt) is _ConsumerCancellationEvt: del self._consumer_infos[evt.method_frame.method.consumer_tag] @@ -1500,7 +1501,7 @@ class BlockingChannel(object): def basic_consume(self, queue, - callback, + on_message_callback, no_ack=False, exclusive=False, consumer_tag=None, @@ -1520,9 +1521,9 @@ class BlockingChannel(object): :param queue: The queue from which to consume :type queue: str or unicode - :param callable callback: Required function for dispatching messages + :param callable on_message_callback: Required function for dispatching messages to user, having the signature: - callback(channel, method, properties, body) + on_message_callback(channel, method, properties, body) channel: BlockingChannel method: spec.Basic.Deliver properties: spec.BasicProperties @@ -1541,17 +1542,17 @@ class BlockingChannel(object): consumer_tag is already present. """ - if not callable(callback): - raise ValueError('callback callback must be callable; got %r' - % callback) + if not callable(on_message_callback): + raise ValueError('callback on_message_callback must be callable; got %r' + % on_message_callback) return self._basic_consume_impl( queue=queue, + on_message_callback=on_message_callback, no_ack=no_ack, exclusive=exclusive, consumer_tag=consumer_tag, - arguments=arguments, - callback=callback) + arguments=arguments) def _basic_consume_impl(self, queue, @@ -1559,12 +1560,12 @@ class BlockingChannel(object): exclusive, consumer_tag, arguments=None, - callback=None, + on_message_callback=None, alternate_event_sink=None): """The low-level implementation used by `basic_consume` and `consume`. See `basic_consume` docstring for more info. - NOTE: exactly one of callback/alternate_event_sink musts be + NOTE: exactly one of on_message_callback/alternate_event_sink musts be non-None. This method has one additional parameter alternate_event_sink over the @@ -1580,10 +1581,10 @@ class BlockingChannel(object): consumer_tag is already present. """ - if (callback is None) == (alternate_event_sink is None): + if (on_message_callback is None) == (alternate_event_sink is None): raise ValueError( - ('exactly one of callback/alternate_event_sink must ' - 'be non-None', callback, alternate_event_sink)) + ('exactly one of on_message_callback/alternate_event_sink must ' + 'be non-None', on_message_callback, alternate_event_sink)) if not consumer_tag: # Need a consumer tag to register consumer info before sending @@ -1599,13 +1600,13 @@ class BlockingChannel(object): self._consumer_infos[consumer_tag] = _ConsumerInfo( consumer_tag, no_ack=no_ack, - callback=callback, + on_message_callback=on_message_callback, alternate_event_sink=alternate_event_sink) try: with self._basic_consume_ok_result as ok_result: tag = self._impl.basic_consume( - callback=self._on_consumer_message_delivery, + on_message_callback=self._on_consumer_message_delivery, queue=queue, no_ack=no_ack, exclusive=exclusive, diff --git a/pika/channel.py b/pika/channel.py index 2ec16f6..5299d15 100644 --- a/pika/channel.py +++ b/pika/channel.py @@ -262,11 +262,12 @@ class Channel(object): def basic_consume(self, queue, - callback, + on_message_callback, no_ack=False, exclusive=False, consumer_tag=None, - arguments=None): + arguments=None, + callback=None): """Sends the AMQP 0-9-1 command Basic.Consume to the broker and binds messages for the consumer_tag to the consumer callback. If you do not pass in a consumer_tag, one will be automatically generated for you. Returns @@ -280,8 +281,8 @@ class Channel(object): :param queue: The queue to consume from. Use the empty string to specify the most recent server-named queue for this channel. :type queue: str or unicode - :param callable callback: The function to call when consuming - with the signature callback(channel, method, properties, body), where + :param callable on_message_callback: The function to call when consuming + with the signature on_message_callback(channel, method, properties, body), where channel: pika.Channel method: pika.spec.Basic.Deliver properties: pika.spec.BasicProperties @@ -292,12 +293,15 @@ class Channel(object): :param consumer_tag: Specify your own consumer tag :type consumer_tag: str or unicode :param dict arguments: Custom key/value pair arguments for the consumer + :param callable callback: callback(pika.frame.Method) for method + Basic.ConsumeOk. :rtype: str :raises ValueError: """ - self._require_callback(callback) + self._require_callback(on_message_callback) self._validate_channel() + self._validate_rpc_completion_callback(callback) # If a consumer tag was not passed, create one if not consumer_tag: @@ -309,14 +313,17 @@ class Channel(object): if no_ack: self._consumers_with_noack.add(consumer_tag) - self._consumers[consumer_tag] = callback + self._consumers[consumer_tag] = on_message_callback + + rpc_callback = self._on_eventok if callback is None else callback + self._rpc(spec.Basic.Consume(queue=queue, consumer_tag=consumer_tag, no_ack=no_ack, exclusive=exclusive, arguments=arguments or dict()), - self._on_eventok, [(spec.Basic.ConsumeOk, - {'consumer_tag': consumer_tag})]) + rpc_callback, [(spec.Basic.ConsumeOk, + {'consumer_tag': consumer_tag})]) return consumer_tag @@ -442,16 +449,16 @@ class Channel(object): falls into other prefetch limits). May be set to zero, meaning "no specific limit", although other prefetch limits may still - apply. The prefetch-size is ignored if the - no-ack option is set. + apply. The prefetch-size is ignored by + consumers who have enabled the no-ack option. :param int prefetch_count: Specifies a prefetch window in terms of whole messages. This field may be used in combination with the prefetch-size field; a message will only be sent in advance if both prefetch windows (and those at the channel and connection level) allow it. The - prefetch-count is ignored if the no-ack - option is set. + prefetch-count is ignored by consumers who + have enabled the no-ack option. :param bool all_channels: Should the QoS apply to all channels :param callable callback: The callback to call for Basic.QosOk response :raises ValueError: @@ -459,6 +466,8 @@ class Channel(object): """ self._validate_channel() self._validate_rpc_completion_callback(callback) + self._validate_zero_or_greater('prefetch_size', prefetch_size) + self._validate_zero_or_greater('prefetch_count', prefetch_count) return self._rpc(spec.Basic.Qos(prefetch_size, prefetch_count, all_channels), callback, [spec.Basic.QosOk]) @@ -1411,6 +1420,11 @@ class Channel(object): raise TypeError( 'Completion callback must be callable if not None') + def _validate_zero_or_greater(self, name, value): + if int(value) < 0: + errmsg = '{} must be >= 0, but got {}'.format(name, value) + raise ValueError(errmsg) + class ContentFrameAssembler(object): """Handle content related frames, building a message and return the message
BlockingChannel.basic_qos hanging on non-integer parameters Under Python 3.6.4 and Pika 0.11.2, the `BlockingChannel.basic_qos` method hangs when setting its `prefetch_size` or `prefetch_count` parameter to a non-integer value. Input: ``` $ python3 <<EOF import pika with pika.BlockingConnection() as connection: channel = connection.channel() channel.basic_qos(prefetch_size="hello") EOF ```
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index 48be9a6..81a8c3c 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -340,7 +340,6 @@ class TestZ_PublishAndConsume(BoundQueueTestCase, AsyncAdapters): # pylint: dis self.channel.basic_cancel(self.ctag, callback=self.on_cancelled) - class TestZ_PublishAndConsumeBig(BoundQueueTestCase, AsyncAdapters): # pylint: disable=C0103 DESCRIPTION = "Publish a big message and consume it" diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index 6f7120b..7628f89 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -1768,14 +1768,12 @@ class TestBasicCancelPurgesPendingConsumerCancellationEvt(BlockingTestCaseBase): ch.publish('', routing_key=q_name, body='via-publish', mandatory=True) - # Create a consumer + # Create a consumer. Not passing a 'callback' to test client-generated + # consumer tags rx_messages = [] consumer_tag = ch.basic_consume( q_name, - lambda *args: rx_messages.append(args), - no_ack=False, - exclusive=False, - arguments=None) + lambda *args: rx_messages.append(args)) # Wait for the published message to arrive, but don't consume it while not ch._pending_events: @@ -1848,7 +1846,8 @@ class TestBasicPublishWithoutPubacks(BlockingTestCaseBase): queue=q_name, expected_count=2) - # Create a consumer + # Create a consumer. Not passing a 'callback' to test client-generated + # consumer tags rx_messages = [] consumer_tag = ch.basic_consume( q_name, diff --git a/tests/unit/channel_tests.py b/tests/unit/channel_tests.py index eba7b70..67e0325 100644 --- a/tests/unit/channel_tests.py +++ b/tests/unit/channel_tests.py @@ -1,6 +1,5 @@ """ Tests for pika.channel.Channel - """ import collections import logging @@ -12,6 +11,9 @@ import mock from pika import channel, connection, exceptions, frame, spec +# Disable protected-access, missing-docstring, and invalid-name, +# too-many-public-methods, too-many-lines +# pylint: disable=W0212,C0111,C0103,R0904,C0302 class ConnectionTemplate(connection.Connection): """Template for using as mock spec_set for the pika Connection class. It @@ -243,67 +245,88 @@ class ChannelTests(unittest.TestCase): def test_basic_consume_channel_closed(self): mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() self.assertRaises(exceptions.ChannelClosed, self.obj.basic_consume, - 'test-queue', mock_callback) + 'test-queue', mock_on_msg_callback, + callback=mock_callback) @mock.patch('pika.channel.Channel._validate_channel') - def test_basic_consume_calls_validate(self, validate): + @mock.patch('pika.channel.Channel._require_callback') + def test_basic_consume_calls_validate(self, require, validate): self.obj._set_state(self.obj.OPEN) mock_callback = mock.Mock() - self.obj.basic_consume('test-queue', mock_callback) + mock_on_msg_callback = mock.Mock() + self.obj.basic_consume('test-queue', mock_on_msg_callback, + callback=mock_callback) + require.assert_called_once_with(mock_on_msg_callback) validate.assert_called_once() - def test_basic_consume_consumer_tag(self): + def test_basic_consume_consumer_tag_no_completion_callback(self): + self.obj._set_state(self.obj.OPEN) + expectation = 'ctag1.' + mock_on_msg_callback = mock.Mock() + consumer_tag = self.obj.basic_consume('test-queue', + mock_on_msg_callback)[:6] + self.assertEqual(consumer_tag, expectation) + + def test_basic_consume_consumer_tag_with_completion_callback(self): self.obj._set_state(self.obj.OPEN) expectation = 'ctag1.' mock_callback = mock.Mock() - self.assertEqual( - self.obj.basic_consume('test-queue', mock_callback)[:6], - expectation) + mock_on_msg_callback = mock.Mock() + consumer_tag = self.obj.basic_consume('test-queue', + mock_on_msg_callback, + callback=mock_callback)[:6] + self.assertEqual(consumer_tag, expectation) def test_basic_consume_consumer_tag_cancelled_full(self): self.obj._set_state(self.obj.OPEN) expectation = 'ctag1.' - mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() for ctag in ['ctag1.%i' % ii for ii in range(11)]: self.obj._cancelled.add(ctag) self.assertEqual( - self.obj.basic_consume('test-queue', mock_callback)[:6], + self.obj.basic_consume('test-queue', mock_on_msg_callback)[:6], expectation) def test_basic_consume_consumer_tag_in_consumers(self): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' + mock_on_msg_callback = mock.Mock() mock_callback = mock.Mock() self.obj.basic_consume( - 'test-queue', mock_callback, consumer_tag=consumer_tag) + 'test-queue', mock_on_msg_callback, + consumer_tag=consumer_tag, callback=mock_callback) self.assertIn(consumer_tag, self.obj._consumers) def test_basic_consume_duplicate_consumer_tag_raises(self): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' + mock_on_msg_callback = mock.Mock() mock_callback = mock.Mock() self.obj._consumers[consumer_tag] = logging.debug self.assertRaises(exceptions.DuplicateConsumerTag, self.obj.basic_consume, 'test-queue', - mock_callback, False, False, consumer_tag) + mock_on_msg_callback, False, False, + consumer_tag, None, mock_callback) def test_basic_consume_consumers_callback_value(self): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' - mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() self.obj.basic_consume( - 'test-queue', mock_callback, consumer_tag=consumer_tag) - self.assertEqual(self.obj._consumers[consumer_tag], mock_callback) + 'test-queue', mock_on_msg_callback, consumer_tag=consumer_tag) + self.assertEqual(self.obj._consumers[consumer_tag], mock_on_msg_callback) @mock.patch('pika.spec.Basic.Consume') @mock.patch('pika.channel.Channel._rpc') - def test_basic_consume_consumers_rpc_called(self, rpc, _unused): + def test_basic_consume_consumers_rpc_with_no_completion_callback(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' - mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() self.obj.basic_consume( - 'test-queue', mock_callback, consumer_tag=consumer_tag) + 'test-queue', mock_on_msg_callback, + consumer_tag=consumer_tag) expectation = spec.Basic.Consume( queue='test-queue', consumer_tag=consumer_tag, @@ -314,6 +337,26 @@ class ChannelTests(unittest.TestCase): 'consumer_tag': consumer_tag })]) + @mock.patch('pika.spec.Basic.Consume') + @mock.patch('pika.channel.Channel._rpc') + def test_basic_consume_consumers_rpc_with_completion_callback(self, rpc, _unused): + self.obj._set_state(self.obj.OPEN) + consumer_tag = 'ctag1.0' + mock_on_msg_callback = mock.Mock() + mock_callback = mock.Mock() + self.obj.basic_consume( + 'test-queue', mock_on_msg_callback, + consumer_tag=consumer_tag, callback=mock_callback) + expectation = spec.Basic.Consume( + queue='test-queue', + consumer_tag=consumer_tag, + no_ack=False, + exclusive=False) + rpc.assert_called_once_with(expectation, mock_callback, + [(spec.Basic.ConsumeOk, { + 'consumer_tag': consumer_tag + })]) + @mock.patch('pika.channel.Channel._require_callback') def test_basic_get_calls_require_callback(self, require): self.obj._set_state(self.obj.OPEN) @@ -392,6 +435,26 @@ class ChannelTests(unittest.TestCase): self.assertRaises(exceptions.ChannelClosed, self.obj.basic_qos, 0, False, True) + def test_basic_qos_invalid_prefetch_size_raises_error(self): + self.obj._set_state(self.obj.OPEN) + with self.assertRaises(ValueError) as ex: + self.obj.basic_qos('foo', 123) + self.assertEqual("invalid literal for int() with base 10: 'foo'", + ex.exception.args[0]) + with self.assertRaises(ValueError) as ex: + self.obj.basic_qos(-1, 123) + self.assertIn('prefetch_size', ex.exception.args[0]) + + def test_basic_qos_invalid_prefetch_count_raises_error(self): + self.obj._set_state(self.obj.OPEN) + with self.assertRaises(ValueError) as ex: + self.obj.basic_qos(123, 'foo') + self.assertEqual("invalid literal for int() with base 10: 'foo'", + ex.exception.args[0]) + with self.assertRaises(ValueError) as ex: + self.obj.basic_qos(123, -1) + self.assertIn('prefetch_count', ex.exception.args[0]) + @mock.patch('pika.spec.Basic.Qos') @mock.patch('pika.channel.Channel._rpc') def test_basic_qos_rpc_request(self, rpc, _unused): @@ -514,12 +577,13 @@ class ChannelTests(unittest.TestCase): def test_confirm_delivery_with_bad_callback_raises_value_error(self): self.assertRaises(ValueError, - self.obj.confirm_delivery, 'bad-callback') + self.obj.confirm_delivery, + 'bad-callback') def test_confirm_delivery_raises_channel_closed(self): cb = mock.Mock() self.assertRaises(exceptions.ChannelClosed, - self.obj.confirm_delivery, cb) + self.obj.confirm_delivery, cb) def test_confirm_delivery_raises_method_not_implemented_for_confirms(self): self.obj._set_state(self.obj.OPEN) @@ -653,7 +717,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_exchange_bind_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.exchange_bind('foo', 'bar', 'baz', callback=None) rpc.assert_called_once_with( spec.Exchange.Bind(0, 'foo', 'bar', 'baz'), None, []) @@ -683,7 +746,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_exchange_declare_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.exchange_declare('foo', callback=None) rpc.assert_called_once_with( spec.Exchange.Declare(0, 'foo'), None, []) @@ -711,7 +773,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_exchange_delete_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.exchange_delete('foo', callback=None) rpc.assert_called_once_with( spec.Exchange.Delete(0, 'foo'), None, []) @@ -824,7 +885,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_bind_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_bind('foo', 'bar', 'baz', callback=None) rpc.assert_called_once_with( spec.Queue.Bind(0, 'foo', 'bar', 'baz'), None, []) @@ -857,7 +917,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_declare_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_declare('foo', callback=None) rpc.assert_called_once_with( spec.Queue.Declare(0, 'foo'), None, []) @@ -884,7 +943,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_delete_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_delete('foo', callback=None) rpc.assert_called_once_with( spec.Queue.Delete(0, 'foo'), None, []) @@ -911,7 +969,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_purge_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_purge('foo', callback=None) rpc.assert_called_once_with( spec.Queue.Purge(0, 'foo'), None, []) @@ -1461,4 +1518,5 @@ class ChannelTests(unittest.TestCase): self): self.obj._set_state(self.obj.OPEN) self.assertRaises(TypeError, - self.obj._validate_rpc_completion_callback, 'foo') + self.obj._validate_rpc_completion_callback, + 'foo')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 3 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.8.0 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 nose==1.3.7 packaging==24.2 -e git+https://github.com/pika/pika.git@3d3b95d31b67dfeaf5ef43650c162e25169336e6#egg=pika pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 tomli==2.2.1 tornado==6.4.2 Twisted==15.3.0 urllib3==2.3.0 zope.interface==7.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.8.0 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - tomli==2.2.1 - tornado==6.4.2 - twisted==15.3.0 - urllib3==2.3.0 - zope-interface==7.2 prefix: /opt/conda/envs/pika
[ "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_in_consumers", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_duplicate_consumer_tag_raises", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_invalid_prefetch_count_raises_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_invalid_prefetch_size_raises_error" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestLostConnectionResultsInIsClosedConnectionAndChannel::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeInactivityTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/channel_tests.py::ChannelTests::test_add_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_callback_multiple_replies", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_cancel_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_get_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_close_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_flow_added", "tests/unit/channel_tests.py::ChannelTests::test_add_on_cancel_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_close_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_return_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_calls_send_method", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch_with_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch_no_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_then_close", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_unknown_consumer_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_cancelled_full", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_callback_value", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_calls_require_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_channel_open_add_callbacks_called", "tests/unit/channel_tests.py::ChannelTests::test_cleanup", "tests/unit/channel_tests.py::ChannelTests::test_close_basic_cancel_called", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closed_state_raises_channel_error_and_stays_closed", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closing_state_raises_already_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_open_state_transitions_to_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_opening_state", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_async", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_ack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_without_nowait_selectok", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_ack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_nack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_callback_call_count", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_confirms", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_with_bad_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_consumer_tags", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_flow_off_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_deliver_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_get_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_return_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_method_returns_none", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_header_frame", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_method_frame", "tests/unit/channel_tests.py::ChannelTests::test_has_content_false", "tests/unit/channel_tests.py::ChannelTests::test_has_content_true", "tests/unit/channel_tests.py::ChannelTests::test_immediate_called_logger_warning", "tests/unit/channel_tests.py::ChannelTests::test_init_blocked", "tests/unit/channel_tests.py::ChannelTests::test_init_blocking", "tests/unit/channel_tests.py::ChannelTests::test_init_callbacks", "tests/unit/channel_tests.py::ChannelTests::test_init_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_init_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_connection", "tests/unit/channel_tests.py::ChannelTests::test_init_consumers", "tests/unit/channel_tests.py::ChannelTests::test_init_content_frame_assembler", "tests/unit/channel_tests.py::ChannelTests::test_init_flow", "tests/unit/channel_tests.py::ChannelTests::test_init_has_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_invalid_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_getok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_openok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_state", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_true", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_true", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_not_appended_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_close_in_closing_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_in_open_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closed_state_is_suppressed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closing_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_open_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_opening_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_warning", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok_following_close_from_broker", "tests/unit/channel_tests.py::ChannelTests::test_on_confirm_selectok", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_eventok", "tests/unit/channel_tests.py::ChannelTests::test_on_flow", "tests/unit/channel_tests.py::ChannelTests::test_on_flow_with_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_calls_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_getempty", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_onreturn", "tests/unit/channel_tests.py::ChannelTests::test_onreturn_warning", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_repr", "tests/unit/channel_tests.py::ChannelTests::test_rpc_adds_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_enters_blocking_and_adds_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_rpc_not_blocking_and_no_on_synchronous_complete_when_no_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_type_error_with_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_value_error_with_unacceptable_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_while_blocking_appends_blocked_collection", "tests/unit/channel_tests.py::ChannelTests::test_send_method", "tests/unit/channel_tests.py::ChannelTests::test_set_state", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_rollback_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_select_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_validate_callback_raises_value_error_not_callable", "tests/unit/channel_tests.py::ChannelTests::test_validate_channel_raises_channel_closed" ]
[]
BSD 3-Clause "New" or "Revised" License
2,121
[ "pika/adapters/blocking_connection.py", "pika/adapters/base_connection.py", "pika/channel.py" ]
[ "pika/adapters/blocking_connection.py", "pika/adapters/base_connection.py", "pika/channel.py" ]
dpkp__kafka-python-1367
618c5051493693c1305aa9f08e8a0583d5fcf0e3
2018-02-05 22:03:47
618c5051493693c1305aa9f08e8a0583d5fcf0e3
diff --git a/kafka/consumer/fetcher.py b/kafka/consumer/fetcher.py index f9fcb37..c9bbb97 100644 --- a/kafka/consumer/fetcher.py +++ b/kafka/consumer/fetcher.py @@ -835,12 +835,21 @@ class Fetcher(six.Iterator): return parsed_records - class PartitionRecords(six.Iterator): + class PartitionRecords(object): def __init__(self, fetch_offset, tp, messages): self.fetch_offset = fetch_offset self.topic_partition = tp self.messages = messages - self.message_idx = 0 + # When fetching an offset that is in the middle of a + # compressed batch, we will get all messages in the batch. + # But we want to start 'take' at the fetch_offset + for i, msg in enumerate(messages): + if msg.offset == fetch_offset: + self.message_idx = i + break + else: + self.message_idx = 0 + self.messages = None # For truthiness evaluation we need to define __len__ or __nonzero__ def __len__(self):
Seek method returning incorrect messages on compressed topic when using max_poll_records While using seek method of `kafka.consumer.group.seek' for a given partition, offset, we are seeing the inconsistent behavior for the messages returned with the subsequent poll method. The issue is easily reproducible for the given topic (compacted). Part of Workflow: ``` from kafka.consumer.group import KafkaConsumer topic_partition = TopicPartition(topic, 0) consumer = KafkaConsumer(*consumer_config) consumer.assign([topic_partition]) start_offset = 100 # Example value: highwatermark - 10 consumer.seek(partition=topic_partition, offset=start_offset) messages = consumer.poll(timeout_ms=1000, max_records=1)[topic_partition] message = messages[0] print('Offset found:', message.offset, 'Expected offset:', start_offset) Sample Output: $ Offset found:80 Expected offset:100 ``` Observation: * If iterator interface is used instead of poll interface, the issue no longer exists. My guess is somewhere while polling for messages, the fetched offsets are not updated or fetched messages are not skipped. It looks like iterator method is not using fetched_records api that's why it works fine. * At times it does give correct messages (especially when given offset is closer to highwatermark) Please let me know if any other details are required.
dpkp/kafka-python
diff --git a/test/test_fetcher.py b/test/test_fetcher.py index 429071a..4547222 100644 --- a/test/test_fetcher.py +++ b/test/test_fetcher.py @@ -498,3 +498,43 @@ def test__parse_fetched_data__out_of_range(fetcher, topic, mocker): partition_record = fetcher._parse_fetched_data(completed_fetch) assert partition_record is None assert fetcher._subscriptions.assignment[tp].awaiting_reset is True + + +def test_partition_records_offset(): + """Test that compressed messagesets are handle correctly + when fetch offset is in the middle of the message list + """ + batch_start = 120 + batch_end = 130 + fetch_offset = 123 + tp = TopicPartition('foo', 0) + messages = [ConsumerRecord(tp.topic, tp.partition, i, + None, None, 'key', 'value', 'checksum', 0, 0) + for i in range(batch_start, batch_end)] + records = Fetcher.PartitionRecords(fetch_offset, None, messages) + assert len(records) > 0 + msgs = records.take(1) + assert msgs[0].offset == 123 + assert records.fetch_offset == 124 + msgs = records.take(2) + assert len(msgs) == 2 + assert len(records) > 0 + records.discard() + assert len(records) == 0 + + +def test_partition_records_empty(): + records = Fetcher.PartitionRecords(0, None, []) + assert len(records) == 0 + + +def test_partition_records_no_fetch_offset(): + batch_start = 0 + batch_end = 100 + fetch_offset = 123 + tp = TopicPartition('foo', 0) + messages = [ConsumerRecord(tp.topic, tp.partition, i, + None, None, 'key', 'value', 'checksum', 0, 0) + for i in range(batch_start, batch_end)] + records = Fetcher.PartitionRecords(fetch_offset, None, messages) + assert len(records) == 0
{ "commit_name": "merge_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
1.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-catchlog", "pytest-sugar", "pytest-mock", "mock", "python-snappy", "lz4", "xxhash" ], "pre_install": [ "apt-get update", "apt-get install -y libsnappy-dev" ], "python": "3.6", "reqs_path": [ "docs/requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 cramjam==2.5.0 docutils==0.18.1 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 -e git+https://github.com/dpkp/kafka-python.git@618c5051493693c1305aa9f08e8a0583d5fcf0e3#egg=kafka_python lz4==3.1.10 MarkupSafe==2.0.1 mock==5.2.0 packaging==21.3 pluggy==1.0.0 pockets==0.9.1 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-catchlog==1.2.2 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-sugar==0.9.6 python-snappy==0.7.3 pytz==2025.2 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-napoleon==0.7 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 termcolor==1.1.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 xxhash==3.2.0 zipp==3.6.0
name: kafka-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - cramjam==2.5.0 - docutils==0.18.1 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - lz4==3.1.10 - markupsafe==2.0.1 - mock==5.2.0 - packaging==21.3 - pluggy==1.0.0 - pockets==0.9.1 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-catchlog==1.2.2 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-sugar==0.9.6 - python-snappy==0.7.3 - pytz==2025.2 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-napoleon==0.7 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - termcolor==1.1.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - xxhash==3.2.0 - zipp==3.6.0 prefix: /opt/conda/envs/kafka-python
[ "test/test_fetcher.py::test_partition_records_offset", "test/test_fetcher.py::test_partition_records_no_fetch_offset" ]
[]
[ "test/test_fetcher.py::test_send_fetches", "test/test_fetcher.py::test_create_fetch_requests[api_version0-3]", "test/test_fetcher.py::test_create_fetch_requests[api_version1-2]", "test/test_fetcher.py::test_create_fetch_requests[api_version2-1]", "test/test_fetcher.py::test_create_fetch_requests[api_version3-0]", "test/test_fetcher.py::test_update_fetch_positions", "test/test_fetcher.py::test__reset_offset", "test/test_fetcher.py::test__send_offset_requests", "test/test_fetcher.py::test__send_offset_requests_multiple_nodes", "test/test_fetcher.py::test__handle_offset_response", "test/test_fetcher.py::test_fetched_records", "test/test_fetcher.py::test__handle_fetch_response[fetch_request0-fetch_response0-1]", "test/test_fetcher.py::test__handle_fetch_response[fetch_request1-fetch_response1-2]", "test/test_fetcher.py::test__handle_fetch_response[fetch_request2-fetch_response2-1]", "test/test_fetcher.py::test__handle_fetch_response[fetch_request3-fetch_response3-1]", "test/test_fetcher.py::test__handle_fetch_response[fetch_request4-fetch_response4-1]", "test/test_fetcher.py::test__handle_fetch_response[fetch_request5-fetch_response5-1]", "test/test_fetcher.py::test__unpack_message_set", "test/test_fetcher.py::test__message_generator", "test/test_fetcher.py::test__parse_fetched_data", "test/test_fetcher.py::test__parse_fetched_data__paused", "test/test_fetcher.py::test__parse_fetched_data__stale_offset", "test/test_fetcher.py::test__parse_fetched_data__not_leader", "test/test_fetcher.py::test__parse_fetched_data__unknown_tp", "test/test_fetcher.py::test__parse_fetched_data__out_of_range", "test/test_fetcher.py::test_partition_records_empty" ]
[]
Apache License 2.0
2,122
[ "kafka/consumer/fetcher.py" ]
[ "kafka/consumer/fetcher.py" ]
projectfluent__python-fluent-46
01be516c3381ffb80798b4817964a5c5dff52d8a
2018-02-06 15:24:48
01be516c3381ffb80798b4817964a5c5dff52d8a
stasm: @zbraniecki This doesn't change the way you've been using `fluent.migrate` so far. Are you OK with these changes?
diff --git a/fluent/migrate/__init__.py b/fluent/migrate/__init__.py index 3582468..93c8ab9 100644 --- a/fluent/migrate/__init__.py +++ b/fluent/migrate/__init__.py @@ -1,13 +1,5 @@ # coding=utf8 -from .context import MergeContext # noqa: F401 -from .errors import ( # noqa: F401 - MigrationError, NotSupportedError, UnreadableReferenceError -) from .transforms import ( # noqa: F401 - Source, COPY, REPLACE_IN_TEXT, REPLACE, PLURALS, CONCAT -) -from .helpers import ( # noqa: F401 - EXTERNAL_ARGUMENT, MESSAGE_REFERENCE + CONCAT, COPY, PLURALS, REPLACE, REPLACE_IN_TEXT ) -from .changesets import convert_blame_to_changesets # noqa: F401 diff --git a/fluent/migrate/context.py b/fluent/migrate/context.py index 7d6a8a4..b1fc94c 100644 --- a/fluent/migrate/context.py +++ b/fluent/migrate/context.py @@ -285,7 +285,15 @@ class MergeContext(object): The input data must be configured earlier using the `add_*` methods. if given, `changeset` must be a set of (path, key) tuples describing - which legacy translations are to be merged. + which legacy translations are to be merged. If `changeset` is None, + all legacy translations will be allowed to be migrated in a single + changeset. + + The inner `in_changeset` function is used to determine if a message + should be migrated for the given changeset. It compares the legacy + dependencies of the transform defined for the message with legacy + translations available in the changeset. If all dependencies are + present, the message will be migrated. Given `changeset`, return a dict whose keys are resource paths and values are `FTL.Resource` instances. The values will also be used to @@ -306,10 +314,18 @@ class MergeContext(object): transforms = self.transforms.get(path, []) def in_changeset(ident): - """Check if entity should be merged. + """Check if a message should be migrated. + + A message will be migrated only if all of its dependencies + are present in the currently processed changeset. + + If a transform defined for this message points to a missing + legacy translation, this message will not be merged. The + missing legacy dependency won't be present in the changeset. - If at least one dependency of the entity is in the current - set of changeset, merge it. + This also means that partially translated messages (e.g. + constructed from two legacy strings out of which only one is + avaiable) will never be migrated. """ message_deps = self.dependencies.get((path, ident), None) @@ -324,9 +340,11 @@ class MergeContext(object): if len(message_deps) == 0: return True - # If the intersection of the dependencies and the current - # changeset is non-empty, merge this message. - return message_deps & changeset + # Make sure all the dependencies are present in the current + # changeset. Partial migrations are not currently supported. + # See https://bugzilla.mozilla.org/show_bug.cgi?id=1321271 + available_deps = message_deps & changeset + return message_deps == available_deps # Merge legacy translations with the existing ones using the # reference as a template. diff --git a/fluent/migrate/transforms.py b/fluent/migrate/transforms.py index b9390ac..1d4202e 100644 --- a/fluent/migrate/transforms.py +++ b/fluent/migrate/transforms.py @@ -277,43 +277,16 @@ class PLURALS(Source): class CONCAT(Transform): - """Concatenate elements of many patterns.""" + """Create a new Pattern from Patterns, PatternElements and Expressions.""" - def __init__(self, *patterns): - self.patterns = list(patterns) + def __init__(self, *elements, **kwargs): + # We want to support both passing elements as *elements in the + # migration specs and as elements=[]. The latter is used by + # FTL.BaseNode.traverse when it recreates the traversed node using its + # attributes as kwargs. + self.elements = list(kwargs.get('elements', elements)) def __call__(self, ctx): - elements = self.flatten_elements(self.patterns) + elements = self.flatten_elements(self.elements) elements = self.prune_text_elements(elements) return FTL.Pattern(elements) - - def traverse(self, fun): - def visit(value): - if isinstance(value, FTL.BaseNode): - return value.traverse(fun) - if isinstance(value, list): - return fun(map(visit, value)) - else: - return fun(value) - - node = self.__class__( - *[ - visit(value) for value in self.patterns - ] - ) - - return fun(node) - - def to_json(self): - def to_json(value): - if isinstance(value, FTL.BaseNode): - return value.to_json() - else: - return value - - return { - 'type': self.__class__.__name__, - 'patterns': [ - to_json(value) for value in self.patterns - ] - } diff --git a/fluent/syntax/ast.py b/fluent/syntax/ast.py index ff8b0ee..faccf08 100644 --- a/fluent/syntax/ast.py +++ b/fluent/syntax/ast.py @@ -66,12 +66,10 @@ class BaseNode(object): else: return fun(value) + # Use all attributes found on the node as kwargs to the constructor. + kwargs = vars(self).items() node = self.__class__( - **{ - name: visit(value) - for name, value in vars(self).items() - } - ) + **{name: visit(value) for name, value in kwargs}) return fun(node) diff --git a/tools/migrate/examples/about_dialog.py b/tools/migrate/examples/about_dialog.py index e45def1..7afd7d9 100644 --- a/tools/migrate/examples/about_dialog.py +++ b/tools/migrate/examples/about_dialog.py @@ -1,9 +1,8 @@ # coding=utf8 import fluent.syntax.ast as FTL -from fluent.migrate import ( - CONCAT, EXTERNAL_ARGUMENT, MESSAGE_REFERENCE, COPY, REPLACE -) +from fluent.migrate import CONCAT, COPY, REPLACE +from fluent.migrate.helpers import EXTERNAL_ARGUMENT, MESSAGE_REFERENCE def migrate(ctx): diff --git a/tools/migrate/examples/about_downloads.py b/tools/migrate/examples/about_downloads.py index edfc823..a07d9b0 100644 --- a/tools/migrate/examples/about_downloads.py +++ b/tools/migrate/examples/about_downloads.py @@ -1,7 +1,8 @@ # coding=utf8 import fluent.syntax.ast as FTL -from fluent.migrate import EXTERNAL_ARGUMENT, COPY, PLURALS, REPLACE_IN_TEXT +from fluent.migrate import COPY, PLURALS, REPLACE_IN_TEXT +from fluent.migrate.helpers import EXTERNAL_ARGUMENT def migrate(ctx): diff --git a/tools/migrate/examples/bug_1291693.py b/tools/migrate/examples/bug_1291693.py index b05327b..6fd27c4 100644 --- a/tools/migrate/examples/bug_1291693.py +++ b/tools/migrate/examples/bug_1291693.py @@ -1,7 +1,8 @@ # coding=utf8 import fluent.syntax.ast as FTL -from fluent.migrate import MESSAGE_REFERENCE, COPY, REPLACE +from fluent.migrate import COPY, REPLACE +from fluent.migrate.helpers import MESSAGE_REFERENCE def migrate(ctx): diff --git a/tools/migrate/migrate-l10n.py b/tools/migrate/migrate-l10n.py index 8070d5f..b57d022 100755 --- a/tools/migrate/migrate-l10n.py +++ b/tools/migrate/migrate-l10n.py @@ -11,9 +11,9 @@ import importlib import hglib from hglib.util import b -from fluent.migrate import ( - MergeContext, MigrationError, convert_blame_to_changesets -) +from fluent.migrate.context import MergeContext +from fluent.migrate.errors import MigrationError +from fluent.migrate.changesets import convert_blame_to_changesets from blame import Blame
Remove MESSAGE_REFERENCE and EXTERNAL_ARGUMENT helpers The [`helpers.py`](https://github.com/projectfluent/python-fluent/blob/b40264348efe0dbad0565678533c24c21a2c4b0e/fluent/migrate/helpers.py) file currently defines two pseudo-AST nodes which can be used in migration specs: `MESSAGE_REFERENCE` and `EXTERNAL_ARGUMENT`. I'd like to discuss the general approach regarding such helpers. On one hand the allow to write more concise AST in migrations. On the other, they introduce abstractions which need to be learned besides the AST itself in order to write and maintain migrations. This in turn results in situations when it's tempting to randomly try different combinations of the helpers to see if they fix an issue with the migration :) AST being the source of truth, I think there's value in sticking to it exclusively, even at the cost of increased verbosity. @flodolo, @Pike, @zbraniecki - What do you think? I already see @zbraniecki not use the helpers in [bug 1424682](https://bugzilla.mozilla.org/show_bug.cgi?id=1424682) in some cases, like here: ```py FTL.Message( id = FTL.Identifier('category-general'), attributes = [ FTL.Attribute( FTL.Identifier('tooltiptext'), FTL.Pattern( elements = [ FTL.Placeable( # This could read: # expression=MESSAGE_REFERENCE('pane-general-title') expression = FTL.MessageReference( id = FTL.Identifier( 'pane-general-title' ) ) ) ] ) ) ] ) ``` This is also related to what the API of `REPLACE` and `CONCAT` should be. Should we only accept `Patterns` and `PatternElements` (i.e. `TextElements` and `Placeables`)? This would make the API closer to the AST. Or should we accept `Expressions` as well? Compare: ```py # REPLACE converts the MessageReference expression returned by the helper to a Placeable. FTL.Message( id = FTL.Identifier('help-button-label'), value = REPLACE( 'browser/chrome/browser/preferences/preferences.dtd', 'helpButton2.label', { '&brandShortName;': MESSAGE_REFERENCE('-brand-short-name') } ) ) ``` ```py # REPLACE requires an explicit Placeable wrapping the MessageReference expression. FTL.Message( id = FTL.Identifier('help-button-label'), value = REPLACE( 'browser/chrome/browser/preferences/preferences.dtd', 'helpButton2.label', { '&brandShortName;': FTL.Placeable( MESSAGE_REFERENCE('-brand-short-name') ) } ) ) ``` ```py # REPLACE requires an explicit Placeable and an explicit MessageReference. FTL.Message( id = FTL.Identifier('help-button-label'), value = REPLACE( 'browser/chrome/browser/preferences/preferences.dtd', 'helpButton2.label', { '&brandShortName;': FTL.Placeable( FTL.MessageReference( FTL.Identifier('-brand-short-name') ) ) } ) ) ```
projectfluent/python-fluent
diff --git a/tests/migrate/test_concat.py b/tests/migrate/test_concat.py index 9cba145..6d2f35e 100644 --- a/tests/migrate/test_concat.py +++ b/tests/migrate/test_concat.py @@ -157,7 +157,7 @@ class TestConcatInterpolate(MockContext): <!ENTITY channel.description.end " channel."> ''') - def test_concat_replace(self): + def test_concat_placeable(self): msg = FTL.Message( FTL.Identifier('channel-desc'), value=CONCAT( @@ -174,6 +174,23 @@ class TestConcatInterpolate(MockContext): ''') ) + def test_concat_expression(self): + msg = FTL.Message( + FTL.Identifier('channel-desc'), + value=CONCAT( + COPY('test.properties', 'channel.description.start'), + EXTERNAL_ARGUMENT('channelname'), + COPY('test.properties', 'channel.description.end'), + ) + ) + + self.assertEqual( + evaluate(self, msg).to_json(), + ftl_message_to_json(''' + channel-desc = You are on the { $channelname } channel. + ''') + ) + @unittest.skipUnless(DTDParser, 'compare-locales required') class TestConcatReplace(MockContext): diff --git a/tests/migrate/test_context.py b/tests/migrate/test_context.py index 81d55af..56f0f50 100644 --- a/tests/migrate/test_context.py +++ b/tests/migrate/test_context.py @@ -16,7 +16,7 @@ from fluent.migrate.errors import ( EmptyLocalizationError, NotSupportedError, UnreadableReferenceError) from fluent.migrate.util import ftl, ftl_resource_to_json, to_json from fluent.migrate.context import MergeContext -from fluent.migrate.transforms import COPY +from fluent.migrate.transforms import CONCAT, COPY def here(*parts): @@ -251,7 +251,7 @@ class TestIncompleteReference(unittest.TestCase): @unittest.skipUnless(compare_locales, 'compare-locales requried') -class TestEmptyLocalization(unittest.TestCase): +class TestMissingLocalizationFiles(unittest.TestCase): def setUp(self): # Silence all logging. logging.disable(logging.CRITICAL) @@ -266,7 +266,40 @@ class TestEmptyLocalization(unittest.TestCase): # Resume logging. logging.disable(logging.NOTSET) - def test_all_localization_missing(self): + def test_missing_file(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ + FTL.Message( + id=FTL.Identifier('title'), + value=COPY( + 'aboutDownloads.dtd', + 'aboutDownloads.title' + ) + ), + FTL.Message( + id=FTL.Identifier('header'), + value=COPY( + 'missing.dtd', + 'missing' + ) + ), + ]) + + expected = { + 'aboutDownloads.ftl': ftl_resource_to_json(''' + # This Source Code Form is subject to the terms of the Mozilla Public + # License, v. 2.0. If a copy of the MPL was not distributed with this + # file, You can obtain one at http://mozilla.org/MPL/2.0/. + + title = Pobrane pliki + ''') + } + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + expected + ) + + def test_all_files_missing(self): pattern = ('No localization files were found') with self.assertRaisesRegexp(EmptyLocalizationError, pattern): self.ctx.add_transforms('existing.ftl', 'existing.ftl', [ @@ -281,7 +314,9 @@ class TestEmptyLocalization(unittest.TestCase): @unittest.skipUnless(compare_locales, 'compare-locales requried') -class TestIncompleteLocalization(unittest.TestCase): +class TestMissingLocalizationStrings(unittest.TestCase): + maxDiff = None + def setUp(self): # Silence all logging. logging.disable(logging.CRITICAL) @@ -296,20 +331,116 @@ class TestIncompleteLocalization(unittest.TestCase): # Resume logging. logging.disable(logging.NOTSET) - def test_missing_localization_file(self): - self.ctx.add_transforms('existing.ftl', 'existing.ftl', [ + def test_missing_string_in_simple_value(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ FTL.Message( - id=FTL.Identifier('foo'), + id=FTL.Identifier('title'), value=COPY( - 'existing.dtd', - 'foo' + 'aboutDownloads.dtd', + 'missing' ) ), + ]) + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + {} + ) + + def test_missing_string_in_only_variant(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ FTL.Message( - id=FTL.Identifier('bar'), - value=COPY( - 'missing.dtd', - 'bar' + id=FTL.Identifier('title'), + value=CONCAT( + FTL.SelectExpression( + expression=FTL.CallExpression( + callee=FTL.Identifier('PLATFORM') + ), + variants=[ + FTL.Variant( + key=FTL.VariantName('other'), + default=True, + value=COPY( + 'aboutDownloads.dtd', + 'missing' + ) + ), + ] + ), + ) + ), + ]) + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + {} + ) + + def test_missing_string_in_all_variants(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ + FTL.Message( + id=FTL.Identifier('title'), + value=CONCAT( + FTL.SelectExpression( + expression=FTL.CallExpression( + callee=FTL.Identifier('PLATFORM') + ), + variants=[ + FTL.Variant( + key=FTL.VariantName('windows'), + default=False, + value=COPY( + 'aboutDownloads.dtd', + 'missing.windows' + ) + ), + FTL.Variant( + key=FTL.VariantName('other'), + default=True, + value=COPY( + 'aboutDownloads.dtd', + 'missing.other' + ) + ), + ] + ), + ) + ), + ]) + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + {} + ) + + def test_missing_string_in_one_of_variants(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ + FTL.Message( + id=FTL.Identifier('title'), + value=CONCAT( + FTL.SelectExpression( + expression=FTL.CallExpression( + callee=FTL.Identifier('PLATFORM') + ), + variants=[ + FTL.Variant( + key=FTL.VariantName('windows'), + default=False, + value=COPY( + 'aboutDownloads.dtd', + 'aboutDownloads.title' + ) + ), + FTL.Variant( + key=FTL.VariantName('other'), + default=True, + value=COPY( + 'aboutDownloads.dtd', + 'missing.other' + ) + ), + ] + ), ) ), ]) @@ -319,6 +450,83 @@ class TestIncompleteLocalization(unittest.TestCase): {} ) + def test_missing_string_in_only_attribute(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ + FTL.Message( + id=FTL.Identifier('title'), + attributes=[ + FTL.Attribute( + FTL.Identifier('one'), + COPY( + 'aboutDownloads.dtd', + 'missing' + ) + ), + ] + ), + ]) + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + {} + ) + + def test_missing_string_in_all_attributes(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ + FTL.Message( + id=FTL.Identifier('title'), + attributes=[ + FTL.Attribute( + FTL.Identifier('one'), + COPY( + 'aboutDownloads.dtd', + 'missing.one' + ) + ), + FTL.Attribute( + FTL.Identifier('two'), + COPY( + 'aboutDownloads.dtd', + 'missing.two' + ) + ), + ] + ), + ]) + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + {} + ) + + def test_missing_string_in_one_of_attributes(self): + self.ctx.add_transforms('aboutDownloads.ftl', 'aboutDownloads.ftl', [ + FTL.Message( + id=FTL.Identifier('title'), + attributes=[ + FTL.Attribute( + FTL.Identifier('title'), + COPY( + 'aboutDownloads.dtd', + 'aboutDownloads.title' + ) + ), + FTL.Attribute( + FTL.Identifier('missing'), + COPY( + 'aboutDownloads.dtd', + 'missing' + ) + ), + ] + ), + ]) + + self.assertDictEqual( + to_json(self.ctx.merge_changeset()), + {} + ) + @unittest.skipUnless(compare_locales, 'compare-locales requried') class TestExistingTarget(unittest.TestCase): @@ -360,7 +568,6 @@ class TestExistingTarget(unittest.TestCase): ''') } - self.maxDiff = None self.assertDictEqual( to_json(self.ctx.merge_changeset()), expected @@ -399,7 +606,6 @@ class TestExistingTarget(unittest.TestCase): ''') } - self.maxDiff = None self.assertDictEqual( to_json(self.ctx.merge_changeset()), expected diff --git a/tests/migrate/test_context_real_examples.py b/tests/migrate/test_context_real_examples.py index 482dbc8..1586826 100644 --- a/tests/migrate/test_context_real_examples.py +++ b/tests/migrate/test_context_real_examples.py @@ -281,6 +281,8 @@ class TestMergeAboutDownloads(unittest.TestCase): @unittest.skipUnless(compare_locales, 'compare-locales requried') class TestMergeAboutDialog(unittest.TestCase): + maxDiff = None + def setUp(self): self.ctx = MergeContext( lang='pl', @@ -363,6 +365,8 @@ class TestMergeAboutDialog(unittest.TestCase): def test_merge_context_some_messages(self): changeset = { ('aboutDialog.dtd', 'update.failed.start'), + ('aboutDialog.dtd', 'update.failed.linkText'), + ('aboutDialog.dtd', 'update.failed.end'), } expected = { @@ -379,3 +383,14 @@ class TestMergeAboutDialog(unittest.TestCase): to_json(self.ctx.merge_changeset(changeset)), expected ) + + def test_merge_context_too_few_messages(self): + changeset = { + ('aboutDialog.dtd', 'update.failed.start'), + ('aboutDialog.dtd', 'update.failed.linkText'), + } + + self.assertDictEqual( + to_json(self.ctx.merge_changeset(changeset)), + {} + ) diff --git a/tests/migrate/test_merge.py b/tests/migrate/test_merge.py index a85fead..9178326 100644 --- a/tests/migrate/test_merge.py +++ b/tests/migrate/test_merge.py @@ -19,12 +19,17 @@ class MockContext(unittest.TestCase): def get_source(self, path, key): # Ignore path (test.properties) and get translations from # self.ab_cd_legacy defined in setUp. - return self.ab_cd_legacy.get(key, None).val + translation = self.ab_cd_legacy.get(key, None) + + if translation is not None: + return translation.val @unittest.skipUnless(PropertiesParser and DTDParser, 'compare-locales required') class TestMergeMessages(MockContext): + maxDiff = None + def setUp(self): self.en_us_ftl = parse(FluentParser, ftl(''' title = Downloads diff --git a/tests/migrate/test_util.py b/tests/migrate/test_util.py index bc739fa..32ea42d 100644 --- a/tests/migrate/test_util.py +++ b/tests/migrate/test_util.py @@ -27,11 +27,11 @@ class TestTraverse(unittest.TestCase): result = node.traverse(lambda x: x) self.assertEqual( - result.value.patterns[0].key, + result.value.elements[0].key, 'key1' ) self.assertEqual( - result.value.patterns[1].key, + result.value.elements[1].key, 'key2' )
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 8 }
0.6
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 -e git+https://github.com/projectfluent/python-fluent.git@01be516c3381ffb80798b4817964a5c5dff52d8a#egg=fluent importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: python-fluent channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 prefix: /opt/conda/envs/python-fluent
[ "tests/migrate/test_util.py::TestTraverse::test_copy_concat" ]
[]
[ "tests/migrate/test_context.py::TestIncompleteReference::test_missing_reference_file", "tests/migrate/test_context.py::TestNotSupportedError::test_add_ftl", "tests/migrate/test_util.py::TestReduce::test_copy_concat", "tests/migrate/test_util.py::TestReduce::test_copy_traits", "tests/migrate/test_util.py::TestReduce::test_copy_value" ]
[]
Apache License 2.0
2,123
[ "fluent/migrate/transforms.py", "tools/migrate/examples/about_dialog.py", "fluent/migrate/__init__.py", "tools/migrate/examples/about_downloads.py", "fluent/migrate/context.py", "tools/migrate/examples/bug_1291693.py", "fluent/syntax/ast.py", "tools/migrate/migrate-l10n.py" ]
[ "fluent/migrate/transforms.py", "tools/migrate/examples/about_dialog.py", "fluent/migrate/__init__.py", "tools/migrate/examples/about_downloads.py", "fluent/migrate/context.py", "tools/migrate/examples/bug_1291693.py", "fluent/syntax/ast.py", "tools/migrate/migrate-l10n.py" ]
OpenNMT__OpenNMT-tf-62
c46f9b0ce71e23faa0202832ac0cad179bb3bb42
2018-02-06 19:46:27
c46f9b0ce71e23faa0202832ac0cad179bb3bb42
diff --git a/opennmt/decoders/self_attention_decoder.py b/opennmt/decoders/self_attention_decoder.py index fe1b3a6e..1e40d5d6 100644 --- a/opennmt/decoders/self_attention_decoder.py +++ b/opennmt/decoders/self_attention_decoder.py @@ -4,7 +4,7 @@ import tensorflow as tf import opennmt.layers.transformer as transformer from opennmt.decoders.decoder import Decoder, get_embedding_fn -from opennmt.layers.position import PositionEmbedder +from opennmt.layers.position import SinusoidalPositionEncoder from opennmt.utils.beam_search import beam_search @@ -21,7 +21,7 @@ class SelfAttentionDecoder(Decoder): dropout=0.1, attention_dropout=0.1, relu_dropout=0.1, - position_encoder=PositionEmbedder()): + position_encoder=SinusoidalPositionEncoder()): """Initializes the parameters of the decoder. Args: diff --git a/opennmt/encoders/self_attention_encoder.py b/opennmt/encoders/self_attention_encoder.py index 69d299cc..3a942a76 100644 --- a/opennmt/encoders/self_attention_encoder.py +++ b/opennmt/encoders/self_attention_encoder.py @@ -4,7 +4,7 @@ import tensorflow as tf import opennmt.layers.transformer as transformer from opennmt.encoders.encoder import Encoder -from opennmt.layers.position import PositionEmbedder +from opennmt.layers.position import SinusoidalPositionEncoder class SelfAttentionEncoder(Encoder): @@ -20,7 +20,7 @@ class SelfAttentionEncoder(Encoder): dropout=0.1, attention_dropout=0.1, relu_dropout=0.1, - position_encoder=PositionEmbedder()): + position_encoder=SinusoidalPositionEncoder()): """Initializes the parameters of the encoder. Args: diff --git a/opennmt/layers/__init__.py b/opennmt/layers/__init__.py index cdf13693..79a97413 100644 --- a/opennmt/layers/__init__.py +++ b/opennmt/layers/__init__.py @@ -10,3 +10,4 @@ from opennmt.layers.bridge import ZeroBridge from opennmt.layers.bridge import DenseBridge from opennmt.layers.position import PositionEmbedder +from opennmt.layers.position import SinusoidalPositionEncoder diff --git a/opennmt/layers/position.py b/opennmt/layers/position.py index 358c5007..f344f225 100644 --- a/opennmt/layers/position.py +++ b/opennmt/layers/position.py @@ -1,5 +1,6 @@ """Define position encoder classes.""" +import math import abc import six @@ -96,10 +97,10 @@ class PositionEncoder(object): input_dim = inputs.get_shape().as_list()[-1] position = tf.tile([position], [batch_size]) + position = tf.expand_dims(position, 1) with tf.variable_scope("position_encoding"): position_encoding = self.encode(position, input_dim, dtype=inputs.dtype) - position_encoding = tf.expand_dims(position_encoding, 1) return self.reducer.reduce([inputs, position_encoding]) @abc.abstractmethod @@ -157,3 +158,20 @@ class PositionEmbedder(PositionEncoder): embeddings = tf.get_variable( "w_embs", shape=[self.maximum_position + 1, depth], dtype=dtype) return tf.nn.embedding_lookup(embeddings, positions) + + +class SinusoidalPositionEncoder(PositionEncoder): + """Encodes positions with sine waves as described in + https://arxiv.org/abs/1706.03762. + """ + + def encode(self, positions, depth, dtype=tf.float32): + batch_size = tf.shape(positions)[0] + positions = tf.cast(positions, dtype) + + log_timescale_increment = math.log(10000) / (depth / 2 - 1) + inv_timescales = tf.exp(tf.range(depth / 2, dtype=dtype) * -log_timescale_increment) + inv_timescales = tf.reshape(tf.tile(inv_timescales, [batch_size]), [batch_size, -1]) + scaled_time = tf.expand_dims(positions, -1) * tf.expand_dims(inv_timescales, 1) + + return tf.concat([tf.sin(scaled_time), tf.cos(scaled_time)], axis=2) diff --git a/opennmt/models/transformer.py b/opennmt/models/transformer.py index f095998e..32bdc812 100644 --- a/opennmt/models/transformer.py +++ b/opennmt/models/transformer.py @@ -5,7 +5,7 @@ import tensorflow as tf from opennmt.models.sequence_to_sequence import SequenceToSequence from opennmt.encoders.self_attention_encoder import SelfAttentionEncoder from opennmt.decoders.self_attention_decoder import SelfAttentionDecoder -from opennmt.layers.position import PositionEmbedder +from opennmt.layers.position import SinusoidalPositionEncoder class Transformer(SequenceToSequence): @@ -23,7 +23,7 @@ class Transformer(SequenceToSequence): dropout=0.1, attention_dropout=0.1, relu_dropout=0.1, - position_encoder=PositionEmbedder(), + position_encoder=SinusoidalPositionEncoder(), name="transformer"): """Initializes a Transformer model.
Positional encoding with sinusoids Implement a new `opennmt.layers.PositionEncoder` with sinusoids as described in "Attention Is All You Need" (https://arxiv.org/abs/1706.03762).
OpenNMT/OpenNMT-tf
diff --git a/opennmt/tests/position_test.py b/opennmt/tests/position_test.py index e71ec8c3..7cfb6e45 100644 --- a/opennmt/tests/position_test.py +++ b/opennmt/tests/position_test.py @@ -19,6 +19,15 @@ class PositionTest(tf.test.TestCase): positions = sess.run(positions) self.assertAllEqual([[1, 2, 3, 4, 0, 0, 0], [1, 2, 3, 4, 5, 6, 0]], positions) + def testSinusoidalPositionEncoder(self): + encoder = position.SinusoidalPositionEncoder() + positions = position.make_positions([4, 6]) + depth = 10 + encoding = encoder.encode(positions, depth) + with self.test_session() as sess: + encoding = sess.run(encoding) + self.assertAllEqual([2, 6, depth], encoding.shape) + if __name__ == "__main__": tf.test.main()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 5 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[TensorFlow]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose2", "pytest" ], "pre_install": null, "python": "3.5", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work bleach==1.5.0 certifi==2021.5.30 dataclasses==0.8 enum34==1.1.10 html5lib==0.9999999 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Markdown==3.3.7 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose2==0.13.0 numpy==1.19.5 -e git+https://github.com/OpenNMT/OpenNMT-tf.git@c46f9b0ce71e23faa0202832ac0cad179bb3bb42#egg=OpenNMT_tf packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work protobuf==3.19.6 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 PyYAML==6.0.1 six==1.17.0 tensorflow==1.4.0 tensorflow-tensorboard==0.4.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work Werkzeug==2.0.3 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: OpenNMT-tf channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - bleach==1.5.0 - dataclasses==0.8 - enum34==1.1.10 - html5lib==0.9999999 - markdown==3.3.7 - nose2==0.13.0 - numpy==1.19.5 - protobuf==3.19.6 - pyyaml==6.0.1 - six==1.17.0 - tensorflow==1.4.0 - tensorflow-tensorboard==0.4.0 - werkzeug==2.0.3 prefix: /opt/conda/envs/OpenNMT-tf
[ "opennmt/tests/position_test.py::PositionTest::testSinusoidalPositionEncoder" ]
[]
[ "opennmt/tests/position_test.py::PositionTest::testPositionBuilder", "opennmt/tests/position_test.py::PositionTest::testPositionBuilderWithMaxLen", "opennmt/tests/position_test.py::PositionTest::test_session" ]
[]
MIT License
2,124
[ "opennmt/layers/__init__.py", "opennmt/encoders/self_attention_encoder.py", "opennmt/layers/position.py", "opennmt/decoders/self_attention_decoder.py", "opennmt/models/transformer.py" ]
[ "opennmt/layers/__init__.py", "opennmt/encoders/self_attention_encoder.py", "opennmt/layers/position.py", "opennmt/decoders/self_attention_decoder.py", "opennmt/models/transformer.py" ]
redkyn__assigner-118
2c0c0b34ce80e776eeaf18e8028a0821513131b1
2018-02-07 02:57:06
2c0c0b34ce80e776eeaf18e8028a0821513131b1
diff --git a/CHANGELOG.md b/CHANGELOG.md index acbcc3b..7159c48 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ ## Devel +- Print informative message if `git` is not installed + ## 1.1.1 - Fixed `get` failing to clone new repos with error `Remote branch ['master'] not found in upstream origin` diff --git a/assigner/__init__.py b/assigner/__init__.py index 1099245..b19db23 100755 --- a/assigner/__init__.py +++ b/assigner/__init__.py @@ -6,6 +6,7 @@ import sys from colorlog import ColoredFormatter from requests.exceptions import HTTPError +from git.cmd import GitCommandNotFound from assigner.baserepo import StudentRepo from assigner.config import config_context @@ -186,6 +187,8 @@ def main(args=sys.argv[1:]): raise e if isinstance(e, KeyError): logger.error("%s is missing", e) + elif isinstance(e, GitCommandNotFound): + logger.error("git is not installed!") else: logger.error(str(e)) raise SystemExit(1) from e
Assigner doesn't provide a helpful message if git is not installed ...instead, assigner yells at you in an unsettling manner! I will provide an example later today.
redkyn/assigner
diff --git a/assigner/tests/assigner_test.py b/assigner/tests/assigner_test.py index 2811a70..771e70e 100644 --- a/assigner/tests/assigner_test.py +++ b/assigner/tests/assigner_test.py @@ -4,6 +4,8 @@ from unittest.mock import patch from assigner import main, make_parser, subcommands from assigner.tests.utils import AssignerTestCase +from git.cmd import GitCommandNotFound + class MakeParserTestCase(AssignerTestCase): def setUp(self): @@ -134,6 +136,22 @@ class MainTestCase(AssignerTestCase): "%s is missing", self.mock_args.run.side_effect ) + @patch("assigner.logger", autospec=True) + def test_main_logs_gitcommandnotfound_with_catch(self, mock_logger): + """ + main should log a GitCommandNotFound with "git is not installed!" when raised. + """ + self.mock_args.tracebacks = False + self.mock_args.run.side_effect = GitCommandNotFound() + try: + main([]) + except SystemExit: + pass + + mock_logger.error.assert_called_once_with( + "git is not installed!" + ) + def test_main_sets_verbosity(self): """ main should set verosity and level from args.
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 2 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
-e git+https://github.com/redkyn/assigner.git@2c0c0b34ce80e776eeaf18e8028a0821513131b1#egg=assigner astroid==1.6.6 attrs==22.2.0 certifi==2021.5.30 colorlog==2.6.0 gitdb==0.6.4 GitPython==1.0.1 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 jsonschema==2.5.1 lazy-object-proxy==1.7.1 mccabe==0.3.1 nose==1.3.7 nose-parameterized==0.6.0 packaging==21.3 pep8==1.5.7 pluggy==1.0.0 progressbar2==3.10.1 PTable==0.9.2 py==1.11.0 pyapi-gitlab==7.8.5 pyflakes==1.0.0 pylint==1.8.1 pyparsing==3.1.4 pytest==7.0.1 python-utils==2.2.0 PyYAML==3.11 requests==2.9.1 setuptools-scm==1.15.7 six==1.10.0 smmap==0.9.0 tomli==1.2.3 tqdm==4.19.5 typing_extensions==4.1.1 wrapt==1.10.11 zipp==3.6.0
name: assigner channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==1.6.6 - attrs==22.2.0 - colorlog==2.6.0 - gitdb==0.6.4 - gitpython==1.0.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - jsonschema==2.5.1 - lazy-object-proxy==1.7.1 - mccabe==0.3.1 - nose==1.3.7 - nose-parameterized==0.6.0 - packaging==21.3 - pep8==1.5.7 - pluggy==1.0.0 - progressbar2==3.10.1 - ptable==0.9.2 - py==1.11.0 - pyapi-gitlab==7.8.5 - pyflakes==1.0.0 - pylint==1.8.1 - pyparsing==3.1.4 - pytest==7.0.1 - python-utils==2.2.0 - pyyaml==3.11 - requests==2.9.1 - setuptools-scm==1.15.7 - six==1.10.0 - smmap==0.9.0 - tomli==1.2.3 - tqdm==4.19.5 - typing-extensions==4.1.1 - wheel==0.24.0 - wrapt==1.10.11 - zipp==3.6.0 prefix: /opt/conda/envs/assigner
[ "assigner/tests/assigner_test.py::MainTestCase::test_main_logs_gitcommandnotfound_with_catch" ]
[]
[ "assigner/tests/assigner_test.py::MakeParserTestCase::test_add_default_help", "assigner/tests/assigner_test.py::MakeParserTestCase::test_adds_all_subcommands", "assigner/tests/assigner_test.py::MakeParserTestCase::test_creates_argument_parser", "assigner/tests/assigner_test.py::MainTestCase::test_calls_args_run", "assigner/tests/assigner_test.py::MainTestCase::test_calls_make_parser", "assigner/tests/assigner_test.py::MainTestCase::test_main_catches_exceptions", "assigner/tests/assigner_test.py::MainTestCase::test_main_logs_exceptions", "assigner/tests/assigner_test.py::MainTestCase::test_main_logs_keyerror_with_catch", "assigner/tests/assigner_test.py::MainTestCase::test_main_raises_exceptions_with_traceback", "assigner/tests/assigner_test.py::MainTestCase::test_main_sets_verbosity" ]
[]
MIT License
2,126
[ "assigner/__init__.py", "CHANGELOG.md" ]
[ "assigner/__init__.py", "CHANGELOG.md" ]
chaostoolkit__chaostoolkit-lib-31
3656c8ec9185318d16f6ee7e0815bd9da52cad44
2018-02-07 13:11:15
3656c8ec9185318d16f6ee7e0815bd9da52cad44
diff --git a/CHANGELOG.md b/CHANGELOG.md index 0af4aa2..c9591c2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [Unreleased]: https://github.com/chaostoolkit/chaostoolkit-lib/compare/0.13.1...HEAD +### Changed + +- Do not fail the discovery when an extension is missing the `__all__` + attribute [#28][28] + +[28]: https://github.com/chaostoolkit/chaostoolkit-lib/issues/28 + ## [0.13.1][] - 2018-01-30 [0.13.1]: https://github.com/chaostoolkit/chaostoolkit-lib/compare/0.13.0...0.13.1 diff --git a/chaoslib/discovery/discover.py b/chaoslib/discovery/discover.py index 73fe118..20ad862 100644 --- a/chaoslib/discovery/discover.py +++ b/chaoslib/discovery/discover.py @@ -10,6 +10,7 @@ from logzero import logger from chaoslib import __version__ from chaoslib.discovery.package import get_discover_function, install,\ load_package +from chaoslib.exceptions import DiscoveryFailed from chaoslib.types import Discovery, DiscoveredActivities @@ -88,11 +89,18 @@ def discover_activities(extension_mod_name: str, mod = importlib.import_module(extension_mod_name) except ImportError: raise DiscoveryFailed( - "could not import Python module '{m}'".format( + "could not import extension module '{m}'".format( m=extension_mod_name)) activities = [] - exported = getattr(mod, "__all__") + try: + exported = getattr(mod, "__all__") + except AttributeError as e: + logger.warn("'{m}' does not expose the __all__ attribute. " + "It is required to determine what functions are actually " + "exported as activities.".format(m=extension_mod_name)) + return activities + funcs = inspect.getmembers(mod, inspect.isfunction) for (name, func) in funcs: if exported and name not in exported:
Do not fail on discovery of module which don't export __all__ Right now the discovery mechanism expects module to have a `__all__` attribute. Do not fail when it is missing.
chaostoolkit/chaostoolkit-lib
diff --git a/tests/fixtures/fakeext.py b/tests/fixtures/fakeext.py new file mode 100644 index 0000000..ed72c69 --- /dev/null +++ b/tests/fixtures/fakeext.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- + +__all__ = ["many_args", "no_args_docstring", "no_args", "one_arg", + "one_untyped_arg", "one_arg_with_default", + "one_untyped_arg_with_default"] + + +def no_args_docstring(): + pass + + +def no_args(): + """ + No arguments. + """ + pass + + +def one_arg(message: str): + """ + One typed argument. + """ + pass + + +def one_arg_with_default(message: str="hello"): + """ + One typed argument with a default value. + """ + pass + + +def one_untyped_arg(message): + """ + One untyped argument. + """ + pass + + +def one_untyped_arg_with_default(message = "hello"): + """ + One untyped argument with a default value. + """ + pass + + +def many_args(message: str, colour: str="blue"): + """ + Many arguments. + """ + pass diff --git a/tests/fixtures/keepempty.py b/tests/fixtures/keepempty.py new file mode 100644 index 0000000..db5dc66 --- /dev/null +++ b/tests/fixtures/keepempty.py @@ -0,0 +1,5 @@ +# just keep this as-is + + +def not_an_activity(): + print("boom") diff --git a/tests/test_discover.py b/tests/test_discover.py index 216d379..13bf346 100644 --- a/tests/test_discover.py +++ b/tests/test_discover.py @@ -5,5 +5,98 @@ import pytest from chaoslib.exceptions import DiscoveryFailed from chaoslib.discovery import discover, initialize_discovery_result +from chaoslib.discovery.discover import discover_activities from chaoslib.types import Discovery, DiscoveredActivities, \ DiscoveredSystemInfo + + +def test_fail_discovery_when_module_cannot_be_loaded(): + with pytest.raises(DiscoveryFailed) as exc: + discover_activities("fixtures.burp", "probe") + assert "could not import extension module" in str(exc) + + +def test_do_not_fail_when_extension_mod_has_not_all(): + activities = discover_activities("fixtures.keepempty", "probe") + assert len(activities) == 0 + + +def test_discover_all_activities(): + mod = "fixtures.fakeext" + activities = discover_activities(mod, "probe") + assert len(activities) == 7 + + activity = activities[0] + assert activity["name"] == "many_args" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == "Many arguments." + assert activity["arguments"] == [ + { + "name": "message" + }, + { + "name": "colour", + "default": "blue" + } + ] + + activity = activities[1] + assert activity["name"] == "no_args" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == "No arguments." + assert activity["arguments"] == [] + + activity = activities[2] + assert activity["name"] == "no_args_docstring" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == None + assert activity["arguments"] == [] + + activity = activities[3] + assert activity["name"] == "one_arg" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == "One typed argument." + assert activity["arguments"] == [ + { + "name": "message" + } + ] + + activity = activities[4] + assert activity["name"] == "one_arg_with_default" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == "One typed argument with a default value." + assert activity["arguments"] == [ + { + "name": "message", + "default": "hello" + } + ] + + activity = activities[5] + assert activity["name"] == "one_untyped_arg" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == "One untyped argument." + assert activity["arguments"] == [ + { + "name": "message" + } + ] + + activity = activities[6] + assert activity["name"] == "one_untyped_arg_with_default" + assert activity["type"] == "probe" + assert activity["mod"] == mod + assert activity["doc"] == "One untyped argument with a default value." + assert activity["arguments"] == [ + { + "name": "message", + "default": "hello" + } + ]
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 2 }
0.13
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[vault]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": null, "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt", "requirements-dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 -e git+https://github.com/chaostoolkit/chaostoolkit-lib.git@3656c8ec9185318d16f6ee7e0815bd9da52cad44#egg=chaostoolkit_lib charset-normalizer==2.0.12 coverage==6.2 hvac==0.11.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 logzero==1.7.0 packaging==21.3 pluggy==1.0.0 ply==3.4 py==1.11.0 pycodestyle==2.10.0 pyhcl==0.2.3 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-sugar==0.9.6 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 termcolor==1.1.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: chaostoolkit-lib channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - coverage==6.2 - hvac==0.11.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - logzero==1.7.0 - packaging==21.3 - pluggy==1.0.0 - ply==3.4 - py==1.11.0 - pycodestyle==2.10.0 - pyhcl==0.2.3 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-sugar==0.9.6 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - termcolor==1.1.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/chaostoolkit-lib
[ "tests/test_discover.py::test_fail_discovery_when_module_cannot_be_loaded", "tests/test_discover.py::test_do_not_fail_when_extension_mod_has_not_all" ]
[]
[ "tests/test_discover.py::test_discover_all_activities" ]
[]
Apache License 2.0
2,128
[ "CHANGELOG.md", "chaoslib/discovery/discover.py" ]
[ "CHANGELOG.md", "chaoslib/discovery/discover.py" ]
nipy__nipype-2429
168dfee8862e3a635d8fd6f0f4469ad0950e55b9
2018-02-07 15:08:46
704b97dee7848283692bac38f04541c5af2a87b5
diff --git a/nipype/interfaces/mrtrix3/preprocess.py b/nipype/interfaces/mrtrix3/preprocess.py index ca5996bea..740513194 100644 --- a/nipype/interfaces/mrtrix3/preprocess.py +++ b/nipype/interfaces/mrtrix3/preprocess.py @@ -27,7 +27,7 @@ class ResponseSDInputSpec(MRTrix3BaseInputSpec): 'tournier', 'tax', argstr='%s', - position=-6, + position=1, mandatory=True, desc='response estimation algorithm (multi-tissue)') in_file = File( @@ -74,7 +74,7 @@ class ResponseSD(MRTrix3Base): >>> resp.inputs.algorithm = 'tournier' >>> resp.inputs.grad_fsl = ('bvecs', 'bvals') >>> resp.cmdline # doctest: +ELLIPSIS - 'dwi2response -fslgrad bvecs bvals tournier dwi.mif wm.txt' + 'dwi2response tournier -fslgrad bvecs bvals dwi.mif wm.txt' >>> resp.run() # doctest: +SKIP """
Mrtrix3 `dwi2response` - bad algorithm argument position ### Summary Th Mrtrix3 `dwi2response` CL wrapper generates the following runtime error: ```shell dwi2response: mrinfo: [ERROR] no diffusion encoding information found in image "<DWI_FILE>" dwi2response: [ERROR] Script requires diffusion gradient table: either in image header, or using -grad / -fslgrad option ``` It turns out that the command generated by `nipype` does not respect (my version of) the Mrtrix3 CL format. ### Actual behavior Generated command (not runnable): ```shell dwi2response -fslgrad <BVEC_FILE> <BVAL_FILE> -mask <MASK_FILE> tournier <WM_FILE> ``` ### Expected behavior Runnable command: ```shell dwi2response tournier -fslgrad <BVEC_FILE> <BVAL_FILE> -mask <MASK_FILE> <WM_FILE> ``` ### Environment - `MRtrix 3.0_RC2-117-gf098f097 dwi2response bin version: 3.0_RC2-117-gf098f097` - `Python 2.7.12` - `nipype v1.0.0` ### Quick and dirty solution I'm really not sure how clean it is, but it worked for me; in the `ResponseSDInputSpec` class, I changed `position=-6` to `position=1` in the `algorithm` traits.
nipy/nipype
diff --git a/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py b/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py index 29a89f097..01104d2d2 100644 --- a/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py +++ b/nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py @@ -8,7 +8,7 @@ def test_ResponseSD_inputs(): algorithm=dict( argstr='%s', mandatory=True, - position=-6, + position=1, ), args=dict(argstr='%s', ), bval_scale=dict(argstr='-bvalue_scaling %s', ),
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 click==8.0.4 configparser==5.2.0 decorator==4.4.2 funcsigs==1.0.2 future==1.0.0 importlib-metadata==4.8.3 iniconfig==1.1.1 isodate==0.6.1 lxml==5.3.1 mock==5.2.0 networkx==2.5.1 nibabel==3.2.2 -e git+https://github.com/nipy/nipype.git@168dfee8862e3a635d8fd6f0f4469ad0950e55b9#egg=nipype numpy==1.19.5 packaging==21.3 pluggy==1.0.0 prov==1.5.0 py==1.11.0 pydot==1.4.2 pydotplus==2.0.2 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 rdflib==5.0.0 scipy==1.5.4 simplejson==3.20.1 six==1.17.0 tomli==1.2.3 traits==6.4.1 typing_extensions==4.1.1 zipp==3.6.0
name: nipype channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==8.0.4 - configparser==5.2.0 - decorator==4.4.2 - funcsigs==1.0.2 - future==1.0.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isodate==0.6.1 - lxml==5.3.1 - mock==5.2.0 - networkx==2.5.1 - nibabel==3.2.2 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - prov==1.5.0 - py==1.11.0 - pydot==1.4.2 - pydotplus==2.0.2 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - rdflib==5.0.0 - scipy==1.5.4 - simplejson==3.20.1 - six==1.17.0 - tomli==1.2.3 - traits==6.4.1 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/nipype
[ "nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py::test_ResponseSD_inputs" ]
[]
[ "nipype/interfaces/mrtrix3/tests/test_auto_ResponseSD.py::test_ResponseSD_outputs" ]
[]
Apache License 2.0
2,129
[ "nipype/interfaces/mrtrix3/preprocess.py" ]
[ "nipype/interfaces/mrtrix3/preprocess.py" ]
pgmpy__pgmpy-973
b85e9ca3b20296925dd4b5686356031dc1f80fec
2018-02-07 20:59:04
b85e9ca3b20296925dd4b5686356031dc1f80fec
codecov[bot]: # [Codecov](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=h1) Report > Merging [#973](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=desc) into [dev](https://codecov.io/gh/pgmpy/pgmpy/commit/b85e9ca3b20296925dd4b5686356031dc1f80fec?src=pr&el=desc) will **not change** coverage. > The diff coverage is `92.85%`. [![Impacted file tree graph](https://codecov.io/gh/pgmpy/pgmpy/pull/973/graphs/tree.svg?src=pr&token=UaJMCdHaEF&width=650&height=150)](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## dev #973 +/- ## ======================================= Coverage 94.73% 94.73% ======================================= Files 114 114 Lines 11216 11216 ======================================= Hits 10625 10625 Misses 591 591 ``` | [Impacted Files](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [pgmpy/models/DynamicBayesianNetwork.py](https://codecov.io/gh/pgmpy/pgmpy/pull/973/diff?src=pr&el=tree#diff-cGdtcHkvbW9kZWxzL0R5bmFtaWNCYXllc2lhbk5ldHdvcmsucHk=) | `91.8% <ø> (ø)` | :arrow_up: | | [pgmpy/tests/test\_models/test\_BayesianModel.py](https://codecov.io/gh/pgmpy/pgmpy/pull/973/diff?src=pr&el=tree#diff-cGdtcHkvdGVzdHMvdGVzdF9tb2RlbHMvdGVzdF9CYXllc2lhbk1vZGVsLnB5) | `100% <100%> (ø)` | :arrow_up: | | [...py/tests/test\_estimators/test\_BayesianEstimator.py](https://codecov.io/gh/pgmpy/pgmpy/pull/973/diff?src=pr&el=tree#diff-cGdtcHkvdGVzdHMvdGVzdF9lc3RpbWF0b3JzL3Rlc3RfQmF5ZXNpYW5Fc3RpbWF0b3IucHk=) | `100% <100%> (ø)` | :arrow_up: | | [pgmpy/estimators/BayesianEstimator.py](https://codecov.io/gh/pgmpy/pgmpy/pull/973/diff?src=pr&el=tree#diff-cGdtcHkvZXN0aW1hdG9ycy9CYXllc2lhbkVzdGltYXRvci5weQ==) | `91.89% <87.5%> (+2.7%)` | :arrow_up: | | [pgmpy/models/BayesianModel.py](https://codecov.io/gh/pgmpy/pgmpy/pull/973/diff?src=pr&el=tree#diff-cGdtcHkvbW9kZWxzL0JheWVzaWFuTW9kZWwucHk=) | `95.63% <0%> (-0.4%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=footer). Last update [b85e9ca...c3f11cd](https://codecov.io/gh/pgmpy/pgmpy/pull/973?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). ankurankan: @khalibartan @yashu-seth Reviews please. Urgently if possible.
diff --git a/pgmpy/estimators/BayesianEstimator.py b/pgmpy/estimators/BayesianEstimator.py index 01689a50..4326ef35 100644 --- a/pgmpy/estimators/BayesianEstimator.py +++ b/pgmpy/estimators/BayesianEstimator.py @@ -27,8 +27,9 @@ class BayesianEstimator(ParameterEstimator): prior_type: 'dirichlet', 'BDeu', or 'K2' string indicting which type of prior to use for the model parameters. - If 'prior_type' is 'dirichlet', the following must be provided: - 'pseudo_counts' = dirichlet hyperparameters; a dict containing, for each variable, a list - with a "virtual" count for each variable state, that is added to the state counts. + 'pseudo_counts' = dirichlet hyperparameters; a dict containing, for each variable, a 2-D + array of the shape (node_card, product of parents_card) with a "virtual" count for each + variable state in the CPD, that is added to the state counts. (lexicographic ordering of states assumed) - If 'prior_type' is 'BDeu', then an 'equivalent_sample_size' must be specified instead of 'pseudo_counts'. This is equivalent to @@ -65,7 +66,7 @@ class BayesianEstimator(ParameterEstimator): for node in self.model.nodes(): _equivalent_sample_size = equivalent_sample_size[node] if isinstance(equivalent_sample_size, dict) else \ equivalent_sample_size - _pseudo_counts = pseudo_counts[node] if isinstance(pseudo_counts, dict) else pseudo_counts + _pseudo_counts = pseudo_counts[node] if pseudo_counts else None cpd = self.estimate_cpd(node, prior_type=prior_type, @@ -87,8 +88,9 @@ class BayesianEstimator(ParameterEstimator): prior_type: 'dirichlet', 'BDeu', 'K2', string indicting which type of prior to use for the model parameters. - If 'prior_type' is 'dirichlet', the following must be provided: - 'pseudo_counts' = dirichlet hyperparameters; a list or dict - with a "virtual" count for each variable state. + 'pseudo_counts' = dirichlet hyperparameters; 2-D array of shape + (node_card, product of parents_card) with a "virtual" count for + each variable state in the CPD. The virtual counts are added to the actual state counts found in the data. (if a list is provided, a lexicographic ordering of states is assumed) - If 'prior_type' is 'BDeu', then an 'equivalent_sample_size' @@ -126,22 +128,23 @@ class BayesianEstimator(ParameterEstimator): node_cardinality = len(self.state_names[node]) parents = sorted(self.model.get_parents(node)) parents_cardinalities = [len(self.state_names[parent]) for parent in parents] + cpd_shape = (node_cardinality, np.prod(parents_cardinalities, dtype=int)) if prior_type == 'K2': - pseudo_counts = [1] * node_cardinality + pseudo_counts = np.ones(cpd_shape, dtype=int) elif prior_type == 'BDeu': alpha = float(equivalent_sample_size) / (node_cardinality * np.prod(parents_cardinalities)) - pseudo_counts = [alpha] * node_cardinality + pseudo_counts = np.ones(cpd_shape, dtype=float) * alpha elif prior_type == 'dirichlet': - if not len(pseudo_counts) == node_cardinality: - raise ValueError("'pseudo_counts' should have length {0}".format(node_cardinality)) - if isinstance(pseudo_counts, dict): - pseudo_counts = sorted(pseudo_counts.values()) + pseudo_counts = np.array(pseudo_counts) + if pseudo_counts.shape != cpd_shape: + raise ValueError("The shape of pseudo_counts must be: {shape}".format( + shape=str(cpd_shape))) else: raise ValueError("'prior_type' not specified") state_counts = self.state_counts(node) - bayesian_counts = (state_counts.T + pseudo_counts).T + bayesian_counts = state_counts + pseudo_counts cpd = TabularCPD(node, node_cardinality, np.array(bayesian_counts), evidence=parents, diff --git a/pgmpy/models/DynamicBayesianNetwork.py b/pgmpy/models/DynamicBayesianNetwork.py index a6ee9513..566dfba6 100644 --- a/pgmpy/models/DynamicBayesianNetwork.py +++ b/pgmpy/models/DynamicBayesianNetwork.py @@ -486,7 +486,7 @@ class DynamicBayesianNetwork(DirectedGraph): >>> student.add_nodes_from(['D', 'G', 'I', 'S', 'L']) >>> student.add_edges_from([(('D', 0),('G', 0)),(('I', 0),('G', 0)),(('D', 0),('D', 1)),(('I', 0),('I', 1))]) >>> grade_cpd = TabularCPD(('G', 0), 3, [[0.3, 0.05, 0.9, 0.5], - ... [0.4, 0.25, 0.8, 0.03], + ... [0.4, 0.25, 0.08, 0.3], ... [0.3, 0.7, 0.02, 0.2]], ... evidence=[('I', 0),('D', 0)], ... evidence_card=[2, 2])
Issue with dirichlet prior for bayesian learning The issue is with Bayesian Estimator. http://pgmpy.org/estimators.html#bayesian-estimator The prior type can be one of the following: ‘dirichlet’, ‘BDeu’, ‘K2’ My issue is with the dirichlet, and the parameter: "pseudo_counts". It seems like pgmpy only allows for ONE set of pseudo counts-(a0...ak) where k is the range of states for the target variable (the variable in the rows of the cpd). However this is NOT the accepted definition for the dirichlet pseudo counts-there should be a different set of pseudo counts (a0..ak) for every different combination of the targets variable's parents(for every column in the cpd) For example-in the example you wrote in the documentation: You only allow one set of pseudo counts(1,2) for each and every one of the columns(combination of C's parents) ╒══════╤══════╤══════╤══════╤════════════════════╕ │ A │ A(0) │ A(0) │ A(1) │ A(1) │ ├──────┼──────┼──────┼──────┼────────────────────┤ │ B │ B(0) │ B(1) │ B(0) │ B(1) │ ├──────┼──────┼──────┼──────┼────────────────────┤ │ C(0) │ 0.25 │ 0.25 │ 0.5 │ 0.3333333333333333 │ ├──────┼──────┼──────┼──────┼────────────────────┤ │ C(1) │ 0.75 │ 0.75 │ 0.5 │ 0.6666666666666666 │ ╘══════╧══════╧══════╧══════╧════════════════════╛ (1,2) (1,2) (1,2) (1,2) But this is not the definition of dirichlet. The dirichlet definition allows a DIIFERENT set of hyper parameters(pseudo counts) for every column-like this ╒══════╤══════╤══════╤══════╤════════════════════╕ │ A │ A(0) │ A(0) │ A(1) │ A(1) │ ├──────┼──────┼──────┼──────┼────────────────────┤ │ B │ B(0) │ B(1) │ B(0) │ B(1) │ ├──────┼──────┼──────┼──────┼────────────────────┤ │ C(0) │ 0.25 │ 0.25 │ 0.5 │ 0.3333333333333333 │ ├──────┼──────┼──────┼──────┼────────────────────┤ │ C(1) │ 0.75 │ 0.75 │ 0.5 │ 0.6666666666666666 │ ╘══════╧══════╧══════╧══════╧════════════════════╛ (1,5) (1,2) (7,9) (8,3) Of course, having the option to put a different set of hyper parameters for different columns(different combination of the target variable's parents) is a necessity because in reality you have different priors for different values of a the target variable's parents. For example, in the project I am working on one of the cpds is for a variable called "party_identification"(it has 3 states- republican, democrat, and none), and the parents are variables like age, gender, state etc. Obviously, there are different priors for P(party_identification-republican | age<30 and gender=female and state=Northeast) and for P(party_identification-republican | 50>age>30 and gender=female and state=MIDWEST), and so for those two columns I need 6 different sets of hyper parameters(pseudo counts): (a_republican_age<30_gender=female,state=northeast), (a_democarat_age<30_gender=female,state=northeast), (a_none_age<30_gender=female,state=northeast) (a_republican_50>age<30_gender=female,state=midwest), (a_democrat_50>age<30_gender=female,state=midwest), (a_none_50>age<30_gender=female,state=midwest) but pgmpy-or at least it seems like-only allows 3 sets of hyper parameters(pseudo counts): (a_republican), (a_democrat), (a_none) In general, if I have 3 states for the "age" parent, 2 states for the gender variable and 3 states for the state variable, I will need 3*2*3(parents states) * 3 (target variable state)=54 pseudo counts, or in different words, 3*2*3=18 triplets(a_rep, a_dem, a_none) of pseudo counts, one for each column. However pgmpy only all allow 3 pseudo counts, or the same triplet (a_rep_a_dem_a_none) for each and every one of the columns. In general, no matter how big my cpd is, and how many columns (target variable's parents combination) I have pgmpy will only allow the number of hyperparameters to be the number of rows (states of the target variable). But it should enable the number of hyperparameters to be number of rows(number of states of the target variables) * number of columns (parents combination). Or, in other words, different set of pseudo counts (target_variable_state_1......target_variable_state_k) for EACH AND EVERY ONE of the columns. As I said both from the documentation and the code it seems like pgmpy only allow one set of pseudo counts-a fixed set of pseudo counts-for all the columns.
pgmpy/pgmpy
diff --git a/pgmpy/tests/test_estimators/test_BayesianEstimator.py b/pgmpy/tests/test_estimators/test_BayesianEstimator.py index 86709d5f..fc85a46f 100644 --- a/pgmpy/tests/test_estimators/test_BayesianEstimator.py +++ b/pgmpy/tests/test_estimators/test_BayesianEstimator.py @@ -22,19 +22,21 @@ class TestBayesianEstimator(unittest.TestCase): self.est3 = BayesianEstimator(self.m1, self.d2) def test_estimate_cpd_dirichlet(self): - cpd_A = self.est1.estimate_cpd('A', prior_type="dirichlet", pseudo_counts=[0, 1]) + cpd_A = self.est1.estimate_cpd('A', prior_type="dirichlet", pseudo_counts=[[0], [1]]) self.assertEqual(cpd_A, TabularCPD('A', 2, [[0.5], [0.5]])) - cpd_B = self.est1.estimate_cpd('B', prior_type="dirichlet", pseudo_counts=[9, 3]) + cpd_B = self.est1.estimate_cpd('B', prior_type="dirichlet", pseudo_counts=[[9], [3]]) self.assertEqual(cpd_B, TabularCPD('B', 2, [[11.0/15], [4.0/15]])) - cpd_C = self.est1.estimate_cpd('C', prior_type="dirichlet", pseudo_counts=[0.4, 0.6]) + cpd_C = self.est1.estimate_cpd('C', prior_type="dirichlet", pseudo_counts=[[0.4, 0.4, 0.4, 0.4], + [0.6, 0.6, 0.6, 0.6]]) self.assertEqual(cpd_C, TabularCPD('C', 2, [[0.2, 0.2, 0.7, 0.4], [0.8, 0.8, 0.3, 0.6]], evidence=['A', 'B'], evidence_card=[2, 2])) def test_estimate_cpd_improper_prior(self): - cpd_C = self.est1.estimate_cpd('C', prior_type="dirichlet", pseudo_counts=[0, 0]) + cpd_C = self.est1.estimate_cpd('C', prior_type="dirichlet", pseudo_counts=[[0, 0, 0, 0], + [0, 0, 0, 0]]) cpd_C_correct = (TabularCPD('C', 2, [[0.0, 0.0, 1.0, np.NaN], [1.0, 1.0, 0.0, np.NaN]], evidence=['A', 'B'], evidence_card=[2, 2], @@ -64,7 +66,8 @@ class TestBayesianEstimator(unittest.TestCase): self.assertSetEqual(set(self.est3.get_parameters()), cpds) def test_get_parameters2(self): - pseudo_counts = {'A': [1, 2, 3], 'B': [4, 5], 'C': [6, 7]} + pseudo_counts = {'A': [[1], [2], [3]], 'B': [[4], [5]], 'C': [[6, 6, 6, 6, 6, 6], + [7, 7, 7, 7, 7, 7]]} cpds = set([self.est3.estimate_cpd('A', prior_type="dirichlet", pseudo_counts=pseudo_counts['A']), self.est3.estimate_cpd('B', prior_type="dirichlet", pseudo_counts=pseudo_counts['B']), self.est3.estimate_cpd('C', prior_type="dirichlet", pseudo_counts=pseudo_counts['C'])]) diff --git a/pgmpy/tests/test_models/test_BayesianModel.py b/pgmpy/tests/test_models/test_BayesianModel.py index c2daf5db..dbc4d480 100644 --- a/pgmpy/tests/test_models/test_BayesianModel.py +++ b/pgmpy/tests/test_models/test_BayesianModel.py @@ -431,7 +431,11 @@ class TestBayesianModelFitPredict(unittest.TestCase): def test_bayesian_fit(self): print(isinstance(BayesianEstimator, BaseEstimator)) print(isinstance(MaximumLikelihoodEstimator, BaseEstimator)) - self.model2.fit(self.data1, estimator=BayesianEstimator, prior_type="dirichlet", pseudo_counts=[9, 3]) + self.model2.fit(self.data1, estimator=BayesianEstimator, prior_type="dirichlet", + pseudo_counts={'A': [[9], [3]], + 'B': [[9], [3]], + 'C': [[9, 9, 9, 9], + [3, 3, 3, 3]]}) self.assertEqual(self.model2.get_cpds('B'), TabularCPD('B', 2, [[11.0 / 15], [4.0 / 15]])) def test_fit_missing_data(self):
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 1 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "mock", "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y build-essential" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 decorator==5.1.1 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 networkx==1.11 nose==1.3.7 numpy==1.11.3 packaging==21.3 pandas==0.19.2 -e git+https://github.com/pgmpy/pgmpy.git@b85e9ca3b20296925dd4b5686356031dc1f80fec#egg=pgmpy pluggy==1.0.0 py==1.11.0 pyparsing==2.2.0 pytest==7.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 scipy==0.18.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 wrapt==1.10.8 zipp==3.6.0
name: pgmpy channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - decorator==5.1.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - networkx==1.11 - nose==1.3.7 - numpy==1.11.3 - packaging==21.3 - pandas==0.19.2 - pluggy==1.0.0 - py==1.11.0 - pyparsing==2.2.0 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - scipy==0.18.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - wrapt==1.10.8 - zipp==3.6.0 prefix: /opt/conda/envs/pgmpy
[ "pgmpy/tests/test_estimators/test_BayesianEstimator.py::TestBayesianEstimator::test_estimate_cpd_dirichlet", "pgmpy/tests/test_estimators/test_BayesianEstimator.py::TestBayesianEstimator::test_estimate_cpd_improper_prior", "pgmpy/tests/test_estimators/test_BayesianEstimator.py::TestBayesianEstimator::test_get_parameters2", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_bayesian_fit" ]
[]
[ "pgmpy/tests/test_estimators/test_BayesianEstimator.py::TestBayesianEstimator::test_estimate_cpd_shortcuts", "pgmpy/tests/test_estimators/test_BayesianEstimator.py::TestBayesianEstimator::test_get_parameters", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edge_nonstring", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edge_result_cycle", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edge_selfloop", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edge_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edges_from_nonstring", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edges_from_result_cycle", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edges_from_self_loop", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_edges_from_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_node_nonstring", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_node_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_nodes_from_non_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_add_nodes_from_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_class_init_with_data_nonstring", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_class_init_with_data_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_class_init_without_data", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_update_node_parents", "pgmpy/tests/test_models/test_BayesianModel.py::TestBaseModelCreation::test_update_node_parents_bm_constructor", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_copy", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_get_ancestors_of_failure", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_get_ancestors_of_success", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_get_cardinality", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_get_cardinality_with_node", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_get_immoralities", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_get_independencies", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_is_iequivalent", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_is_imap", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_local_independencies", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_markov_blanet", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_moral_graph", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_moral_graph_with_edge_present_over_parents", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_remove_node", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelMethods::test_remove_nodes_from", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_active_trail_nodes", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_active_trail_nodes_args", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_add_multiple_cpds", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_add_single_cpd", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_check_model", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_check_model1", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_check_model2", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_get_cpds", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_get_cpds1", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_is_active_trail", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_is_active_trail_args", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelCPD::test_is_active_trail_triplets", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_connected_predict", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_connected_predict_probability", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_disconnected_fit", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_fit_missing_data", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_predict", "pgmpy/tests/test_models/test_BayesianModel.py::TestBayesianModelFitPredict::test_predict_probability_errors", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_add_multiple_cpds", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_add_single_cpd", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_get_values_for_node", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_get_values_raises_error", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_remove_multiple_cpds", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_remove_multiple_cpds_string", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_remove_single_cpd", "pgmpy/tests/test_models/test_BayesianModel.py::TestDirectedGraphCPDOperations::test_remove_single_cpd_string" ]
[]
MIT License
2,130
[ "pgmpy/estimators/BayesianEstimator.py", "pgmpy/models/DynamicBayesianNetwork.py" ]
[ "pgmpy/estimators/BayesianEstimator.py", "pgmpy/models/DynamicBayesianNetwork.py" ]
dropbox__pyannotate-64
b02080a3b340f5b5aa464007510e211ecd0529a3
2018-02-07 22:58:52
40edbfeed62a78cd683cd3eb56a7412ae40dd124
diff --git a/pyannotate_runtime/collect_types.py b/pyannotate_runtime/collect_types.py index ca88866..72d5bbf 100644 --- a/pyannotate_runtime/collect_types.py +++ b/pyannotate_runtime/collect_types.py @@ -41,11 +41,13 @@ from typing import ( Any, Callable, Dict, + Iterable, Iterator, List, NamedTuple, Optional, Set, + Sized, Tuple, TypeVar, Union, @@ -54,6 +56,8 @@ from contextlib import contextmanager # pylint: disable=invalid-name +CO_GENERATOR = inspect.CO_GENERATOR # type: ignore + def _my_hash(arg_list): # type: (List[Any]) -> int @@ -84,8 +88,30 @@ class TypeWasIncomparable(object): pass +class FakeIterator(Iterable[Any], Sized): + """ + Container for iterator values. + + Note that FakeIterator([a, b, c]) is akin to list([a, b, c]); this + is turned into IteratorType by resolve_type(). + """ + + def __init__(self, values): + # type: (List[Any]) -> None + self.values = values + + def __iter__(self): + # type: () -> Iterator[Any] + for v in self.values: + yield v + + def __len__(self): + # type: () -> int + return len(self.values) + + _NONE_TYPE = type(None) -InternalType = Union['DictType', 'ListType', 'TupleType', 'SetType', 'type'] +InternalType = Union['DictType', 'ListType', 'TupleType', 'SetType', 'IteratorType', 'type'] class DictType(object): @@ -188,6 +214,39 @@ class SetType(object): return not self.__eq__(other) +class IteratorType(object): + """ + Internal representation of Iterator type. + """ + + def __init__(self, val_type): + # type: (TentativeType) -> None + self.val_type = val_type + + def __repr__(self): + # type: () -> str + if repr(self.val_type) == 'None': + # We didn't see any values, so we don't know what's inside + return 'Iterator' + else: + return 'Iterator[%s]' % (repr(self.val_type)) + + def __hash__(self): + # type: () -> int + return hash(self.val_type) if self.val_type else 0 + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, IteratorType): + return False + + return self.val_type == other.val_type + + def __ne__(self, other): + # type: (object) -> bool + return not self.__eq__(other) + + class TupleType(object): """ Internal representation of Tuple type. @@ -279,6 +338,9 @@ class TentativeType(object): elif isinstance(type, ListType): if EMPTY_LIST_TYPE in self.types_hashable: self.types_hashable.remove(EMPTY_LIST_TYPE) + elif isinstance(type, IteratorType): + if EMPTY_ITERATOR_TYPE in self.types_hashable: + self.types_hashable.remove(EMPTY_ITERATOR_TYPE) elif isinstance(type, DictType): if EMPTY_DICT_TYPE in self.types_hashable: self.types_hashable.remove(EMPTY_DICT_TYPE) @@ -350,7 +412,7 @@ def name_from_type(type_): """ Helper function to get PEP-484 compatible string representation of our internal types. """ - if isinstance(type_, (DictType, ListType, TupleType, SetType)): + if isinstance(type_, (DictType, ListType, TupleType, SetType, IteratorType)): return repr(type_) else: if type_.__name__ != 'NoneType': @@ -369,6 +431,7 @@ def name_from_type(type_): EMPTY_DICT_TYPE = DictType(TentativeType(), TentativeType()) EMPTY_LIST_TYPE = ListType(TentativeType()) EMPTY_SET_TYPE = SetType(TentativeType()) +EMPTY_ITERATOR_TYPE = IteratorType(TentativeType()) # TODO: Make this faster @@ -450,6 +513,16 @@ def resolve_type(arg): for sample_item in sample: tentative_type.add(resolve_type(sample_item)) return SetType(tentative_type) + elif arg_type == FakeIterator: + assert isinstance(arg, FakeIterator) # this line helps mypy figure out types + sample = [] + iterator = iter(arg) + for i in range(0, min(4, len(arg))): + sample.append(next(iterator)) + tentative_type = TentativeType() + for sample_item in sample: + tentative_type.add(resolve_type(sample_item)) + return IteratorType(tentative_type) elif arg_type == tuple: assert isinstance(arg, tuple) # this line helps mypy figure out types sample = list(arg[:min(10, len(arg))]) @@ -715,8 +788,10 @@ _filter_filename = default_filter_filename # type: Callable[[Optional[str]], Op if sys.version_info[0] == 2: RETURN_VALUE_OPCODE = chr(opcode.opmap['RETURN_VALUE']) + YIELD_VALUE_OPCODE = chr(opcode.opmap['YIELD_VALUE']) else: RETURN_VALUE_OPCODE = opcode.opmap['RETURN_VALUE'] + YIELD_VALUE_OPCODE = opcode.opmap['YIELD_VALUE'] def _trace_dispatch(frame, event, arg): @@ -777,14 +852,29 @@ def _trace_dispatch(frame, event, arg): resolved_types = prep_args(arg_info) _task_queue.put(KeyAndTypes(function_key, resolved_types)) elif event == 'return': - # This event is also triggered if a function raises an exception. + # This event is also triggered if a function yields or raises an exception. # We can tell the difference by looking at the bytecode. # (We don't get here for C functions so the bytecode always exists.) - # TODO: Also recognize YIELD_VALUE opcode. last_opcode = code.co_code[frame.f_lasti] - if last_opcode != RETURN_VALUE_OPCODE: - arg = NoReturnType() - _task_queue.put(KeyAndReturn(function_key, resolve_type(arg))) + if last_opcode == RETURN_VALUE_OPCODE: + if code.co_flags & CO_GENERATOR: + # Return from a generator. + t = resolve_type(FakeIterator([])) + else: + t = resolve_type(arg) + elif last_opcode == YIELD_VALUE_OPCODE: + # Yield from a generator. + # TODO: Unify generators -- currently each YIELD is turned into + # a separate call, so a function yielding ints and strs will be + # typed as Union[Iterator[int], Iterator[str]] -- this should be + # Iterator[Union[int, str]]. + t = resolve_type(FakeIterator([arg])) + else: + # This branch is also taken when returning from a generator. + # TODO: returning non-trivial values from generators, per PEP 380; + # and async def / await stuff. + t = NoReturnType + _task_queue.put(KeyAndReturn(function_key, t)) else: sampling_counters[key] = None # We're not interested in this function.
Detect yield and return opcodes There are some [hacks in MonkeyType](https://github.com/Instagram/MonkeyType/blob/0119311745449560e30ef554ba449a99c1b6679d/monkeytype/tracing.py#L226) that detect yield and return opcodes -- the former to generate Generator/Iterator return annotations, the latter to distinguish between return and exceptions.
dropbox/pyannotate
diff --git a/pyannotate_runtime/tests/test_collect_types.py b/pyannotate_runtime/tests/test_collect_types.py index 06ca1b2..9d98085 100644 --- a/pyannotate_runtime/tests/test_collect_types.py +++ b/pyannotate_runtime/tests/test_collect_types.py @@ -594,6 +594,43 @@ class TestCollectTypes(TestBaseClass): self.assert_type_comments('func_with_unknown_module_types', ['(C) -> C']) + def test_yield_basic(self): + # type: () -> None + def gen(n, a): + for i in range(n): + yield a + + with self.collecting_types(): + list(gen(10, 'x')) + + self.assert_type_comments('gen', ['(int, str) -> Iterator[str]']) + + def test_yield_various(self): + # type: () -> None + def gen(n, a, b): + for i in range(n): + yield a + yield b + + with self.collecting_types(): + list(gen(10, 'x', 1)) + list(gen(0, 0, 0)) + + # TODO: This should really return Iterator[Union[int, str]] + self.assert_type_comments('gen', ['(int, str, int) -> Iterator[int]', + '(int, str, int) -> Iterator[str]']) + + def test_yield_empty(self): + # type: () -> None + def gen(): + if False: + yield + + with self.collecting_types(): + list(gen()) + + self.assert_type_comments('gen', ['() -> Iterator']) + def foo(arg): # type: (Any) -> Any
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 importlib-metadata==4.8.3 iniconfig==1.1.1 mypy-extensions==1.0.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 -e git+https://github.com/dropbox/pyannotate.git@b02080a3b340f5b5aa464007510e211ecd0529a3#egg=pyannotate pyparsing==3.1.4 pytest==7.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: pyannotate channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mypy-extensions==1.0.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/pyannotate
[ "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_yield_basic", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_yield_empty", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_yield_various" ]
[]
[ "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_callee_star_args", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_caller_star_args", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_default_args", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_fully_qualified_type_name_with_sub_package", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_ignoring_c_calls", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_keyword_args", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_many_signatures", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_no_crash_on_nested_dict_comps", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_no_return", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_only_return", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_recursive_function", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_recursive_function_2", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_run_a_bunch_of_tests", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_skip_lambda", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_star_star_args", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_two_signatures", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_type_collection_on_another_thread", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_type_collection_on_main_thread", "pyannotate_runtime/tests/test_collect_types.py::TestCollectTypes::test_unknown_module_types", "pyannotate_runtime/tests/test_collect_types.py::TestInitWithFilter::test_init_with_filter", "pyannotate_runtime/tests/test_collect_types.py::TestInitWithFilter::test_init_with_none_filter" ]
[]
Apache License 2.0
2,131
[ "pyannotate_runtime/collect_types.py" ]
[ "pyannotate_runtime/collect_types.py" ]
smarkets__marge-bot-80
600eaf337d143656bb44d710331119ea57c7a3e6
2018-02-08 11:30:02
9986daf294673ad58a06c7ca19125bc20c144c96
jcpetruzza: @glensc is it possible for you to try marge on your setup with the version of this branch?
diff --git a/marge/git.py b/marge/git.py index fa7c68a..2400001 100644 --- a/marge/git.py +++ b/marge/git.py @@ -84,7 +84,7 @@ class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): def _fuse_branch(self, strategy, branch, target_branch, source_repo_url=None): assert source_repo_url or branch != target_branch, branch - self.git('fetch', 'origin') + self.git('fetch', '--prune', 'origin') if source_repo_url: # "upsert" remote 'source' and fetch it try: @@ -92,7 +92,7 @@ class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout')): except GitError: pass self.git('remote', 'add', 'source', source_repo_url) - self.git('fetch', 'source') + self.git('fetch', '--prune', 'source') self.git('checkout', '-B', branch, 'source/' + branch, '--') else: self.git('checkout', '-B', branch, 'origin/' + branch, '--') diff --git a/marge/job.py b/marge/job.py index 82d80f2..b1022b5 100644 --- a/marge/job.py +++ b/marge/job.py @@ -37,7 +37,7 @@ class MergeJob(object): return state = merge_request.state - if state not in ('opened', 'reopened'): + if state not in ('opened', 'reopened', 'locked'): if state in ('merged', 'closed'): log.info('The merge request is already %s!', state) else: @@ -236,7 +236,7 @@ class MergeJob(object): return # success! if merge_request.state == 'closed': raise CannotMerge('someone closed the merge request while merging!') - assert merge_request.state in ('opened', 'reopened'), merge_request.state + assert merge_request.state in ('opened', 'reopened', 'locked'), merge_request.state log.info('Giving %s more secs for !%s to be merged...', waiting_time_in_secs, merge_request.iid) time.sleep(waiting_time_in_secs)
unexpected merge state locked ``` marge-bot_1 | 2018-02-05 12:39:47,026 INFO Commit id to merge 'bb052510c9a6357fe733b793f4c1f5b38a32e89e' (into: 'aa5d3b11e3768d34f74bde8de7095a629d76cba1') marge-bot_1 | 2018-02-05 12:39:53,476 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:40:03,757 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:40:14,265 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:40:24,748 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:40:35,059 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:40:45,633 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:40:55,966 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:41:06,263 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:41:16,685 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:41:27,039 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:41:37,310 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:41:47,509 INFO Giving 10 more secs for !125 to be merged... marge-bot_1 | 2018-02-05 12:41:57,879 ERROR Unexpected Exception marge-bot_1 | Traceback (most recent call last): marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/job.py", line 55, in execute marge-bot_1 | self.rebase_and_accept(approvals) marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/job.py", line 198, in rebase_and_accept marge-bot_1 | self.wait_for_branch_to_be_merged() marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/job.py", line 238, in wait_for_branch_to_be_merged marge-bot_1 | assert merge_request.state in ('opened', 'reopened'), merge_request.state marge-bot_1 | AssertionError: locked marge-bot_1 | Traceback (most recent call last): marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/bin/.marge.app-wrapped", line 4, in <module> marge-bot_1 | marge.app.main() marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/app.py", line 221, in main marge-bot_1 | marge_bot.start() marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/bot.py", line 39, in start marge-bot_1 | self._run(repo_manager) marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/bot.py", line 108, in _run marge-bot_1 | merge_job.execute() marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/job.py", line 55, in execute marge-bot_1 | self.rebase_and_accept(approvals) marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/job.py", line 198, in rebase_and_accept marge-bot_1 | self.wait_for_branch_to_be_merged() marge-bot_1 | File "/nix/store/v4xzqxg6yvhvdqk6w453pig8v7r69v1c-python3.6-marge-0.5.1/lib/python3.6/site-packages/marge/job.py", line 238, in wait_for_branch_to_be_merged marge-bot_1 | assert merge_request.state in ('opened', 'reopened'), merge_request.state marge-bot_1 | AssertionError: locked ``` here's how it looked at gitlab web: ![image](https://user-images.githubusercontent.com/199095/35805166-7aa99aa6-0a83-11e8-8b5b-7ffdd88c4c50.png)
smarkets/marge-bot
diff --git a/tests/test_git.py b/tests/test_git.py index 4569d03..77e5f31 100644 --- a/tests/test_git.py +++ b/tests/test_git.py @@ -38,7 +38,7 @@ class TestRepo(object): self.repo.rebase('feature_branch', 'master_of_the_universe') assert get_calls(mocked_run) == [ - 'git -C /tmp/local/path fetch origin', + 'git -C /tmp/local/path fetch --prune origin', 'git -C /tmp/local/path checkout -B feature_branch origin/feature_branch --', 'git -C /tmp/local/path rebase origin/master_of_the_universe', 'git -C /tmp/local/path rev-parse HEAD' @@ -48,7 +48,7 @@ class TestRepo(object): self.repo.merge('feature_branch', 'master_of_the_universe') assert get_calls(mocked_run) == [ - 'git -C /tmp/local/path fetch origin', + 'git -C /tmp/local/path fetch --prune origin', 'git -C /tmp/local/path checkout -B feature_branch origin/feature_branch --', 'git -C /tmp/local/path merge origin/master_of_the_universe', 'git -C /tmp/local/path rev-parse HEAD'
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_git_commit_hash", "has_media", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 2 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==3.3.9 certifi==2025.1.31 charset-normalizer==3.4.1 ConfigArgParse==1.7 coverage==7.8.0 dateparser==1.2.1 dill==0.3.9 exceptiongroup==1.2.2 humanize==4.12.2 idna==3.10 iniconfig==2.1.0 isort==6.0.1 -e git+https://github.com/smarkets/marge-bot.git@600eaf337d143656bb44d710331119ea57c7a3e6#egg=marge maya==0.6.1 mccabe==0.7.0 packaging==24.2 pendulum==3.0.0 platformdirs==4.3.7 pluggy==1.5.0 pylint==3.3.6 pytest==8.3.5 pytest-cov==6.0.0 pytest-pylint==0.21.0 pytest-runner==6.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.2 regex==2024.11.6 requests==2.32.3 six==1.17.0 snaptime==0.2.4 time-machine==2.16.0 tomli==2.2.1 tomlkit==0.13.2 typing_extensions==4.13.0 tzdata==2025.2 tzlocal==5.3.1 urllib3==2.3.0
name: marge-bot channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==3.3.9 - certifi==2025.1.31 - charset-normalizer==3.4.1 - configargparse==1.7 - coverage==7.8.0 - dateparser==1.2.1 - dill==0.3.9 - exceptiongroup==1.2.2 - humanize==4.12.2 - idna==3.10 - iniconfig==2.1.0 - isort==6.0.1 - maya==0.6.1 - mccabe==0.7.0 - packaging==24.2 - pendulum==3.0.0 - platformdirs==4.3.7 - pluggy==1.5.0 - pylint==3.3.6 - pytest==8.3.5 - pytest-cov==6.0.0 - pytest-pylint==0.21.0 - pytest-runner==6.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.2 - regex==2024.11.6 - requests==2.32.3 - six==1.17.0 - snaptime==0.2.4 - time-machine==2.16.0 - tomli==2.2.1 - tomlkit==0.13.2 - typing-extensions==4.13.0 - tzdata==2025.2 - tzlocal==5.3.1 - urllib3==2.3.0 prefix: /opt/conda/envs/marge-bot
[ "tests/test_git.py::TestRepo::test_rebase_success", "tests/test_git.py::TestRepo::test_merge_success" ]
[]
[ "tests/test_git.py::PYLINT", "tests/test_git.py::TestRepo::test_clone", "tests/test_git.py::TestRepo::test_config_user_info", "tests/test_git.py::TestRepo::test_reviewer_tagging_success", "tests/test_git.py::TestRepo::test_reviewer_tagging_failure", "tests/test_git.py::TestRepo::test_rebase_same_branch", "tests/test_git.py::TestRepo::test_merge_same_branch", "tests/test_git.py::TestRepo::test_remove_branch", "tests/test_git.py::TestRepo::test_remove_master_branch_fails", "tests/test_git.py::TestRepo::test_push_force", "tests/test_git.py::TestRepo::test_push_force_fails_on_dirty", "tests/test_git.py::TestRepo::test_push_force_fails_on_untracked", "tests/test_git.py::TestRepo::test_get_commit_hash", "tests/test_git.py::TestRepo::test_passes_ssh_key", "tests/test_git.py::test_filter", "tests/test_git.py::test_filter_fails_on_empty_commit_messages", "tests/test_git.py::test_filter_fails_on_commit_messages_that_are_empty_apart_from_trailers", "tests/test_git.py::test_filter_treats_the_first_commit_line_not_as_a_trailer_unless_it_matches_the_trailer_name_passed_in" ]
[]
BSD 3-Clause "New" or "Revised" License
2,132
[ "marge/git.py", "marge/job.py" ]
[ "marge/git.py", "marge/job.py" ]
adamjstewart__fiscalyear-4
f3bf1e0ea82ab659802f7ee576e9d16758fc2b1a
2018-02-08 16:22:40
a59cde7a881a85c5a65e523623e23668c2cb991c
diff --git a/.travis.yml b/.travis.yml index 416d131..f829e95 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,15 +1,13 @@ language: python -dist: xenial - python: + - 2.6 - 2.7 + - 3.3 - 3.4 - 3.5 - 3.6 - - 3.7 - - 3.8-dev -script: pytest --cov=fiscalyear +script: py.test --cov=fiscalyear after_success: codecov diff --git a/LICENSE b/LICENSE index f2c31c2..247c0b8 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2017-2019 Adam J. Stewart +Copyright (c) 2017 Adam J. Stewart Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.rst b/README.rst index b7d0874..2fe25dc 100644 --- a/README.rst +++ b/README.rst @@ -26,13 +26,18 @@ The ``FiscalYear`` class provides an object for storing information about the st .. code-block:: python - >>> from fiscalyear import * >>> a = FiscalYear(2017) >>> a.start FiscalDateTime(2016, 10, 1, 0, 0) >>> a.end FiscalDateTime(2017, 9, 30, 23, 59, 59) +You can also get the current ``FiscalYear`` with: + +.. code-block:: python + + >>> FiscalYear.current() + FiscalYear(2018) FiscalQuarter ------------- @@ -61,6 +66,12 @@ These objects represent the standalone ``FiscalQuarter`` class. >>> b in a True +You can also get the current ``FiscalQuarter`` with: + +.. code-block:: python + + >>> FiscalQuarter.current() + FiscalQuarter(2018, 2) FiscalDateTime -------------- diff --git a/docs/basic_usage.rst b/docs/basic_usage.rst index 93a741c..a8730d7 100644 --- a/docs/basic_usage.rst +++ b/docs/basic_usage.rst @@ -17,6 +17,14 @@ The ``FiscalYear`` class provides an object for storing information about the st >>> a.end FiscalDateTime(2017, 9, 30, 23, 59, 59) +You can also get the current ``FiscalYear`` with: + +.. code-block:: python + + >>> from fiscalyear import * + >>> FiscalYear.current() + FiscalYear(2018) + FiscalQuarter ------------- @@ -45,6 +53,14 @@ These objects represent the standalone ``FiscalQuarter`` class. >>> b in a True +You can also get the current ``FiscalQuarter`` with: + +.. code-block:: python + + >>> from fiscalyear import * + >>> FiscalQuarter.current() + FiscalQuarter(2018, 2) + FiscalDateTime -------------- diff --git a/fiscalyear.py b/fiscalyear.py index c6f8ddc..6802471 100644 --- a/fiscalyear.py +++ b/fiscalyear.py @@ -191,6 +191,16 @@ class FiscalYear(object): self._fiscal_year = fiscal_year return self + @classmethod + def current(cls): + """Alternative constructor. Returns the current FiscalYear. + + :returns: A newly constructed FiscalYear object + :rtype: FiscalYear + """ + today = FiscalDate.today() + return cls(today.fiscal_year) + def __repr__(self): """Convert to formal string, for repr(). @@ -371,6 +381,16 @@ class FiscalQuarter(object): self._quarter = quarter return self + @classmethod + def current(cls): + """Alternative constructor. Returns the current FiscalQuarter. + + :returns: A newly constructed FiscalQuarter object + :rtype: FiscalQuarter + """ + today = FiscalDate.today() + return cls(today.fiscal_year, today.quarter) + def __repr__(self): """Convert to formal string, for repr(). diff --git a/requirements.txt b/requirements.txt index fe65f2d..228f95a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ codecov pytest pytest-cov +pytest-mock pytest-runner sphinx sphinx_rtd_theme diff --git a/setup.cfg b/setup.cfg index d928416..4bca33d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,6 +4,3 @@ test=pytest [build_sphinx] source-dir=docs build-dir=docs/_build - -[metadata] -license-file = LICENSE diff --git a/setup.py b/setup.py index 05153b5..e8225cd 100755 --- a/setup.py +++ b/setup.py @@ -42,8 +42,6 @@ setuptools.setup( 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', # Type of package 'Topic :: Office/Business :: Financial :: Accounting', @@ -52,5 +50,5 @@ setuptools.setup( keywords=['fiscal year', 'fiscal quarter', 'calendar', 'datetime'], py_modules=[fiscalyear.__name__], setup_requires=['pytest-runner'], - tests_require=['pytest'], + tests_require=['pytest', 'pytest-mock'], )
Syntactic sugar for getting the current Fiscal{Year,Quarter} I think it would be nice if there was an easy way to get the current `FiscalYear`/`FiscalQuarter`. This could easily be achieved by using a couple of `classmethods`. E.g.: ``` diff $ git diff diff --git a/fiscalyear.py b/fiscalyear.py index c6f8ddc..f7213d7 100644 --- a/fiscalyear.py +++ b/fiscalyear.py @@ -191,6 +191,11 @@ class FiscalYear(object): self._fiscal_year = fiscal_year return self + @classmethod + def current(cls): + today = datetime.date.today() + return cls(today.year) + def __repr__(self): """Convert to formal string, for repr(). @@ -371,6 +376,11 @@ class FiscalQuarter(object): self._quarter = quarter return self + @classmethod + def current(cls): + today = datetime.date.today() + return cls(today.year, (today.month // 3) + 1) + def __repr__(self): """Convert to formal string, for repr(). ``` This way you could write: ``` python In [3]: FiscalYear.current() Out[3]: FiscalYear(2018) In [4]: FiscalQuarter.current() Out[4]: FiscalQuarter(2018, 1) ``` If you want, I can try to make a pull request.
adamjstewart/fiscalyear
diff --git a/docs/testing.rst b/docs/testing.rst index 7a0c8c6..b9a21b0 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -1,8 +1,13 @@ Testing ======= -``fiscalyear`` comes with a full test-suite called ``test_fiscalyear``. To run the test-suite, you will need to install the `pytest <https://docs.pytest.org/en/latest/>`_ package. +``fiscalyear`` comes with a full test-suite called ``test_fiscalyear``. To run the test-suite, you +will need to install the following packages: +* `pytest <https://docs.pytest.org/en/latest/>`_ +* `pytest-cov <https://docs.pytest.org/en/latest/>`_ +* `pytest-mock <https://docs.pytest.org/en/latest/>`_ +* `pytest-runner <https://docs.pytest.org/en/latest/>`_ Running tests ------------- diff --git a/test_fiscalyear.py b/test_fiscalyear.py index 690835f..6a7a22c 100644 --- a/test_fiscalyear.py +++ b/test_fiscalyear.py @@ -125,6 +125,12 @@ class TestFiscalYear: def test_basic(self, a): assert a.fiscal_year == 2016 + def test_current(self, mocker): + mock_today = mocker.patch.object(fiscalyear.FiscalDate, 'today') + mock_today.return_value = fiscalyear.FiscalDate(2016, 10, 1) + current = fiscalyear.FiscalYear.current() + assert current == fiscalyear.FiscalYear(2017) + def test_repr(self, a): assert repr(a) == 'FiscalYear(2016)' @@ -268,6 +274,12 @@ class TestFiscalQuarter: assert a.fiscal_year == 2016 assert a.quarter == 4 + def test_current(self, mocker): + mock_today = mocker.patch.object(fiscalyear.FiscalDate, 'today') + mock_today.return_value = fiscalyear.FiscalDate(2016, 10, 1) + current = fiscalyear.FiscalQuarter.current() + assert current == fiscalyear.FiscalQuarter(2017, 1) + def test_repr(self, a): assert repr(a) == 'FiscalQuarter(2016, 4)'
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 8 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "numpy>=1.16.0", "pandas>=1.0.0", "pytest-mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
exceptiongroup==1.2.2 -e git+https://github.com/adamjstewart/fiscalyear.git@f3bf1e0ea82ab659802f7ee576e9d16758fc2b1a#egg=fiscalyear iniconfig==2.1.0 numpy==2.0.2 packaging==24.2 pandas==2.2.3 pluggy==1.5.0 pytest==8.3.5 pytest-mock==3.14.0 python-dateutil==2.9.0.post0 pytz==2025.2 six==1.17.0 tomli==2.2.1 tzdata==2025.2
name: fiscalyear channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - numpy==2.0.2 - packaging==24.2 - pandas==2.2.3 - pluggy==1.5.0 - pytest==8.3.5 - pytest-mock==3.14.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - six==1.17.0 - tomli==2.2.1 - tzdata==2025.2 prefix: /opt/conda/envs/fiscalyear
[ "test_fiscalyear.py::TestFiscalYear::test_current", "test_fiscalyear.py::TestFiscalQuarter::test_current" ]
[]
[ "test_fiscalyear.py::TestFiscalCalendar::test_start_year", "test_fiscalyear.py::TestFiscalCalendar::test_start_month", "test_fiscalyear.py::TestFiscalCalendar::test_start_day", "test_fiscalyear.py::TestFiscalCalendar::test_complex", "test_fiscalyear.py::TestFiscalCalendar::test_nested", "test_fiscalyear.py::TestFiscalCalendar::test_wrong_type", "test_fiscalyear.py::TestFiscalCalendar::test_out_of_range", "test_fiscalyear.py::TestFiscalYear::test_basic", "test_fiscalyear.py::TestFiscalYear::test_repr", "test_fiscalyear.py::TestFiscalYear::test_str", "test_fiscalyear.py::TestFiscalYear::test_from_string", "test_fiscalyear.py::TestFiscalYear::test_wrong_type", "test_fiscalyear.py::TestFiscalYear::test_out_of_range", "test_fiscalyear.py::TestFiscalYear::test_prev_fiscal_year", "test_fiscalyear.py::TestFiscalYear::test_next_fiscal_year", "test_fiscalyear.py::TestFiscalYear::test_start", "test_fiscalyear.py::TestFiscalYear::test_end", "test_fiscalyear.py::TestFiscalYear::test_q1", "test_fiscalyear.py::TestFiscalYear::test_q2", "test_fiscalyear.py::TestFiscalYear::test_q3", "test_fiscalyear.py::TestFiscalYear::test_q4", "test_fiscalyear.py::TestFiscalYear::test_contains", "test_fiscalyear.py::TestFiscalYear::test_less_than", "test_fiscalyear.py::TestFiscalYear::test_less_than_equals", "test_fiscalyear.py::TestFiscalYear::test_equals", "test_fiscalyear.py::TestFiscalYear::test_not_equals", "test_fiscalyear.py::TestFiscalYear::test_greater_than", "test_fiscalyear.py::TestFiscalYear::test_greater_than_equals", "test_fiscalyear.py::TestFiscalQuarter::test_basic", "test_fiscalyear.py::TestFiscalQuarter::test_repr", "test_fiscalyear.py::TestFiscalQuarter::test_str", "test_fiscalyear.py::TestFiscalQuarter::test_from_string", "test_fiscalyear.py::TestFiscalQuarter::test_wrong_type", "test_fiscalyear.py::TestFiscalQuarter::test_out_of_range", "test_fiscalyear.py::TestFiscalQuarter::test_prev_quarter", "test_fiscalyear.py::TestFiscalQuarter::test_next_quarter", "test_fiscalyear.py::TestFiscalQuarter::test_start", "test_fiscalyear.py::TestFiscalQuarter::test_end", "test_fiscalyear.py::TestFiscalQuarter::test_bad_start_year", "test_fiscalyear.py::TestFiscalQuarter::test_q1_start", "test_fiscalyear.py::TestFiscalQuarter::test_q1_end", "test_fiscalyear.py::TestFiscalQuarter::test_q2_start", "test_fiscalyear.py::TestFiscalQuarter::test_q2_end", "test_fiscalyear.py::TestFiscalQuarter::test_q3_start", "test_fiscalyear.py::TestFiscalQuarter::test_q3_end", "test_fiscalyear.py::TestFiscalQuarter::test_q4_start", "test_fiscalyear.py::TestFiscalQuarter::test_q4_end", "test_fiscalyear.py::TestFiscalQuarter::test_contains", "test_fiscalyear.py::TestFiscalQuarter::test_less_than", "test_fiscalyear.py::TestFiscalQuarter::test_less_than_equals", "test_fiscalyear.py::TestFiscalQuarter::test_equals", "test_fiscalyear.py::TestFiscalQuarter::test_not_equals", "test_fiscalyear.py::TestFiscalQuarter::test_greater_than", "test_fiscalyear.py::TestFiscalQuarter::test_greater_than_equals", "test_fiscalyear.py::TestFiscalDate::test_basic", "test_fiscalyear.py::TestFiscalDate::test_fiscal_year", "test_fiscalyear.py::TestFiscalDate::test_prev_fiscal_year", "test_fiscalyear.py::TestFiscalDate::test_next_fiscal_year", "test_fiscalyear.py::TestFiscalDate::test_prev_quarter", "test_fiscalyear.py::TestFiscalDate::test_next_quarter", "test_fiscalyear.py::TestFiscalDateTime::test_basic", "test_fiscalyear.py::TestFiscalDateTime::test_fiscal_year", "test_fiscalyear.py::TestFiscalDateTime::test_prev_fiscal_year", "test_fiscalyear.py::TestFiscalDateTime::test_next_fiscal_year", "test_fiscalyear.py::TestFiscalDateTime::test_prev_quarter", "test_fiscalyear.py::TestFiscalDateTime::test_next_quarter" ]
[]
MIT License
2,133
[ "README.rst", "setup.py", ".travis.yml", "setup.cfg", "fiscalyear.py", "LICENSE", "requirements.txt", "docs/basic_usage.rst" ]
[ "README.rst", "setup.py", ".travis.yml", "setup.cfg", "fiscalyear.py", "LICENSE", "requirements.txt", "docs/basic_usage.rst" ]
nipy__nipype-2432
d7a8085d9230c4f43489ba93742ea1f6401f3ede
2018-02-08 16:41:43
704b97dee7848283692bac38f04541c5af2a87b5
diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 51449632b..0883023f6 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -84,9 +84,12 @@ class EngineBase(object): A clone of node or workflow must have a new name """ if name == self.name: - raise ValueError('Cloning requires a new name, "%s" is in use.' % name) + raise ValueError('Cloning requires a new name, "%s" is ' + 'in use.' % name) clone = deepcopy(self) clone.name = name + if hasattr(clone, '_id'): + clone._id = name return clone def _check_outputs(self, parameter):
workflow with iterables and cloned nodes fail when expanding iterables ### Summary When running a workflow which includes a cloned node and iterables the workflow will fail when expanding the iterables because the id of the cloned node will be the same as the original one. ### Actual behavior Will result in an error: Traceback (most recent call last): File "<ipython-input-55-177d6eaeef2c>", line 27, in <module> workflow.run() File "/data/eaxfjord/anaconda2/lib/python2.7/site-packages/nipype/pipeline/engine/workflows.py", line 592, in run execgraph = generate_expanded_graph(deepcopy(flatgraph)) File "/data/eaxfjord/anaconda2/lib/python2.7/site-packages/nipype/pipeline/engine/utils.py", line 1042, in generate_expanded_graph iterable_prefix, inode.synchronize) File "/data/eaxfjord/anaconda2/lib/python2.7/site-packages/nipype/pipeline/engine/utils.py", line 733, in _merge_graphs raise Exception(("Execution graph does not have a unique set of node " Exception: Execution graph does not have a unique set of node names. Please rerun the workflow ### Expected behavior Will execute normally without the errors. ### How to replicate the behavior The following workflow will produce the error. ### Script/Workflow details ```python from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe def addstr(string): string = ('%s+2' % string ) return string subject_list = ['sub-001', 'sub-002'] inputnode = pe.Node(niu.IdentityInterface(fields = ['subject']), name = 'inputnode') inputnode.iterables = [('subject', subject_list)] node_1 = pe.Node(niu.Function(input_names='string', output_names= 'string', function = addstr),name='node_1') node_2 = node_1.clone('node_2') workflow = pe.Workflow(name='my_workflow') workflow.connect([(inputnode, node_1, [('subject','string')]), (node_1, node_2, [('string','string')])]) workflow.run() ``` ### Platform details: /data/eaxfjord/anaconda2/lib/python2.7/site-packages/h5py/__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`. from ._conv import register_converters as _register_converters {'nibabel_version': '2.2.1', 'sys_executable': '/data/eaxfjord/anaconda2/bin/python', 'networkx_version': '2.1', 'numpy_version': '1.14.0', 'sys_platform': 'linux2', 'sys_version': '2.7.13 |Anaconda custom (64-bit)| (default, Dec 20 2016, 23:09:15) \n[GCC 4.4.7 20120313 (Red Hat 4.4.7-1)]', 'commit_source': 'installation', 'commit_hash': '0a5948a0', 'pkg_path': '/data/eaxfjord/anaconda2/lib/python2.7/site-packages/nipype', 'nipype_version': '1.0.0', 'traits_version': '4.6.0', 'scipy_version': '1.0.0'} 1.0.0 ### Execution environment - My python environment outside container
nipy/nipype
diff --git a/nipype/pipeline/engine/tests/test_base.py b/nipype/pipeline/engine/tests/test_base.py index 54356fd6c..fd87aa687 100644 --- a/nipype/pipeline/engine/tests/test_base.py +++ b/nipype/pipeline/engine/tests/test_base.py @@ -6,6 +6,8 @@ from __future__ import print_function, unicode_literals import pytest from ..base import EngineBase from ....interfaces import base as nib +from ....interfaces import utility as niu +from ... import engine as pe class InputSpec(nib.TraitedSpec): @@ -64,3 +66,24 @@ def test_clone(): with pytest.raises(ValueError): base.clone('nodename') + +def test_clone_node_iterables(tmpdir): + tmpdir.chdir() + + def addstr(string): + return ('%s + 2' % string) + + subject_list = ['sub-001', 'sub-002'] + inputnode = pe.Node(niu.IdentityInterface(fields=['subject']), + name='inputnode') + inputnode.iterables = [('subject', subject_list)] + + node_1 = pe.Node(niu.Function(input_names='string', + output_names='string', + function=addstr), name='node_1') + node_2 = node_1.clone('node_2') + + workflow = pe.Workflow(name='iter_clone_wf') + workflow.connect([(inputnode, node_1, [('subject', 'string')]), + (node_1, node_2, [('string', 'string')])]) + workflow.run()
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 click==8.0.4 configparser==5.2.0 decorator==4.4.2 funcsigs==1.0.2 future==1.0.0 importlib-metadata==4.8.3 iniconfig==1.1.1 isodate==0.6.1 lxml==5.3.1 mock==5.2.0 networkx==2.5.1 nibabel==3.2.2 -e git+https://github.com/nipy/nipype.git@d7a8085d9230c4f43489ba93742ea1f6401f3ede#egg=nipype numpy==1.19.5 packaging==21.3 pluggy==1.0.0 prov==1.5.0 py==1.11.0 pydot==1.4.2 pydotplus==2.0.2 pyparsing==3.1.4 pytest==7.0.1 python-dateutil==2.9.0.post0 rdflib==5.0.0 scipy==1.5.4 simplejson==3.20.1 six==1.17.0 tomli==1.2.3 traits==6.4.1 typing_extensions==4.1.1 zipp==3.6.0
name: nipype channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - click==8.0.4 - configparser==5.2.0 - decorator==4.4.2 - funcsigs==1.0.2 - future==1.0.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isodate==0.6.1 - lxml==5.3.1 - mock==5.2.0 - networkx==2.5.1 - nibabel==3.2.2 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - prov==1.5.0 - py==1.11.0 - pydot==1.4.2 - pydotplus==2.0.2 - pyparsing==3.1.4 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - rdflib==5.0.0 - scipy==1.5.4 - simplejson==3.20.1 - six==1.17.0 - tomli==1.2.3 - traits==6.4.1 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/nipype
[ "nipype/pipeline/engine/tests/test_base.py::test_clone_node_iterables" ]
[]
[ "nipype/pipeline/engine/tests/test_base.py::test_create[valid1]", "nipype/pipeline/engine/tests/test_base.py::test_create[valid_node]", "nipype/pipeline/engine/tests/test_base.py::test_create[valid-node]", "nipype/pipeline/engine/tests/test_base.py::test_create[ValidNode0]", "nipype/pipeline/engine/tests/test_base.py::test_create_invalid[invalid*1]", "nipype/pipeline/engine/tests/test_base.py::test_create_invalid[invalid.1]", "nipype/pipeline/engine/tests/test_base.py::test_create_invalid[invalid@]", "nipype/pipeline/engine/tests/test_base.py::test_create_invalid[in/valid]", "nipype/pipeline/engine/tests/test_base.py::test_create_invalid[None]", "nipype/pipeline/engine/tests/test_base.py::test_hierarchy", "nipype/pipeline/engine/tests/test_base.py::test_clone" ]
[]
Apache License 2.0
2,134
[ "nipype/pipeline/engine/base.py" ]
[ "nipype/pipeline/engine/base.py" ]
adamjstewart__fiscalyear-5
77c5c0c82a62de36e77284e924f744bb1e770a31
2018-02-08 16:57:16
a59cde7a881a85c5a65e523623e23668c2cb991c
codecov-io: # [Codecov](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=h1) Report > Merging [#5](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=desc) into [master](https://codecov.io/gh/adamjstewart/fiscalyear/commit/77c5c0c82a62de36e77284e924f744bb1e770a31?src=pr&el=desc) will **decrease** coverage by `0.43%`. > The diff coverage is `93.75%`. [![Impacted file tree graph](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5/graphs/tree.svg?width=650&src=pr&token=bzOeexdvrN&height=150)](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #5 +/- ## ========================================== - Coverage 100% 99.56% -0.44% ========================================== Files 1 1 Lines 233 229 -4 ========================================== - Hits 233 228 -5 - Misses 0 1 +1 ``` | [Impacted Files](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [fiscalyear.py](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5/diff?src=pr&el=tree#diff-ZmlzY2FseWVhci5weQ==) | `99.56% <93.75%> (-0.44%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=footer). Last update [77c5c0c...d01eec3](https://codecov.io/gh/adamjstewart/fiscalyear/pull/5?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/fiscalyear.py b/fiscalyear.py index c6f8ddc..f3a7640 100644 --- a/fiscalyear.py +++ b/fiscalyear.py @@ -24,10 +24,41 @@ START_MONTH = 10 START_DAY = 1 +def _validate_fiscal_calendar_params(start_year, start_month, start_day): + """Raise an Exception if the calendar parameters are invalid. + + :param start_year: Relationship between the start of the fiscal year and + the calendar year. Possible values: ``'previous'`` or ``'same'``. + :type start_year: str + :param start_month: The first month of the fiscal year + :type start_month: int or str + :param start_day: The first day of the first month of the fiscal year + :type start_day: int or str + :raises TypeError: If ``start_year`` is not a ``str``. + :raises ValueError: If ``start_year`` is not ``'previous'`` or ``'same'`` + :raises ValueError: If ``start_month`` or ``start_day`` is not an int or + int-like string + :raises ValueError: If ``start_month`` or ``start_day`` is out of range + """ + if not isinstance(start_year, str): + raise TypeError("'start_year' must be a 'str', not: '%s'" % type(str)) + if start_year not in ('previous', 'same'): + msg = "'start_year' must be either 'previous' or 'same', not: '%s'" + raise ValueError(msg % start_year) + _check_day(start_month, start_day) + + +def setup_fiscal_calendar(start_year, start_month, start_day): + """Change the global calendar settings.""" + _validate_fiscal_calendar_params(start_year, start_month, start_day) + global START_YEAR, START_MONTH, START_DAY + START_YEAR = start_year + START_MONTH = start_month + START_DAY = start_day + + @contextlib.contextmanager -def fiscal_calendar(start_year=None, - start_month=None, - start_day=None): +def fiscal_calendar(start_year=None, start_month=None, start_day=None): """A context manager that lets you modify the start of the fiscal calendar inside the scope of a with-statement. @@ -38,43 +69,22 @@ def fiscal_calendar(start_year=None, :type start_month: int or str :param start_day: The first day of the first month of the fiscal year :type start_day: int or str - :raises AssertionError: If ``start_year`` is not ``'previous'`` or ``'same'`` - :raises TypeError: If ``start_month`` or ``start_day`` is not an int or int-like string + :raises ValueError: If ``start_year`` is not ``'previous'`` or ``'same'`` + :raises TypeError: If ``start_month`` or ``start_day`` is not an int or + int-like string :raises ValueError: If ``start_month`` or ``start_day`` is out of range """ - global START_YEAR - global START_MONTH - global START_DAY - - # Use default values if not changed - if start_year is None: - start_year = START_YEAR - if start_month is None: - start_month = START_MONTH - if start_day is None: - start_day = START_DAY - - assert isinstance(start_year, str) - assert start_year == 'previous' or start_year == 'same' - start_month = _check_month(start_month) - start_day = _check_day(start_month, start_day) - - # Backup previous values - old_start_year = START_YEAR - old_start_month = START_MONTH - old_start_day = START_DAY + # If arguments are omitted, use the currently active values. + start_year = START_YEAR if start_year is None else start_year + start_month = START_MONTH if start_month is None else start_month + start_day = START_DAY if start_day is None else start_day # Temporarily change global variables - START_YEAR = start_year - START_MONTH = start_month - START_DAY = start_day - + previous_values = (START_YEAR, START_MONTH, START_DAY) + setup_fiscal_calendar(start_year, start_month, start_day) yield - # Restore previous values - START_YEAR = old_start_year - START_MONTH = old_start_month - START_DAY = old_start_day + setup_fiscal_calendar(*previous_values) def _check_int(value): @@ -225,11 +235,9 @@ class FiscalYear(object): return self == item elif isinstance(item, FiscalQuarter): return self._fiscal_year == item.fiscal_year - elif (isinstance(item, FiscalDateTime) or - isinstance(item, datetime.datetime)): + elif isinstance(item, datetime.datetime): return self.start <= item <= self.end - elif (isinstance(item, FiscalDate) or - isinstance(item, datetime.date)): + elif isinstance(item, datetime.date): return self.start.date() <= item <= self.end.date() else: raise TypeError("can't compare '%s' to '%s'" % ( @@ -405,11 +413,9 @@ class FiscalQuarter(object): """ if isinstance(item, FiscalQuarter): return self == item - elif (isinstance(item, FiscalDateTime) or - isinstance(item, datetime.datetime)): + elif isinstance(item, datetime.datetime): return self.start <= item <= self.end - elif (isinstance(item, FiscalDate) or - isinstance(item, datetime.date)): + elif isinstance(item, datetime.date): return self.start.date() <= item <= self.end.date() else: raise TypeError("can't compare '%s' to '%s'" % (
Add a function for easily changing the global "START_*" parameters I think it would make sense to have a function that would make changing the global parameters easier. E.g. ``` python def setup_fiscal_year(start_year, start_month, start_day): global START_YEAR, START_MONTH, START_DAY START_YEAR = start_year START_MONTH = start_month START_DAY = start_day def test_setup_fiscal_year(): # test defaults day = fiscalyear.FiscalDate(2017, 12, 1) assert day.fiscal_year == 2018 assert day.quarter == 1 # change fiscal year settings fiscalyear.setup_fiscal_year("same", 1, 1) assert day.fiscal_year == 2017 assert day.quarter == 4 # restore defaults and re-test fiscalyear.setup_fiscal_year("previous", 10, 1) assert day.fiscal_year == 2018 assert day.quarter == 1 ``` This could also make it possible to change the Fiscal Year settings even if you don't import the whole module. E.g. ``` python In [4]: from fiscalyear import FiscalQuarter, setup_fiscal_year In [5]: quarter = FiscalQuarter(2018, 1) In [6]: quarter.start Out[6]: FiscalDateTime(2017, 10, 1, 0, 0) In [7]: setup_fiscal_year('same', 1, 1) In [8]: quarter.start Out[8]: FiscalDateTime(2018, 1, 1, 0, 0) ```
adamjstewart/fiscalyear
diff --git a/test_fiscalyear.py b/test_fiscalyear.py index 690835f..5b2779f 100644 --- a/test_fiscalyear.py +++ b/test_fiscalyear.py @@ -9,8 +9,143 @@ import pytest US_FEDERAL = ('previous', 10, 1) UK_PERSONAL = ('same', 4, 6) -# Default to U.S. -fiscalyear.START_YEAR, fiscalyear.START_MONTH, fiscalyear.START_DAY = US_FEDERAL + +class TestCheckInt(object): + @pytest.mark.parametrize("value, exception", [ + ('asdf', TypeError), + ("-999", TypeError), + # Technically speaking, _check_int should accept negative integers + # but this isn't a public function + datetime doesn't handle them + # anyway. + (float(), TypeError), + (object(), TypeError), + ]) + def test_invalid_input(self, value, exception): + with pytest.raises(exception): + fiscalyear._check_int(value) + + @pytest.mark.parametrize("value", [1, 2, 0, -1, -2, "1", "0", "999"]) + def test_valid_input(self, value): + assert int(value) == fiscalyear._check_int(value) + + +class TestCheckYear(object): + @pytest.mark.parametrize("value, exception", [ + ('asdf', TypeError), + (float(), TypeError), + (object(), TypeError), + ("-1", TypeError), + (-1, ValueError), + (0, ValueError), + ("0", ValueError), + (10000, ValueError), + ("10000", ValueError), + ]) + def test_invalid_input(self, value, exception): + with pytest.raises(exception): + fiscalyear._check_year(value) + + @pytest.mark.parametrize("value", [1, 2, "1", "999"]) + def test_valid_input(self, value): + assert int(value) == fiscalyear._check_year(value) + + +class TestCheckDay(object): + @pytest.mark.parametrize("month, day, exception", [ + (1, 'asdf', TypeError), + (1, "-999", TypeError), + (1, float(), TypeError), + (1, object(), TypeError), + (1, -1, ValueError), + (1, "-1", TypeError), + (1, 0, ValueError), + (1, "0", ValueError), + (1, 32, ValueError), + (1, 32, ValueError), + ]) + def test_invalid_input(self, month, day, exception): + with pytest.raises(exception): + fiscalyear._check_day(month, day) + + @pytest.mark.parametrize("month, day", [(1, 1), (1, 2), (1, "1"), (1, 31), (1, "31")]) + def test_valid_input(self, month, day): + assert int(day) == fiscalyear._check_day(month, day) + + +class TestCheckQuarter(object): + @pytest.mark.parametrize("value, exception", [ + ('asdf', TypeError), + (float(), TypeError), + (object(), TypeError), + ("-1", TypeError), + (-1, ValueError), + (0, ValueError), + ("0", ValueError), + (5, ValueError), + ("5", ValueError), + ]) + def test_invalid_input(self, value, exception): + with pytest.raises(exception): + fiscalyear._check_quarter(value) + + @pytest.mark.parametrize("value", [1, 2, "1", "4"]) + def test_valid_input(self, value): + assert int(value) == fiscalyear._check_quarter(value) + + +class TestCalendarSettingsValidator(object): + @pytest.mark.parametrize("arguments, exception", [ + (dict(start_year='asdf', start_month=12, start_day=1), ValueError), + (dict(start_year=float(1999), start_month=12, start_day=1), TypeError), + (dict(start_year=object(), start_month=12, start_day=1), TypeError), + + (dict(start_year='same', start_month='asdf', start_day=1), TypeError), + (dict(start_year='same', start_month=float(12), start_day=1), TypeError), + (dict(start_year='same', start_month=object(), start_day=1), TypeError), + (dict(start_year='same', start_month=-1, start_day=1), ValueError), + (dict(start_year='same', start_month=0, start_day=1), ValueError), + (dict(start_year='same', start_month=13, start_day=1), ValueError), + + (dict(start_year='same', start_month=12, start_day='asdf'), TypeError), + (dict(start_year='same', start_month=12, start_day=float(1)), TypeError), + (dict(start_year='same', start_month=12, start_day=object()), TypeError), + (dict(start_year='same', start_month=12, start_day=0), ValueError), + (dict(start_year='same', start_month=12, start_day=-1), ValueError), + (dict(start_year='same', start_month=12, start_day=32), ValueError), + ]) + def test_invalid_input(self, arguments, exception): + with pytest.raises(exception): + fiscalyear._validate_fiscal_calendar_params(**arguments) + + @pytest.mark.parametrize("arguments", [ + dict(start_year='same', start_month=1, start_day=1), + dict(start_year='same', start_month=1, start_day=31), + dict(start_year='same', start_month=12, start_day=1), + dict(start_year='previous', start_month=1, start_day=1), + dict(start_year='previous', start_month=1, start_day=31), + dict(start_year='previous', start_month=12, start_day=1), + ]) + def test_valid_input(self, arguments): + fiscalyear._validate_fiscal_calendar_params(**arguments) + + +class TestSetupFiscalCalendar(object): + + def test_setup_fiscal_calendar(self): + # Test defaults + day = fiscalyear.FiscalDate(2017, 12, 1) + assert day.fiscal_year == 2018 + assert day.quarter == 1 + + # Change fiscal year settings + fiscalyear.setup_fiscal_calendar("same", 1, 1) + assert day.fiscal_year == 2017 + assert day.quarter == 4 + + # Restore defaults and re-test + fiscalyear.setup_fiscal_calendar("previous", 10, 1) + assert day.fiscal_year == 2018 + assert day.quarter == 1 class TestFiscalCalendar: @@ -86,6 +221,10 @@ class TestFiscalCalendar: assert fiscalyear.START_DAY == 1 def test_wrong_type(self): + with pytest.raises(TypeError): + with fiscalyear.fiscal_calendar(start_year=6.5): + pass + with pytest.raises(TypeError): with fiscalyear.fiscal_calendar(start_month=6.5): pass
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 3 }, "num_modified_files": 1 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.16 babel==2.17.0 certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.8.0 docutils==0.21.2 exceptiongroup==1.2.2 -e git+https://github.com/adamjstewart/fiscalyear.git@77c5c0c82a62de36e77284e924f744bb1e770a31#egg=fiscalyear idna==3.10 imagesize==1.4.1 importlib_metadata==8.6.1 iniconfig==2.1.0 Jinja2==3.1.6 MarkupSafe==3.0.2 packaging==24.2 pluggy==1.5.0 Pygments==2.19.1 pytest==8.3.5 pytest-cov==6.0.0 pytest-runner==6.0.1 requests==2.32.3 snowballstemmer==2.2.0 Sphinx==7.4.7 sphinx-rtd-theme==3.0.2 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 tomli==2.2.1 urllib3==2.3.0 zipp==3.21.0
name: fiscalyear channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.16 - babel==2.17.0 - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.8.0 - docutils==0.21.2 - exceptiongroup==1.2.2 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==8.6.1 - iniconfig==2.1.0 - jinja2==3.1.6 - markupsafe==3.0.2 - packaging==24.2 - pluggy==1.5.0 - pygments==2.19.1 - pytest==8.3.5 - pytest-cov==6.0.0 - pytest-runner==6.0.1 - requests==2.32.3 - snowballstemmer==2.2.0 - sphinx==7.4.7 - sphinx-rtd-theme==3.0.2 - sphinxcontrib-applehelp==2.0.0 - sphinxcontrib-devhelp==2.0.0 - sphinxcontrib-htmlhelp==2.1.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==2.0.0 - sphinxcontrib-serializinghtml==2.0.0 - tomli==2.2.1 - urllib3==2.3.0 - zipp==3.21.0 prefix: /opt/conda/envs/fiscalyear
[ "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments0-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments1-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments2-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments3-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments4-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments5-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments6-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments7-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments8-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments9-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments10-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments11-TypeError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments12-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments13-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_invalid_input[arguments14-ValueError]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments0]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments1]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments2]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments3]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments4]", "test_fiscalyear.py::TestCalendarSettingsValidator::test_valid_input[arguments5]", "test_fiscalyear.py::TestSetupFiscalCalendar::test_setup_fiscal_calendar", "test_fiscalyear.py::TestFiscalCalendar::test_wrong_type" ]
[]
[ "test_fiscalyear.py::TestCheckInt::test_invalid_input[asdf-TypeError]", "test_fiscalyear.py::TestCheckInt::test_invalid_input[-999-TypeError]", "test_fiscalyear.py::TestCheckInt::test_invalid_input[0.0-TypeError]", "test_fiscalyear.py::TestCheckInt::test_invalid_input[value3-TypeError]", "test_fiscalyear.py::TestCheckInt::test_valid_input[1_0]", "test_fiscalyear.py::TestCheckInt::test_valid_input[2]", "test_fiscalyear.py::TestCheckInt::test_valid_input[0_0]", "test_fiscalyear.py::TestCheckInt::test_valid_input[-1]", "test_fiscalyear.py::TestCheckInt::test_valid_input[-2]", "test_fiscalyear.py::TestCheckInt::test_valid_input[1_1]", "test_fiscalyear.py::TestCheckInt::test_valid_input[0_1]", "test_fiscalyear.py::TestCheckInt::test_valid_input[999]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[asdf-TypeError]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[0.0-TypeError]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[value2-TypeError]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[-1-TypeError]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[-1-ValueError]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[0-ValueError0]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[0-ValueError1]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[10000-ValueError0]", "test_fiscalyear.py::TestCheckYear::test_invalid_input[10000-ValueError1]", "test_fiscalyear.py::TestCheckYear::test_valid_input[1_0]", "test_fiscalyear.py::TestCheckYear::test_valid_input[2]", "test_fiscalyear.py::TestCheckYear::test_valid_input[1_1]", "test_fiscalyear.py::TestCheckYear::test_valid_input[999]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-asdf-TypeError]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1--999-TypeError]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0.0-TypeError]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-day3-TypeError]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1--1-ValueError]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1--1-TypeError]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0-ValueError0]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-0-ValueError1]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-32-ValueError0]", "test_fiscalyear.py::TestCheckDay::test_invalid_input[1-32-ValueError1]", "test_fiscalyear.py::TestCheckDay::test_valid_input[1-1_0]", "test_fiscalyear.py::TestCheckDay::test_valid_input[1-2]", "test_fiscalyear.py::TestCheckDay::test_valid_input[1-1_1]", "test_fiscalyear.py::TestCheckDay::test_valid_input[1-31_0]", "test_fiscalyear.py::TestCheckDay::test_valid_input[1-31_1]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[asdf-TypeError]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0.0-TypeError]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[value2-TypeError]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[-1-TypeError]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[-1-ValueError]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0-ValueError0]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[0-ValueError1]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[5-ValueError0]", "test_fiscalyear.py::TestCheckQuarter::test_invalid_input[5-ValueError1]", "test_fiscalyear.py::TestCheckQuarter::test_valid_input[1_0]", "test_fiscalyear.py::TestCheckQuarter::test_valid_input[2]", "test_fiscalyear.py::TestCheckQuarter::test_valid_input[1_1]", "test_fiscalyear.py::TestCheckQuarter::test_valid_input[4]", "test_fiscalyear.py::TestFiscalCalendar::test_start_year", "test_fiscalyear.py::TestFiscalCalendar::test_start_month", "test_fiscalyear.py::TestFiscalCalendar::test_start_day", "test_fiscalyear.py::TestFiscalCalendar::test_complex", "test_fiscalyear.py::TestFiscalCalendar::test_nested", "test_fiscalyear.py::TestFiscalCalendar::test_out_of_range", "test_fiscalyear.py::TestFiscalYear::test_basic", "test_fiscalyear.py::TestFiscalYear::test_repr", "test_fiscalyear.py::TestFiscalYear::test_str", "test_fiscalyear.py::TestFiscalYear::test_from_string", "test_fiscalyear.py::TestFiscalYear::test_wrong_type", "test_fiscalyear.py::TestFiscalYear::test_out_of_range", "test_fiscalyear.py::TestFiscalYear::test_prev_fiscal_year", "test_fiscalyear.py::TestFiscalYear::test_next_fiscal_year", "test_fiscalyear.py::TestFiscalYear::test_start", "test_fiscalyear.py::TestFiscalYear::test_end", "test_fiscalyear.py::TestFiscalYear::test_q1", "test_fiscalyear.py::TestFiscalYear::test_q2", "test_fiscalyear.py::TestFiscalYear::test_q3", "test_fiscalyear.py::TestFiscalYear::test_q4", "test_fiscalyear.py::TestFiscalYear::test_contains", "test_fiscalyear.py::TestFiscalYear::test_less_than", "test_fiscalyear.py::TestFiscalYear::test_less_than_equals", "test_fiscalyear.py::TestFiscalYear::test_equals", "test_fiscalyear.py::TestFiscalYear::test_not_equals", "test_fiscalyear.py::TestFiscalYear::test_greater_than", "test_fiscalyear.py::TestFiscalYear::test_greater_than_equals", "test_fiscalyear.py::TestFiscalQuarter::test_basic", "test_fiscalyear.py::TestFiscalQuarter::test_repr", "test_fiscalyear.py::TestFiscalQuarter::test_str", "test_fiscalyear.py::TestFiscalQuarter::test_from_string", "test_fiscalyear.py::TestFiscalQuarter::test_wrong_type", "test_fiscalyear.py::TestFiscalQuarter::test_out_of_range", "test_fiscalyear.py::TestFiscalQuarter::test_prev_quarter", "test_fiscalyear.py::TestFiscalQuarter::test_next_quarter", "test_fiscalyear.py::TestFiscalQuarter::test_start", "test_fiscalyear.py::TestFiscalQuarter::test_end", "test_fiscalyear.py::TestFiscalQuarter::test_bad_start_year", "test_fiscalyear.py::TestFiscalQuarter::test_q1_start", "test_fiscalyear.py::TestFiscalQuarter::test_q1_end", "test_fiscalyear.py::TestFiscalQuarter::test_q2_start", "test_fiscalyear.py::TestFiscalQuarter::test_q2_end", "test_fiscalyear.py::TestFiscalQuarter::test_q3_start", "test_fiscalyear.py::TestFiscalQuarter::test_q3_end", "test_fiscalyear.py::TestFiscalQuarter::test_q4_start", "test_fiscalyear.py::TestFiscalQuarter::test_q4_end", "test_fiscalyear.py::TestFiscalQuarter::test_contains", "test_fiscalyear.py::TestFiscalQuarter::test_less_than", "test_fiscalyear.py::TestFiscalQuarter::test_less_than_equals", "test_fiscalyear.py::TestFiscalQuarter::test_equals", "test_fiscalyear.py::TestFiscalQuarter::test_not_equals", "test_fiscalyear.py::TestFiscalQuarter::test_greater_than", "test_fiscalyear.py::TestFiscalQuarter::test_greater_than_equals", "test_fiscalyear.py::TestFiscalDate::test_basic", "test_fiscalyear.py::TestFiscalDate::test_fiscal_year", "test_fiscalyear.py::TestFiscalDate::test_prev_fiscal_year", "test_fiscalyear.py::TestFiscalDate::test_next_fiscal_year", "test_fiscalyear.py::TestFiscalDate::test_prev_quarter", "test_fiscalyear.py::TestFiscalDate::test_next_quarter", "test_fiscalyear.py::TestFiscalDateTime::test_basic", "test_fiscalyear.py::TestFiscalDateTime::test_fiscal_year", "test_fiscalyear.py::TestFiscalDateTime::test_prev_fiscal_year", "test_fiscalyear.py::TestFiscalDateTime::test_next_fiscal_year", "test_fiscalyear.py::TestFiscalDateTime::test_prev_quarter", "test_fiscalyear.py::TestFiscalDateTime::test_next_quarter" ]
[]
MIT License
2,135
[ "fiscalyear.py" ]
[ "fiscalyear.py" ]
pytorch__ignite-69
a0235df55650ec5368c0cd9f84a3a34b92c37273
2018-02-09 11:11:39
a0235df55650ec5368c0cd9f84a3a34b92c37273
diff --git a/ignite/engine.py b/ignite/engine.py index f81753a1..aa7b2357 100644 --- a/ignite/engine.py +++ b/ignite/engine.py @@ -111,15 +111,16 @@ class Engine(object): try: start_time = time.time() for batch in dataset: + self.current_iteration += 1 self._fire_event(Events.ITERATION_STARTED) step_result = self._process_function(batch) if step_result is not None: self.history.append(step_result) - self.current_iteration += 1 self._fire_event(Events.ITERATION_COMPLETED) if self.should_terminate: break + time_taken = time.time() - start_time hours, mins, secs = _to_hours_mins_secs(time_taken) return hours, mins, secs diff --git a/ignite/trainer.py b/ignite/trainer.py index e06805af..f5f33c6c 100644 --- a/ignite/trainer.py +++ b/ignite/trainer.py @@ -67,12 +67,12 @@ class Trainer(Engine): self._fire_event(Events.STARTED) while self.current_epoch < max_epochs and not self.should_terminate: + self.current_epoch += 1 self._fire_event(Events.EPOCH_STARTED) self._train_one_epoch(training_data) if self.should_terminate: break self._fire_event(Events.EPOCH_COMPLETED) - self.current_epoch += 1 self._fire_event(Events.COMPLETED) time_taken = time.time() - start_time
Start current_epoch + current_iteration from 1 instead of 0. Also increment these counters *after* the `ITERATION_COMPLETE`/`EPOCH_COMPLETE` events. See discussion in #59 for more details
pytorch/ignite
diff --git a/tests/ignite/test_evaluator.py b/tests/ignite/test_evaluator.py index b0c54408..4450e93e 100644 --- a/tests/ignite/test_evaluator.py +++ b/tests/ignite/test_evaluator.py @@ -14,7 +14,7 @@ def test_current_validation_iteration_counter_increases_every_iteration(): class IterationCounter(object): def __init__(self): - self.current_iteration_count = 0 + self.current_iteration_count = 1 self.total_count = 0 def __call__(self, evaluator): @@ -23,7 +23,7 @@ def test_current_validation_iteration_counter_increases_every_iteration(): self.total_count += 1 def clear(self): - self.current_iteration_count = 0 + self.current_iteration_count = 1 iteration_counter = IterationCounter() @@ -71,15 +71,15 @@ def test_terminate_stops_evaluator_when_called_during_iteration(): iteration_to_stop = 3 # i.e. part way through the 3rd validation run evaluator = Evaluator(MagicMock(return_value=1)) - def end_of_iteration_handler(evaluator): + def start_of_iteration_handler(evaluator): if evaluator.current_iteration == iteration_to_stop: evaluator.terminate() - evaluator.add_event_handler(Events.ITERATION_STARTED, end_of_iteration_handler) + evaluator.add_event_handler(Events.ITERATION_STARTED, start_of_iteration_handler) evaluator.run([None] * num_iterations) - # should complete the iteration when terminate called - assert evaluator.current_iteration == iteration_to_stop + 1 + # should complete the iteration when terminate called but not increment counter + assert evaluator.current_iteration == iteration_to_stop def test_create_supervised(): diff --git a/tests/ignite/test_trainer.py b/tests/ignite/test_trainer.py index b7b96177..8c3117c1 100644 --- a/tests/ignite/test_trainer.py +++ b/tests/ignite/test_trainer.py @@ -60,7 +60,7 @@ def test_current_epoch_counter_increases_every_epoch(): class EpochCounter(object): def __init__(self): - self.current_epoch_count = 0 + self.current_epoch_count = 1 def __call__(self, trainer): assert trainer.current_epoch == self.current_epoch_count @@ -80,7 +80,7 @@ def test_current_iteration_counter_increases_every_iteration(): class IterationCounter(object): def __init__(self): - self.current_iteration_count = 0 + self.current_iteration_count = 1 def __call__(self, trainer): assert trainer.current_iteration == self.current_iteration_count @@ -115,7 +115,7 @@ def test_terminate_at_end_of_epoch_stops_training(): trainer.run([1], max_epochs=max_epochs) - assert trainer.current_epoch == last_epoch_to_run + 1 # counter is incremented at end of loop + assert trainer.current_epoch == last_epoch_to_run assert trainer.should_terminate @@ -139,24 +139,23 @@ def test_terminate_at_start_of_epoch_stops_training_after_completing_iteration() assert trainer.current_epoch == epoch_to_terminate_on assert trainer.should_terminate # completes first iteration - assert trainer.current_iteration == (epoch_to_terminate_on * len(batches_per_epoch)) + 1 + assert trainer.current_iteration == ((epoch_to_terminate_on - 1) * len(batches_per_epoch)) + 1 def test_terminate_stops_training_mid_epoch(): num_iterations_per_epoch = 10 - iteration_to_stop = num_iterations_per_epoch + 3 # i.e. part way through the 2nd epoch + iteration_to_stop = num_iterations_per_epoch + 3 # i.e. part way through the 3rd epoch trainer = Trainer(MagicMock(return_value=1)) - def end_of_iteration_handler(trainer): + def start_of_iteration_handler(trainer): if trainer.current_iteration == iteration_to_stop: trainer.terminate() - trainer.add_event_handler(Events.ITERATION_STARTED, end_of_iteration_handler) + trainer.add_event_handler(Events.ITERATION_STARTED, start_of_iteration_handler) trainer.run(training_data=[None] * num_iterations_per_epoch, max_epochs=3) - assert (trainer.current_iteration == iteration_to_stop + - 1) # completes the iteration when terminate called - assert trainer.current_epoch == np.ceil( - iteration_to_stop / num_iterations_per_epoch) - 1 # it starts from 0 + # completes the iteration but doesn't increment counter (this happens just before a new iteration starts) + assert (trainer.current_iteration == iteration_to_stop) + assert trainer.current_epoch == np.ceil(iteration_to_stop / num_iterations_per_epoch) # it starts from 0 def _create_mock_data_loader(epochs, batches_per_epoch): @@ -199,7 +198,7 @@ def test_training_iteration_events_are_fired(): assert mock_manager.mock_calls == expected_calls -def test_create_supervised(): +def test_create_supervised_trainer(): model = Linear(1, 1) model.weight.data.zero_() model.bias.data.zero_()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "numpy", "mock", "pytest", "codecov", "pytest-cov", "tqdm", "scikit-learn", "visdom", "torchvision", "tensorboardX", "gym" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 charset-normalizer==2.0.12 cloudpickle==2.2.1 codecov==2.1.13 coverage==6.2 dataclasses==0.8 decorator==4.4.2 enum34==1.1.10 gym==0.26.2 gym-notices==0.0.8 idna==3.10 -e git+https://github.com/pytorch/ignite.git@a0235df55650ec5368c0cd9f84a3a34b92c37273#egg=ignite importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work importlib-resources==5.4.0 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work joblib==1.1.1 jsonpatch==1.32 jsonpointer==2.3 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work networkx==2.5.1 numpy==1.19.5 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work Pillow==8.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work protobuf==4.21.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-cov==4.0.0 requests==2.27.1 scikit-learn==0.24.2 scipy==1.5.4 six==1.17.0 tensorboardX==2.6.2.2 threadpoolctl==3.1.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 torch==1.10.1 torchvision==0.11.2 tornado==6.1 tqdm==4.64.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 visdom==0.2.4 websocket-client==1.3.1 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: ignite channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - charset-normalizer==2.0.12 - cloudpickle==2.2.1 - codecov==2.1.13 - coverage==6.2 - dataclasses==0.8 - decorator==4.4.2 - enum34==1.1.10 - gym==0.26.2 - gym-notices==0.0.8 - idna==3.10 - importlib-resources==5.4.0 - joblib==1.1.1 - jsonpatch==1.32 - jsonpointer==2.3 - mock==5.2.0 - networkx==2.5.1 - numpy==1.19.5 - pillow==8.4.0 - protobuf==4.21.0 - pytest-cov==4.0.0 - requests==2.27.1 - scikit-learn==0.24.2 - scipy==1.5.4 - six==1.17.0 - tensorboardx==2.6.2.2 - threadpoolctl==3.1.0 - tomli==1.2.3 - torch==1.10.1 - torchvision==0.11.2 - tornado==6.1 - tqdm==4.64.1 - urllib3==1.26.20 - visdom==0.2.4 - websocket-client==1.3.1 prefix: /opt/conda/envs/ignite
[ "tests/ignite/test_evaluator.py::test_current_validation_iteration_counter_increases_every_iteration", "tests/ignite/test_evaluator.py::test_terminate_stops_evaluator_when_called_during_iteration", "tests/ignite/test_trainer.py::test_current_epoch_counter_increases_every_epoch", "tests/ignite/test_trainer.py::test_current_iteration_counter_increases_every_iteration", "tests/ignite/test_trainer.py::test_terminate_at_end_of_epoch_stops_training", "tests/ignite/test_trainer.py::test_terminate_at_start_of_epoch_stops_training_after_completing_iteration", "tests/ignite/test_trainer.py::test_terminate_stops_training_mid_epoch" ]
[ "tests/ignite/test_trainer.py::test_create_supervised_trainer" ]
[ "tests/ignite/test_evaluator.py::test_evaluation_iteration_events_are_fired", "tests/ignite/test_evaluator.py::test_create_supervised", "tests/ignite/test_trainer.py::test_exception_handler_called_on_error", "tests/ignite/test_trainer.py::test_stopping_criterion_is_max_epochs", "tests/ignite/test_trainer.py::test_training_iteration_events_are_fired" ]
[]
BSD 3-Clause "New" or "Revised" License
2,136
[ "ignite/engine.py", "ignite/trainer.py" ]
[ "ignite/engine.py", "ignite/trainer.py" ]
castle__castle-python-29
288780af56d67af5757c6b0d17a74a24c29a9a60
2018-02-09 13:40:37
c54fd46f2f076a6a53e24225a1223a01952cb408
diff --git a/HISTORY.md b/HISTORY.md index c705403..e66e5a4 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,8 @@ +## master + +### Breaking Changes: +- switched configuration request_timeout from seconds to milliseconds + ## 2.0.0 (2018-02-09) ### Features: diff --git a/README.rst b/README.rst index 5fc110f..1436405 100644 --- a/README.rst +++ b/README.rst @@ -29,8 +29,8 @@ import and configure the library with your Castle API secret. # For authenticate method you can set failover strategies: allow(default), deny, challenge, throw configuration.failover_strategy = 'deny' - # Castle::RequestError is raised when timing out in seconds (default: 0.5 of the second) - configuration.request_timeout = 1 + # Castle::RequestError is raised when timing out in milliseconds (default: 500 milliseconds) + configuration.request_timeout = 1000 # Whitelisted and Blacklisted headers are case insensitive and allow to use _ and - as a separator, http prefixes are removed # Whitelisted headers diff --git a/castle/configuration.py b/castle/configuration.py index 1a9a6dc..c406191 100644 --- a/castle/configuration.py +++ b/castle/configuration.py @@ -19,7 +19,7 @@ WHITELISTED = [ BLACKLISTED = ['HTTP_COOKIE'] # 500 milliseconds -REQUEST_TIMEOUT = 0.5 +REQUEST_TIMEOUT = 500 FAILOVER_STRATEGIES = ['allow', 'deny', 'challenge', 'throw'] diff --git a/castle/request.py b/castle/request.py index 7e38c79..230787c 100644 --- a/castle/request.py +++ b/castle/request.py @@ -13,7 +13,7 @@ class Request(object): method, self.build_url(path), auth=('', configuration.api_secret), - timeout=configuration.request_timeout, + timeout=configuration.request_timeout / 1000.0, headers=self.headers, verify=Request.verify(), data=None if params is None else json.dumps(params)
change timeout configuration from seconds to miliseconds
castle/castle-python
diff --git a/castle/test/configuration_test.py b/castle/test/configuration_test.py index 67fd056..6c80f05 100644 --- a/castle/test/configuration_test.py +++ b/castle/test/configuration_test.py @@ -15,7 +15,7 @@ class ConfigurationTestCase(unittest.TestCase): HeadersFormatter.call(v) for v in WHITELISTED]) self.assertEqual(config.blacklisted, [ HeadersFormatter.call(v) for v in BLACKLISTED]) - self.assertEqual(config.request_timeout, 0.5) + self.assertEqual(config.request_timeout, 500) self.assertEqual(config.failover_strategy, 'allow') def test_api_secret_setter(self): @@ -70,8 +70,8 @@ class ConfigurationTestCase(unittest.TestCase): def test_request_timeout_setter(self): config = Configuration() - config.request_timeout = 5 - self.assertEqual(config.request_timeout, 5.0) + config.request_timeout = 5000 + self.assertEqual(config.request_timeout, 5000) def test_failover_strategy_setter_valid(self): config = Configuration()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 4 }
2.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pylint", "setuptools-lint", "pep8", "autopep8", "coverage", "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==2.11.7 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work autopep8==2.0.4 -e git+https://github.com/castle/castle-python.git@288780af56d67af5757c6b0d17a74a24c29a9a60#egg=castle certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 dill==0.3.4 idna==3.10 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pep8==1.7.1 platformdirs==2.4.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pycodestyle==2.10.0 pylint==2.13.9 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 requests==2.27.1 setuptools-lint==0.6.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typed-ast==1.5.5 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 wrapt==1.16.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: castle-python channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==2.11.7 - autopep8==2.0.4 - charset-normalizer==2.0.12 - coverage==6.2 - dill==0.3.4 - idna==3.10 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - pep8==1.7.1 - platformdirs==2.4.0 - pycodestyle==2.10.0 - pylint==2.13.9 - requests==2.27.1 - setuptools-lint==0.6.0 - tomli==1.2.3 - typed-ast==1.5.5 - urllib3==1.26.20 - wrapt==1.16.0 prefix: /opt/conda/envs/castle-python
[ "castle/test/configuration_test.py::ConfigurationTestCase::test_default_values" ]
[]
[ "castle/test/configuration_test.py::ConfigurationTestCase::test_api_secret_setter", "castle/test/configuration_test.py::ConfigurationTestCase::test_blacklisted_setter_empty", "castle/test/configuration_test.py::ConfigurationTestCase::test_blacklisted_setter_list", "castle/test/configuration_test.py::ConfigurationTestCase::test_blacklisted_setter_none", "castle/test/configuration_test.py::ConfigurationTestCase::test_failover_strategy_setter_invalid", "castle/test/configuration_test.py::ConfigurationTestCase::test_failover_strategy_setter_valid", "castle/test/configuration_test.py::ConfigurationTestCase::test_host_setter", "castle/test/configuration_test.py::ConfigurationTestCase::test_port_setter", "castle/test/configuration_test.py::ConfigurationTestCase::test_request_timeout_setter", "castle/test/configuration_test.py::ConfigurationTestCase::test_url_prefix_setter", "castle/test/configuration_test.py::ConfigurationTestCase::test_whitelisted_setter_empty", "castle/test/configuration_test.py::ConfigurationTestCase::test_whitelisted_setter_list", "castle/test/configuration_test.py::ConfigurationTestCase::test_whitelisted_setter_none" ]
[]
MIT License
2,137
[ "README.rst", "castle/configuration.py", "HISTORY.md", "castle/request.py" ]
[ "README.rst", "castle/configuration.py", "HISTORY.md", "castle/request.py" ]
pyout__pyout-40
558c1fc9b760146b5a9b794f7a8c8dab2b378863
2018-02-09 22:16:40
5c6e7d9ea60e63704333e554567561df1123bd3f
diff --git a/pyout/elements.py b/pyout/elements.py index 160a1f8..700f53d 100644 --- a/pyout/elements.py +++ b/pyout/elements.py @@ -28,6 +28,12 @@ schema = { {"$ref": "#/definitions/interval"}], "default": "black", "scope": "field"}, + "missing": { + "description": "Text to display for missing values", + "type": "string", + "default": "", + "scope": "column" + }, "underline": { "description": "Whether text is underlined", "oneOf": [{"type": "boolean"}, @@ -52,6 +58,7 @@ schema = { "properties": {"align": {"$ref": "#/definitions/align"}, "bold": {"$ref": "#/definitions/bold"}, "color": {"$ref": "#/definitions/color"}, + "missing": {"$ref": "#/definitions/missing"}, "transform": {"$ref": "#/definitions/transform"}, "underline": {"$ref": "#/definitions/underline"}, "width": {"$ref": "#/definitions/width"}}, diff --git a/pyout/field.py b/pyout/field.py index 9dee4fc..bb20027 100644 --- a/pyout/field.py +++ b/pyout/field.py @@ -145,6 +145,42 @@ class Field(object): return result +class Nothing(object): + """Internal class to represent missing values. + + This is used instead of a built-ins like None, "", or 0 to allow + us to unambiguously identify a missing value. In terms of + methods, it tries to mimic the string `text` (an empty string by + default) because that behavior is the most useful internally for + formatting the output. + + Parameters + ---------- + text : str, optional + Text to use for string representation of this object. + """ + + def __init__(self, text=""): + self._text = text + + def __str__(self): + return self._text + + def __add__(self, right): + return str(self) + right + + def __radd__(self, left): + return left + str(self) + + def __bool__(self): + return False + + __nonzero__ = __bool__ # py2 + + def __format__(self, format_spec): + return str.__format__(self._text, format_spec) + + class StyleFunctionError(Exception): """Signal that a style function failed. """ @@ -224,6 +260,8 @@ class StyleProcessors(object): """Return a processor for a style's "transform" function. """ def transform_fn(_, result): + if isinstance(result, Nothing): + return result try: return function(result) except: @@ -292,7 +330,11 @@ class StyleProcessors(object): A function. """ def by_interval_lookup_fn(value, result): - value = float(value) + try: + value = float(value) + except TypeError: + return result + for start, end, lookup_value in intervals: if start is None: start = float("-inf") diff --git a/pyout/tabular.py b/pyout/tabular.py index a63713f..ec16773 100644 --- a/pyout/tabular.py +++ b/pyout/tabular.py @@ -13,7 +13,9 @@ from multiprocessing.dummy import Pool from blessings import Terminal from pyout import elements -from pyout.field import Field, StyleProcessors +from pyout.field import Field, StyleProcessors, Nothing + +NOTHING = Nothing() class TermProcessors(StyleProcessors): @@ -133,7 +135,7 @@ class Tabular(object): self._init_style = style self._style = None - + self._nothings = {} # column => missing value self._autowidth_columns = {} if columns is not None: @@ -171,6 +173,12 @@ class Tabular(object): elements.validate(self._style) + for col in self._columns: + if "missing" in self._style[col]: + self._nothings[col] = Nothing(self._style[col]["missing"]) + else: + self._nothings[col] = NOTHING + def _setup_fields(self): self._fields = {} for column in self._columns: @@ -181,7 +189,7 @@ class Tabular(object): is_auto = style_width == "auto" or _safe_get(style_width, "auto") if is_auto: - width = _safe_get(style_width, "min", 1) + width = _safe_get(style_width, "min", 0) wmax = _safe_get(style_width, "max") self._autowidth_columns[column] = {"max": wmax} @@ -234,7 +242,7 @@ class Tabular(object): return dict(zip(self._columns, row)) def _attrs_to_dict(self, row): - return {c: getattr(row, c) for c in self._columns} + return {c: getattr(row, c, self._nothings[c]) for c in self._columns} def _choose_normalizer(self, row): if isinstance(row, Mapping): @@ -416,7 +424,7 @@ class Tabular(object): if isinstance(value, tuple): initial, fn = value else: - initial = "" + initial = NOTHING # Value could be a normal (non-callable) value or a # callable with no initial value. fn = value @@ -526,6 +534,16 @@ class Tabular(object): row = self._normalizer(row) callables = self._strip_callables(row) + # Fill in any missing values. Note: If the un-normalized data is an + # object, we already handle this in its normalizer, _attrs_to_dict. + # When the data is given as a dict, we do it here instead of its + # normalizer because there may be multi-column tuple keys. + if self._normalizer == self._identity: + for column in self._columns: + if column in row: + continue + row[column] = self._nothings[column] + with self._write_lock(): if not self._rows: self._maybe_write_header()
Allow for incomplete records So it must not crash -- instead it should place some "default" or specified "absent" value (e.g. `""` or `"-"`) Those values which were not provided should not be passed into the summary callables??? we might just provide `exclude_absent` decorator for summary transformers
pyout/pyout
diff --git a/pyout/tests/test_field.py b/pyout/tests/test_field.py index 2a36ff1..0c1f9fa 100644 --- a/pyout/tests/test_field.py +++ b/pyout/tests/test_field.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pytest -from pyout.field import Field, StyleProcessors +from pyout.field import Field, Nothing, StyleProcessors def test_field_base(): @@ -37,6 +37,17 @@ def test_field_processors(): field.add("pre", "not registered key") [email protected]("text", ["", "-"], ids=["text=''", "text='-'"]) +def test_something_about_nothing(text): + nada = Nothing(text=text) + assert not nada + + assert str(nada) == text + assert "{:5}".format(nada) == "{:5}".format(text) + assert "x" + nada == "x" + text + assert nada + "x" == text + "x" + + def test_truncate_mark_true(): fn = StyleProcessors.truncate(7, marker=True) diff --git a/pyout/tests/test_tabular.py b/pyout/tests/test_tabular.py index d695b83..1964f88 100644 --- a/pyout/tests/test_tabular.py +++ b/pyout/tests/test_tabular.py @@ -51,6 +51,48 @@ def test_tabular_write_color(): assert eq_repr(fd.getvalue(), expected) +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_empty_string(): + fd = StringIO() + out = Tabular(stream=fd) + out({"name": ""}) + assert eq_repr(fd.getvalue(), "\n") + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_missing_column(): + fd = StringIO() + out = Tabular(columns=["name", "status"], stream=fd) + out({"name": "solo"}) + assert eq_repr(fd.getvalue(), "solo \n") + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_missing_column_missing_text(): + fd = StringIO() + out = Tabular(columns=["name", "status"], + style={"status": + {"missing": "-"}}, + stream=fd) + out({"name": "solo"}) + assert eq_repr(fd.getvalue(), "solo -\n") + + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_missing_column_missing_object_data(): + class Data(object): + name = "solo" + data = Data() + + fd = StringIO() + out = Tabular(columns=["name", "status"], + style={"status": + {"missing": "-"}}, + stream=fd) + out(data) + assert eq_repr(fd.getvalue(), "solo -\n") + + @patch("pyout.tabular.Terminal", TestTerminal) def test_tabular_write_columns_from_orderdict_row(): fd = StringIO() @@ -601,6 +643,27 @@ def test_tabular_write_intervals_bold(): assert eq_repr(fd.getvalue(), expected) + +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_intervals_missing(): + fd = StringIO() + out = Tabular(style={"name": {"width": 3}, + "percent": {"bold": {"interval": + [[30, 50, False], + [50, 80, True]]}, + "width": 2}}, + stream=fd, force_styling=True) + out(OrderedDict([("name", "foo"), + ("percent", 78)])) + # Interval lookup function can handle a missing value. + out(OrderedDict([("name", "bar")])) + + expected = "foo " + unicode_cap("bold") + \ + "78" + unicode_cap("sgr0") + "\n" + \ + "bar \n" + assert eq_repr(fd.getvalue(), expected) + + @patch("pyout.tabular.Terminal", TestTerminal) def test_tabular_write_transform(): fd = StringIO() @@ -888,6 +951,26 @@ def test_tabular_write_callable_values(): assert len([ln for ln in lines if ln.endswith("baz over ")]) == 1 [email protected](10) +@patch("pyout.tabular.Terminal", TestTerminal) +def test_tabular_write_callable_transform_nothing(): + delay0 = Delayed(3) + + fd = StringIO() + out = Tabular(["name", "status"], + style={"status": {"transform": lambda n: n + 2}}, + stream=fd) + with out: + # The unspecified initial value is set to Nothing(). The + # transform function above, which is designed to take a + # number, won't be called with it. + out({"name": "foo", "status": delay0.run}) + assert eq_repr(fd.getvalue(), "foo \n") + delay0.now = True + lines = fd.getvalue().splitlines() + assert len([ln for ln in lines if ln.endswith("foo 5")]) == 1 + + @pytest.mark.timeout(10) @patch("pyout.tabular.Terminal", TestTerminal) def test_tabular_write_callable_values_multi_return():
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 3 }, "num_modified_files": 3 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[full]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "coverage" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work blessings==1.7 certifi==2021.5.30 coverage==6.2 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work jsonschema==3.2.0 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/pyout/pyout.git@558c1fc9b760146b5a9b794f7a8c8dab2b378863#egg=pyout pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyrsistent==0.18.0 pytest==6.2.4 pytest-timeout==2.1.0 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pyout channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - blessings==1.7 - coverage==6.2 - jsonschema==3.2.0 - mock==5.2.0 - pyrsistent==0.18.0 - pytest-timeout==2.1.0 - six==1.17.0 prefix: /opt/conda/envs/pyout
[ "pyout/tests/test_field.py::test_field_base", "pyout/tests/test_field.py::test_field_update", "pyout/tests/test_field.py::test_field_processors", "pyout/tests/test_field.py::test_something_about_nothing[text='']", "pyout/tests/test_field.py::test_something_about_nothing[text='-']", "pyout/tests/test_field.py::test_truncate_mark_true", "pyout/tests/test_field.py::test_truncate_mark_string", "pyout/tests/test_field.py::test_truncate_mark_short", "pyout/tests/test_field.py::test_truncate_nomark", "pyout/tests/test_field.py::test_style_value_type", "pyout/tests/test_field.py::test_style_processor_translate", "pyout/tests/test_tabular.py::test_tabular_write_color", "pyout/tests/test_tabular.py::test_tabular_write_empty_string", "pyout/tests/test_tabular.py::test_tabular_write_missing_column", "pyout/tests/test_tabular.py::test_tabular_write_missing_column_missing_text", "pyout/tests/test_tabular.py::test_tabular_write_missing_column_missing_object_data", "pyout/tests/test_tabular.py::test_tabular_write_columns_from_orderdict_row", "pyout/tests/test_tabular.py::test_tabular_write_columns_orderdict_mapping[sequence]", "pyout/tests/test_tabular.py::test_tabular_write_columns_orderdict_mapping[dict]", "pyout/tests/test_tabular.py::test_tabular_write_data_as_list", "pyout/tests/test_tabular.py::test_tabular_write_header", "pyout/tests/test_tabular.py::test_tabular_write_data_as_object", "pyout/tests/test_tabular.py::test_tabular_write_different_data_types_same_output", "pyout/tests/test_tabular.py::test_tabular_write_header_with_style", "pyout/tests/test_tabular.py::test_tabular_nondefault_separator", "pyout/tests/test_tabular.py::test_tabular_write_data_as_list_no_columns", "pyout/tests/test_tabular.py::test_tabular_write_style_override", "pyout/tests/test_tabular.py::test_tabular_default_style", "pyout/tests/test_tabular.py::test_tabular_write_multicolor", "pyout/tests/test_tabular.py::test_tabular_write_align", "pyout/tests/test_tabular.py::test_tabular_rewrite", "pyout/tests/test_tabular.py::test_tabular_rewrite_notfound", "pyout/tests/test_tabular.py::test_tabular_rewrite_multi_id", "pyout/tests/test_tabular.py::test_tabular_rewrite_multi_value", "pyout/tests/test_tabular.py::test_tabular_rewrite_with_ids_property", "pyout/tests/test_tabular.py::test_tabular_rewrite_auto_width", "pyout/tests/test_tabular.py::test_tabular_rewrite_data_as_list", "pyout/tests/test_tabular.py::test_tabular_repaint", "pyout/tests/test_tabular.py::test_tabular_repaint_with_header", "pyout/tests/test_tabular.py::test_tabular_write_label_color", "pyout/tests/test_tabular.py::test_tabular_write_label_bold", "pyout/tests/test_tabular.py::test_tabular_write_label_bold_false", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color_open_ended", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color_outside_intervals", "pyout/tests/test_tabular.py::test_tabular_write_intervals_bold", "pyout/tests/test_tabular.py::test_tabular_write_intervals_missing", "pyout/tests/test_tabular.py::test_tabular_write_transform", "pyout/tests/test_tabular.py::test_tabular_write_transform_with_header", "pyout/tests/test_tabular.py::test_tabular_write_transform_autowidth", "pyout/tests/test_tabular.py::test_tabular_write_transform_on_header", "pyout/tests/test_tabular.py::test_tabular_write_transform_func_error", "pyout/tests/test_tabular.py::test_tabular_write_width_truncate_long", "pyout/tests/test_tabular.py::test_tabular_write_autowidth", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_with_header", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=True]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=False]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=\\u2026]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max_with_header", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_different_data_types_same_output", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_auto_false_exception", "pyout/tests/test_tabular.py::test_tabular_write_callable_values", "pyout/tests/test_tabular.py::test_tabular_write_callable_transform_nothing", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multi_return", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multicol_key_infer_column[result=tuple]", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multicol_key_infer_column[result=dict]", "pyout/tests/test_tabular.py::test_tabular_write_generator_function_values[gen_func]", "pyout/tests/test_tabular.py::test_tabular_write_generator_function_values[generator]", "pyout/tests/test_tabular.py::test_tabular_write_generator_values_multireturn", "pyout/tests/test_tabular.py::test_tabular_write_wait_noop_if_nothreads" ]
[]
[]
[]
MIT License
2,141
[ "pyout/field.py", "pyout/tabular.py", "pyout/elements.py" ]
[ "pyout/field.py", "pyout/tabular.py", "pyout/elements.py" ]
SethMMorton__natsort-49
2e474460d65998dd72f1db56556b03cb3f69f806
2018-02-10 21:44:11
2e474460d65998dd72f1db56556b03cb3f69f806
diff --git a/natsort/compat/locale.py b/natsort/compat/locale.py index 4cbe5f5..ab392ee 100644 --- a/natsort/compat/locale.py +++ b/natsort/compat/locale.py @@ -6,8 +6,21 @@ from __future__ import ( absolute_import ) +# Std. lib imports. +import sys + # Local imports. -from natsort.compat.py23 import PY_VERSION, cmp_to_key +from natsort.compat.py23 import ( + PY_VERSION, + cmp_to_key, + py23_unichr, +) + +# This string should be sorted after any other byte string because +# it contains the max unicode character repeated 20 times. +# You would need some odd data to come after that. +null_string = '' +null_string_max = py23_unichr(sys.maxunicode) * 20 # Make the strxfrm function from strcoll on Python2 # It can be buggy (especially on BSD-based systems), @@ -16,7 +29,12 @@ try: import icu from locale import getlocale - null_string = b'' + null_string_locale = b'' + + # This string should in theory be sorted after any other byte + # string because it contains the max byte char repeated many times. + # You would need some odd data to come after that. + null_string_locale_max = b'x7f' * 50 def dumb_sort(): return False @@ -43,11 +61,28 @@ except ImportError: import locale if PY_VERSION < 3: from locale import strcoll - strxfrm = cmp_to_key(strcoll) - null_string = strxfrm('') + sentinel = object() + + def custom_strcoll(a, b, last=sentinel): + """strcoll that can handle a sentinel that is always last.""" + if a is last: + return 0 if a is b else 1 + elif b is last: # a cannot also be sentinel b/c above logic + return -1 + else: # neither are sentinel + return strcoll(a, b) + + strxfrm = cmp_to_key(custom_strcoll) + null_string_locale = strxfrm('') + null_string_locale_max = strxfrm(sentinel) else: from locale import strxfrm - null_string = '' + null_string_locale = '' + + # This string should be sorted after any other byte string because + # it contains the max unicode character repeated 20 times. + # You would need some odd data to come after that. + null_string_locale_max = py23_unichr(sys.maxunicode) * 20 # On some systems, locale is broken and does not sort in the expected # order. We will try to detect this and compensate. diff --git a/natsort/natsort.py b/natsort/natsort.py index be887c7..557e07b 100644 --- a/natsort/natsort.py +++ b/natsort/natsort.py @@ -207,13 +207,24 @@ def natsort_keygen(key=None, alg=0, **_kwargs): alg |= ns._DUMB # Set some variables that will be passed to the factory functions - sep = natsort.compat.locale.null_string if alg & ns.LOCALEALPHA else '' + if alg & ns.NUMAFTER: + if alg & ns.LOCALEALPHA: + sep = natsort.compat.locale.null_string_locale_max + else: + sep = natsort.compat.locale.null_string_max + pre_sep = natsort.compat.locale.null_string_max + else: + if alg & ns.LOCALEALPHA: + sep = natsort.compat.locale.null_string_locale + else: + sep = natsort.compat.locale.null_string + pre_sep = natsort.compat.locale.null_string regex = _regex_chooser[alg & ns._NUMERIC_ONLY] # Create the functions that will be used to split strings. input_transform = _input_string_transform_factory(alg) component_transform = _string_component_transform_factory(alg) - final_transform = _final_data_transform_factory(alg, sep) + final_transform = _final_data_transform_factory(alg, sep, pre_sep) # Create the high-level parsing functions for strings, bytes, and numbers. string_func = _parse_string_factory( @@ -223,7 +234,7 @@ def natsort_keygen(key=None, alg=0, **_kwargs): if alg & ns.PATH: string_func = _parse_path_factory(string_func) bytes_func = _parse_bytes_factory(alg) - num_func = _parse_number_factory(alg, sep) + num_func = _parse_number_factory(alg, sep, pre_sep) # Return the natsort key with the parsing path pre-chosen. return partial( diff --git a/natsort/ns_enum.py b/natsort/ns_enum.py index 37a00de..4216ddd 100644 --- a/natsort/ns_enum.py +++ b/natsort/ns_enum.py @@ -42,6 +42,9 @@ class ns(object): Tell `natsort` to not search for exponents as part of a float number. For example, with `NOEXP` the number "5.6E5" would be interpreted as `5.6`, `"E"`, and `5` instead of `560000`. + NUMAFTER, NA + Tell `natsort` to sort numbers after non-numbers. By default + numbers will be ordered before non-numbers. PATH, P Tell `natsort` to interpret strings as filesystem paths, so they will be split according to the filesystem separator @@ -151,6 +154,7 @@ class ns(object): CAPITALFIRST = C = UNGROUPLETTERS NANLAST = NL = 1 << 10 COMPATIBILITYNORMALIZE = CN = 1 << 11 + NUMAFTER = NA = 1 << 12 # The below are private options for internal use only. _NUMERIC_ONLY = REAL | NOEXP diff --git a/natsort/utils.py b/natsort/utils.py index c33de1d..b6484b0 100644 --- a/natsort/utils.py +++ b/natsort/utils.py @@ -197,7 +197,7 @@ def _parse_bytes_factory(alg): return lambda x: (x,) -def _parse_number_factory(alg, sep): +def _parse_number_factory(alg, sep, pre_sep): """Create a function that will properly format a number in a tuple.""" nan_replace = float('+inf') if alg & ns.NANLAST else float('-inf') @@ -207,9 +207,9 @@ def _parse_number_factory(alg, sep): # Return the function, possibly wrapping in tuple if PATH is selected. if alg & ns.PATH and alg & ns.UNGROUPLETTERS and alg & ns.LOCALEALPHA: - return lambda x: ((('',), func(x)),) + return lambda x: (((pre_sep,), func(x)),) elif alg & ns.UNGROUPLETTERS and alg & ns.LOCALEALPHA: - return lambda x: (('',), func(x)) + return lambda x: ((pre_sep,), func(x)) elif alg & ns.PATH: return lambda x: (func(x),) else: @@ -363,7 +363,7 @@ def _string_component_transform_factory(alg): return partial(fast_int, **kwargs) -def _final_data_transform_factory(alg, sep): +def _final_data_transform_factory(alg, sep, pre_sep): """ Given a set of natsort algorithms, return the function to operate on the post-parsed strings according to the user's request. @@ -383,7 +383,7 @@ def _final_data_transform_factory(alg, sep): if not split_val: return (), () elif split_val[0] == sep: - return ('',), split_val + return (pre_sep,), split_val else: return (transform(val[0]),), split_val return func
Feature request natsorted(): Sort by letters, then numbers I'd like to submit a feature request to add an option that would allow natsorted() to sort by letters first, then numbers instead of numbers first, then letters. Here is an example of the list that I am looking to sort - I am sorting by index 2 ('ABCDE', 'BCDEF' and '10001'): ```python from natsort import natsorted a = [ ['192.168.1.1', 'generic_description_1', 'ABCDE', 'apples', '172.16.1.1', u'\nCircuit is up\n'], ['192.168.1.1', 'generic_description_2', 'BCDEF', 'oranges', '172.16.2.1', u'\nCircuit is up\n'], ['192.168.2.1', 'generic_description_3', '10001', 'bananas', '172.16.3.1', u'\nCircuit is up\n'] ] print(natsorted(a, key = lambda x: x[2])) ``` Here are the results that are printed: ``` [['192.168.2.1', 'generic_description_3', '10001', 'bananas', '172.16.3.1', u'\nCircuit is up\n'], ['192.168.1.1', 'generic_description_1', 'ABCDE', 'apples', '172.16.1.1', u'\nCircuit is up\n'], ['192.168.1.1', 'generic_description_2', 'BCDEF', 'oranges', '172.16.2.1', u'\nCircuit is up\n']] ``` And as you can see, it sorts by index 2 by numbers first, then letters: 10001 ABCDE BCDEF The option I am requesting would allow the sorting by letters first, then numbers - like this: ABCDE BCDEF 10001 Thanks for the consideration! -Matt
SethMMorton/natsort
diff --git a/test_natsort/test_final_data_transform_factory.py b/test_natsort/test_final_data_transform_factory.py index bfd3dfd..f0207e6 100644 --- a/test_natsort/test_final_data_transform_factory.py +++ b/test_natsort/test_final_data_transform_factory.py @@ -20,38 +20,38 @@ from hypothesis.strategies import ( def test_final_data_transform_factory_with_iterable_returns_tuple_with_no_options_example(): - assert _final_data_transform_factory(0, '')(iter([7]), '') == (7,) + assert _final_data_transform_factory(0, '', '')(iter([7]), '') == (7,) @given(text()) def test_final_data_transform_factory_with_iterable_returns_tuple_with_no_options(x): - assert _final_data_transform_factory(0, '')(iter([x]), '') == (x,) + assert _final_data_transform_factory(0, '', '')(iter([x]), '') == (x,) # UNGROUPLETTERS without LOCALE does nothing, as does LOCALE without UNGROUPLETTERS - assert _final_data_transform_factory(ns.UNGROUPLETTERS, '')(iter([x]), '') == _final_data_transform_factory(0, '')(iter([x]), '') - assert _final_data_transform_factory(ns.LOCALE, '')(iter([x]), '') == _final_data_transform_factory(0, '')(iter([x]), '') + assert _final_data_transform_factory(ns.UNGROUPLETTERS, '', '')(iter([x]), '') == _final_data_transform_factory(0, '', '')(iter([x]), '') + assert _final_data_transform_factory(ns.LOCALE, '', '')(iter([x]), '') == _final_data_transform_factory(0, '', '')(iter([x]), '') def test_final_data_transform_factory_with_empty_tuple_returns_double_empty_tuple(): - assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '')((), '') == ((), ()) + assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '', '')((), '') == ((), ()) def test_final_data_transform_factory_with_null_string_first_element_adds_empty_string_on_first_tuple_element(): - assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '')(('', 60), '') == (('',), ('', 60)) + assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '', 'xx')(('', 60), '') == (('xx',), ('', 60)) def test_final_data_transform_factory_returns_first_element_in_first_tuple_element_example(): - assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '')(('this', 60), 'this60') == (('t',), ('this', 60)) + assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '', '')(('this', 60), 'this60') == (('t',), ('this', 60)) @given(x=text().filter(bool), y=floats(allow_nan=False, allow_infinity=False) | integers()) def test_final_data_transform_factory_returns_first_element_in_first_tuple_element(x, y): - assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '')((x, y), ''.join(map(py23_str, [x, y]))) == ((x[0],), (x, y)) + assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS, '', '')((x, y), ''.join(map(py23_str, [x, y]))) == ((x[0],), (x, y)) def test_final_data_transform_factory_returns_first_element_in_first_tuple_element_caseswapped_with_DUMB_and_LOWERCASEFIRST_example(): - assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS | ns._DUMB | ns.LOWERCASEFIRST, '')(('this', 60), 'this60') == (('T',), ('this', 60)) + assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS | ns._DUMB | ns.LOWERCASEFIRST, '', '')(('this', 60), 'this60') == (('T',), ('this', 60)) @given(x=text().filter(bool), y=floats(allow_nan=False, allow_infinity=False) | integers()) def test_final_data_transform_factory_returns_first_element_in_first_tuple_element_caseswapped_with_DUMB_and_LOWERCASEFIRST(x, y): - assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS | ns._DUMB | ns.LOWERCASEFIRST, '')((x, y), ''.join(map(py23_str, [x, y]))) == ((x[0].swapcase(),), (x, y)) + assert _final_data_transform_factory(ns.LOCALE | ns.UNGROUPLETTERS | ns._DUMB | ns.LOWERCASEFIRST, '', '')((x, y), ''.join(map(py23_str, [x, y]))) == ((x[0].swapcase(),), (x, y)) diff --git a/test_natsort/test_natsort_key.py b/test_natsort/test_natsort_key.py index a675cb2..9aabd11 100644 --- a/test_natsort/test_natsort_key.py +++ b/test_natsort/test_natsort_key.py @@ -34,10 +34,10 @@ if PY_VERSION >= 3: regex = _regex_chooser[ns.INT] pre = _input_string_transform_factory(ns.INT) post = _string_component_transform_factory(ns.INT) -after = _final_data_transform_factory(ns.INT, '') +after = _final_data_transform_factory(ns.INT, '', '') string_func = _parse_string_factory(ns.INT, '', regex.split, pre, post, after) bytes_func = _parse_bytes_factory(ns.INT) -num_func = _parse_number_factory(ns.INT, '') +num_func = _parse_number_factory(ns.INT, '', '') def test__natsort_key_with_numeric_input_and_PATH_returns_number_in_nested_tuple(): @@ -45,7 +45,7 @@ def test__natsort_key_with_numeric_input_and_PATH_returns_number_in_nested_tuple # so it will sort against the other as_path results. sfunc = _parse_path_factory(string_func) bytes_func = _parse_bytes_factory(ns.PATH) - num_func = _parse_number_factory(ns.PATH, '') + num_func = _parse_number_factory(ns.PATH, '', '') assert _natsort_key(10, None, sfunc, bytes_func, num_func) == (('', 10),) @@ -55,7 +55,7 @@ def test__natsort_key_with_bytes_input_and_PATH_returns_number_in_nested_tuple() # so it will sort against the other as_path results. sfunc = _parse_path_factory(string_func) bytes_func = _parse_bytes_factory(ns.PATH) - num_func = _parse_number_factory(ns.PATH, '') + num_func = _parse_number_factory(ns.PATH, '', '') assert _natsort_key(b'/hello/world', None, sfunc, bytes_func, num_func) == ((b'/hello/world',),) @@ -63,7 +63,7 @@ def test__natsort_key_with_tuple_of_paths_and_PATH_returns_triply_nested_tuple() # PATH also handles recursion well. sfunc = _parse_path_factory(string_func) bytes_func = _parse_bytes_factory(ns.PATH) - num_func = _parse_number_factory(ns.PATH, '') + num_func = _parse_number_factory(ns.PATH, '', '') assert _natsort_key(('/Folder', '/Folder (1)'), None, sfunc, bytes_func, num_func) == ((('/',), ('Folder',)), (('/',), ('Folder (', 1, ')'))) diff --git a/test_natsort/test_natsort_keygen.py b/test_natsort/test_natsort_keygen.py index e94d3d3..9ea408a 100644 --- a/test_natsort/test_natsort_keygen.py +++ b/test_natsort/test_natsort_keygen.py @@ -16,7 +16,7 @@ from natsort import ( ) from natsort.compat.py23 import PY_VERSION from natsort.compat.locale import ( - null_string, + null_string_locale, get_strxfrm, ) from compat.mock import patch @@ -79,9 +79,9 @@ def test_natsort_keygen_splits_input_with_locale(): load_locale('en_US') strxfrm = get_strxfrm() with patch('natsort.compat.locale.dumb_sort', return_value=False): - assert natsort_keygen(alg=ns.L)(INPUT) == ((null_string, 6, strxfrm('A-'), 5, strxfrm('.'), 34, strxfrm('e+'), 1), (strxfrm('/Folder ('), 1, strxfrm(')/Foo')), (null_string, 56.7)) + assert natsort_keygen(alg=ns.L)(INPUT) == ((null_string_locale, 6, strxfrm('A-'), 5, strxfrm('.'), 34, strxfrm('e+'), 1), (strxfrm('/Folder ('), 1, strxfrm(')/Foo')), (null_string_locale, 56.7)) with patch('natsort.compat.locale.dumb_sort', return_value=True): - assert natsort_keygen(alg=ns.L)(INPUT) == ((null_string, 6, strxfrm('aa--'), 5, strxfrm('..'), 34, strxfrm('eE++'), 1), (strxfrm('//ffoOlLdDeErR (('), 1, strxfrm('))//ffoOoO')), (null_string, 56.7)) + assert natsort_keygen(alg=ns.L)(INPUT) == ((null_string_locale, 6, strxfrm('aa--'), 5, strxfrm('..'), 34, strxfrm('eE++'), 1), (strxfrm('//ffoOlLdDeErR (('), 1, strxfrm('))//ffoOoO')), (null_string_locale, 56.7)) if PY_VERSION >= 3: assert natsort_keygen(alg=ns.LA)(b'6A-5.034e+1') == (b'6A-5.034e+1',) locale.setlocale(locale.LC_ALL, str('')) @@ -90,7 +90,7 @@ def test_natsort_keygen_splits_input_with_locale_and_capitalfirst(): load_locale('en_US') strxfrm = get_strxfrm() with patch('natsort.compat.locale.dumb_sort', return_value=False): - assert natsort_keygen(alg=ns.LA | ns.C)(INPUT) == ((('',), (null_string, 6, strxfrm('A-'), 5, strxfrm('.'), 34, strxfrm('e+'), 1)), (('/',), (strxfrm('/Folder ('), 1, strxfrm(')/Foo'))), (('',), (null_string, 56.7))) + assert natsort_keygen(alg=ns.LA | ns.C)(INPUT) == ((('',), (null_string_locale, 6, strxfrm('A-'), 5, strxfrm('.'), 34, strxfrm('e+'), 1)), (('/',), (strxfrm('/Folder ('), 1, strxfrm(')/Foo'))), (('',), (null_string_locale, 56.7))) if PY_VERSION >= 3: assert natsort_keygen(alg=ns.LA | ns.C)(b'6A-5.034e+1') == (b'6A-5.034e+1',) locale.setlocale(locale.LC_ALL, str('')) diff --git a/test_natsort/test_natsorted.py b/test_natsort/test_natsorted.py index fcbf75b..71ffbf6 100644 --- a/test_natsort/test_natsorted.py +++ b/test_natsort/test_natsorted.py @@ -82,8 +82,10 @@ def test_natsorted_returns_sorted_list_with_mixed_type_input_and_does_not_raise_ def test_natsorted_with_mixed_input_returns_sorted_results_without_error(): a = ['0', 'Á', '2', 'Z'] assert natsorted(a) == ['0', '2', 'Á', 'Z'] + assert natsorted(a, alg=ns.NUMAFTER) == ['Á', 'Z', '0', '2'] a = ['2', 'ä', 'b', 1.5, 3] assert natsorted(a) == [1.5, '2', 3, 'ä', 'b'] + assert natsorted(a, alg=ns.NUMAFTER) == ['ä', 'b', 1.5, '2', 3] def test_natsorted_with_nan_input_returns_sorted_results_with_nan_last_with_NANLAST(): @@ -243,8 +245,10 @@ def test_natsorted_with_LOCALE_and_mixed_input_returns_sorted_results_without_er load_locale('en_US') a = ['0', 'Á', '2', 'Z'] assert natsorted(a, alg=ns.LOCALE) == ['0', '2', 'Á', 'Z'] + assert natsorted(a, alg=ns.LOCALE | ns.NUMAFTER) == ['Á', 'Z', '0', '2'] a = ['2', 'ä', 'b', 1.5, 3] assert natsorted(a, alg=ns.LOCALE) == [1.5, '2', 3, 'ä', 'b'] + assert natsorted(a, alg=ns.LOCALE | ns.NUMAFTER) == ['ä', 'b', 1.5, '2', 3] locale.setlocale(locale.LC_ALL, str('')) @@ -252,8 +256,10 @@ def test_natsorted_with_LOCALE_and_UNGROUPLETTERS_and_mixed_input_returns_sorted load_locale('en_US') a = ['0', 'Á', '2', 'Z'] assert natsorted(a, alg=ns.LOCALE | ns.UNGROUPLETTERS) == ['0', '2', 'Á', 'Z'] + assert natsorted(a, alg=ns.LOCALE | ns.UNGROUPLETTERS | ns.NUMAFTER) == ['Á', 'Z', '0', '2'] a = ['2', 'ä', 'b', 1.5, 3] assert natsorted(a, alg=ns.LOCALE | ns.UNGROUPLETTERS) == [1.5, '2', 3, 'ä', 'b'] + assert natsorted(a, alg=ns.LOCALE | ns.UNGROUPLETTERS | ns.NUMAFTER) == ['ä', 'b', 1.5, '2', 3] locale.setlocale(locale.LC_ALL, str('')) @@ -261,6 +267,16 @@ def test_natsorted_with_PATH_and_LOCALE_and_UNGROUPLETTERS_and_mixed_input_retur load_locale('en_US') a = ['0', 'Á', '2', 'Z'] assert natsorted(a, alg=ns.PATH | ns.LOCALE | ns.UNGROUPLETTERS) == ['0', '2', 'Á', 'Z'] + assert natsorted(a, alg=ns.PATH | ns.LOCALE | ns.UNGROUPLETTERS | ns.NUMAFTER) == ['Á', 'Z', '0', '2'] a = ['2', 'ä', 'b', 1.5, 3] assert natsorted(a, alg=ns.PATH | ns.LOCALE | ns.UNGROUPLETTERS) == [1.5, '2', 3, 'ä', 'b'] + assert natsorted(a, alg=ns.PATH | ns.LOCALE | ns.UNGROUPLETTERS | ns.NUMAFTER) == ['ä', 'b', 1.5, '2', 3] locale.setlocale(locale.LC_ALL, str('')) + + +def test_natsorted_sorts_an_odd_collection_of_string(): + a = ['Corn', 'apple', 'Banana', '73', 'Apple', '5039', 'corn', '~~~~~~', 'banana'] + assert natsorted(a) == ['73', '5039', 'Apple', 'Banana', 'Corn', + 'apple', 'banana', 'corn', '~~~~~~'] + assert natsorted(a, alg=ns.NUMAFTER) == ['Apple', 'Banana', 'Corn', + 'apple', 'banana', 'corn', '~~~~~~', '73', '5039'] diff --git a/test_natsort/test_parse_number_function.py b/test_natsort/test_parse_number_function.py index 163d066..2e7a9fe 100644 --- a/test_natsort/test_parse_number_function.py +++ b/test_natsort/test_parse_number_function.py @@ -18,38 +18,38 @@ from hypothesis.strategies import ( def test_parse_number_factory_makes_function_that_returns_tuple_example(): - assert _parse_number_factory(0, '')(57) == ('', 57) - assert _parse_number_factory(0, '')(float('nan')) == ('', float('-inf')) - assert _parse_number_factory(ns.NANLAST, '')(float('nan')) == ('', float('+inf')) + assert _parse_number_factory(0, '', '')(57) == ('', 57) + assert _parse_number_factory(0, '', '')(float('nan')) == ('', float('-inf')) + assert _parse_number_factory(ns.NANLAST, '', '')(float('nan')) == ('', float('+inf')) @given(floats(allow_nan=False) | integers()) def test_parse_number_factory_makes_function_that_returns_tuple(x): - assert _parse_number_factory(0, '')(x) == ('', x) + assert _parse_number_factory(0, '', '')(x) == ('', x) def test_parse_number_factory_with_PATH_makes_function_that_returns_nested_tuple_example(): - assert _parse_number_factory(ns.PATH, '')(57) == (('', 57),) + assert _parse_number_factory(ns.PATH, '', '')(57) == (('', 57),) @given(floats(allow_nan=False) | integers()) def test_parse_number_factory_with_PATH_makes_function_that_returns_nested_tuple(x): - assert _parse_number_factory(ns.PATH, '')(x) == (('', x),) + assert _parse_number_factory(ns.PATH, '', '')(x) == (('', x),) def test_parse_number_factory_with_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple_example(): - assert _parse_number_factory(ns.UNGROUPLETTERS | ns.LOCALE, '')(57) == (('',), ('', 57)) + assert _parse_number_factory(ns.UNGROUPLETTERS | ns.LOCALE, '', 'xx')(57) == (('xx',), ('', 57)) @given(floats(allow_nan=False) | integers()) def test_parse_number_factory_with_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple(x): - assert _parse_number_factory(ns.UNGROUPLETTERS | ns.LOCALE, '')(x) == (('',), ('', x)) + assert _parse_number_factory(ns.UNGROUPLETTERS | ns.LOCALE, '', 'xx')(x) == (('xx',), ('', x)) def test_parse_number_factory_with_PATH_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple_example(): - assert _parse_number_factory(ns.PATH | ns.UNGROUPLETTERS | ns.LOCALE, '')(57) == ((('',), ('', 57)),) + assert _parse_number_factory(ns.PATH | ns.UNGROUPLETTERS | ns.LOCALE, '', 'xx')(57) == ((('xx',), ('', 57)),) @given(floats(allow_nan=False) | integers()) def test_parse_number_factory_with_PATH_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple(x): - assert _parse_number_factory(ns.PATH | ns.UNGROUPLETTERS | ns.LOCALE, '')(x) == ((('',), ('', x)),) + assert _parse_number_factory(ns.PATH | ns.UNGROUPLETTERS | ns.LOCALE, '', 'xx')(x) == ((('xx',), ('', x)),) diff --git a/test_natsort/test_utils.py b/test_natsort/test_utils.py index cfa06b6..10ad0fa 100644 --- a/test_natsort/test_utils.py +++ b/test_natsort/test_utils.py @@ -24,7 +24,7 @@ from natsort.utils import ( chain_functions, ) from natsort.compat.py23 import py23_str, py23_cmp -from natsort.compat.locale import null_string +from natsort.compat.locale import null_string_locale from slow_splitters import ( sep_inserter, add_leading_space_if_first_is_num, @@ -149,6 +149,7 @@ def test_ns_enum_values_have_are_as_expected(): assert ns.UNGROUPLETTERS == ns.CAPITALFIRST assert ns.NANLAST == ns.NL assert ns.COMPATIBILITYNORMALIZE == ns.CN + assert ns.NUMAFTER == ns.NA # Convenience assert ns.LOCALE == ns.LOCALEALPHA | ns.LOCALENUM @@ -195,7 +196,7 @@ def test_sep_inserter_does_nothing_if_only_one_number_example(): def test_sep_inserter_inserts_separator_string_between_two_numbers_example(): assert list(_sep_inserter(iter([5, 9]), '')) == ['', 5, '', 9] - assert list(_sep_inserter(iter([5, 9]), null_string)) == [null_string, 5, null_string, 9] + assert list(_sep_inserter(iter([5, 9]), null_string_locale)) == [null_string_locale, 5, null_string_locale, 9] @given(lists(elements=text().filter(bool) | integers()))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 4 }
5.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "hypothesis>=3.8.0" ], "pre_install": [ "apt-get update", "apt-get install -y gcc g++ libicu-dev" ], "python": "3.9", "reqs_path": [ "requirements.txt", "testing-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
astroid==1.5.3 attrs==25.3.0 coverage==7.8.0 exceptiongroup==1.2.2 execnet==2.1.1 hypothesis==6.130.6 iniconfig==2.1.0 lazy-object-proxy==1.10.0 -e git+https://github.com/SethMMorton/natsort.git@2e474460d65998dd72f1db56556b03cb3f69f806#egg=natsort packaging==24.2 pep8==1.7.1 pluggy==1.5.0 pyflakes==3.3.2 pytest==8.3.5 pytest-cache==1.0 pytest-cov==6.0.0 pytest-flakes==4.0.5 pytest-pep8==1.0.6 six==1.17.0 sortedcontainers==2.4.0 tomli==2.2.1 wrapt==1.17.2
name: natsort channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - astroid==1.5.3 - attrs==25.3.0 - coverage==7.8.0 - exceptiongroup==1.2.2 - execnet==2.1.1 - hypothesis==6.130.6 - iniconfig==2.1.0 - lazy-object-proxy==1.10.0 - packaging==24.2 - pep8==1.7.1 - pluggy==1.5.0 - pyflakes==3.3.2 - pytest==8.3.5 - pytest-cache==1.0 - pytest-cov==6.0.0 - pytest-flakes==4.0.5 - pytest-pep8==1.0.6 - six==1.17.0 - sortedcontainers==2.4.0 - tomli==2.2.1 - wrapt==1.17.2 prefix: /opt/conda/envs/natsort
[ "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_with_iterable_returns_tuple_with_no_options_example", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_with_iterable_returns_tuple_with_no_options", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_with_empty_tuple_returns_double_empty_tuple", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_with_null_string_first_element_adds_empty_string_on_first_tuple_element", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_returns_first_element_in_first_tuple_element_example", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_returns_first_element_in_first_tuple_element", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_returns_first_element_in_first_tuple_element_caseswapped_with_DUMB_and_LOWERCASEFIRST_example", "test_natsort/test_final_data_transform_factory.py::test_final_data_transform_factory_returns_first_element_in_first_tuple_element_caseswapped_with_DUMB_and_LOWERCASEFIRST", "test_natsort/test_natsort_key.py::test__natsort_key_with_numeric_input_and_PATH_returns_number_in_nested_tuple", "test_natsort/test_natsort_key.py::test__natsort_key_with_bytes_input_and_PATH_returns_number_in_nested_tuple", "test_natsort/test_natsort_key.py::test__natsort_key_with_tuple_of_paths_and_PATH_returns_triply_nested_tuple", "test_natsort/test_natsort_key.py::test__natsort_key_with_numeric_input_takes_number_path", "test_natsort/test_natsort_key.py::test__natsort_key_with_bytes_input_takes_bytes_path", "test_natsort/test_natsort_key.py::test__natsort_key_with_text_input_takes_string_path", "test_natsort/test_natsort_key.py::test__natsort_key_with_nested_input_takes_nested_path", "test_natsort/test_natsort_key.py::test__natsort_key_with_key_argument_applies_key_before_processing", "test_natsort/test_natsort_keygen.py::test_natsort_key_public_raises_DeprecationWarning_when_called", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_with_invalid_alg_input_raises_ValueError", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_returns_natsort_key_that_parses_input", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_returns_key_that_can_be_used_to_sort_list_in_place_with_same_result_as_natsorted", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_defaults", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_real", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_lowercasefirst_noexp_float", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_path", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_ignorecase", "test_natsort/test_natsorted.py::test_natsorted_returns_strings_with_numbers_in_ascending_order", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_as_signed_floats_with_exponents", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_as_unsigned_floats_without_exponents_with_NOEXP_option", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_as_unsigned_ints_with_INT_option", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_as_unsigned_ints_with_DIGIT_and_VERSION_option", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_as_signed_ints_with_SIGNED_option", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_accounting_for_sign_with_SIGNED_option", "test_natsort/test_natsorted.py::test_natsorted_returns_list_of_numbers_sorted_not_accounting_for_sign_without_SIGNED_option", "test_natsort/test_natsorted.py::test_natsorted_returns_sorted_list_of_version_numbers_by_default_or_with_VERSION_option", "test_natsort/test_natsorted.py::test_natsorted_returns_sorted_list_with_mixed_type_input_and_does_not_raise_TypeError_on_Python3", "test_natsort/test_natsorted.py::test_natsorted_with_mixed_input_returns_sorted_results_without_error", "test_natsort/test_natsorted.py::test_natsorted_with_nan_input_returns_sorted_results_with_nan_last_with_NANLAST", "test_natsort/test_natsorted.py::test_natsorted_with_nan_input_returns_sorted_results_with_nan_first_without_NANLAST", "test_natsort/test_natsorted.py::test_natsorted_with_mixed_input_raises_TypeError_if_bytes_type_is_involved_on_Python3", "test_natsort/test_natsorted.py::test_natsorted_raises_ValueError_for_non_iterable_input", "test_natsort/test_natsorted.py::test_natsorted_recursivley_applies_key_to_nested_lists_to_return_sorted_nested_list", "test_natsort/test_natsorted.py::test_natsorted_applies_key_to_each_list_element_before_sorting_list", "test_natsort/test_natsorted.py::test_natsorted_returns_list_in_reversed_order_with_reverse_option", "test_natsort/test_natsorted.py::test_natsorted_sorts_OS_generated_paths_incorrectly_without_PATH_option", "test_natsort/test_natsorted.py::test_natsorted_sorts_OS_generated_paths_correctly_with_PATH_option", "test_natsort/test_natsorted.py::test_natsorted_can_handle_sorting_paths_and_numbers_with_PATH", "test_natsort/test_natsorted.py::test_natsorted_returns_results_in_ASCII_order_with_no_case_options", "test_natsort/test_natsorted.py::test_natsorted_returns_results_sorted_by_lowercase_ASCII_order_with_IGNORECASE", "test_natsort/test_natsorted.py::test_natsorted_returns_results_in_ASCII_order_but_with_lowercase_letters_first_with_LOWERCASEFIRST", "test_natsort/test_natsorted.py::test_natsorted_returns_results_with_uppercase_and_lowercase_letters_grouped_together_with_GROUPLETTERS", "test_natsort/test_natsorted.py::test_natsorted_returns_results_in_natural_order_with_GROUPLETTERS_and_LOWERCASEFIRST", "test_natsort/test_natsorted.py::test_natsorted_places_uppercase_letters_before_lowercase_letters_for_nested_input", "test_natsort/test_natsorted.py::test_natsorted_with_LOWERCASEFIRST_places_lowercase_letters_before_uppercase_letters_for_nested_input", "test_natsort/test_natsorted.py::test_natsorted_with_IGNORECASE_sorts_without_regard_to_case_for_nested_input", "test_natsort/test_natsorted.py::test_natsorted_sorts_an_odd_collection_of_string", "test_natsort/test_parse_number_function.py::test_parse_number_factory_makes_function_that_returns_tuple_example", "test_natsort/test_parse_number_function.py::test_parse_number_factory_makes_function_that_returns_tuple", "test_natsort/test_parse_number_function.py::test_parse_number_factory_with_PATH_makes_function_that_returns_nested_tuple_example", "test_natsort/test_parse_number_function.py::test_parse_number_factory_with_PATH_makes_function_that_returns_nested_tuple", "test_natsort/test_parse_number_function.py::test_parse_number_factory_with_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple_example", "test_natsort/test_parse_number_function.py::test_parse_number_factory_with_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple", "test_natsort/test_parse_number_function.py::test_parse_number_factory_with_PATH_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple_example", "test_natsort/test_parse_number_function.py::test_parse_number_factory_with_PATH_UNGROUPLETTERS_LOCALE_makes_function_that_returns_nested_tuple", "test_natsort/test_utils.py::test_do_decoding_decodes_bytes_string_to_unicode", "test_natsort/test_utils.py::test_args_to_enum_raises_TypeError_for_invalid_argument", "test_natsort/test_utils.py::test_args_to_enum_converts_signed_exp_float_to_ns_F", "test_natsort/test_utils.py::test_args_to_enum_converts_signed_noexp_float_to_ns_FN", "test_natsort/test_utils.py::test_args_to_enum_converts_unsigned_exp_float_to_ns_FU", "test_natsort/test_utils.py::test_args_to_enum_converts_unsigned_unexp_float_to_ns_FNU", "test_natsort/test_utils.py::test_args_to_enum_converts_float_and_path_and_py3safe_to_ns_FPT", "test_natsort/test_utils.py::test_args_to_enum_converts_int_and_path_to_ns_IP", "test_natsort/test_utils.py::test_args_to_enum_converts_unsigned_int_and_py3safe_to_ns_IUT", "test_natsort/test_utils.py::test_args_to_enum_converts_None_to_ns_IU", "test_natsort/test_utils.py::test_regex_chooser_returns_correct_regular_expression_object", "test_natsort/test_utils.py::test_ns_enum_values_have_are_as_expected", "test_natsort/test_utils.py::test_chain_functions_is_a_no_op_if_no_functions_are_given", "test_natsort/test_utils.py::test_chain_functions_does_one_function_if_one_function_is_given", "test_natsort/test_utils.py::test_chain_functions_combines_functions_in_given_order", "test_natsort/test_utils.py::test_groupletters_returns_letters_with_lowercase_transform_of_letter_example", "test_natsort/test_utils.py::test_groupeletters_returns_letters_with_lowercase_transform_of_letter", "test_natsort/test_utils.py::test_sep_inserter_does_nothing_if_no_numbers_example", "test_natsort/test_utils.py::test_sep_inserter_does_nothing_if_only_one_number_example", "test_natsort/test_utils.py::test_sep_inserter_inserts_separator_string_between_two_numbers_example", "test_natsort/test_utils.py::test_path_splitter_splits_path_string_by_separator_example", "test_natsort/test_utils.py::test_path_splitter_splits_path_string_by_separator", "test_natsort/test_utils.py::test_path_splitter_splits_path_string_by_separator_and_removes_extension_example", "test_natsort/test_utils.py::test_path_splitter_splits_path_string_by_separator_and_removes_extension", "test_natsort/test_utils.py::test_py23_cmp" ]
[ "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_locale", "test_natsort/test_natsort_keygen.py::test_natsort_keygen_splits_input_with_locale_and_capitalfirst", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_returns_results_sorted_by_lowercase_first_and_grouped_letters", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_and_CAPITALFIRST_returns_results_sorted_by_capital_first_and_ungrouped", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_and_LOWERCASEFIRST_returns_results_sorted_by_uppercase_first_and_grouped_letters", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_and_CAPITALFIRST_and_LOWERCASE_returns_results_sorted_by_capital_last_and_ungrouped", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_and_en_setting_returns_results_sorted_by_en_language", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_and_mixed_input_returns_sorted_results_without_error", "test_natsort/test_natsorted.py::test_natsorted_with_LOCALE_and_UNGROUPLETTERS_and_mixed_input_returns_sorted_results_without_error", "test_natsort/test_natsorted.py::test_natsorted_with_PATH_and_LOCALE_and_UNGROUPLETTERS_and_mixed_input_returns_sorted_results_without_error", "test_natsort/test_utils.py::test_sep_inserter_inserts_separator_between_two_numbers" ]
[]
[]
MIT License
2,143
[ "natsort/ns_enum.py", "natsort/natsort.py", "natsort/compat/locale.py", "natsort/utils.py" ]
[ "natsort/ns_enum.py", "natsort/natsort.py", "natsort/compat/locale.py", "natsort/utils.py" ]
pyout__pyout-48
9b85940318d2b583a2e7f4bc1fd8474d08aab679
2018-02-11 16:40:57
5c6e7d9ea60e63704333e554567561df1123bd3f
kyleam: Woops, tests failing because I didn't account for the merge #47. Will rebase.
diff --git a/CHANGELOG.md b/CHANGELOG.md index 58115f6..1942ecc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,8 @@ TODO Summary The only associated user-facing change is the rename of `pyout.SCHEMA` to `pyout.schema`. +- The style attribute "label" has been renamed to "lookup". + ### Deprecated ### Fixed ### Removed diff --git a/pyout/elements.py b/pyout/elements.py index 700f53d..d88c4b8 100644 --- a/pyout/elements.py +++ b/pyout/elements.py @@ -15,7 +15,7 @@ schema = { "bold": { "description": "Whether text is bold", "oneOf": [{"type": "boolean"}, - {"$ref": "#/definitions/label"}, + {"$ref": "#/definitions/lookup"}, {"$ref": "#/definitions/interval"}], "default": False, "scope": "field"}, @@ -24,7 +24,7 @@ schema = { "oneOf": [{"type": "string", "enum": ["black", "red", "green", "yellow", "blue", "magenta", "cyan", "white"]}, - {"$ref": "#/definitions/label"}, + {"$ref": "#/definitions/lookup"}, {"$ref": "#/definitions/interval"}], "default": "black", "scope": "field"}, @@ -37,7 +37,7 @@ schema = { "underline": { "description": "Whether text is underlined", "oneOf": [{"type": "boolean"}, - {"$ref": "#/definitions/label"}, + {"$ref": "#/definitions/lookup"}, {"$ref": "#/definitions/interval"}], "default": False, "scope": "field"}, @@ -76,10 +76,10 @@ schema = { {"type": ["string", "boolean"]}], "additionalItems": False}]}}, "additionalProperties": False}, - "label": { + "lookup": { "description": "Map a value to a style", "type": "object", - "properties": {"label": {"type": "object"}}, + "properties": {"lookup": {"type": "object"}}, "additionalProperties": False}, "transform": { "description": """An arbitrary function. diff --git a/pyout/field.py b/pyout/field.py index 3b0664c..12cc4a7 100644 --- a/pyout/field.py +++ b/pyout/field.py @@ -360,13 +360,13 @@ class StyleProcessors(object): Returns ------- - str, {"simple", "label", "interval"} + str, {"simple", "lookup", "interval"} """ try: keys = list(value.keys()) except AttributeError: return "simple" - if keys in [["label"], ["interval"]]: + if keys in [["lookup"], ["interval"]]: return keys[0] raise ValueError("Type of `value` could not be determined") @@ -412,7 +412,7 @@ class StyleProcessors(object): yield self.by_key(key) elif key_type is str: yield self.by_key(column_style[key]) - elif vtype == "label": + elif vtype == "lookup": yield self.by_lookup(column_style[key][vtype], attr_key) elif vtype == "interval": yield self.by_interval_lookup(column_style[key][vtype],
"label" -> "enum" or alike? Although originally `enum` is for enumerating and in our case to assign arbitrary style values, I feel that it might be a better fit instead of a `label`. just an idea, feel free to close
pyout/pyout
diff --git a/pyout/tests/test_field.py b/pyout/tests/test_field.py index 0c1f9fa..b4d72a1 100644 --- a/pyout/tests/test_field.py +++ b/pyout/tests/test_field.py @@ -82,7 +82,7 @@ def test_style_value_type(): assert fn(True) == "simple" assert fn("red") == "simple" - assert fn({"label": {"BAD": "red"}}) == "label" + assert fn({"lookup": {"BAD": "red"}}) == "lookup" interval = {"interval": [(0, 50, "red"), (50, 80, "yellow")]} assert fn(interval) == "interval" diff --git a/pyout/tests/test_tabular.py b/pyout/tests/test_tabular.py index 49affe1..aa342e8 100644 --- a/pyout/tests/test_tabular.py +++ b/pyout/tests/test_tabular.py @@ -517,10 +517,10 @@ def test_tabular_repaint_with_header(): @patch("pyout.tabular.Terminal", TestTerminal) -def test_tabular_write_label_color(): +def test_tabular_write_lookup_color(): fd = StringIO() out = Tabular(style={"name": {"width": 3}, - "status": {"color": {"label": {"BAD": "red"}}, + "status": {"color": {"lookup": {"BAD": "red"}}, "width": 6}}, stream=fd, force_styling=True) out(OrderedDict([("name", "foo"), @@ -535,10 +535,10 @@ def test_tabular_write_label_color(): @patch("pyout.tabular.Terminal", TestTerminal) -def test_tabular_write_label_bold(): +def test_tabular_write_lookup_bold(): fd = StringIO() out = Tabular(style={"name": {"width": 3}, - "status": {"bold": {"label": {"BAD": True}}, + "status": {"bold": {"lookup": {"BAD": True}}, "width": 6}}, stream=fd, force_styling=True) out(OrderedDict([("name", "foo"), @@ -553,10 +553,10 @@ def test_tabular_write_label_bold(): @patch("pyout.tabular.Terminal", TestTerminal) -def test_tabular_write_label_bold_false(): +def test_tabular_write_lookup_bold_false(): fd = StringIO() out = Tabular(style={"name": {"width": 3}, - "status": {"bold": {"label": {"BAD": False}}, + "status": {"bold": {"lookup": {"BAD": False}}, "width": 6}}, stream=fd, force_styling=True) out(OrderedDict([("name", "foo"), @@ -570,9 +570,9 @@ def test_tabular_write_label_bold_false(): @patch("pyout.tabular.Terminal", TestTerminal) -def test_tabular_write_label_non_hashable(): +def test_tabular_write_lookup_non_hashable(): fd = StringIO() - out = Tabular(style={"status": {"color": {"label": {"BAD": "red"}}}}, + out = Tabular(style={"status": {"color": {"lookup": {"BAD": "red"}}}}, stream=fd) out(OrderedDict([("status", [0, 1])])) expected = ("[0, 1]\n")
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 3 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[full]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-timeout", "pytest-mock" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work blessings==1.7 certifi==2021.5.30 coverage==6.2 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work jsonschema==3.2.0 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work -e git+https://github.com/pyout/pyout.git@9b85940318d2b583a2e7f4bc1fd8474d08aab679#egg=pyout pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pyrsistent==0.18.0 pytest==6.2.4 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-timeout==2.1.0 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: pyout channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - blessings==1.7 - coverage==6.2 - jsonschema==3.2.0 - mock==5.2.0 - pyrsistent==0.18.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-timeout==2.1.0 - six==1.17.0 - tomli==1.2.3 prefix: /opt/conda/envs/pyout
[ "pyout/tests/test_field.py::test_style_value_type", "pyout/tests/test_tabular.py::test_tabular_write_lookup_color", "pyout/tests/test_tabular.py::test_tabular_write_lookup_bold", "pyout/tests/test_tabular.py::test_tabular_write_lookup_bold_false", "pyout/tests/test_tabular.py::test_tabular_write_lookup_non_hashable" ]
[]
[ "pyout/tests/test_field.py::test_field_base", "pyout/tests/test_field.py::test_field_update", "pyout/tests/test_field.py::test_field_processors", "pyout/tests/test_field.py::test_something_about_nothing[text='']", "pyout/tests/test_field.py::test_something_about_nothing[text='-']", "pyout/tests/test_field.py::test_truncate_mark_true", "pyout/tests/test_field.py::test_truncate_mark_string", "pyout/tests/test_field.py::test_truncate_mark_short", "pyout/tests/test_field.py::test_truncate_nomark", "pyout/tests/test_field.py::test_style_processor_translate", "pyout/tests/test_tabular.py::test_tabular_write_color", "pyout/tests/test_tabular.py::test_tabular_write_empty_string", "pyout/tests/test_tabular.py::test_tabular_write_missing_column", "pyout/tests/test_tabular.py::test_tabular_write_missing_column_missing_text", "pyout/tests/test_tabular.py::test_tabular_write_list_value", "pyout/tests/test_tabular.py::test_tabular_write_missing_column_missing_object_data", "pyout/tests/test_tabular.py::test_tabular_write_columns_from_orderdict_row", "pyout/tests/test_tabular.py::test_tabular_write_columns_orderdict_mapping[sequence]", "pyout/tests/test_tabular.py::test_tabular_write_columns_orderdict_mapping[dict]", "pyout/tests/test_tabular.py::test_tabular_write_data_as_list", "pyout/tests/test_tabular.py::test_tabular_write_header", "pyout/tests/test_tabular.py::test_tabular_write_data_as_object", "pyout/tests/test_tabular.py::test_tabular_write_different_data_types_same_output", "pyout/tests/test_tabular.py::test_tabular_write_header_with_style", "pyout/tests/test_tabular.py::test_tabular_nondefault_separator", "pyout/tests/test_tabular.py::test_tabular_write_data_as_list_no_columns", "pyout/tests/test_tabular.py::test_tabular_write_style_override", "pyout/tests/test_tabular.py::test_tabular_default_style", "pyout/tests/test_tabular.py::test_tabular_write_multicolor", "pyout/tests/test_tabular.py::test_tabular_write_align", "pyout/tests/test_tabular.py::test_tabular_rewrite", "pyout/tests/test_tabular.py::test_tabular_rewrite_notfound", "pyout/tests/test_tabular.py::test_tabular_rewrite_multi_id", "pyout/tests/test_tabular.py::test_tabular_rewrite_multi_value", "pyout/tests/test_tabular.py::test_tabular_rewrite_with_ids_property", "pyout/tests/test_tabular.py::test_tabular_rewrite_auto_width", "pyout/tests/test_tabular.py::test_tabular_rewrite_data_as_list", "pyout/tests/test_tabular.py::test_tabular_repaint", "pyout/tests/test_tabular.py::test_tabular_repaint_with_header", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color_open_ended", "pyout/tests/test_tabular.py::test_tabular_write_intervals_color_outside_intervals", "pyout/tests/test_tabular.py::test_tabular_write_intervals_bold", "pyout/tests/test_tabular.py::test_tabular_write_intervals_missing", "pyout/tests/test_tabular.py::test_tabular_write_transform", "pyout/tests/test_tabular.py::test_tabular_write_transform_with_header", "pyout/tests/test_tabular.py::test_tabular_write_transform_autowidth", "pyout/tests/test_tabular.py::test_tabular_write_transform_on_header", "pyout/tests/test_tabular.py::test_tabular_write_transform_func_error", "pyout/tests/test_tabular.py::test_tabular_write_width_truncate_long", "pyout/tests/test_tabular.py::test_tabular_write_autowidth", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_with_header", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=True]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=False]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max[marker=\\u2026]", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_min_max_with_header", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_different_data_types_same_output", "pyout/tests/test_tabular.py::test_tabular_write_autowidth_auto_false_exception", "pyout/tests/test_tabular.py::test_tabular_write_callable_values", "pyout/tests/test_tabular.py::test_tabular_write_callable_transform_nothing", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multi_return", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multicol_key_infer_column[result=tuple]", "pyout/tests/test_tabular.py::test_tabular_write_callable_values_multicol_key_infer_column[result=dict]", "pyout/tests/test_tabular.py::test_tabular_write_generator_function_values[gen_func]", "pyout/tests/test_tabular.py::test_tabular_write_generator_function_values[generator]", "pyout/tests/test_tabular.py::test_tabular_write_generator_values_multireturn", "pyout/tests/test_tabular.py::test_tabular_write_wait_noop_if_nothreads" ]
[]
MIT License
2,144
[ "pyout/field.py", "pyout/elements.py", "CHANGELOG.md" ]
[ "pyout/field.py", "pyout/elements.py", "CHANGELOG.md" ]
pika__pika-954
3d3b95d31b67dfeaf5ef43650c162e25169336e6
2018-02-11 22:47:04
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
codecov[bot]: # [Codecov](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=h1) Report > Merging [#954](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=desc) into [master](https://codecov.io/gh/pika/pika/commit/3d3b95d31b67dfeaf5ef43650c162e25169336e6?src=pr&el=desc) will **increase** coverage by `0.01%`. > The diff coverage is `78.57%`. [![Impacted file tree graph](https://codecov.io/gh/pika/pika/pull/954/graphs/tree.svg?src=pr&token=cJFWQg66l4&width=650&height=150)](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #954 +/- ## ========================================== + Coverage 82.33% 82.34% +0.01% ========================================== Files 19 19 Lines 3690 3694 +4 Branches 548 549 +1 ========================================== + Hits 3038 3042 +4 Misses 502 502 Partials 150 150 ``` | [Impacted Files](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [pika/channel.py](https://codecov.io/gh/pika/pika/pull/954/diff?src=pr&el=tree#diff-cGlrYS9jaGFubmVsLnB5) | `94.34% <100%> (+0.05%)` | :arrow_up: | | [pika/adapters/blocking\_connection.py](https://codecov.io/gh/pika/pika/pull/954/diff?src=pr&el=tree#diff-cGlrYS9hZGFwdGVycy9ibG9ja2luZ19jb25uZWN0aW9uLnB5) | `84.72% <62.5%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=footer). Last update [3d3b95d...8ac57ae](https://codecov.io/gh/pika/pika/pull/954?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py index ffeeadc..cecd3b7 100644 --- a/pika/adapters/base_connection.py +++ b/pika/adapters/base_connection.py @@ -219,7 +219,7 @@ class BaseConnection(connection.Connection): error = 'Connection to %s:%s failed: %s' % (sock_addr_tuple[4][0], sock_addr_tuple[4][1], error) - LOGGER.warning(error) + LOGGER.error(error) return error # Handle SSL Connection Negotiation diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index ea9b6c1..d021e94 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -705,7 +705,8 @@ class BlockingConnection(object): with self._acquire_event_dispatch() as dispatch_acquired: # Check if we can actually process pending events common_terminator = lambda: bool(dispatch_acquired and - (self._channels_pending_dispatch or self._ready_events)) + (self._channels_pending_dispatch or + self._ready_events)) if time_limit is None: self._flush_output(common_terminator) else: @@ -954,7 +955,7 @@ class ReturnedMessage(object): class _ConsumerInfo(object): """Information about an active consumer""" - __slots__ = ('consumer_tag', 'no_ack', 'callback', + __slots__ = ('consumer_tag', 'no_ack', 'on_message_callback', 'alternate_event_sink', 'state') # Consumer states @@ -963,16 +964,16 @@ class _ConsumerInfo(object): TEARING_DOWN = 3 CANCELLED_BY_BROKER = 4 - def __init__(self, consumer_tag, no_ack, callback=None, + def __init__(self, consumer_tag, no_ack, on_message_callback=None, alternate_event_sink=None): """ NOTE: exactly one of callback/alternate_event_sink musts be non-None. :param str consumer_tag: :param bool no_ack: the no-ack value for the consumer - :param callable callback: The function for dispatching messages to + :param callable on_message_callback: The function for dispatching messages to user, having the signature: - callback(channel, method, properties, body) + on_message_callback(channel, method, properties, body) channel: BlockingChannel method: spec.Basic.Deliver properties: spec.BasicProperties @@ -983,12 +984,12 @@ class _ConsumerInfo(object): `_pending_events` container. Signature: alternate_event_sink(evt) """ - assert (callback is None) != (alternate_event_sink is None), ( - 'exactly one of callback/alternate_event_sink must be non-None', - callback, alternate_event_sink) + assert (on_message_callback is None) != (alternate_event_sink is None), ( + 'exactly one of on_message_callback/alternate_event_sink must be non-None', + on_message_callback, alternate_event_sink) self.consumer_tag = consumer_tag self.no_ack = no_ack - self.callback = callback + self.on_message_callback = on_message_callback self.alternate_event_sink = alternate_event_sink self.state = self.SETTING_UP @@ -1316,7 +1317,7 @@ class BlockingChannel(object): self._cleanup() method = method_frame.method raise exceptions.ChannelClosed(method.reply_code, - method.reply_text) + method.reply_text) def _on_consumer_cancelled_by_broker(self, method_frame): """Called by impl when broker cancels consumer via Basic.Cancel. @@ -1402,8 +1403,8 @@ class BlockingChannel(object): if type(evt) is _ConsumerDeliveryEvt: consumer_info = self._consumer_infos[evt.method.consumer_tag] - consumer_info.callback(self, evt.method, - evt.properties, evt.body) + consumer_info.on_message_callback(self, evt.method, + evt.properties, evt.body) elif type(evt) is _ConsumerCancellationEvt: del self._consumer_infos[evt.method_frame.method.consumer_tag] @@ -1500,7 +1501,7 @@ class BlockingChannel(object): def basic_consume(self, queue, - callback, + on_message_callback, no_ack=False, exclusive=False, consumer_tag=None, @@ -1520,9 +1521,9 @@ class BlockingChannel(object): :param queue: The queue from which to consume :type queue: str or unicode - :param callable callback: Required function for dispatching messages + :param callable on_message_callback: Required function for dispatching messages to user, having the signature: - callback(channel, method, properties, body) + on_message_callback(channel, method, properties, body) channel: BlockingChannel method: spec.Basic.Deliver properties: spec.BasicProperties @@ -1541,17 +1542,17 @@ class BlockingChannel(object): consumer_tag is already present. """ - if not callable(callback): - raise ValueError('callback callback must be callable; got %r' - % callback) + if not callable(on_message_callback): + raise ValueError('callback on_message_callback must be callable; got %r' + % on_message_callback) return self._basic_consume_impl( queue=queue, + on_message_callback=on_message_callback, no_ack=no_ack, exclusive=exclusive, consumer_tag=consumer_tag, - arguments=arguments, - callback=callback) + arguments=arguments) def _basic_consume_impl(self, queue, @@ -1559,12 +1560,12 @@ class BlockingChannel(object): exclusive, consumer_tag, arguments=None, - callback=None, + on_message_callback=None, alternate_event_sink=None): """The low-level implementation used by `basic_consume` and `consume`. See `basic_consume` docstring for more info. - NOTE: exactly one of callback/alternate_event_sink musts be + NOTE: exactly one of on_message_callback/alternate_event_sink musts be non-None. This method has one additional parameter alternate_event_sink over the @@ -1580,10 +1581,10 @@ class BlockingChannel(object): consumer_tag is already present. """ - if (callback is None) == (alternate_event_sink is None): + if (on_message_callback is None) == (alternate_event_sink is None): raise ValueError( - ('exactly one of callback/alternate_event_sink must ' - 'be non-None', callback, alternate_event_sink)) + ('exactly one of on_message_callback/alternate_event_sink must ' + 'be non-None', on_message_callback, alternate_event_sink)) if not consumer_tag: # Need a consumer tag to register consumer info before sending @@ -1599,13 +1600,13 @@ class BlockingChannel(object): self._consumer_infos[consumer_tag] = _ConsumerInfo( consumer_tag, no_ack=no_ack, - callback=callback, + on_message_callback=on_message_callback, alternate_event_sink=alternate_event_sink) try: with self._basic_consume_ok_result as ok_result: tag = self._impl.basic_consume( - callback=self._on_consumer_message_delivery, + on_message_callback=self._on_consumer_message_delivery, queue=queue, no_ack=no_ack, exclusive=exclusive, diff --git a/pika/channel.py b/pika/channel.py index 2ec16f6..b1ff5cc 100644 --- a/pika/channel.py +++ b/pika/channel.py @@ -262,11 +262,12 @@ class Channel(object): def basic_consume(self, queue, - callback, + on_message_callback, no_ack=False, exclusive=False, consumer_tag=None, - arguments=None): + arguments=None, + callback=None): """Sends the AMQP 0-9-1 command Basic.Consume to the broker and binds messages for the consumer_tag to the consumer callback. If you do not pass in a consumer_tag, one will be automatically generated for you. Returns @@ -280,8 +281,8 @@ class Channel(object): :param queue: The queue to consume from. Use the empty string to specify the most recent server-named queue for this channel. :type queue: str or unicode - :param callable callback: The function to call when consuming - with the signature callback(channel, method, properties, body), where + :param callable on_message_callback: The function to call when consuming + with the signature on_message_callback(channel, method, properties, body), where channel: pika.Channel method: pika.spec.Basic.Deliver properties: pika.spec.BasicProperties @@ -292,12 +293,15 @@ class Channel(object): :param consumer_tag: Specify your own consumer tag :type consumer_tag: str or unicode :param dict arguments: Custom key/value pair arguments for the consumer + :param callable callback: callback(pika.frame.Method) for method + Basic.ConsumeOk. :rtype: str :raises ValueError: """ - self._require_callback(callback) + self._require_callback(on_message_callback) self._validate_channel() + self._validate_rpc_completion_callback(callback) # If a consumer tag was not passed, create one if not consumer_tag: @@ -309,14 +313,17 @@ class Channel(object): if no_ack: self._consumers_with_noack.add(consumer_tag) - self._consumers[consumer_tag] = callback + self._consumers[consumer_tag] = on_message_callback + + rpc_callback = self._on_eventok if callback is None else callback + self._rpc(spec.Basic.Consume(queue=queue, consumer_tag=consumer_tag, no_ack=no_ack, exclusive=exclusive, arguments=arguments or dict()), - self._on_eventok, [(spec.Basic.ConsumeOk, - {'consumer_tag': consumer_tag})]) + rpc_callback, [(spec.Basic.ConsumeOk, + {'consumer_tag': consumer_tag})]) return consumer_tag
Channel.basic_consume doesn't support consume-ok/completion callback [Channel.basic_consume](https://github.com/pika/pika/blob/3d3b95d31b67dfeaf5ef43650c162e25169336e6/pika/channel.py#L263) doesn't provide an argument for specifying the completion callback to be called upon [consume-ok](https://www.rabbitmq.com/amqp-0-9-1-reference.html#basic.consume-ok). (the callback arg that it presently supports is for consuming incoming messages). A client's logic might need to know that the `basic-consume` request succeeded before initiating other operations (e.g., when requesting exclusive access). Lack of the completion callback prevents the application from doing so easily. Implementation notes: 1. Rename `Channel.basic_consume`'s consumer callback (presently named `callback`) to `consumer_callback`, `message_callback`, `message_rx_callback`, or similar, and name the completion callback `callback` to follow the recently-established convention for naming of completion callbacks. Also rename the consumer callback arg in `BlockingChannel.basic_consume` for consistency with `Channel.basic_consume`. 2. If completion callback isn't specified, auto-generate and return the consumer tag following the legacy behavior. If completion callback is specified, return None from `Channel.basic_consume` and let the server generate the consumer tag and return it via [consume-ok](https://www.rabbitmq.com/amqp-0-9-1-reference.html#basic.consume-ok). Update the docstring in `Channel.basic_consume` to reflect this. 3. Make sure pika's [Channel._generate_consumer_tag](https://github.com/pika/pika/blob/3d3b95d31b67dfeaf5ef43650c162e25169336e6/pika/channel.py#L323) doesn't generate consumer tags that can potentially collide with server-generated ones by prefixing "pika" into the consumer tag name that `Channel._generate_consumer_tag` generates.
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index 48be9a6..81a8c3c 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -340,7 +340,6 @@ class TestZ_PublishAndConsume(BoundQueueTestCase, AsyncAdapters): # pylint: dis self.channel.basic_cancel(self.ctag, callback=self.on_cancelled) - class TestZ_PublishAndConsumeBig(BoundQueueTestCase, AsyncAdapters): # pylint: disable=C0103 DESCRIPTION = "Publish a big message and consume it" diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index 6f7120b..7628f89 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -1768,14 +1768,12 @@ class TestBasicCancelPurgesPendingConsumerCancellationEvt(BlockingTestCaseBase): ch.publish('', routing_key=q_name, body='via-publish', mandatory=True) - # Create a consumer + # Create a consumer. Not passing a 'callback' to test client-generated + # consumer tags rx_messages = [] consumer_tag = ch.basic_consume( q_name, - lambda *args: rx_messages.append(args), - no_ack=False, - exclusive=False, - arguments=None) + lambda *args: rx_messages.append(args)) # Wait for the published message to arrive, but don't consume it while not ch._pending_events: @@ -1848,7 +1846,8 @@ class TestBasicPublishWithoutPubacks(BlockingTestCaseBase): queue=q_name, expected_count=2) - # Create a consumer + # Create a consumer. Not passing a 'callback' to test client-generated + # consumer tags rx_messages = [] consumer_tag = ch.basic_consume( q_name, diff --git a/tests/unit/channel_tests.py b/tests/unit/channel_tests.py index eba7b70..c1f8b67 100644 --- a/tests/unit/channel_tests.py +++ b/tests/unit/channel_tests.py @@ -1,6 +1,5 @@ """ Tests for pika.channel.Channel - """ import collections import logging @@ -12,6 +11,9 @@ import mock from pika import channel, connection, exceptions, frame, spec +# Disable protected-access, missing-docstring, and invalid-name, +# too-many-public-methods, too-many-lines +# pylint: disable=W0212,C0111,C0103,R0904,C0302 class ConnectionTemplate(connection.Connection): """Template for using as mock spec_set for the pika Connection class. It @@ -243,67 +245,88 @@ class ChannelTests(unittest.TestCase): def test_basic_consume_channel_closed(self): mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() self.assertRaises(exceptions.ChannelClosed, self.obj.basic_consume, - 'test-queue', mock_callback) + 'test-queue', mock_on_msg_callback, + callback=mock_callback) @mock.patch('pika.channel.Channel._validate_channel') - def test_basic_consume_calls_validate(self, validate): + @mock.patch('pika.channel.Channel._require_callback') + def test_basic_consume_calls_validate(self, require, validate): self.obj._set_state(self.obj.OPEN) mock_callback = mock.Mock() - self.obj.basic_consume('test-queue', mock_callback) + mock_on_msg_callback = mock.Mock() + self.obj.basic_consume('test-queue', mock_on_msg_callback, + callback=mock_callback) + require.assert_called_once_with(mock_on_msg_callback) validate.assert_called_once() - def test_basic_consume_consumer_tag(self): + def test_basic_consume_consumer_tag_no_completion_callback(self): + self.obj._set_state(self.obj.OPEN) + expectation = 'ctag1.' + mock_on_msg_callback = mock.Mock() + consumer_tag = self.obj.basic_consume('test-queue', + mock_on_msg_callback)[:6] + self.assertEqual(consumer_tag, expectation) + + def test_basic_consume_consumer_tag_with_completion_callback(self): self.obj._set_state(self.obj.OPEN) expectation = 'ctag1.' mock_callback = mock.Mock() - self.assertEqual( - self.obj.basic_consume('test-queue', mock_callback)[:6], - expectation) + mock_on_msg_callback = mock.Mock() + consumer_tag = self.obj.basic_consume('test-queue', + mock_on_msg_callback, + callback=mock_callback)[:6] + self.assertEqual(consumer_tag, expectation) def test_basic_consume_consumer_tag_cancelled_full(self): self.obj._set_state(self.obj.OPEN) expectation = 'ctag1.' - mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() for ctag in ['ctag1.%i' % ii for ii in range(11)]: self.obj._cancelled.add(ctag) self.assertEqual( - self.obj.basic_consume('test-queue', mock_callback)[:6], + self.obj.basic_consume('test-queue', mock_on_msg_callback)[:6], expectation) def test_basic_consume_consumer_tag_in_consumers(self): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' + mock_on_msg_callback = mock.Mock() mock_callback = mock.Mock() self.obj.basic_consume( - 'test-queue', mock_callback, consumer_tag=consumer_tag) + 'test-queue', mock_on_msg_callback, + consumer_tag=consumer_tag, callback=mock_callback) self.assertIn(consumer_tag, self.obj._consumers) def test_basic_consume_duplicate_consumer_tag_raises(self): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' + mock_on_msg_callback = mock.Mock() mock_callback = mock.Mock() self.obj._consumers[consumer_tag] = logging.debug self.assertRaises(exceptions.DuplicateConsumerTag, self.obj.basic_consume, 'test-queue', - mock_callback, False, False, consumer_tag) + mock_on_msg_callback, False, False, + consumer_tag, None, mock_callback) def test_basic_consume_consumers_callback_value(self): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' - mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() self.obj.basic_consume( - 'test-queue', mock_callback, consumer_tag=consumer_tag) - self.assertEqual(self.obj._consumers[consumer_tag], mock_callback) + 'test-queue', mock_on_msg_callback, consumer_tag=consumer_tag) + self.assertEqual(self.obj._consumers[consumer_tag], mock_on_msg_callback) @mock.patch('pika.spec.Basic.Consume') @mock.patch('pika.channel.Channel._rpc') - def test_basic_consume_consumers_rpc_called(self, rpc, _unused): + def test_basic_consume_consumers_rpc_with_no_completion_callback(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) consumer_tag = 'ctag1.0' - mock_callback = mock.Mock() + mock_on_msg_callback = mock.Mock() self.obj.basic_consume( - 'test-queue', mock_callback, consumer_tag=consumer_tag) + 'test-queue', mock_on_msg_callback, + consumer_tag=consumer_tag) expectation = spec.Basic.Consume( queue='test-queue', consumer_tag=consumer_tag, @@ -314,6 +337,26 @@ class ChannelTests(unittest.TestCase): 'consumer_tag': consumer_tag })]) + @mock.patch('pika.spec.Basic.Consume') + @mock.patch('pika.channel.Channel._rpc') + def test_basic_consume_consumers_rpc_with_completion_callback(self, rpc, _unused): + self.obj._set_state(self.obj.OPEN) + consumer_tag = 'ctag1.0' + mock_on_msg_callback = mock.Mock() + mock_callback = mock.Mock() + self.obj.basic_consume( + 'test-queue', mock_on_msg_callback, + consumer_tag=consumer_tag, callback=mock_callback) + expectation = spec.Basic.Consume( + queue='test-queue', + consumer_tag=consumer_tag, + no_ack=False, + exclusive=False) + rpc.assert_called_once_with(expectation, mock_callback, + [(spec.Basic.ConsumeOk, { + 'consumer_tag': consumer_tag + })]) + @mock.patch('pika.channel.Channel._require_callback') def test_basic_get_calls_require_callback(self, require): self.obj._set_state(self.obj.OPEN) @@ -514,12 +557,13 @@ class ChannelTests(unittest.TestCase): def test_confirm_delivery_with_bad_callback_raises_value_error(self): self.assertRaises(ValueError, - self.obj.confirm_delivery, 'bad-callback') + self.obj.confirm_delivery, + 'bad-callback') def test_confirm_delivery_raises_channel_closed(self): cb = mock.Mock() self.assertRaises(exceptions.ChannelClosed, - self.obj.confirm_delivery, cb) + self.obj.confirm_delivery, cb) def test_confirm_delivery_raises_method_not_implemented_for_confirms(self): self.obj._set_state(self.obj.OPEN) @@ -653,7 +697,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_exchange_bind_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.exchange_bind('foo', 'bar', 'baz', callback=None) rpc.assert_called_once_with( spec.Exchange.Bind(0, 'foo', 'bar', 'baz'), None, []) @@ -683,7 +726,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_exchange_declare_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.exchange_declare('foo', callback=None) rpc.assert_called_once_with( spec.Exchange.Declare(0, 'foo'), None, []) @@ -711,7 +753,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_exchange_delete_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.exchange_delete('foo', callback=None) rpc.assert_called_once_with( spec.Exchange.Delete(0, 'foo'), None, []) @@ -824,7 +865,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_bind_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_bind('foo', 'bar', 'baz', callback=None) rpc.assert_called_once_with( spec.Queue.Bind(0, 'foo', 'bar', 'baz'), None, []) @@ -857,7 +897,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_declare_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_declare('foo', callback=None) rpc.assert_called_once_with( spec.Queue.Declare(0, 'foo'), None, []) @@ -884,7 +923,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_delete_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_delete('foo', callback=None) rpc.assert_called_once_with( spec.Queue.Delete(0, 'foo'), None, []) @@ -911,7 +949,6 @@ class ChannelTests(unittest.TestCase): @mock.patch('pika.channel.Channel._rpc') def test_queue_purge_rpc_request_nowait(self, rpc, _unused): self.obj._set_state(self.obj.OPEN) - mock_callback = mock.Mock() self.obj.queue_purge('foo', callback=None) rpc.assert_called_once_with( spec.Queue.Purge(0, 'foo'), None, []) @@ -1461,4 +1498,5 @@ class ChannelTests(unittest.TestCase): self): self.obj._set_state(self.obj.OPEN) self.assertRaises(TypeError, - self.obj._validate_rpc_completion_callback, 'foo') + self.obj._validate_rpc_completion_callback, + 'foo')
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 3 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 nose==1.3.7 packaging==21.3 -e git+https://github.com/pika/pika.git@3d3b95d31b67dfeaf5ef43650c162e25169336e6#egg=pika pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 tornado==6.1 Twisted==15.3.0 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0 zope.interface==5.5.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - tornado==6.1 - twisted==15.3.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/pika
[ "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_in_consumers", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_duplicate_consumer_tag_raises" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestLostConnectionResultsInIsClosedConnectionAndChannel::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeInactivityTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/channel_tests.py::ChannelTests::test_add_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_callback_multiple_replies", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_cancel_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_get_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_close_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_flow_added", "tests/unit/channel_tests.py::ChannelTests::test_add_on_cancel_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_close_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_return_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_calls_send_method", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch_with_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch_no_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_then_close", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_unknown_consumer_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_cancelled_full", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_callback_value", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_calls_require_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_channel_open_add_callbacks_called", "tests/unit/channel_tests.py::ChannelTests::test_cleanup", "tests/unit/channel_tests.py::ChannelTests::test_close_basic_cancel_called", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closed_state_raises_channel_error_and_stays_closed", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closing_state_raises_already_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_open_state_transitions_to_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_opening_state", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_async", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_ack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_without_nowait_selectok", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_ack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_nack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_callback_call_count", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_confirms", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_with_bad_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_consumer_tags", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_flow_off_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_deliver_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_get_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_return_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_method_returns_none", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_header_frame", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_method_frame", "tests/unit/channel_tests.py::ChannelTests::test_has_content_false", "tests/unit/channel_tests.py::ChannelTests::test_has_content_true", "tests/unit/channel_tests.py::ChannelTests::test_immediate_called_logger_warning", "tests/unit/channel_tests.py::ChannelTests::test_init_blocked", "tests/unit/channel_tests.py::ChannelTests::test_init_blocking", "tests/unit/channel_tests.py::ChannelTests::test_init_callbacks", "tests/unit/channel_tests.py::ChannelTests::test_init_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_init_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_connection", "tests/unit/channel_tests.py::ChannelTests::test_init_consumers", "tests/unit/channel_tests.py::ChannelTests::test_init_content_frame_assembler", "tests/unit/channel_tests.py::ChannelTests::test_init_flow", "tests/unit/channel_tests.py::ChannelTests::test_init_has_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_invalid_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_getok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_openok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_state", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_true", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_true", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_not_appended_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_close_in_closing_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_in_open_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closed_state_is_suppressed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closing_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_open_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_opening_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_warning", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok_following_close_from_broker", "tests/unit/channel_tests.py::ChannelTests::test_on_confirm_selectok", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_eventok", "tests/unit/channel_tests.py::ChannelTests::test_on_flow", "tests/unit/channel_tests.py::ChannelTests::test_on_flow_with_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_calls_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_getempty", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_onreturn", "tests/unit/channel_tests.py::ChannelTests::test_onreturn_warning", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_repr", "tests/unit/channel_tests.py::ChannelTests::test_rpc_adds_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_enters_blocking_and_adds_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_rpc_not_blocking_and_no_on_synchronous_complete_when_no_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_type_error_with_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_value_error_with_unacceptable_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_while_blocking_appends_blocked_collection", "tests/unit/channel_tests.py::ChannelTests::test_send_method", "tests/unit/channel_tests.py::ChannelTests::test_set_state", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_rollback_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_select_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_validate_callback_raises_value_error_not_callable", "tests/unit/channel_tests.py::ChannelTests::test_validate_channel_raises_channel_closed" ]
[]
BSD 3-Clause "New" or "Revised" License
2,145
[ "pika/adapters/blocking_connection.py", "pika/adapters/base_connection.py", "pika/channel.py" ]
[ "pika/adapters/blocking_connection.py", "pika/adapters/base_connection.py", "pika/channel.py" ]
Azure__WALinuxAgent-1039
53a429b06b67031d30351b45e798ec204484b8ef
2018-02-12 05:42:23
6e9b985c1d7d564253a1c344bab01b45093103cd
diff --git a/azurelinuxagent/common/event.py b/azurelinuxagent/common/event.py index 84a439f5..7fc084d9 100644 --- a/azurelinuxagent/common/event.py +++ b/azurelinuxagent/common/event.py @@ -254,7 +254,11 @@ __event_logger__ = EventLogger() def elapsed_milliseconds(utc_start): - d = datetime.utcnow() - utc_start + now = datetime.utcnow() + if now < utc_start: + return 0 + + d = now - utc_start return int(((d.days * 24 * 60 * 60 + d.seconds) * 1000) + \ (d.microseconds / 1000.0))
The ProcessGoalState Duration is Too Large The Duration telemetry value exceeds the size of an Int64, which does not make sense. This has been seen in at least two different agent versions (2.1.3 and 2.2.21).
Azure/WALinuxAgent
diff --git a/tests/common/test_event.py b/tests/common/test_event.py index 01bcd7b9..4d9afeff 100644 --- a/tests/common/test_event.py +++ b/tests/common/test_event.py @@ -17,14 +17,10 @@ from __future__ import print_function -from datetime import datetime - -import azurelinuxagent.common.event as event -import azurelinuxagent.common.logger as logger +from datetime import datetime, timedelta from azurelinuxagent.common.event import add_event, \ - mark_event_status, should_emit_event, \ - WALAEventOperation + WALAEventOperation, elapsed_milliseconds from azurelinuxagent.common.future import ustr from azurelinuxagent.common.version import CURRENT_VERSION @@ -217,3 +213,7 @@ class TestEvent(AgentTestCase): with open(last_event) as last_fh: last_event_text = last_fh.read() self.assertTrue('last event' in last_event_text) + + def test_elapsed_milliseconds(self): + utc_start = datetime.utcnow() + timedelta(days=1) + self.assertEqual(0, elapsed_milliseconds(utc_start))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 1 }
2.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pyasn1" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///croot/attrs_1668696182826/work certifi @ file:///croot/certifi_1671487769961/work/certifi flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work packaging @ file:///croot/packaging_1671697413597/work pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyasn1==0.5.1 pytest==7.1.2 tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work typing_extensions @ file:///croot/typing_extensions_1669924550328/work -e git+https://github.com/Azure/WALinuxAgent.git@53a429b06b67031d30351b45e798ec204484b8ef#egg=WALinuxAgent zipp @ file:///croot/zipp_1672387121353/work
name: WALinuxAgent channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=22.1.0=py37h06a4308_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - flit-core=3.6.0=pyhd3eb1b0_0 - importlib-metadata=4.11.3=py37h06a4308_0 - importlib_metadata=4.11.3=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=22.0=py37h06a4308_0 - pip=22.3.1=py37h06a4308_0 - pluggy=1.0.0=py37h06a4308_1 - py=1.11.0=pyhd3eb1b0_0 - pytest=7.1.2=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py37h06a4308_0 - typing_extensions=4.4.0=py37h06a4308_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zipp=3.11.0=py37h06a4308_0 - zlib=1.2.13=h5eee18b_1 - pip: - pyasn1==0.5.1 prefix: /opt/conda/envs/WALinuxAgent
[ "tests/common/test_event.py::TestEvent::test_elapsed_milliseconds" ]
[]
[ "tests/common/test_event.py::TestEvent::test_event_status_defaults_to_success", "tests/common/test_event.py::TestEvent::test_event_status_event_marked", "tests/common/test_event.py::TestEvent::test_event_status_preserves_state", "tests/common/test_event.py::TestEvent::test_event_status_records_status", "tests/common/test_event.py::TestEvent::test_periodic_does_not_emit_if_previously_sent", "tests/common/test_event.py::TestEvent::test_periodic_emits_after_elapsed_delta", "tests/common/test_event.py::TestEvent::test_periodic_emits_if_forced", "tests/common/test_event.py::TestEvent::test_periodic_emits_if_not_previously_sent", "tests/common/test_event.py::TestEvent::test_periodic_forwards_args", "tests/common/test_event.py::TestEvent::test_save_event", "tests/common/test_event.py::TestEvent::test_save_event_cleanup", "tests/common/test_event.py::TestEvent::test_save_event_rollover", "tests/common/test_event.py::TestEvent::test_should_emit_event_handles_known_operations", "tests/common/test_event.py::TestEvent::test_should_emit_event_ignores_unknown_operations" ]
[]
Apache License 2.0
2,146
[ "azurelinuxagent/common/event.py" ]
[ "azurelinuxagent/common/event.py" ]
conan-io__conan-2458
544682392b632d8e7cc51372d9e51c887af5139e
2018-02-12 13:32:04
c8ee776992121b27d2dcb54be835b501326254bc
diff --git a/conans/client/build/autotools_environment.py b/conans/client/build/autotools_environment.py index 2d99729ba..2dba8e3ad 100644 --- a/conans/client/build/autotools_environment.py +++ b/conans/client/build/autotools_environment.py @@ -1,42 +1,17 @@ import copy -import platform import os +import platform from conans.client import join_arguments -from conans.tools import environment_append, args_to_string, cpu_count, cross_building, detected_architecture -from conans.client.tools.win import unix_path +from conans.client.build.compiler_flags import (architecture_flag, format_libraries, + format_library_paths, format_defines, + sysroot_flag, format_include_paths, + build_type_flag, libcxx_flag, build_type_define, + libcxx_define, pic_flag, rpath_flags) from conans.client.tools.oss import OSInfo - -sun_cc_libcxx_flags_dict = {"libCstd": "-library=Cstd", - "libstdcxx": "-library=stdcxx4", - "libstlport": "-library=stlport4", - "libstdc++": "-library=stdcpp"} - -architecture_dict = {"x86_64": "-m64", "x86": "-m32"} - - -def stdlib_flags(compiler, libcxx): - ret = [] - if compiler and "clang" in compiler: - if libcxx == "libc++": - ret.append("-stdlib=libc++") - else: - ret.append("-stdlib=libstdc++") - elif compiler == "sun-cc": - flag = sun_cc_libcxx_flags_dict.get(libcxx, None) - if flag: - ret.append(flag) - return ret - - -def stdlib_defines(compiler, libcxx): - ret = [] - if compiler == "gcc" or compiler == "clang": # Maybe clang is using the standard library from g++ - if libcxx == "libstdc++": - ret.append("_GLIBCXX_USE_CXX11_ABI=0") - elif str(libcxx) == "libstdc++11": - ret.append("_GLIBCXX_USE_CXX11_ABI=1") - return ret +from conans.client.tools.win import unix_path +from conans.tools import (environment_append, args_to_string, cpu_count, cross_building, + detected_architecture) class AutoToolsBuildEnvironment(object): @@ -47,9 +22,13 @@ class AutoToolsBuildEnvironment(object): - LDFLAGS (-L, others like -m64 -m32) linker """ - def __init__(self, conanfile, win_bash=False): + def __init__(self, conanfile, win_bash=False, include_rpath_flags=False): + """ + FIXME: include_rpath_flags CONAN 2.0 to default True? Could break many packages in center + """ self._conanfile = conanfile self._win_bash = win_bash + self._include_rpath_flags = include_rpath_flags self.subsystem = OSInfo().detect_windows_subsystem() if self._win_bash else None self._deps_cpp_info = conanfile.deps_cpp_info self._arch = conanfile.settings.get_safe("arch") @@ -122,8 +101,10 @@ class AutoToolsBuildEnvironment(object): :param args: Optional arguments to pass to configure. :param build: In which system the program will be built. "False" skips the --build flag :param host: In which system the generated program will run. "False" skips the --host flag - :param target: This option is only used to build a cross-compiling toolchain. "False" skips the --target flag - When the tool chain generates executable program, in which target system the program will run. + :param target: This option is only used to build a cross-compiling toolchain. + "False" skips the --target flag + When the tool chain generates executable program, in which target system + the program will run. :return: None http://jingfenghanmax.blogspot.com.es/2010/09/configure-with-host-target-and-build.html @@ -136,9 +117,8 @@ class AutoToolsBuildEnvironment(object): configure_dir = "." auto_build, auto_host, auto_target = None, None, None if build is None or host is None or target is None: - auto_build, auto_host, auto_target = self._get_host_build_target_flags(detected_architecture(), - platform.system()) - + flags = self._get_host_build_target_flags(detected_architecture(), platform.system()) + auto_build, auto_host, auto_target = flags triplet_args = [] if build is not False: # Skipped by user @@ -179,40 +159,52 @@ class AutoToolsBuildEnvironment(object): with environment_append(self.vars): str_args = args_to_string(args) cpu_count_option = ("-j%s" % cpu_count()) if "-j" not in str_args else None - self._conanfile.run("%s" % join_arguments([make_program, target, str_args, cpu_count_option]), + self._conanfile.run("%s" % join_arguments([make_program, target, str_args, + cpu_count_option]), win_bash=self._win_bash, subsystem=self.subsystem) - @property - def _sysroot_flag(self): - if self._compiler == 'Visual Studio': - return None - else: - return "--sysroot=%s" % self._adjust_path(self._deps_cpp_info.sysroot) if self._deps_cpp_info.sysroot else None - def _configure_link_flags(self): """Not the -L""" ret = copy.copy(self._deps_cpp_info.sharedlinkflags) ret.extend(self._deps_cpp_info.exelinkflags) - ret.append(self._architecture_flag) - if self._sysroot_flag: - ret.append(self._sysroot_flag) + arch_flag = architecture_flag(compiler=self._compiler, arch=self._arch) + if arch_flag: + ret.append(arch_flag) + + sysf = sysroot_flag(self._deps_cpp_info.sysroot, win_bash=self._win_bash, + subsystem=self.subsystem, + compiler=self._compiler) + if sysf: + ret.append(sysf) + + if self._include_rpath_flags: + the_os = self._conanfile.settings.get_safe("os_build") or \ + self._conanfile.settings.get_safe("os") + ret.extend(rpath_flags(the_os, self._compiler, self._deps_cpp_info.lib_paths)) + return ret def _configure_flags(self): ret = copy.copy(self._deps_cpp_info.cflags) - ret.append(self._architecture_flag) - if self._build_type == "Debug" and str(self._compiler) in ['gcc', 'clang', 'apple-clang', 'sun-cc']: - ret.append("-g") # default debug information - elif self._build_type == "Release" and self._compiler == "gcc": - # Remove all symbol table and relocation information from the executable. - ret.append("-s") - if self._sysroot_flag: - ret.append(self._sysroot_flag) + arch_flag = architecture_flag(compiler=self._compiler, arch=self._arch) + if arch_flag: + ret.append(arch_flag) + btf = build_type_flag(compiler=self._compiler, build_type=self._build_type) + if btf: + ret.append(btf) + srf = sysroot_flag(self._deps_cpp_info.sysroot, win_bash=self._win_bash, + subsystem=self.subsystem, + compiler=self._compiler) + if srf: + ret.append(srf) + return ret def _configure_cxx_flags(self): ret = copy.copy(self._deps_cpp_info.cppflags) - ret.extend(stdlib_flags(self._compiler, self._libcxx)) + cxxf = libcxx_flag(compiler=self._compiler, libcxx=self._libcxx) + if cxxf: + ret.append(cxxf) return ret def _configure_defines(self): @@ -220,19 +212,16 @@ class AutoToolsBuildEnvironment(object): ret = copy.copy(self._deps_cpp_info.defines) # Debug definition for GCC - if self._build_type == "Release" and self._compiler == "gcc": - ret.append("NDEBUG") + btf = build_type_define(build_type=self._build_type) + if btf: + ret.append(btf) # CXX11 ABI - ret.extend(stdlib_defines(self._compiler, self._libcxx)) + abif = libcxx_define(compiler=self._compiler, libcxx=self._libcxx) + if abif: + ret.append(abif) return ret - @property - def _architecture_flag(self): - if str(self._compiler) in ['gcc', 'clang', 'apple-clang', 'sun-cc']: - return architecture_dict.get(self._arch, "") - return "" - def _get_vars(self): def append(*args): ret = [] @@ -244,22 +233,18 @@ class AutoToolsBuildEnvironment(object): ret.append(arg) return ret - if self._compiler == 'Visual Studio': - lib_paths = ['/LIBPATH:%s' % x.replace("/", "\\") for x in self.library_paths] - include_paths = ['-I%s' % x.replace("/", "\\") for x in self.include_paths] - libs = [lib for lib in self.libs] - else: - lib_paths = ['-L%s' % self._adjust_path(x.replace("\\", "/")) for x in self.library_paths] - include_paths = ['-I%s' % self._adjust_path(x.replace("\\", "/")) for x in self.include_paths] - libs = ['-l%s' % lib for lib in self.libs] + lib_paths = format_library_paths(self.library_paths, win_bash=self._win_bash, + subsystem=self.subsystem, compiler=self._compiler) + include_paths = format_include_paths(self.include_paths, win_bash=self._win_bash, + subsystem=self.subsystem, compiler=self._compiler) ld_flags = append(self.link_flags, lib_paths) - - cpp_flags = append(include_paths, ["-D%s" % x for x in self.defines]) + cpp_flags = append(include_paths, format_defines(self.defines, self._compiler)) + libs = format_libraries(self.libs, compiler=self._compiler) tmp_compilation_flags = copy.copy(self.flags) - if self.fpic and not self._compiler == 'Visual Studio': - tmp_compilation_flags.append("-fPIC") + if self.fpic: + tmp_compilation_flags.append(pic_flag(self._compiler)) cxx_flags = append(tmp_compilation_flags, self.cxx_flags) c_flags = tmp_compilation_flags diff --git a/conans/client/build/cmake.py b/conans/client/build/cmake.py index 264fb76fb..d57a117cb 100644 --- a/conans/client/build/cmake.py +++ b/conans/client/build/cmake.py @@ -15,6 +15,7 @@ from conans.tools import cpu_count, args_to_string from conans import tools from conans.util.log import logger from conans.util.config_parser import get_bool_from_text +from conans.client.build.compiler_flags import architecture_flag def _get_env_cmake_system_name(): @@ -25,7 +26,7 @@ def _get_env_cmake_system_name(): class CMake(object): def __init__(self, conanfile, generator=None, cmake_system_name=True, - parallel=True, build_type=None, toolset=None, make_program=None): + parallel=True, build_type=None, toolset=None, make_program=None, set_cmake_flags=False): """ :param settings_or_conanfile: Conanfile instance (or settings for retro compatibility) :param generator: Generator name to use or none to autodetect @@ -35,6 +36,8 @@ class CMake(object): :param build_type: Overrides default build type comming from settings :param toolset: Toolset name to use (such as llvm-vs2014) or none for default one, applies only to certain generators (e.g. Visual Studio) + :param set_cmake_flags: whether or not to set CMake flags like CMAKE_CXX_FLAGS, CMAKE_C_FLAGS, etc. + it's vital to set for certain projects (e.g. using CMAKE_SIZEOF_VOID_P or CMAKE_LIBRARY_ARCHITECTURE) """ if not isinstance(conanfile, ConanFile): raise ConanException("First argument of CMake() has to be ConanFile. Use CMake(self)") @@ -60,6 +63,7 @@ class CMake(object): if self._cmake_system_name is None: # Not overwritten using environment self._cmake_system_name = cmake_system_name self.parallel = parallel + self._set_cmake_flags = set_cmake_flags self.definitions = self._get_cmake_definitions() if build_type and build_type != self._build_type: # Call the setter to warn and update the definitions if needed @@ -255,6 +259,17 @@ class CMake(object): return "" def _get_cmake_definitions(self): + def add_cmake_flag(cmake_flags, name, flag): + """ + appends compiler linker flags (if already present), or just sets + """ + if flag: + if name not in cmake_flags: + cmake_flags[name] = flag + else: + cmake_flags[name] = ' ' + flag + return cmake_flags + ret = OrderedDict() ret.update(self._build_type_definition()) ret.update(self._runtime_definition()) @@ -268,16 +283,14 @@ class CMake(object): ret["CONAN_COMPILER_VERSION"] = str(self._compiler_version) # Force compiler flags -- TODO: give as environment/setting parameter? - if self._compiler in ("gcc", "clang", "apple-clang", "sun-cc"): - if self._arch == "x86" or self._arch == "sparc": - ret["CONAN_CXX_FLAGS"] = "-m32" - ret["CONAN_SHARED_LINKER_FLAGS"] = "-m32" - ret["CONAN_C_FLAGS"] = "-m32" - - if self._arch == "x86_64" or self._arch == "sparcv9": - ret["CONAN_CXX_FLAGS"] = "-m64" - ret["CONAN_SHARED_LINKER_FLAGS"] = "-m64" - ret["CONAN_C_FLAGS"] = "-m64" + arch_flag = architecture_flag(compiler=self._compiler, arch=self._arch) + ret = add_cmake_flag(ret, 'CONAN_CXX_FLAGS', arch_flag) + ret = add_cmake_flag(ret, 'CONAN_SHARED_LINKER_FLAGS', arch_flag) + ret = add_cmake_flag(ret, 'CONAN_C_FLAGS', arch_flag) + if self._set_cmake_flags: + ret = add_cmake_flag(ret, 'CMAKE_CXX_FLAGS', arch_flag) + ret = add_cmake_flag(ret, 'CMAKE_SHARED_LINKER_FLAGS', arch_flag) + ret = add_cmake_flag(ret, 'CMAKE_C_FLAGS', arch_flag) if self._libcxx: ret["CONAN_LIBCXX"] = self._libcxx @@ -328,7 +341,6 @@ class CMake(object): def configure(self, args=None, defs=None, source_dir=None, build_dir=None, source_folder=None, build_folder=None, cache_build_folder=None): - # TODO: Deprecate source_dir and build_dir in favor of xxx_folder args = args or [] defs = defs or {} diff --git a/conans/client/build/compiler_flags.py b/conans/client/build/compiler_flags.py new file mode 100644 index 000000000..05b3c929e --- /dev/null +++ b/conans/client/build/compiler_flags.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +from conans.tools import unix_path + + +def rpath_flags(os_build, compiler, lib_paths): + if not os_build: + return [] + if compiler in ("clang", "apple-clang", "gcc"): + rpath_separator = "," if os_build in ["Macos", "iOS", "watchOS", "tvOS"] else "=" + return ['-Wl,-rpath%s"%s"' % (rpath_separator, x.replace("\\", "/")) + for x in lib_paths if x] + return [] + + +def architecture_flag(compiler, arch): + """ + returns flags specific to the target architecture and compiler + """ + if not compiler or not arch: + return "" + + if str(compiler) in ['gcc', 'apple-clang', 'clang', 'sun-cc']: + if str(arch) in ['x86_64', 'sparcv9']: + return '-m64' + elif str(arch) in ['x86', 'sparc']: + return '-m32' + return "" + + +def libcxx_define(compiler, libcxx): + + if not compiler or not libcxx: + return "" + + if str(compiler) in ['gcc', 'clang', 'apple-clang']: + if str(libcxx) == 'libstdc++': + return '_GLIBCXX_USE_CXX11_ABI=0' + elif str(libcxx) == 'libstdc++11': + return '_GLIBCXX_USE_CXX11_ABI=1' + return "" + + +def libcxx_flag(compiler, libcxx): + """ + returns flag specific to the target C++ standard library + """ + if not compiler or not libcxx: + return "" + if str(compiler) in ['clang', 'apple-clang']: + if str(libcxx) in ['libstdc++', 'libstdc++11']: + return '-stdlib=libstdc++' + elif str(libcxx) == 'libc++': + return '-stdlib=libc++' + elif str(compiler) == 'sun-cc': + return ({"libCstd": "-library=Cstd", + "libstdcxx": "-library=stdcxx4", + "libstlport": "-library=stlport4", + "libstdc++": "-library=stdcpp"}.get(libcxx, "")) + return "" + + +def pic_flag(compiler=None): + """ + returns PIC (position independent code) flags, such as -fPIC + """ + if not compiler or compiler == 'Visual Studio': + return "" + return '-fPIC' + + +def build_type_flag(compiler, build_type): + """ + returns flags specific to the build type (Debug, Release, etc.) + (-s, -g, /Zi, etc.) + """ + if not compiler or not build_type: + return "" + + if str(compiler) == 'Visual Studio': + if build_type == 'Debug': + return '/Zi' + else: + if build_type == 'Debug': + return '-g' + elif build_type == 'Release' and str(compiler) == 'gcc': + return '-s' + return "" + + +def build_type_define(build_type=None): + """ + returns definitions specific to the build type (Debug, Release, etc.) + like DEBUG, _DEBUG, NDEBUG + """ + return 'NDEBUG' if build_type == 'Release' else "" + + +def adjust_path(path, win_bash=False, subsystem=None, compiler=None): + """ + adjusts path to be safely passed to the compiler command line + for Windows bash, ensures path is in format according to the subsystem + for path with spaces, places double quotes around it + converts slashes to backslashes, or vice versa + """ + if str(compiler) == 'Visual Studio': + path = path.replace('/', '\\') + else: + path = path.replace('\\', '/') + if win_bash: + path = unix_path(path, subsystem) + return '"%s"' % path if ' ' in path else path + + +def sysroot_flag(sysroot, win_bash=False, subsystem=None, compiler=None): + if str(compiler) != 'Visual Studio' and sysroot: + sysroot = adjust_path(sysroot, win_bash=win_bash, subsystem=subsystem, compiler=compiler) + return '--sysroot=%s' % sysroot + return "" + + +def visual_runtime(runtime): + if runtime: + return "/%s" % runtime + return "" + +def _option_char(compiler): + """-L vs /L""" + return "-" if compiler != "Visual Studio" else "/" + + +def format_defines(defines, compiler): + return ["%sD%s" % (_option_char(compiler), define) for define in defines if define] + + +def format_include_paths(include_paths, win_bash=False, subsystem=None, compiler=None): + return ["%sI%s" % (_option_char(compiler), + adjust_path(include_path, win_bash=win_bash, + subsystem=subsystem, compiler=compiler)) + for include_path in include_paths if include_path] + + +def format_library_paths(library_paths, win_bash=False, subsystem=None, compiler=None): + + pattern = "/LIBPATH:%s" if str(compiler) == 'Visual Studio' else "-L%s" + return [pattern % adjust_path(library_path, win_bash=win_bash, + subsystem=subsystem, compiler=compiler) + for library_path in library_paths if library_path] + + +def format_libraries(libraries, compiler=None): + pattern = "%s.lib" if str(compiler) == 'Visual Studio' else "-l%s" + return [pattern % library for library in libraries if library] diff --git a/conans/client/build/visual_environment.py b/conans/client/build/visual_environment.py index 60f86a2af..c3004e9d7 100644 --- a/conans/client/build/visual_environment.py +++ b/conans/client/build/visual_environment.py @@ -1,6 +1,8 @@ import copy import os +from conans.client.build.compiler_flags import build_type_define, build_type_flag, visual_runtime, format_defines + class VisualStudioBuildEnvironment(object): """ @@ -21,6 +23,8 @@ class VisualStudioBuildEnvironment(object): self._settings = conanfile.settings self._options = conanfile.options self._deps_cpp_info = conanfile.deps_cpp_info + self._build_type = self._settings.get_safe("build_type") + self._runtime = self._settings.get_safe("runtime") self.include_paths = conanfile.deps_cpp_info.include_paths self.lib_paths = conanfile.deps_cpp_info.lib_paths @@ -37,20 +41,27 @@ class VisualStudioBuildEnvironment(object): def _configure_flags(self): ret = copy.copy(self._deps_cpp_info.cflags) - if self._settings.get_safe("build_type") == "Debug": - ret.append("/Zi") # default debug information + btd = build_type_define(build_type=self._build_type) + if btd: + ret.extend(format_defines([btd], compiler="Visual Studio")) + btf = build_type_flag("Visual Studio", build_type=self._build_type) + if btf: + ret.append(btf) return ret def _get_cl_list(self, quotes=True): + # FIXME: It should be managed with the compiler_flags module + # But need further investigation about the quotes and so on, so better to not break anything if quotes: ret = ['/I"%s"' % lib for lib in self.include_paths] else: ret = ['/I%s' % lib for lib in self.include_paths] - if self.runtime: - ret.append("/%s" % self.runtime) + runtime = visual_runtime(self._runtime) + if runtime: + ret.append(runtime) - ret.extend(['/D%s' % lib for lib in self.defines]) + ret.extend(format_defines(self.defines, "Visual Studio")) ret.extend(self.flags) ret.extend(self.cxx_flags) ret.extend(self.link_flags) diff --git a/conans/client/generators/__init__.py b/conans/client/generators/__init__.py index 679f4ae5c..626dd42b2 100644 --- a/conans/client/generators/__init__.py +++ b/conans/client/generators/__init__.py @@ -1,5 +1,6 @@ from os.path import join +from conans.client.generators.compiler_args import CompilerArgsGenerator from conans.client.generators.pkg_config import PkgConfigGenerator from conans.errors import ConanException from conans.util.files import save, normalize @@ -47,6 +48,7 @@ registered_generators = _GeneratorManager() registered_generators.add("txt", TXTGenerator) registered_generators.add("gcc", GCCGenerator) +registered_generators.add("compiler_args", CompilerArgsGenerator) registered_generators.add("cmake", CMakeGenerator) registered_generators.add("cmake_multi", CMakeMultiGenerator) registered_generators.add("qmake", QmakeGenerator) diff --git a/conans/client/generators/cmake_multi.py b/conans/client/generators/cmake_multi.py index 290e217ff..3853f91dd 100644 --- a/conans/client/generators/cmake_multi.py +++ b/conans/client/generators/cmake_multi.py @@ -1,7 +1,9 @@ -from conans.model import Generator -from conans.client.generators.cmake_common import cmake_dependency_vars, cmake_package_info,\ - cmake_macros_multi, generate_targets_section, cmake_dependencies, cmake_global_vars, cmake_user_info_vars from conans.client.generators.cmake import DepsCppCmake +from conans.client.generators.cmake_common import (cmake_dependency_vars, cmake_package_info, + cmake_macros_multi, generate_targets_section, + cmake_dependencies, cmake_global_vars, + cmake_user_info_vars) +from conans.model import Generator from conans.model.build_info import CppInfo diff --git a/conans/client/generators/compiler_args.py b/conans/client/generators/compiler_args.py new file mode 100644 index 000000000..66bd528ca --- /dev/null +++ b/conans/client/generators/compiler_args.py @@ -0,0 +1,79 @@ +from conans.model import Generator +from conans.paths import BUILD_INFO_COMPILER_ARGS +from conans.client.build.compiler_flags import (architecture_flag, sysroot_flag, + format_defines, format_include_paths, + format_library_paths, format_libraries, + build_type_flag, build_type_define, libcxx_flag, + libcxx_define, rpath_flags, visual_runtime) + + +class CompilerArgsGenerator(Generator): + + @property + def filename(self): + return BUILD_INFO_COMPILER_ARGS + + @property + def compiler(self): + return self.conanfile.settings.get_safe("compiler") + + @property + def content(self): + """With compiler_args you can invoke your compiler: + $ gcc main.c @conanbuildinfo.args -o main + $ clang main.c @conanbuildinfo.args -o main + $ cl /EHsc main.c @conanbuildinfo.args + """ + flags = [] + flags.extend(format_defines(self._deps_build_info.defines, compiler=self.compiler)) + flags.extend(format_include_paths(self._deps_build_info.include_paths, compiler=self.compiler)) + + flags.extend(self._deps_build_info.cppflags) + flags.extend(self._deps_build_info.cflags) + + arch_flag = architecture_flag(arch=self.conanfile.settings.get_safe("arch"), compiler=self.compiler) + if arch_flag: + flags.append(arch_flag) + + build_type = self.conanfile.settings.get_safe("build_type") + btf = build_type_flag(compiler=self.compiler, build_type=build_type) + if btf: + flags.append(btf) + btd = build_type_define(build_type=build_type) + if btd: + flags.extend(format_defines([btd], self.compiler)) + + if self.compiler == "Visual Studio": + runtime = visual_runtime(self.conanfile.settings.get_safe("compiler.runtime")) + if runtime: + flags.append(runtime) + # Necessary in the "cl" invocation before specify the rest of linker flags + flags.append("/link") + + the_os = self.conanfile.settings.get_safe("os_build") or \ + self.conanfile.settings.get_safe("os") + flags.extend(rpath_flags(the_os, self.compiler, self._deps_build_info.lib_paths)) + flags.extend(format_library_paths(self._deps_build_info.lib_paths, compiler=self.compiler)) + flags.extend(format_libraries(self._deps_build_info.libs, compiler=self.compiler)) + flags.extend(self._deps_build_info.sharedlinkflags) + flags.extend(self._deps_build_info.exelinkflags) + flags.extend(self._libcxx_flags()) + sysrf = sysroot_flag(self._deps_build_info.sysroot, compiler=self.compiler) + if sysrf: + flags.append(sysrf) + + return " ".join(flag for flag in flags if flag) + + def _libcxx_flags(self): + libcxx = self.conanfile.settings.get_safe("compiler.libcxx") + compiler = self.conanfile.settings.get_safe("compiler") + + lib_flags = [] + if libcxx: + stdlib_define = libcxx_define(compiler=compiler, libcxx=libcxx) + lib_flags.extend(format_defines([stdlib_define], compiler=compiler)) + cxxf = libcxx_flag(compiler=compiler, libcxx=libcxx) + if cxxf: + lib_flags.append(cxxf) + + return lib_flags diff --git a/conans/client/generators/gcc.py b/conans/client/generators/gcc.py index 3bf43b869..d1eeb4608 100644 --- a/conans/client/generators/gcc.py +++ b/conans/client/generators/gcc.py @@ -1,57 +1,13 @@ -from conans.client.build.autotools_environment import architecture_dict, stdlib_flags, \ - stdlib_defines -from conans.model import Generator +from conans.client.generators.compiler_args import CompilerArgsGenerator from conans.paths import BUILD_INFO_GCC -import platform -class GCCGenerator(Generator): +class GCCGenerator(CompilerArgsGenerator): + """Backwards compatibility with 'gcc' generator, there the compiler was fixed to gcc always""" @property def filename(self): return BUILD_INFO_GCC @property - def content(self): - """With gcc_flags you can invoke gcc like that: - $ gcc main.c @conanbuildinfo.gcc -o main - """ - flags = [] - flags.extend(["-D%s" % x for x in self._deps_build_info.defines]) - flags.extend(['-I"%s"' % x.replace("\\", "/") for x in self._deps_build_info.include_paths]) - rpath_separator = "," if platform.system() == "Darwin" else "=" - flags.extend(['-Wl,-rpath%s"%s"' % (rpath_separator, x.replace("\\", "/")) - for x in self._deps_build_info.lib_paths]) # rpaths - flags.extend(['-L"%s"' % x.replace("\\", "/") for x in self._deps_build_info.lib_paths]) - flags.extend(["-l%s" % x for x in self._deps_build_info.libs]) - flags.extend(self._deps_build_info.cppflags) - flags.extend(self._deps_build_info.cflags) - flags.extend(self._deps_build_info.sharedlinkflags) - flags.extend(self._deps_build_info.exelinkflags) - flags.extend(self._libcxx_flags()) - if self._deps_build_info.sysroot: - flags.append("--sysroot=%s" % self._deps_build_info.sysroot) - arch = self.conanfile.settings.get_safe("arch") - flags.append(architecture_dict.get(arch, "")) - - build_type = self.conanfile.settings.get_safe("build_type") - if build_type == "Release": - compiler = self.conanfile.settings.get_safe("compiler") - if compiler == "gcc": - flags.append("-s") - flags.append("-DNDEBUG") - elif build_type == "Debug": - flags.append("-g") - - return " ".join(flags) - - def _libcxx_flags(self): - libcxx = self.conanfile.settings.get_safe("compiler.libcxx") - compiler = self.conanfile.settings.get_safe("compiler") - - lib_flags = [] - if libcxx: - lib_flags.extend(["-D%s" % define for define in stdlib_defines(compiler, libcxx)]) - lib_flags.extend(stdlib_flags(compiler, libcxx)) - - return lib_flags - + def compiler(self): + return "gcc" diff --git a/conans/client/manager.py b/conans/client/manager.py index ad3316a6c..146a144be 100644 --- a/conans/client/manager.py +++ b/conans/client/manager.py @@ -237,8 +237,8 @@ class ConanManager(object): conanfile.requires(str(inject_require)) def _get_graph_builder(self, loader, update, remote_proxy): - local_search = self._search_manager - resolver = RequireResolver(self._user_io.out, local_search, remote_proxy, update=update) + local_search = None if update else self._search_manager + resolver = RequireResolver(self._user_io.out, local_search, remote_proxy) graph_builder = DepsGraphBuilder(remote_proxy, self._user_io.out, loader, resolver) return graph_builder diff --git a/conans/client/require_resolver.py b/conans/client/require_resolver.py index e59029363..8b1f99196 100644 --- a/conans/client/require_resolver.py +++ b/conans/client/require_resolver.py @@ -22,11 +22,10 @@ def satisfying(list_versions, versionexpr, output): class RequireResolver(object): - def __init__(self, output, local_search, remote_search, update): + def __init__(self, output, local_search, remote_search): self._output = output self._local_search = local_search self._remote_search = remote_search - self._update = update def resolve(self, require, base_conanref): version_range = require.version_range @@ -49,24 +48,20 @@ class RequireResolver(object): ref = require.conan_reference # The search pattern must be a string search_ref = str(ConanFileReference(ref.name, "*", ref.user, ref.channel)) - - if self._update: - searches = (self._resolve_remote, self._resolve_local) - else: - searches = (self._resolve_local, self._resolve_remote) - - for fcn in searches: - resolved = fcn(search_ref, version_range) - if resolved: - break + resolved = self._resolve_local(search_ref, version_range) + if not resolved: + # We should use ignorecase=False, we want the exact case! + remote_found = self._remote_search.search_remotes(search_ref, ignorecase=False) + if remote_found: + resolved = self._resolve_version(version_range, remote_found) if resolved: self._output.success("Version range '%s' required by '%s' resolved to '%s'" % (version_range, base_conanref, str(resolved))) require.conan_reference = resolved else: - raise ConanException("The version in '%s' from requirement '%s' could not be resolved" - % (version_range, require)) + raise ConanException( + "The version in '%s' from requirement '%s' could not be resolved" % (version_range, require)) def _resolve_local(self, search_ref, version_range): if self._local_search: @@ -76,12 +71,6 @@ class RequireResolver(object): if resolved_version: return resolved_version - def _resolve_remote(self, search_ref, version_range): - # We should use ignorecase=False, we want the exact case! - remote_found = self._remote_search.search_remotes(search_ref, ignorecase=False) - if remote_found: - return self._resolve_version(version_range, remote_found) - def _resolve_version(self, version_range, local_found): versions = {ref.version: ref for ref in local_found} result = satisfying(versions, version_range, self._output) diff --git a/conans/client/tools/__init__.py b/conans/client/tools/__init__.py index c1f321c38..d5577b118 100644 --- a/conans/client/tools/__init__.py +++ b/conans/client/tools/__init__.py @@ -12,4 +12,4 @@ from .system_pm import * # noinspection PyUnresolvedReferences from .win import * # noinspection PyUnresolvedReferences -from .pkg_config import * \ No newline at end of file +from .pkg_config import * diff --git a/conans/client/tools/win.py b/conans/client/tools/win.py index b29e77e85..99ea22167 100644 --- a/conans/client/tools/win.py +++ b/conans/client/tools/win.py @@ -110,7 +110,7 @@ def vswhere(all_=False, prerelease=False, products=None, requires=None, version= arguments = list() arguments.append(vswhere_path) - + # Output json format arguments.append("-format") arguments.append("json") @@ -148,7 +148,7 @@ def vswhere(all_=False, prerelease=False, products=None, requires=None, version= try: output = subprocess.check_output(arguments) - vswhere_out = decode_text(output).strip() + vswhere_out = output.decode().strip() except (ValueError, subprocess.CalledProcessError, UnicodeDecodeError) as e: raise ConanException("vswhere error: %s" % str(e)) @@ -229,7 +229,7 @@ def vcvars_command(settings, arch=None, compiler_version=None, force=False): "and vswhere didn't find it" % env_var) if not os.path.isdir(vs_path): _global_output.warn('VS variable %s points to the non-existing path "%s",' - 'please check that you have set it correctly' % (env_var, vs_path)) + 'please check that you have set it correctly' % (env_var, vs_path)) vcvars_path = os.path.join(vs_path, "../../VC/Auxiliary/Build/vcvarsall.bat") command = ('set "VSCMD_START_DIR=%%CD%%" && ' 'call "%s" %s' % (vcvars_path, vcvars_arch)) @@ -240,7 +240,7 @@ def vcvars_command(settings, arch=None, compiler_version=None, force=False): raise ConanException("VS '%s' variable not defined. Please install VS" % env_var) if not os.path.isdir(vs_path): _global_output.warn('VS variable %s points to the non-existing path "%s",' - 'please check that you have set it correctly' % (env_var, vs_path)) + 'please check that you have set it correctly' % (env_var, vs_path)) vcvars_path = os.path.join(vs_path, "../../VC/vcvarsall.bat") command = ('call "%s" %s' % (vcvars_path, vcvars_arch)) @@ -321,7 +321,6 @@ def get_cased_path(name): return None return res[0] - MSYS2 = 'msys2' MSYS = 'msys' CYGWIN = 'cygwin' @@ -334,6 +333,7 @@ def unix_path(path, path_flavor=None): c/users/path/to/file. Not working in a regular console or MinGW!""" if not path: return None + from conans.client.tools.oss import os_info if os.path.exists(path): path = get_cased_path(path) # if the path doesn't exist (and abs) we cannot guess the casing diff --git a/conans/paths.py b/conans/paths.py index 973410b70..1e8719966 100644 --- a/conans/paths.py +++ b/conans/paths.py @@ -28,6 +28,7 @@ CONANFILE_TXT = "conanfile.txt" CONAN_MANIFEST = "conanmanifest.txt" BUILD_INFO = 'conanbuildinfo.txt' BUILD_INFO_GCC = 'conanbuildinfo.gcc' +BUILD_INFO_COMPILER_ARGS = 'conanbuildinfo.args' BUILD_INFO_CMAKE = 'conanbuildinfo.cmake' BUILD_INFO_QMAKE = 'conanbuildinfo.pri' BUILD_INFO_QBS = 'conanbuildinfo.qbs' diff --git a/conans/tools.py b/conans/tools.py index 3289293a8..45d360092 100644 --- a/conans/tools.py +++ b/conans/tools.py @@ -5,8 +5,6 @@ import requests from conans.client.tools import * from conans.client.output import ConanOutput # noinspection PyUnresolvedReferences -from conans.util.env_reader import get_env -# noinspection PyUnresolvedReferences from conans.util.files import (_generic_algorithm_sum, load, save, sha256sum, sha1sum, md5sum, md5, touch, relative_dirs, rmdir, mkdir) diff --git a/conans/util/env_reader.py b/conans/util/env_reader.py index cd6f77bb8..8628843c8 100644 --- a/conans/util/env_reader.py +++ b/conans/util/env_reader.py @@ -9,11 +9,8 @@ import os -def get_env(env_key, default=None, environment=None): +def get_env(env_key, default=None, environment=os.environ): """Get the env variable associated with env_key""" - if environment is None: - environment = os.environ - env_var = environment.get(env_key, default) if env_var != default: if isinstance(default, str):
"gcc" generator could be generalised to any compiler even "cl" The `gcc` generator could be a `compiler_args` generator (not breaking), even managing flags for compiling in command line using the `cl` compiler. ``` cl /ehSC main.cpp @conanbuildinfo.args gcc main.cpp -o main @conanbuildinfo.args clang main.cpp -o main @conanbuildinfo.args ```
conan-io/conan
diff --git a/conans/test/build_helpers/autotools_configure_test.py b/conans/test/build_helpers/autotools_configure_test.py index d4ecf2233..3564b51ae 100644 --- a/conans/test/build_helpers/autotools_configure_test.py +++ b/conans/test/build_helpers/autotools_configure_test.py @@ -55,13 +55,12 @@ class AutoToolsConfigureTest(unittest.TestCase): be = AutoToolsBuildEnvironment(conanfile) expected = {'CFLAGS': 'a_c_flag', - 'CPPFLAGS': '-Ipath\\includes -Iother\\include\\path -Donedefinition -Dtwodefinition', - 'CXXFLAGS': 'a_c_flag a_cpp_flag', - 'LDFLAGS': 'shared_link_flag exe_link_flag /LIBPATH:one\\lib\\path', - 'LIBS': 'onelib twolib'} + 'CPPFLAGS': '/Ipath\\includes /Iother\\include\\path /Donedefinition /Dtwodefinition /DNDEBUG', + 'CXXFLAGS': 'a_c_flag a_cpp_flag', + 'LDFLAGS': 'shared_link_flag exe_link_flag /LIBPATH:one\\lib\\path', + 'LIBS': 'onelib.lib twolib.lib'} self.assertEquals(be.vars, expected) - # GCC 32 settings = MockSettings({"build_type": "Release", "arch": "x86", @@ -107,7 +106,7 @@ class AutoToolsConfigureTest(unittest.TestCase): self._set_deps_info(conanfile) expected = {'CFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder', 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition' - ' -D_GLIBCXX_USE_CXX11_ABI=0', + ' -DNDEBUG -D_GLIBCXX_USE_CXX11_ABI=0', 'CXXFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder a_cpp_flag -stdlib=libstdc++', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder -Lone/lib/path', 'LIBS': '-lonelib -ltwolib'} @@ -123,7 +122,7 @@ class AutoToolsConfigureTest(unittest.TestCase): conanfile.settings = settings self._set_deps_info(conanfile) expected = {'CFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder', - 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition', + 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition -DNDEBUG', 'CXXFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder a_cpp_flag -stdlib=libc++', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder -Lone/lib/path', 'LIBS': '-lonelib -ltwolib'} @@ -156,7 +155,7 @@ class AutoToolsConfigureTest(unittest.TestCase): conanfile.settings = settings self._set_deps_info(conanfile) expected = {'CFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder', - 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition', + 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition -DNDEBUG', 'CXXFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder a_cpp_flag -library=Cstd', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder -Lone/lib/path', 'LIBS': '-lonelib -ltwolib'} @@ -171,7 +170,7 @@ class AutoToolsConfigureTest(unittest.TestCase): conanfile.settings = settings self._set_deps_info(conanfile) expected = {'CFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder', - 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition', + 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition -DNDEBUG', 'CXXFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder a_cpp_flag -library=stdcxx4', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder -Lone/lib/path', 'LIBS': '-lonelib -ltwolib'} @@ -186,7 +185,7 @@ class AutoToolsConfigureTest(unittest.TestCase): conanfile.settings = settings self._set_deps_info(conanfile) expected = {'CFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder', - 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition', + 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition -DNDEBUG', 'CXXFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder a_cpp_flag -library=stlport4', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder -Lone/lib/path', 'LIBS': '-lonelib -ltwolib'} @@ -201,13 +200,32 @@ class AutoToolsConfigureTest(unittest.TestCase): conanfile.settings = settings self._set_deps_info(conanfile) expected = {'CFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder', - 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition', + 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition -DNDEBUG', 'CXXFLAGS': 'a_c_flag -m64 --sysroot=/path/to/folder a_cpp_flag -library=stdcpp', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder -Lone/lib/path', 'LIBS': '-lonelib -ltwolib'} be = AutoToolsBuildEnvironment(conanfile) self.assertEquals(be.vars, expected) + def rpath_optin_test(self): + settings = MockSettings({"os_build": "Linux", + "build_type": "Release", + "arch": "x86_64", + "compiler": "gcc", + "compiler.libcxx": "libstdc++11"}) + conanfile = MockConanfile(settings) + conanfile.settings = settings + self._set_deps_info(conanfile) + expected = {'CFLAGS': 'a_c_flag -m64 -s --sysroot=/path/to/folder', + 'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -Dtwodefinition -DNDEBUG ' + '-D_GLIBCXX_USE_CXX11_ABI=1', + 'CXXFLAGS': 'a_c_flag -m64 -s --sysroot=/path/to/folder a_cpp_flag', + 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 --sysroot=/path/to/folder ' + '-Wl,-rpath="one/lib/path" -Lone/lib/path', + 'LIBS': '-lonelib -ltwolib'} + be = AutoToolsBuildEnvironment(conanfile, include_rpath_flags=True) + self.assertEquals(be.vars, expected) + def environment_append_test(self): settings = MockSettings({"build_type": "Debug", "arch": "x86_64", @@ -225,11 +243,11 @@ class AutoToolsConfigureTest(unittest.TestCase): with(tools.environment_append(env_vars)): be = AutoToolsBuildEnvironment(conanfile) expected = {'CPPFLAGS': '-Ipath/includes -Iother/include/path -Donedefinition -' - 'Dtwodefinition -D_GLIBCXX_USE_CXX11_ABI=0 -additionalcppflag', - 'CXXFLAGS': 'a_c_flag -m64 -g --sysroot=/path/to/folder a_cpp_flag -additionalcxxflag', - 'LIBS': '-lonelib -ltwolib -additionallibs', + 'Dtwodefinition -D_GLIBCXX_USE_CXX11_ABI=0 -additionalcppflag', + 'CXXFLAGS': 'a_c_flag -m64 -g --sysroot=/path/to/folder a_cpp_flag -additionalcxxflag', + 'LIBS': '-lonelib -ltwolib -additionallibs', 'LDFLAGS': 'shared_link_flag exe_link_flag -m64 ' - '--sysroot=/path/to/folder -Lone/lib/path -additionalldflag', + '--sysroot=/path/to/folder -Lone/lib/path -additionalldflag', 'CFLAGS': 'a_c_flag -m64 -g --sysroot=/path/to/folder -additionalcflag'} self.assertEquals(be.vars, expected) diff --git a/conans/test/build_helpers/cmake_test.py b/conans/test/build_helpers/cmake_test.py index 4c418f83c..d46dfcd65 100644 --- a/conans/test/build_helpers/cmake_test.py +++ b/conans/test/build_helpers/cmake_test.py @@ -180,7 +180,7 @@ class CMakeTest(unittest.TestCase): conan_file = ConanFileMock() conan_file.settings = settings - def check(text, build_config, generator=None): + def check(text, build_config, generator=None, set_cmake_flags=False): os = str(settings.os) os_ver = str(settings.os.version) if settings.get_safe('os.version') else None for cmake_system_name in (True, False): @@ -188,7 +188,8 @@ class CMakeTest(unittest.TestCase): cross = ("-DCMAKE_SYSTEM_NAME=\"%s\" %s-DCMAKE_SYSROOT=\"/path/to/sysroot\" " % ({"Macos": "Darwin"}.get(os, os), cross_ver) if (platform.system() != os and cmake_system_name) else "") - cmake = CMake(conan_file, generator=generator, cmake_system_name=cmake_system_name) + cmake = CMake(conan_file, generator=generator, cmake_system_name=cmake_system_name, + set_cmake_flags=set_cmake_flags) new_text = text.replace("-DCONAN_EXPORTED", "%s-DCONAN_EXPORTED" % cross) if "Visual Studio" in text: cores = ('-DCONAN_CXX_FLAGS="/MP{0}" ' @@ -205,6 +206,10 @@ class CMakeTest(unittest.TestCase): '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', '', generator="Custom Generator") + check('-G "Custom Generator" -DCONAN_EXPORTED="1" ' + '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', + '', generator="Custom Generator", set_cmake_flags=True) + settings.build_type = "Debug" check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" ' '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', @@ -218,10 +223,9 @@ class CMakeTest(unittest.TestCase): settings.compiler = "gcc" settings.compiler.version = "4.8" generator = "MinGW Makefiles" if platform.system() == "Windows" else "Unix Makefiles" - check('-G "%s" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" ' - '-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" ' - '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" ' - '-Wno-dev' % generator, "") + check('-G "%s" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" ' + '-DCONAN_COMPILER="gcc" -DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" ' + '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev' % generator, "") settings.os = "Linux" settings.arch = "x86" @@ -238,6 +242,14 @@ class CMakeTest(unittest.TestCase): '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" -Wno-dev' % generator, "") + check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"' + ' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" ' + '-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" ' + '-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" ' + '-DCMAKE_CXX_FLAGS="-m64" -DCMAKE_SHARED_LINKER_FLAGS="-m64" -DCMAKE_C_FLAGS="-m64" ' + '-Wno-dev' % generator, + "", set_cmake_flags=True) + settings.os = "FreeBSD" settings.compiler = "clang" settings.compiler.version = "3.8" diff --git a/conans/test/build_helpers/compiler_flags_test.py b/conans/test/build_helpers/compiler_flags_test.py new file mode 100644 index 000000000..629508d9a --- /dev/null +++ b/conans/test/build_helpers/compiler_flags_test.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import unittest +import platform +from nose.plugins.attrib import attr + +from conans.client.build.compiler_flags import architecture_flag, libcxx_flag, libcxx_define, \ + pic_flag, build_type_flag, build_type_define, adjust_path, sysroot_flag, format_defines, \ + format_include_paths, format_library_paths, format_libraries + + +class CompilerFlagsTest(unittest.TestCase): + + def test_arch_flag(self): + for compiler in ("gcc", "clang", "sun-cc"): + arch_flag = architecture_flag(arch='x86', compiler=compiler) + self.assertEquals(arch_flag, '-m32') + + arch_flag = architecture_flag(arch='sparc', compiler='sun-cc') + self.assertEquals(arch_flag, '-m32') + + for compiler in ("gcc", "clang", "sun-cc"): + arch_flag = architecture_flag(arch='x86_64', compiler=compiler) + self.assertEquals(arch_flag, '-m64') + + arch_flag = architecture_flag(arch='sparcv9', compiler='sun-cc') + self.assertEquals(arch_flag, '-m64') + + for compiler in ("gcc", "clang", "sun-cc"): + arch_flag = architecture_flag(arch='armv7', compiler=compiler) + self.assertEquals(arch_flag, '') + + arch_flag = architecture_flag(arch='x86', compiler='Visual Studio') + self.assertEquals(arch_flag, '') + + arch_flag = architecture_flag(arch='x86_64', compiler='Visual Studio') + self.assertEquals(arch_flag, '') + + def test_libcxx_flags(self): + arch_define = libcxx_define(compiler='gcc', libcxx='libstdc++') + self.assertEquals(arch_define, '_GLIBCXX_USE_CXX11_ABI=0') + + arch_define = libcxx_define(compiler='gcc', libcxx='libstdc++11') + self.assertEquals(arch_define, '_GLIBCXX_USE_CXX11_ABI=1') + + arch_flags = libcxx_flag(compiler='clang', libcxx='libc++') + self.assertEquals(arch_flags, '-stdlib=libc++') + + arch_flags = libcxx_flag(compiler='clang', libcxx='libstdc++') + self.assertEquals(arch_flags, '-stdlib=libstdc++') + + arch_define = libcxx_define(compiler='clang', libcxx='libstdc++') + self.assertEquals(arch_define, '_GLIBCXX_USE_CXX11_ABI=0') + + arch_flags = libcxx_flag(compiler='clang', libcxx='libstdc++') + self.assertEquals(arch_flags, '-stdlib=libstdc++') + arch_define = libcxx_define(compiler='clang', libcxx='libstdc++') + self.assertEquals(arch_define, '_GLIBCXX_USE_CXX11_ABI=0') + + arch_flags = libcxx_flag(compiler='apple-clang', libcxx='libc++') + self.assertEquals(arch_flags, '-stdlib=libc++') + + arch_flags = libcxx_flag(compiler='Visual Studio', libcxx=None) + self.assertEquals(arch_flags, "") + + arch_flags = libcxx_flag(compiler='sun-cc', libcxx='libCstd') + self.assertEquals(arch_flags, '-library=Cstd') + + arch_flags = libcxx_flag(compiler='sun-cc', libcxx='libstdcxx') + self.assertEquals(arch_flags, '-library=stdcxx4') + + arch_flags = libcxx_flag(compiler='sun-cc', libcxx='libstlport') + self.assertEquals(arch_flags, '-library=stlport4') + + arch_flags = libcxx_flag(compiler='sun-cc', libcxx='libstdc++') + self.assertEquals(arch_flags, '-library=stdcpp') + + def test_pic_flags(self): + flag = pic_flag() + self.assertEquals(flag, '') + + flags = pic_flag(compiler='gcc') + self.assertEquals(flags, '-fPIC') + + flags = pic_flag(compiler='Visual Studio') + self.assertEquals(flags, "") + + def test_build_type_flags(self): + flags = build_type_flag(compiler='Visual Studio', build_type='Debug') + self.assertEquals(flags, '/Zi') + + flags = build_type_flag(compiler='Visual Studio', build_type='Release') + self.assertEquals(flags, "") + + flags = build_type_flag(compiler='gcc', build_type='Debug') + self.assertEquals(flags, '-g') + + flags = build_type_flag(compiler='gcc', build_type='Release') + self.assertEquals(flags, '-s') + define = build_type_define(build_type='Release') + self.assertEquals(define, 'NDEBUG') + + flags = build_type_flag(compiler='clang', build_type='Debug') + self.assertEquals(flags, '-g') + + flags = build_type_flag(compiler='clang', build_type='Release') + self.assertEquals(flags, '') + + def test_adjust_path(self): + self.assertEquals('home/www', adjust_path('home\\www')) + self.assertEquals('home/www', adjust_path('home\\www', compiler='gcc')) + + self.assertEquals('"home/www root"', adjust_path('home\\www root')) + self.assertEquals('"home/www root"', adjust_path('home\\www root', compiler='gcc')) + + @attr('visual_studio') + def test_adjust_path_visual_studio(self): + # NOTE : test cannot be run on *nix systems, as adjust_path uses + # tools.unix_path which is Windows-only + if platform.system() != "Windows": + return + self.assertEquals('home\\www', adjust_path('home/www', compiler='Visual Studio')) + self.assertEquals('"home\\www root"', + adjust_path('home/www root', compiler='Visual Studio')) + self.assertEquals('home/www', + adjust_path('home\\www', compiler='Visual Studio', win_bash=True)) + self.assertEquals('home/www', + adjust_path('home/www', compiler='Visual Studio', win_bash=True)) + self.assertEquals('"home/www root"', + adjust_path('home\\www root', compiler='Visual Studio', win_bash=True)) + self.assertEquals('"home/www root"', + adjust_path('home/www root', compiler='Visual Studio', win_bash=True)) + + def test_sysroot_flag(self): + sysroot = sysroot_flag(sysroot=None) + self.assertEquals(sysroot, "") + + sysroot = sysroot_flag(sysroot='sys/root', compiler='Visual Studio') + self.assertEquals(sysroot, "") + + sysroot = sysroot_flag(sysroot='sys/root') + self.assertEquals(sysroot, "--sysroot=sys/root") + + def test_format_defines(self): + self.assertEquals(['-DFOO', '-DBAR=1'], format_defines(['FOO', 'BAR=1'], "gcc")) + + def test_format_include_paths(self): + self.assertEquals(['-Ipath1', '-I"with spaces"'], format_include_paths(['path1', 'with spaces'])) + + def test_format_library_paths(self): + self.assertEquals(['-Lpath1', '-L"with spaces"'], format_library_paths(['path1', 'with spaces'])) + self.assertEquals(['/LIBPATH:path1', '/LIBPATH:"with spaces"'], + format_library_paths(['path1', 'with spaces'], compiler='Visual Studio')) + + def test_format_libraries(self): + self.assertEquals(['-llib1', '-llib2'], format_libraries(['lib1', 'lib2'])) + self.assertEquals(['lib1.lib', 'lib2.lib'], format_libraries(['lib1', 'lib2'], + compiler='Visual Studio')) diff --git a/conans/test/generators/compiler_args_test.py b/conans/test/generators/compiler_args_test.py new file mode 100644 index 000000000..3db09256a --- /dev/null +++ b/conans/test/generators/compiler_args_test.py @@ -0,0 +1,114 @@ +import unittest + +from conans.client.conf import default_settings_yml +from conans.client.generators.compiler_args import CompilerArgsGenerator +from conans.client.generators.gcc import GCCGenerator +from conans.model.build_info import DepsCppInfo, CppInfo +from conans.model.env_info import DepsEnvInfo, EnvInfo +from conans.model.settings import Settings +from conans.model.user_info import DepsUserInfo +from conans.test.build_helpers.cmake_test import ConanFileMock + + +class CompilerArgsTest(unittest.TestCase): + + def _get_conanfile(self, settings): + conan_file = ConanFileMock() + conan_file.settings = settings + conan_file.source_folder = "my_cache_source_folder" + conan_file.build_folder = "my_cache_build_folder" + conan_file.deps_env_info = DepsEnvInfo() + conan_file.deps_user_info = DepsUserInfo() + conan_file.deps_cpp_info = DepsCppInfo() + cpp_info = CppInfo("/root") + cpp_info.include_paths.append("path/to/include1") + cpp_info.lib_paths.append("path/to/lib1") + cpp_info.libs.append("mylib") + cpp_info.bindirs = "path/to/bin1" + cpp_info.cflags.append("c_flag1") + cpp_info.cppflags.append("cxx_flag1") + cpp_info.defines.append("mydefine1") + + conan_file.deps_cpp_info.update(cpp_info, "zlib") + conan_file.env_info = EnvInfo() + return conan_file + + + def gcc_test(self): + settings = Settings.loads(default_settings_yml) + settings.os = "Linux" + settings.compiler = "gcc" + settings.compiler.version = "6.3" + settings.arch = "x86" + settings.build_type = "Release" + + conan_file = self._get_conanfile(settings) + gcc = GCCGenerator(conan_file) + self.assertEquals('-Dmydefine1 -Ipath/to/include1 cxx_flag1 c_flag1 -m32 -s -DNDEBUG ' + '-Wl,-rpath="path/to/lib1" ' + '-Lpath/to/lib1 -lmylib', gcc.content) + + settings.arch = "x86_64" + settings.build_type = "Debug" + settings.compiler.libcxx = "libstdc++11" + + gcc = GCCGenerator(conan_file) + self.assertEquals('-Dmydefine1 -Ipath/to/include1 cxx_flag1 c_flag1 -m64 -g ' + '-Wl,-rpath="path/to/lib1" -Lpath/to/lib1 -lmylib ' + '-D_GLIBCXX_USE_CXX11_ABI=1', + gcc.content) + + settings.compiler.libcxx = "libstdc++" + gcc = GCCGenerator(conan_file) + self.assertEquals('-Dmydefine1 -Ipath/to/include1 cxx_flag1 c_flag1 -m64 -g ' + '-Wl,-rpath="path/to/lib1" -Lpath/to/lib1 -lmylib ' + '-D_GLIBCXX_USE_CXX11_ABI=0', + gcc.content) + + settings.os = "Windows" + settings.compiler = "Visual Studio" + settings.compiler.version = "15" + settings.arch = "x86" + settings.build_type = "Release" + gcc = GCCGenerator(conan_file) + # GCC generator ignores the compiler setting, it is always gcc + self.assertEquals('-Dmydefine1 -Ipath/to/include1 cxx_flag1 c_flag1 -m32 -s ' + '-DNDEBUG -Wl,-rpath="path/to/lib1" -Lpath/to/lib1 -lmylib', gcc.content) + + def compiler_args_test(self): + settings = Settings.loads(default_settings_yml) + settings.os = "Windows" + settings.compiler = "Visual Studio" + settings.compiler.version = "15" + settings.arch = "x86" + settings.build_type = "Release" + + conan_file = self._get_conanfile(settings) + gen = CompilerArgsGenerator(conan_file) + self.assertEquals('/Dmydefine1 /Ipath\\to\\include1 cxx_flag1 c_flag1 /DNDEBUG ' + '/link /LIBPATH:path\\to\\lib1 mylib.lib', gen.content) + + settings = Settings.loads(default_settings_yml) + settings.os = "Macos" + settings.compiler = "apple-clang" + settings.compiler.version = "9.0" + settings.arch = "x86" + settings.build_type = "Release" + conan_file = self._get_conanfile(settings) + gen = CompilerArgsGenerator(conan_file) + self.assertEquals('-Dmydefine1 -Ipath/to/include1 cxx_flag1 c_flag1 -m32 -DNDEBUG ' + '-Wl,-rpath,"path/to/lib1" -Lpath/to/lib1 -lmylib', gen.content) + + settings = Settings.loads(default_settings_yml) + settings.os = "Linux" + settings.os_build = "Macos" + settings.compiler = "apple-clang" + settings.compiler.version = "9.0" + settings.arch = "x86" + settings.build_type = "Release" + + conan_file = self._get_conanfile(settings) + args = CompilerArgsGenerator(conan_file) + self.assertEquals('-Dmydefine1 -Ipath/to/include1 cxx_flag1 c_flag1 -m32 -DNDEBUG ' + '-Wl,-rpath,"path/to/lib1" ' + '-Lpath/to/lib1 -lmylib', args.content) diff --git a/conans/test/integration/version_range_error_test.py b/conans/test/integration/version_range_error_test.py index ae48a5dc1..b05d20edb 100644 --- a/conans/test/integration/version_range_error_test.py +++ b/conans/test/integration/version_range_error_test.py @@ -12,23 +12,3 @@ class VersionRangesErrorTest(unittest.TestCase): error = client.run("install . --build", ignore_error=True) self.assertTrue(error) self.assertIn("from requirement 'MyOtherPkg/[~0.1]@user/testing'", client.user_io.out) - - def werror_fail_test(self): - client = TestClient() - - def add(name, version, requires=None): - conanfile = TestConanFile(name, version, requires=requires) - client.save({CONANFILE: str(conanfile)}) - client.run("export . user/testing") - - add("MyPkg1", "0.1.0") - add("MyPkg1", "0.2.0") - add("MyPkg2", "0.1", ["MyPkg1/[~0.1]@user/testing"]) - add("MyPkg3", "0.1", ["MyPkg1/[~0.2]@user/testing", "MyPkg2/[~0.1]@user/testing"]) - - error = client.run("install . --build", ignore_error=True) - self.assertTrue(error) - self.assertNotIn("WARN: Version range '~0.1' required", client.out) - self.assertIn("ERROR: Version range '~0.1' required by 'MyPkg2/0.1@user/testing' " - "not valid for downstream requirement 'MyPkg1/0.2.0@user/testing'", - client.out) diff --git a/conans/test/integration/version_ranges_conflict_test.py b/conans/test/integration/version_ranges_conflict_test.py new file mode 100644 index 000000000..9d68de64a --- /dev/null +++ b/conans/test/integration/version_ranges_conflict_test.py @@ -0,0 +1,27 @@ +import unittest +from conans.test.utils.tools import TestClient +from conans.paths import CONANFILE +from conans.test.utils.conanfile import TestConanFile + + +class VersionRangesConflictTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def add(name, version, requires=None): + conanfile = TestConanFile(name, version, requires=requires) + self.client.save({CONANFILE: str(conanfile)}) + self.client.run("export . user/testing") + add("MyPkg1", "0.1.0") + add("MyPkg1", "0.2.0") + add("MyPkg2", "0.1", ["MyPkg1/[~0.1]@user/testing"]) + add("MyPkg3", "0.1", ["MyPkg1/[~0.2]@user/testing", "MyPkg2/[~0.1]@user/testing"]) + + def werror_fail_test(self): + error = self.client.run("install . --build", ignore_error=True) + self.assertTrue(error) + self.assertNotIn("WARN: Version range '~0.1' required", self.client.user_io.out) + self.assertIn("ERROR: Version range '~0.1' required by 'MyPkg2/0.1@user/testing' " + "not valid for downstream requirement 'MyPkg1/0.2.0@user/testing'", + self.client.user_io.out) diff --git a/conans/test/integration/version_ranges_diamond_test.py b/conans/test/integration/version_ranges_diamond_test.py index 29a86264e..acd72efd9 100644 --- a/conans/test/integration/version_ranges_diamond_test.py +++ b/conans/test/integration/version_ranges_diamond_test.py @@ -1,96 +1,9 @@ import unittest from conans.test.utils.tools import TestClient, TestServer from conans.paths import CONANFILE -from conans.util.files import load, save +from conans.util.files import load import os from nose_parameterized import parameterized -from conans.model.ref import ConanFileReference - - -class VersionRangesUpdatingTest(unittest.TestCase): - - def update_test(self): - client = TestClient(servers={"default": TestServer()}, - users={"default": [("lasote", "mypass")]}) - conanfile = """from conans import ConanFile -class HelloReuseConan(ConanFile): - pass -""" - client.save({"conanfile.py": conanfile}) - client.run("create . Pkg/1.1@lasote/testing") - client.run("create . Pkg/1.2@lasote/testing") - client.run("upload Pkg* -r=default --all --confirm") - client.run("remove Pkg/1.2@lasote/testing -f") - conanfile = """from conans import ConanFile -class HelloReuseConan(ConanFile): - requires = "Pkg/[~1]@lasote/testing" -""" - client.save({"conanfile.py": conanfile}) - client.run("install .") - # Resolves to local package - self.assertIn("Pkg/1.1@lasote/testing: Already installed!", client.out) - client.run("install . --update") - # Resolves to remote package - self.assertIn("Pkg/1.2@lasote/testing: Package installed", client.out) - self.assertNotIn("Pkg/1.1", client.out) - - # removes remote - client.run("remove Pkg* -r=default --f") - # Resolves to local package - client.run("install .") - self.assertIn("Pkg/1.2@lasote/testing: Already installed!", client.out) - # Update also resolves to local package - client.run("install . --update") - self.assertIn("Pkg/1.2@lasote/testing: Already installed!", client.out) - self.assertNotIn("Pkg/1.1", client.out) - - def update_pkg_test(self): - server = TestServer() - client = TestClient(servers={"default": server}, - users={"default": [("lasote", "mypass")]}) - conanfile = """from conans import ConanFile -class HelloReuseConan(ConanFile): - def package_info(self): - self.output.info("PACKAGE_INFO {}") -""" - client.save({"conanfile.py": conanfile.format("1.1")}) - client.run("create . Pkg/1.1@lasote/testing") - client.save({"conanfile.py": conanfile.format("1.2")}) - client.run("create . Pkg/1.2@lasote/testing") - client.run("upload Pkg* -r=default --all --confirm") - consumer = """from conans import ConanFile -class HelloReuseConan(ConanFile): - requires = "Pkg/[~1]@lasote/testing" -""" - client.save({"conanfile.py": consumer}) - client.run("install .") - # Resolves to local package - self.assertIn("Pkg/1.2@lasote/testing: Already installed!", client.out) - self.assertIn("Pkg/1.2@lasote/testing: PACKAGE_INFO 1.2", client.out) - - # modify remote 1.2 - client2 = TestClient(servers={"default": server}, - users={"default": [("lasote", "mypass")]}) - client2.save({"conanfile.py": conanfile.format("*1.2*")}) - client2.run("create . Pkg/1.2@lasote/testing") - conan_reference = ConanFileReference.loads("Pkg/1.2@lasote/testing") - manifest = client2.client_cache.digestfile_conanfile(conan_reference) - # Make sure timestamp increases, in some machines in testing, it can fail due to same timestamp - content = load(manifest).splitlines() - content[0] = str(int(content[0]) + 1) - save(manifest, "\n".join(content)) - client2.run("upload Pkg* -r=default --all --confirm") - - client.run("install .") - # Resolves to local package - self.assertIn("Pkg/1.2@lasote/testing: Already installed!", client.out) - self.assertIn("Pkg/1.2@lasote/testing: PACKAGE_INFO 1.2", client.out) - - client.run("install . --update") - # Resolves to local package - self.assertIn("Pkg/1.2@lasote/testing: Package installed", client.out) - self.assertNotIn("Pkg/1.2@lasote/testing: PACKAGE_INFO 1.2", client.out) - self.assertIn("Pkg/1.2@lasote/testing: PACKAGE_INFO *1.2*", client.out) class VersionRangesMultiRemoteTest(unittest.TestCase): diff --git a/conans/test/model/version_ranges_test.py b/conans/test/model/version_ranges_test.py index 9d2ce94bc..66ef14142 100644 --- a/conans/test/model/version_ranges_test.py +++ b/conans/test/model/version_ranges_test.py @@ -171,7 +171,7 @@ class VersionRangesTest(unittest.TestCase): self.loader = ConanFileLoader(None, Settings.loads(""), Profile()) self.retriever = Retriever(self.loader, self.output) self.remote_search = MockSearchRemote() - self.resolver = RequireResolver(self.output, self.retriever, self.remote_search, update=False) + self.resolver = RequireResolver(self.output, self.retriever, self.remote_search) self.builder = DepsGraphBuilder(self.retriever, self.output, self.loader, self.resolver) for v in ["0.1", "0.2", "0.3", "1.1", "1.1.2", "1.2.1", "2.1", "2.2.1"]: diff --git a/conans/test/util/tools_test.py b/conans/test/util/tools_test.py index 7c690dc47..c66818d0f 100644 --- a/conans/test/util/tools_test.py +++ b/conans/test/util/tools_test.py @@ -71,85 +71,6 @@ class ToolsTest(unittest.TestCase): with tools.environment_append({"CONAN_CPU_COUNT": "34"}): self.assertEquals(tools.cpu_count(), 34) - def get_env_unit_test(self): - """ - Unit tests tools.get_env - """ - # Test default - self.assertIsNone( - tools.get_env("NOT_DEFINED", environment={}), - None - ) - # Test defined default - self.assertEqual( - tools.get_env("NOT_DEFINED_KEY", default="random_default", environment={}), - "random_default" - ) - # Test return defined string - self.assertEqual( - tools.get_env("FROM_STR", default="", environment={"FROM_STR": "test_string_value"}), - "test_string_value" - ) - # Test boolean conversion - self.assertEqual( - tools.get_env("BOOL_FROM_STR", default=False, environment={"BOOL_FROM_STR": "1"}), - True - ) - self.assertEqual( - tools.get_env("BOOL_FROM_STR", default=True, environment={"BOOL_FROM_STR": "0"}), - False - ) - self.assertEqual( - tools.get_env("BOOL_FROM_STR", default=False, environment={"BOOL_FROM_STR": "True"}), - True - ) - self.assertEqual( - tools.get_env("BOOL_FROM_STR", default=True, environment={"BOOL_FROM_STR": ""}), - False - ) - # Test int conversion - self.assertEqual( - tools.get_env("TO_INT", default=2, environment={"TO_INT": "1"}), - 1 - ) - # Test float conversion - self.assertEqual( - tools.get_env("TO_FLOAT", default=2.0, environment={"TO_FLOAT": "1"}), - 1.0 - ), - # Test list conversion - self.assertEqual( - tools.get_env("TO_LIST", default=[], environment={"TO_LIST": "1,2,3"}), - ["1", "2", "3"] - ) - self.assertEqual( - tools.get_env("TO_LIST_NOT_TRIMMED", default=[], environment={"TO_LIST_NOT_TRIMMED": " 1 , 2 , 3 "}), - [" 1 ", " 2 ", " 3 "] - ) - - def test_get_env_in_conanfile(self): - """ - Test get_env is available and working in conanfile - """ - client = TestClient() - - conanfile = """from conans import ConanFile, tools - -class HelloConan(ConanFile): - name = "Hello" - version = "0.1" - - def build(self): - run_tests = tools.get_env("CONAN_RUN_TESTS", default=False) - print("test_get_env_in_conafile CONAN_RUN_TESTS=%r" % run_tests) - assert(run_tests == True) - """ - client.save({"conanfile.py": conanfile}) - - with tools.environment_append({"CONAN_RUN_TESTS": "1"}): - client.run("install .") - client.run("build .") - def test_global_tools_overrided(self): client = TestClient()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 3, "issue_text_score": 2, "test_score": 0 }, "num_modified_files": 13 }
1.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "nose-cov", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc g++-multilib" ], "python": "3.6", "reqs_path": [ "conans/requirements.txt", "conans/requirements_osx.txt", "conans/requirements_server.txt", "conans/requirements_dev.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asn1crypto==1.5.1 astroid==1.6.6 attrs==22.2.0 beautifulsoup4==4.12.3 bottle==0.12.25 certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 codecov==2.1.13 colorama==0.3.9 -e git+https://github.com/conan-io/conan.git@544682392b632d8e7cc51372d9e51c887af5139e#egg=conan cov-core==1.15.0 coverage==4.2 cryptography==2.1.4 distro==1.1.0 fasteners==0.19 future==0.16.0 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 isort==5.10.1 lazy-object-proxy==1.7.1 mccabe==0.7.0 mock==1.3.0 ndg-httpsclient==0.4.4 node-semver==0.2.0 nose==1.3.7 nose-cov==1.6 nose-parameterized==0.5.0 packaging==21.3 patch==1.16 pbr==6.1.1 pluggy==1.0.0 pluginbase==0.7 py==1.11.0 pyasn==1.5.0b7 pyasn1==0.5.1 pycparser==2.21 Pygments==2.14.0 PyJWT==1.7.1 pylint==1.8.4 pyOpenSSL==17.5.0 pyparsing==3.1.4 pytest==7.0.1 PyYAML==3.12 requests==2.27.1 six==1.17.0 soupsieve==2.3.2.post1 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 waitress==2.0.0 WebOb==1.8.9 WebTest==2.0.35 wrapt==1.16.0 zipp==3.6.0
name: conan channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asn1crypto==1.5.1 - astroid==1.6.6 - attrs==22.2.0 - beautifulsoup4==4.12.3 - bottle==0.12.25 - cffi==1.15.1 - charset-normalizer==2.0.12 - codecov==2.1.13 - colorama==0.3.9 - cov-core==1.15.0 - coverage==4.2 - cryptography==2.1.4 - distro==1.1.0 - fasteners==0.19 - future==0.16.0 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - isort==5.10.1 - lazy-object-proxy==1.7.1 - mccabe==0.7.0 - mock==1.3.0 - ndg-httpsclient==0.4.4 - node-semver==0.2.0 - nose==1.3.7 - nose-cov==1.6 - nose-parameterized==0.5.0 - packaging==21.3 - patch==1.16 - pbr==6.1.1 - pluggy==1.0.0 - pluginbase==0.7 - py==1.11.0 - pyasn==1.5.0b7 - pyasn1==0.5.1 - pycparser==2.21 - pygments==2.14.0 - pyjwt==1.7.1 - pylint==1.8.4 - pyopenssl==17.5.0 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==3.12 - requests==2.27.1 - six==1.17.0 - soupsieve==2.3.2.post1 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - waitress==2.0.0 - webob==1.8.9 - webtest==2.0.35 - wrapt==1.16.0 - zipp==3.6.0 prefix: /opt/conda/envs/conan
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_make_targets", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_mocked_methods", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_previous_env", "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_variables", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_clean_sh_path", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_cores_ancient_visual", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_deprecated_behaviour", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_run_tests", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_shared", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_sysroot", "conans/test/build_helpers/cmake_test.py::CMakeTest::test_verbose", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_adjust_path", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_adjust_path_visual_studio", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_arch_flag", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_build_type_flags", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_format_defines", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_format_include_paths", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_format_libraries", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_format_library_paths", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_libcxx_flags", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_pic_flags", "conans/test/build_helpers/compiler_flags_test.py::CompilerFlagsTest::test_sysroot_flag", "conans/test/model/version_ranges_test.py::VersionRangesTest::test_local_basic", "conans/test/model/version_ranges_test.py::VersionRangesTest::test_remote_basic", "conans/test/util/tools_test.py::ReplaceInFileTest::test_replace_in_file", "conans/test/util/tools_test.py::ToolsTest::test_environment_nested" ]
[ "conans/test/build_helpers/autotools_configure_test.py::AutoToolsConfigureTest::test_pkg_config_paths", "conans/test/util/tools_test.py::ToolsTest::test_global_tools_overrided" ]
[]
[]
MIT License
2,147
[ "conans/client/generators/gcc.py", "conans/client/tools/win.py", "conans/client/generators/__init__.py", "conans/client/build/compiler_flags.py", "conans/client/manager.py", "conans/client/require_resolver.py", "conans/client/build/cmake.py", "conans/tools.py", "conans/util/env_reader.py", "conans/client/build/autotools_environment.py", "conans/client/generators/cmake_multi.py", "conans/paths.py", "conans/client/build/visual_environment.py", "conans/client/generators/compiler_args.py", "conans/client/tools/__init__.py" ]
[ "conans/client/generators/gcc.py", "conans/client/tools/win.py", "conans/client/generators/__init__.py", "conans/client/build/compiler_flags.py", "conans/client/manager.py", "conans/client/require_resolver.py", "conans/client/build/cmake.py", "conans/tools.py", "conans/util/env_reader.py", "conans/client/build/autotools_environment.py", "conans/client/generators/cmake_multi.py", "conans/paths.py", "conans/client/build/visual_environment.py", "conans/client/generators/compiler_args.py", "conans/client/tools/__init__.py" ]
dask__dask-3160
246d2ce2ef5bb306b7b6e26e7bc6cfaea492b26b
2018-02-12 16:51:06
48c4a589393ebc5b335cc5c7df291901401b0b15
martindurant: Note that the simple http server used for testing here does not actually support Range, so the code is not very complete; should I write a little tornado thing, actually hosting some sorts of files that we might want to work with (csv...) ? mrocklin: Testing against tornado would be fine. Alternatively I think that some of the dask.bag test suite still tests against the open internet. They mark with `@pytest.mark.network` for people who want to avoid network-based tests. martindurant: A couple of things to consider here: - some servers do not provide the content-length with a HEAD call, so we cannot know the size of those objects before downloading them - some servers do not respect the Range header, and send the whole object every time - some servers still do not provide the length even when getting the data. In other cases they might, and streamed download mode could be used to bail in the case that the download looks too big If the length is not known, or Range is ignored, clearly we cannot use intra-file partitioning. Is it then an error to do anything other than `read()` from position 0? For a smallish file (less than the block-size), we would just download that block and be able to seek within it. So perhaps the fail condition is: we are doing something other than `tell()==0, read()`, we tried to download a block (say 5MB), and while streaming, the header says the arriving data is bigger than we asked for OR we are streaming and have already seen more data than we asked for. Thoughts on this? mrocklin: My hope would be that the servers that would serve large datasets *would* support these features. Is that likely to be true? Is it possible to check if they support these and, if not, err? martindurant: We can know up front whether the content-length is missing in the HEAD call, we could error early on that, but it should not prevent the simple file-access pattern of one full file per call. We cannot know whether Range is supported without trying with a Range header. [This SO answer](https://stackoverflow.com/questions/720419/how-can-i-find-out-whether-a-server-supports-the-range-header) says that you can combine HEAD and Range to see if the server supports it without getting data That could mean three calls to get any data: one HEAD to get the size, one HEAD to check Range, and then a GET to actually fetch data. mrocklin: That sounds unpleasant. What do you think is best? martindurant: Here is my stab at putting checks and expressive error messages. I don't know how to go about testing these bits, though. martindurant: I think this is a useful addition at this point. There may be more polish needed down the road, but I would defer them to future PRs. Any thoughts? martindurant: (appveyor failure appears unrelated, possibly the numpy version problem that has been noticed elsewhere) mrocklin: Appveyor issues resolved mrocklin: +1 from me on the code. Should this capability be mentioned in documentation somewhere? martindurant: Yes, it should be in the changelog, and the limitations mentioned on http://dask.pydata.org/en/latest/remote-data-services.html
diff --git a/dask/bytes/core.py b/dask/bytes/core.py index 3e99a7d7a..5585ae068 100644 --- a/dask/bytes/core.py +++ b/dask/bytes/core.py @@ -449,6 +449,15 @@ def get_fs(protocol, storage_options=None): elif protocol == 'hdfs': cls = get_hdfs_driver(_globals.get("hdfs_driver", "auto")) + elif protocol in ['http', 'https']: + import_required('requests', + "Need to install `requests` for HTTP support\n" + " conda install requests\n" + " or\n" + " pip install requests") + import dask.bytes.http # noqa, registers HTTP backend + cls = _filesystems[protocol] + else: raise ValueError("Unknown protocol %s" % protocol) diff --git a/dask/bytes/http.py b/dask/bytes/http.py new file mode 100644 index 000000000..b0563a2e0 --- /dev/null +++ b/dask/bytes/http.py @@ -0,0 +1,283 @@ +from __future__ import print_function, division, absolute_import + +import requests +import uuid + +from . import core + +DEFAULT_BLOCK_SIZE = 5 * 2 ** 20 + + +class HTTPFileSystem(core.FileSystem): + """ + Simple File-System for fetching data via HTTP(S) + + Unlike other file-systems, HTTP is limited in that it does not provide glob + or write capability. + """ + sep = '/' + + def __init__(self, **storage_options): + """ + Parameters + ---------- + storage_options: key-value + May be credentials, e.g., `{'auth': ('username', 'pword')}` or any + other parameters for requests + """ + self.block_size = storage_options.pop('block_size', DEFAULT_BLOCK_SIZE) + self.kwargs = storage_options + self.session = requests.Session() + + def glob(self, url): + """For a template path, return matching files""" + raise NotImplementedError + + def mkdirs(self, url): + """Make any intermediate directories to make path writable""" + raise NotImplementedError + + def open(self, url, mode='rb', block_size=None, **kwargs): + """Make a file-like object + + Parameters + ---------- + url: str + Full URL with protocol + mode: string + must be "rb" + kwargs: key-value + Any other parameters, passed to requests calls + """ + if mode != 'rb': + raise NotImplementedError + block_size = block_size if block_size is not None else self.block_size + return HTTPFile(url, self.session, block_size, **self.kwargs) + + def ukey(self, url): + """Unique identifier, implied file might have changed every time""" + return uuid.uuid1().hex + + def size(self, url): + """Size in bytes of the file at path""" + return file_size(url, session=self.session, **self.kwargs) + + +core._filesystems['http'] = HTTPFileSystem +core._filesystems['https'] = HTTPFileSystem + + +class HTTPFile(object): + """ + A file-like object pointing to a remove HTTP(S) resource + + Supports only reading, with read-ahead of a predermined block-size. + + In the case that the server does not supply the filesize, only reading of + the complete file in one go is supported. + + Parameters + ---------- + url: str + Full URL of the remote resource, including the protocol + session: requests.Session or None + All calls will be made within this session, to avoid restarting + connections where the server allows this + block_size: int or None + The amount of read-ahead to do, in bytes. Default is 5MB, or the value + configured for the FileSystem creating this file + kwargs: all other key-values are passed to reqeuests calls. + """ + + def __init__(self, url, session=None, block_size=None, **kwargs): + self.url = url + self.kwargs = kwargs + self.loc = 0 + self.session = session if session is not None else requests.Session() + self.blocksize = (block_size if block_size is not None + else DEFAULT_BLOCK_SIZE) + try: + self.size = file_size(url, self.session, allow_redirects=True, + **self.kwargs) + except ValueError: + # No size information - only allow read() and no seek() + self.size = None + self.cache = None + self.closed = False + self.start = None + self.end = None + + def seek(self, where, whence=0): + """Set file position + + Parameters + ---------- + where: int + Location to set + whence: int (default 0) + If zero, set from start of file (value should be positive); if 1, + set relative to current position; if 2, set relative to end of file + (value shoulf be negative) + + Returns the position. + """ + if self.size is None: + raise ValueError('Cannot seek since size of file is not known') + if whence == 0: + nloc = where + elif whence == 1: + nloc += where + elif whence == 2: + nloc = self.size + where + else: + raise ValueError('Whence must be in [1, 2, 3], but got %s' % whence) + if nloc < 0: + raise ValueError('Seek before start of file') + self.loc = nloc + return nloc + + def tell(self): + """Get current file byte position""" + return self.loc + + def read(self, length=-1): + """Read bytes from file + + Parameters + ---------- + length: int + Read up to this many bytes. If negative, read all content to end of + file. If the server has not supplied the filesize, attempting to + read only part of the data will raise a ValueError. + """ + if self.size is None: + if length >= 0: + raise ValueError('File size is unknown, must read all data') + else: + return self._fetch_all() + if length < 0 or self.loc + length > self.size: + end = self.size + else: + end = self.loc + length + if self.loc >= self.size: + return b'' + self. _fetch(self.loc, end) + data = self.cache[self.loc - self.start:end - self.start] + self.loc = end + return data + + def _fetch(self, start, end): + """Set new bounds for data cache and fetch data, if required""" + if self.start is None and self.end is None: + # First read + self.start = start + self.end = end + self.blocksize + self.cache = self._fetch_range(start, self.end) + elif start < self.start: + if self.end - end > self.blocksize: + self.start = start + self.end = end + self.blocksize + self.cache = self._fetch_range(self.start, self.end) + else: + new = self._fetch_range(start, self.start) + self.start = start + self.cache = new + self.cache + elif end > self.end: + if self.end > self.size: + return + if end - self.end > self.blocksize: + self.start = start + self.end = end + self.blocksize + self.cache = self._fetch_range(self.start, self.end) + else: + new = self._fetch_range(self.end, end + self.blocksize) + self.end = end + self.blocksize + self.cache = self.cache + new + + def _fetch_all(self): + """Read whole file in one shot, without caching + + This is only called when size is None and read() is called without a + byte-count. + """ + r = self.session.get(self.url, **self.kwargs) + r.raise_for_status() + return r.content + + def _fetch_range(self, start, end): + """Download a block of data + + The expectation is that the server returns only the requested bytes, + with HTTP code 206. If this is not the case, we first check the headers, + and then stream the output - if the data size is bigger than we + requested, an exception is raised. + """ + kwargs = self.kwargs.copy() + headers = self.kwargs.pop('headers', {}) + headers['Range'] = 'bytes=%i-%i' % (start, end - 1) + r = self.session.get(self.url, headers=headers, stream=True, **kwargs) + r.raise_for_status() + if r.status_code == 206: + # partial content, as expected + return r.content + if 'Content-Length' in r.headers: + cl = int(r.headers['Content-Length']) + if cl <= end - start: + # data size OK + return r.content + else: + raise ValueError('Got more bytes (%i) than requested (%i)' % ( + cl, end - start)) + cl = 0 + out = [] + for chunk in r.iter_content(chunk_size=2 ** 20): + # data size unknown, let's see if it goes too big + if chunk: + out.append(chunk) + cl += len(chunk) + if cl > end - start: + raise ValueError( + 'Got more bytes so far (>%i) than requested (%i)' % ( + cl, end - start)) + else: + break + return b''.join(out) + + def __enter__(self): + self.loc = 0 + return self + + def __exit__(self, *args): + self.close() + + def __iter__(self): + # no text lines here, use TextIOWrapper + raise NotImplementedError + + def write(self): + raise NotImplementedError + + def flush(self): + pass + + def close(self): + self.closed = True + + def seekable(self): + return True + + def writable(self): + return False + + def readable(self): + return True + + +def file_size(url, session, **kwargs): + """Call HEAD on the server to get file size""" + r = session.head(url, **kwargs) + r.raise_for_status() + if 'Content-Length' in r.headers: + return int(r.headers['Content-Length']) + else: + raise ValueError("Server did not supply size of %s" % url) diff --git a/dask/bytes/utils.py b/dask/bytes/utils.py index ad0fb3968..619eaf385 100644 --- a/dask/bytes/utils.py +++ b/dask/bytes/utils.py @@ -51,6 +51,10 @@ def infer_storage_options(urlpath, inherit_storage_options=None): if windows_path: path = '%s:%s' % windows_path.groups() + if protocol in ['http', 'https']: + # for HTTP, we don't want to parse, as requests will anyway + return {'protocol': protocol, 'path': urlpath} + options = { 'protocol': protocol, 'path': path, diff --git a/docs/source/array-creation.rst b/docs/source/array-creation.rst index d7bf27c0e..26be15e01 100644 --- a/docs/source/array-creation.rst +++ b/docs/source/array-creation.rst @@ -1,22 +1,28 @@ Create Dask Arrays ================== -We store and manipulate large arrays in a wide variety of ways. There are some -standards like HDF5 and NetCDF but just as often people use custom storage -solutions. This page talks about how to build dask graphs to interact with -your array. +You can load or store dask arrays from a variety of common sources like HDF5, +and NetCDF, `Zarr <http://zarr.readthedocs.io/en/stable/>`_, or any format that +supports Numpy-style slicing. -In principle we need functions that return NumPy arrays. These functions and -their arrangement can be as simple or as complex as the situation dictates. +.. currentmodule:: dask.array +.. autosummary:: + from_array + from_delayed + from_npy_stack + stack + concatenate -Simple case - Format Supports NumPy Slicing -------------------------------------------- +NumPy Slicing +------------- + +.. autosummary:: + from_array Many storage formats have Python projects that expose storage using NumPy slicing syntax. These include HDF5, NetCDF, BColz, Zarr, GRIB, etc.. For -example the ``HDF5`` file format has the ``h5py`` Python project, which -provides a ``Dataset`` object into which we can slice in NumPy fashion. +example we can load a Dask array from an HDF5 file using `h5py <http://www.h5py.org/>`_: .. code-block:: Python @@ -28,45 +34,79 @@ provides a ``Dataset`` object into which we can slice in NumPy fashion. >>> x = d[:5, :5] # We slice to get numpy arrays -It is common for Python wrappers of on-disk array formats to present a NumPy -slicing syntax. The full dataset looks like a NumPy array with ``.shape`` and -``.dtype`` attributes even though the data hasn't yet been loaded in and still -lives on disk. Slicing in to this array-like object fetches the appropriate -data from disk and returns that region as an in-memory NumPy array. - -For this common case ``dask.array`` presents the convenience function -``da.from_array`` +Given an object like ``d`` above that has ``dtype`` and ``shape`` properties +and that supports Numpy style slicing we can construct a lazy Dask array. .. code-block:: Python >>> import dask.array as da >>> x = da.from_array(d, chunks=(1000, 1000)) +This process is entirely lazy. Neither creating the h5py object nor wrapping +it with ``da.from_array`` have loaded any data. + Concatenation and Stacking -------------------------- -Often we store data in several different locations and want to stitch them -together. +.. autosummary:: + stack + concatenate + +Often we store data in several different locations and want to stitch them together. .. code-block:: Python - >>> filenames = sorted(glob('2015-*-*.hdf5') - >>> dsets = [h5py.File(fn)['/data'] for fn in filenames] - >>> arrays = [da.from_array(dset, chunks=(1000, 1000)) for dset in dsets] - >>> x = da.concatenate(arrays, axis=0) # Concatenate arrays along first axis + dask_arrays = [] + for fn in filenames: + f = h5py.File(fn) + d = f['/data'] + x = da.from_array(d, chunks=(1000, 1000)) + dask_arrays.append(x) + + x = da.concatenate(arrays, axis=0) # concatenate arrays along first axis For more information see :doc:`concatenation and stacking <array-stack>` docs. + Using ``dask.delayed`` ---------------------- -You can create a plan to arrange many numpy arrays into a grid with normal for -loops using :doc:`dask.delayed<delayed-overview>` and then convert each of these -Dask.delayed objects into a single-chunk Dask array with ``da.from_delayed``. -You can then arrange these single-chunk Dask arrays into a larger -multiple-chunk Dask array using :doc:`concatenation and stacking <array-stack>`, -as described above. +.. autosummary:: + from_delayed + stack + concatenate + +Sometimes Numpy-style data resides in formats that do not support numpy-style +slicing. We can still construct Dask arrays around this data if we have a +Python function that can generate pieces of the full array if we use +:doc:`dask.delayed <delayed>`. Dask delayed lets us delay a single function +call that would create a numpy array. We can then wrap this delayed object +with ``da.from_delayed``, providing a dtype and shape to produce a +single-chunked Dask array. We can then use ``stack`` or ``concatenate`` from +before to construct a larger lazy array. + + +As an example, consider loading a stack of images using ``skimage.io.imread``: + +.. code-block:: python + + import skimage.io + import dask.array as da + import dask + + imread = dask.delayed(skimage.io.imread, pure=True) # Lazy version of imread + + filenames = sorted(glob.glob('*.jpg')) + + lazy_images = [imread(url) for url in urls] # Lazily evaluate imread on each url + + arrays = [da.from_delayed(lazy_image, # Construct a small Dask array + dtype=sample.dtype, # for every lazy value + shape=sample.shape) + for lazy_value in lazy_values] + + stack = da.stack(arrays, axis=0) # Stack all small Dask arrays into one See :doc:`documentation on using dask.delayed with collections<delayed-collections>`. @@ -260,9 +300,18 @@ For example, if you plan to take out thin slices along the first dimension then Store Dask Arrays ================= +.. autosummary:: + store + to_hdf5 + to_npy_stack + compute + In Memory --------- +.. autosummary:: + compute + If you have a small amount of data, you can call ``np.array`` or ``.compute()`` on your Dask array to turn in to a normal NumPy array: @@ -277,41 +326,48 @@ on your Dask array to turn in to a normal NumPy array: array([0, 1, 4, 9, 16, 25]) -HDF5 ----- +Numpy style slicing +------------------- + +.. autosummary:: + store -Use the ``to_hdf5`` function to store data into HDF5 using ``h5py``: +You can store dask arrays in any object that supports numpy-style slice +assignment like ``h5py.Dataset``: .. code-block:: Python - >>> da.to_hdf5('myfile.hdf5', '/y', y) # doctest: +SKIP + >>> import h5py + >>> f = h5py.File('myfile.hdf5') + >>> d = f.require_dataset('/data', shape=x.shape, dtype=x.dtype) + >>> da.store(x, d) -Store several arrays in one computation with the function -``da.to_hdf5`` by passing in a dict: +You can store several arrays in one computation by passing lists of sources and +destinations: .. code-block:: Python - >>> da.to_hdf5('myfile.hdf5', {'/x': x, '/y': y}) # doctest: +SKIP + >>> da.store([array1, array2], [output1, output2]) # doctest: +SKIP +HDF5 +---- -Other On-Disk Storage ---------------------- +.. autosummary:: + to_hdf5 -Alternatively, you can store dask arrays in any object that supports numpy-style -slice assignment like ``h5py.Dataset``, or ``bcolz.carray``: +HDF5 is sufficiently common that there is a special function, ``to_hdf5`` to +store data into HDF5 files using ``h5py``: .. code-block:: Python - >>> import bcolz # doctest: +SKIP - >>> out = bcolz.zeros(shape=y.shape, rootdir='myfile.bcolz') # doctest: +SKIP - >>> da.store(y, out) # doctest: +SKIP + >>> da.to_hdf5('myfile.hdf5', '/y', y) # doctest: +SKIP -You can store several arrays in one computation by passing lists of sources and -destinations: +Store several arrays in one computation with the function +``da.to_hdf5`` by passing in a dict: .. code-block:: Python - >>> da.store([array1, array2], [output1, output2]) # doctest: +SKIP + >>> da.to_hdf5('myfile.hdf5', {'/x': x, '/y': y}) # doctest: +SKIP Plugins diff --git a/docs/source/array-ghost.rst b/docs/source/array-ghost.rst index 33bad3a0b..b92d39c35 100644 --- a/docs/source/array-ghost.rst +++ b/docs/source/array-ghost.rst @@ -1,5 +1,5 @@ -Overlapping Blocks with Ghost Cells -=================================== +Overlapping Computations +======================== Some array operations require communication of borders between neighboring blocks. Example operations include the following: diff --git a/docs/source/array-overview.rst b/docs/source/array-overview.rst deleted file mode 100644 index 6977e0adb..000000000 --- a/docs/source/array-overview.rst +++ /dev/null @@ -1,73 +0,0 @@ -Overview -======== - -Dask Array implements a subset of the NumPy ndarray interface using blocked -algorithms, cutting up the large array into many small arrays. This lets us -compute on arrays larger than memory using all of our cores. We coordinate -these blocked algorithms using dask graphs. - -Design ------- - -.. image:: images/dask-array-black-text.svg - :alt: Dask arrays coordinate many numpy arrays - :align: right - -Dask arrays coordinate many NumPy arrays arranged into a grid. These -NumPy arrays may live on disk or on other machines. - -Common Uses ------------ - -Today Dask array is commonly used in the sort of gridded data analysis that -arises in weather, climate modeling, or oceanography, especially when data -sizes become inconveniently large. Dask array complements large on-disk array -stores like HDF5, NetCDF, and BColz. Additionally Dask array is commonly used -to speed up expensive in-memory computations using multiple cores, such as you -might find in image analysis or statistical and machine learning applications. - -Scope ------ - -The ``dask.array`` library supports the following interface from ``numpy``: - -* Arithmetic and scalar mathematics, ``+, *, exp, log, ...`` -* Reductions along axes, ``sum(), mean(), std(), sum(axis=0), ...`` -* Tensor contractions / dot products / matrix multiply, ``tensordot`` -* Axis reordering / transpose, ``transpose`` -* Slicing, ``x[:100, 500:100:-2]`` -* Fancy indexing along single axes with lists or numpy arrays, ``x[:, [10, 1, 5]]`` -* The array protocol ``__array__`` -* Some linear algebra ``svd, qr, solve, solve_triangular, lstsq`` - -See :doc:`the dask.array API<array-api>` for a more extensive list of -functionality. - -Execution ---------- - -By default Dask array uses the threaded scheduler in order to avoid data -transfer costs and because NumPy releases the GIL well. It is also quite -effective on a cluster using the `dask.distributed`_ scheduler. - -.. _`dask.distributed`: https://distributed.readthedocs.io/en/latest/ - -Limitations ------------ - -Dask array does not implement the entire numpy interface. Users expecting this -will be disappointed. Notably, Dask array has the following limitations: - -1. Dask array does not implement all of ``np.linalg``. This has been done by a - number of excellent BLAS/LAPACK implementations, and is the focus of - numerous ongoing academic research projects. -2. Dask array with unknown shapes do not support all operations -3. Dask array does not attempt operations like ``sort`` which are notoriously - difficult to do in parallel, and are of somewhat diminished value on very - large data (you rarely actually need a full sort). - Often we include parallel-friendly alternatives like ``topk``. -4. Dask array doesn't implement operations like ``tolist`` that would be very - inefficient for larger datasets. Likewise it is very inefficient to iterate - over a Dask array with for loops. -5. Dask development is driven by immediate need, and so many lesser used - functions have not been implemented. Community contributions are encouraged. diff --git a/docs/source/array.rst b/docs/source/array.rst index 16b52df8d..e8208b29c 100644 --- a/docs/source/array.rst +++ b/docs/source/array.rst @@ -1,25 +1,83 @@ Array ===== -Dask arrays implement a subset of the NumPy interface on large arrays using -blocked algorithms and task scheduling. - .. toctree:: :maxdepth: 1 + :hidden: - array-overview.rst - array-creation.rst array-api.rst - -Other topics - -.. toctree:: - :maxdepth: 1 - - array-slicing.rst - array-stack.rst + array-creation.rst array-ghost.rst array-design.rst - array-linear-operator.rst array-sparse.rst array-stats.rst + array-linear-operator.rst + array-slicing.rst + array-stack.rst + +Dask Array implements a subset of the NumPy ndarray interface using blocked +algorithms, cutting up the large array into many small arrays. This lets us +compute on arrays larger than memory using all of our cores. We coordinate +these blocked algorithms using dask graphs. + +Design +------ + +.. image:: images/dask-array-black-text.svg + :alt: Dask arrays coordinate many numpy arrays + :align: right + +Dask arrays coordinate many NumPy arrays arranged into a grid. These +NumPy arrays may live on disk or on other machines. + +Common Uses +----------- + +Dask array used in fields like atmospheric and oceanographic science, large +scale imaging, genomics, numerical algorithms for optimization or statistics , +and more. + +Scope +----- + +Dask arrays supports most of the Numpy interface like the following: + +- Arithmetic and scalar mathematics, ``+, *, exp, log, ...`` +- Reductions along axes, ``sum(), mean(), std(), sum(axis=0), ...`` +- Tensor contractions / dot products / matrix multiply, ``tensordot`` +- Axis reordering / transpose, ``transpose`` +- Slicing, ``x[:100, 500:100:-2]`` +- Fancy indexing along single axes with lists or numpy arrays, ``x[:, [10, 1, 5]]`` +- Array protocols like ``__array__``, and ``__array_ufunc__`` +- Some linear algebra ``svd, qr, solve, solve_triangular, lstsq`` +- ... + + +However Dask array does not implement the entire numpy interface. Users expecting this +will be disappointed. Notably, Dask array lacks the following features: + +- Much of ``np.linalg`` has not been implemented. + This has been done by a number of excellent BLAS/LAPACK implementations, + and is the focus of numerous ongoing academic research projects. +- Arrays with unknown shapes do not support all operations +- Operations like ``sort`` which are notoriously + difficult to do in parallel, and are of somewhat diminished value on very + large data (you rarely actually need a full sort). + Often we include parallel-friendly alternatives like ``topk``. +- Dask array doesn't implement operations like ``tolist`` that would be very + inefficient for larger datasets. Likewise it is very inefficient to iterate + over a Dask array with for loops. +- Dask development is driven by immediate need, and so many lesser used + functions have not been implemented. Community contributions are encouraged. + +See :doc:`the dask.array API<array-api>` for a more extensive list of +functionality. + +Execution +--------- + +By default Dask array uses the threaded scheduler in order to avoid data +transfer costs and because NumPy releases the GIL well. It is also quite +effective on a cluster using the `dask.distributed`_ scheduler. + +.. _`dask.distributed`: https://distributed.readthedocs.io/en/latest/ diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 0d6de29b8..5a6ee716b 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -24,6 +24,7 @@ Bag Core ++++ +- New file-system for HTTP(S), allowing direct loading from specific URLs (:pr:`3160`) `Martin Durant`_ 0.17.0 / 2018-02-09 ------------------- diff --git a/docs/source/conf.py b/docs/source/conf.py index 72713486a..326fd2337 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,6 +12,7 @@ # serve to show the default. import sys, os +from shutil import copyfile # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -312,3 +313,37 @@ intersphinx_mapping = {'pandas': ('http://pandas.pydata.org/pandas-docs/stable/' 'http://pandas.pydata.org/pandas-docs/stable/objects.inv'), 'numpy': ('https://docs.scipy.org/doc/numpy/', 'https://docs.scipy.org/doc/numpy/objects.inv')} + +# Redirects +# https://tech.signavio.com/2017/managing-sphinx-redirects +redirect_files = [ + # old html, new html + ('array-overview.html', 'array.html'), + ('dataframe-overview.html', 'dataframe.html'), + ('delayed-overview.html', 'delayed.html'), +] + + +redirect_template = """\ +<html> + <head> + <meta http-equiv="refresh" content="1; url={new}" /> + <script> + window.location.href = "{new}" + </script> + </head> +</html> +""" + + +def copy_legacy_redirects(app, docname): + if app.builder.name == 'html': + for html_src_path, new in redirect_files: + page = redirect_template.format(new=new) + target_path = app.outdir + '/' + html_src_path + with open(target_path, 'w') as f: + f.write(page) + + +def setup(app): + app.connect('build-finished', copy_legacy_redirects) diff --git a/docs/source/dataframe-create.rst b/docs/source/dataframe-create.rst index 759e4b0e6..f969b6674 100644 --- a/docs/source/dataframe-create.rst +++ b/docs/source/dataframe-create.rst @@ -22,7 +22,6 @@ file formats, and other Dask or Python collections. File Formats: .. autosummary:: - read_csv read_parquet read_hdf diff --git a/docs/source/dataframe-overview.rst b/docs/source/dataframe-overview.rst deleted file mode 100644 index f821a06f8..000000000 --- a/docs/source/dataframe-overview.rst +++ /dev/null @@ -1,162 +0,0 @@ -Overview -======== - -Dask Dataframe implements a subset of the Pandas Dataframe interface using -blocked algorithms, cutting up the large DataFrame into many small Pandas -DataFrames. This lets us compute on dataframes that are larger than memory -using all of our cores or on many dataframes spread across a cluster. One -operation on a dask.dataframe triggers many operations on the constituent -Pandas dataframes. - -Design ------- - -.. image:: images/dask-dataframe.svg - :alt: Dask DataFrames coordinate many Pandas DataFrames - :align: right - :width: 40% - -Dask dataframes coordinate many Pandas DataFrames/Series arranged along the -index. Dask.dataframe is partitioned *row-wise*, grouping rows by index value -for efficiency. These Pandas objects may live on disk or on other machines. - - -Common Uses and Anti-Uses -------------------------- - -Dask.dataframe is particularly useful in the following situations: - -* Manipulating large datasets on a single machine, even when those datasets - don't fit comfortably into memory. -* Fast computation on large workstation machines by parallelizing many Pandas - calls across many cores. -* Distributed computing of very large tables stored in the Hadoop File System - (HDFS), S3, or other parallel file systems. -* Parallel groupby, join, or time series computations - -However in the following situations Dask.dataframe may not be the best choice: - -* If your dataset fits comfortably into RAM on your laptop then you may be - better off just using Pandas_. There may be simpler ways to improve - performance than through parallelism. -* If your dataset doesn't fit neatly into the Pandas tabular model then you - might find more use in :doc:`dask.bag <bag>` or :doc:`dask.array <array>` -* If you need functions that are not implemented in dask.dataframe then you - might want to look at :doc:`dask.delayed <delayed>` which offers more - flexibility. -* If you need a proper database with all that databases offer you might prefer - something like Postgres_ - -.. _Pandas: https://pandas.pydata.org/ -.. _Postgres: https://www.postgresql.org/ - - -Dask.dataframe copies the pandas API ------------------------------------- - -Because the ``dask.dataframe`` application programming interface (API) is a -subset of the pandas API it should be familiar to pandas users. There are some -slight alterations due to the parallel nature of dask: - -.. code-block:: python - - >>> import dask.dataframe as dd - >>> df = dd.read_csv('2014-*.csv') - >>> df.head() - x y - 0 1 a - 1 2 b - 2 3 c - 3 4 a - 4 5 b - 5 6 c - - >>> df2 = df[df.y == 'a'].x + 1 - -As with all dask collections (for example Array, Bag, DataFrame) one triggers -computation by calling the ``.compute()`` method: - -.. code-block:: python - - >>> df2.compute() - 0 2 - 3 5 - Name: x, dtype: int64 - - -Scope ------ - -Dask.dataframe covers a small but well-used portion of the pandas API. -This limitation is for two reasons: - -1. The pandas API is *huge* -2. Some operations are genuinely hard to do in parallel (for example sort). - -Additionally, some important operations like ``set_index`` work, but are slower -than in pandas because they may write out to disk. - -The following class of computations works well: - -* Trivially parallelizable operations (fast): - * Elementwise operations: ``df.x + df.y``, ``df * df`` - * Row-wise selections: ``df[df.x > 0]`` - * Loc: ``df.loc[4.0:10.5]`` - * Common aggregations: ``df.x.max()``, ``df.max()`` - * Is in: ``df[df.x.isin([1, 2, 3])]`` - * Datetime/string accessors: ``df.timestamp.month`` -* Cleverly parallelizable operations (fast): - * groupby-aggregate (with common aggregations): ``df.groupby(df.x).y.max()``, - ``df.groupby('x').max()`` - * groupby-apply on index: ``df.groupby(['idx', 'x']).apply(myfunc)``, where - ``idx`` is the index level name - * value_counts: ``df.x.value_counts()`` - * Drop duplicates: ``df.x.drop_duplicates()`` - * Join on index: ``dd.merge(df1, df2, left_index=True, right_index=True)`` - or ``dd.merge(df1, df2, on=['idx', 'x'])`` where ``idx`` is the index - name for both ``df1`` and ``df2`` - * Join with Pandas DataFrames: ``dd.merge(df1, df2, on='id')`` - * Elementwise operations with different partitions / divisions: ``df1.x + df2.y`` - * Datetime resampling: ``df.resample(...)`` - * Rolling averages: ``df.rolling(...)`` - * Pearson Correlations: ``df[['col1', 'col2']].corr()`` -* Operations requiring a shuffle (slow-ish, unless on index) - * Set index: ``df.set_index(df.x)`` - * groupby-apply not on index (with anything): ``df.groupby(df.x).apply(myfunc)`` - * Join not on the index: ``dd.merge(df1, df2, on='name')`` - -See :doc:`DataFrame API documentation<dataframe-api>` for a more extensive -list. - - -Execution ---------- - -By default ``dask.dataframe`` uses the multi-threaded scheduler. -This exposes some parallelism when pandas or the underlying numpy operations -release the global interpreter lock (GIL). Generally pandas is more GIL -bound than NumPy, so multi-core speed-ups are not as pronounced for -``dask.dataframe`` as they are for ``dask.array``. This is changing, and -the pandas development team is actively working on releasing the GIL. - -In some cases you may experience speedups by switching to the multiprocessing -or distributed scheduler. - -.. code-block:: python - - >>> dask.set_options(get=dask.multiprocessing.get) - -See :doc:`scheduler docs<scheduler-overview>` for more information. - - -Limitations ------------ - -Dask.DataFrame does not implement the entire Pandas interface. Users expecting this -will be disappointed. Notably, dask.dataframe has the following limitations: - -1. Setting a new index from an unsorted column is expensive -2. Many operations, like groupby-apply and join on unsorted columns require - setting the index, which as mentioned above, is expensive -3. The Pandas API is very large. Dask.dataframe does not attempt to implement - many pandas features or any of the more exotic data structures like NDFrames diff --git a/docs/source/dataframe.rst b/docs/source/dataframe.rst index 623ce8e9b..45d31d787 100644 --- a/docs/source/dataframe.rst +++ b/docs/source/dataframe.rst @@ -1,32 +1,155 @@ -DataFrame +Dataframe ========= +.. toctree:: + :maxdepth: 1 + :hidden: + + dataframe-api.rst + dataframe-create.rst + dataframe-performance.rst + dataframe-design.rst + dataframe-groupby.rst + A Dask DataFrame is a large parallel dataframe composed of many smaller Pandas dataframes, split along the index. These pandas dataframes may live on disk for larger-than-memory computing on a single machine, or on many different -machines in a cluster. +machines in a cluster. One Dask dataframe operation triggers many operations +on the constituent Pandas dataframes. -Dask.dataframe implements a commonly used subset of the Pandas_ interface -including elementwise operations, reductions, grouping operations, joins, -timeseries algorithms, and more. It copies the Pandas interface for these -operations exactly and so should be very familiar to Pandas users. Because -Dask.dataframe operations merely coordinate Pandas operations they usually -exhibit similar performance characteristics as are found in Pandas. +Design +------ -.. _Pandas: http://pandas.pydata.org/ +Dask dataframes coordinate many Pandas DataFrames/Series arranged along the +index. Dask.dataframe is partitioned *row-wise*, grouping rows by index value +for efficiency. These Pandas objects may live on disk or on other machines. -.. toctree:: - :maxdepth: 1 +.. image:: images/dask-dataframe.svg + :alt: Dask DataFrames coordinate many Pandas DataFrames + :width: 40% - dataframe-overview.rst - dataframe-create.rst - dataframe-api.rst - dataframe-performance.rst -Other topics +Dask.dataframe copies the Pandas API +------------------------------------ -.. toctree:: - :maxdepth: 1 +Because the ``dask.dataframe`` application programming interface (API) is a +subset of the Pandas API it should be familiar to Pandas users. There are some +slight alterations due to the parallel nature of dask: - dataframe-design.rst - dataframe-groupby.rst +.. code-block:: python + + >>> import dask.dataframe as dd + >>> df = dd.read_csv('2014-*.csv') + >>> df.head() + x y + 0 1 a + 1 2 b + 2 3 c + 3 4 a + 4 5 b + 5 6 c + + >>> df2 = df[df.y == 'a'].x + 1 + +As with all dask collections one triggers computation by calling the +``.compute()`` method: + +.. code-block:: python + + >>> df2.compute() + 0 2 + 3 5 + Name: x, dtype: int64 + + +Common Uses and Anti-Uses +------------------------- + +Dask.dataframe is used in situations where Pandas is commonly needed, but when +Pandas fails due to data size or computation speed. + +- Manipulating large datasets, even when those datasets don't fit in memory +- Accelerating long computations by using many cores +- Distributed computing on large datasets with standard Pandas operations like + groupby, join, and time series computations + +Dask dataframe may not be the best choice in the following situations: + +* If your dataset fits comfortably into RAM on your laptop then you may be + better off just using Pandas . There may be simpler ways to improve + performance than through parallelism. +* If your dataset doesn't fit neatly into the Pandas tabular model then you + might find more use in :doc:`dask.bag <bag>` or :doc:`dask.array <array>` +* If you need functions that are not implemented in Dask dataframe then you + might want to look at :doc:`dask.delayed <delayed>` which offers more + flexibility. +* If you need a proper database with all that databases offer you might prefer + something like Postgres_ + +.. _Pandas: https://pandas.pydata.org/ +.. _Postgres: https://www.postgresql.org/ + + +Scope +----- + +Dask.dataframe covers a well-used portion of the Pandas API. +The following class of computations works well: + +* Trivially parallelizable operations (fast): + * Elementwise operations: ``df.x + df.y``, ``df * df`` + * Row-wise selections: ``df[df.x > 0]`` + * Loc: ``df.loc[4.0:10.5]`` + * Common aggregations: ``df.x.max()``, ``df.max()`` + * Is in: ``df[df.x.isin([1, 2, 3])]`` + * Datetime/string accessors: ``df.timestamp.month`` +* Cleverly parallelizable operations (fast): + * groupby-aggregate (with common aggregations): ``df.groupby(df.x).y.max()``, + ``df.groupby('x').max()`` + * groupby-apply on index: ``df.groupby(['idx', 'x']).apply(myfunc)``, where + ``idx`` is the index level name + * value_counts: ``df.x.value_counts()`` + * Drop duplicates: ``df.x.drop_duplicates()`` + * Join on index: ``dd.merge(df1, df2, left_index=True, right_index=True)`` + or ``dd.merge(df1, df2, on=['idx', 'x'])`` where ``idx`` is the index + name for both ``df1`` and ``df2`` + * Join with Pandas DataFrames: ``dd.merge(df1, df2, on='id')`` + * Elementwise operations with different partitions / divisions: ``df1.x + df2.y`` + * Datetime resampling: ``df.resample(...)`` + * Rolling averages: ``df.rolling(...)`` + * Pearson Correlations: ``df[['col1', 'col2']].corr()`` +* Operations requiring a shuffle (slow-ish, unless on index) + * Set index: ``df.set_index(df.x)`` + * groupby-apply not on index (with anything): ``df.groupby(df.x).apply(myfunc)`` + * Join not on the index: ``dd.merge(df1, df2, on='name')`` + +However Dask dataframe does not implement the entire Pandas interface. Users +expecting this will be disappointed. Notably, Dask dataframe has the following +limitations: + +1. Setting a new index from an unsorted column is expensive +2. Many operations, like groupby-apply and join on unsorted columns require + setting the index, which as mentioned above, is expensive +3. The Pandas API is very large. Dask dataframe does not attempt to implement + many Pandas features or any of the more exotic data structures like NDFrames +4. Operations that were slow on Pandas, like iterating through row-by-row, + remain slow on Dask dataframe + +See :doc:`DataFrame API documentation<dataframe-api>` for a more extensive list. + + +Execution +--------- + +By default ``dask.dataframe`` uses the multi-threaded scheduler. +This exposes some parallelism when Pandas or the underlying numpy operations +release the global interpreter lock (GIL). Generally Pandas is more GIL +bound than NumPy, so multi-core speed-ups are not as pronounced for +``dask.dataframe`` as they are for ``dask.array``. This is changing, and +the Pandas development team is actively working on releasing the GIL. + +When dealing with text data you may see speedups by switching to the newer +:doc:`distributed scheduler <setup/single-distributed>` either on a cluster or +single machine. + +.. _Pandas: http://pandas.pydata.org/ diff --git a/docs/source/delayed-api.rst b/docs/source/delayed-api.rst index f322b420c..0b88bd71b 100644 --- a/docs/source/delayed-api.rst +++ b/docs/source/delayed-api.rst @@ -1,6 +1,43 @@ API === +The ``dask.delayed`` interface consists of one function, ``delayed``: + +- ``delayed`` wraps functions + + Wraps functions. Can be used as a decorator, or around function calls + directly (i.e. ``delayed(foo)(a, b, c)``). Outputs from functions wrapped in + ``delayed`` are proxy objects of type ``Delayed`` that contain a graph of + all operations done to get to this result. + +- ``delayed`` wraps objects + + Wraps objects. Used to create ``Delayed`` proxies directly. + +``Delayed`` objects can be thought of as representing a key in the dask. A +``Delayed`` supports *most* python operations, each of which creates another +``Delayed`` representing the result: + +- Most operators (``*``, ``-``, and so on) +- Item access and slicing (``a[0]``) +- Attribute access (``a.size``) +- Method calls (``a.index(0)``) + +Operations that aren't supported include: + +- Mutating operators (``a += 1``) +- Mutating magics such as ``__setitem__``/``__setattr__`` (``a[0] = 1``, ``a.foo = 1``) +- Iteration. (``for i in a: ...``) +- Use as a predicate (``if a: ...``) + +The last two points in particular mean that ``Delayed`` objects cannot be used for +control flow, meaning that no ``Delayed`` can appear in a loop or if statement. +In other words you can't iterate over a ``Delayed`` object, or use it as part of +a condition in an if statement, but ``Delayed`` object can be used in a body of a loop +or if statement (i.e. the example above is fine, but if ``data`` was a ``Delayed`` +object it wouldn't be). +Even with this limitation, many workflows can easily be parallelized. + .. currentmodule:: dask.delayed .. autosummary:: diff --git a/docs/source/delayed-overview.rst b/docs/source/delayed-overview.rst deleted file mode 100644 index 12bccf61a..000000000 --- a/docs/source/delayed-overview.rst +++ /dev/null @@ -1,119 +0,0 @@ -Overview -======== - -Motivation and Example ----------------------- - -Dask.delayed lets you parallelize custom code. It is useful whenever your -problem doesn't quite fit a high-level parallel object like dask.array or -dask.dataframe but could still benefit from parallelism. Dask.delayed works by -delaying your function evaluations and putting them into a dask graph. -Dask.delayed is useful when wrapping existing code or when handling -non-standard problems. - -Consider the following example: - -.. code-block:: python - - def inc(x): - return x + 1 - - def double(x): - return x + 2 - - def add(x, y): - return x + y - - data = [1, 2, 3, 4, 5] - - output = [] - for x in data: - a = inc(x) - b = double(x) - c = add(a, b) - output.append(c) - - total = sum(output) - -As written this code runs sequentially in a single thread. However we see that -a lot of this could be executed in parallel. We use the ``delayed`` function -to parallelize this code by turning it into a dask graph. We slightly modify -our code by wrapping functions in ``delayed``. This delays the execution of -the function and generates a dask graph instead. - -.. code-block:: python - - from dask import delayed - - output = [] - for x in data: - a = delayed(inc)(x) - b = delayed(double)(x) - c = delayed(add)(a, b) - output.append(c) - - total = delayed(sum)(output) - -We used the ``delayed`` function to wrap the function calls that we want -to turn into tasks. None of the ``inc``, ``double``, ``add`` or ``sum`` calls -have happened yet, instead the object ``total`` is a ``Delayed`` result that -contains a task graph of the entire computation. Looking at the graph we see -clear opportunities for parallel execution. The dask schedulers will exploit -this parallelism, generally improving performance. (although not in this -example, because these functions are already very small and fast.) - -.. code-block:: python - - total.visualize() # see image to the right - -.. image:: images/delayed-inc-double-add.svg - :align: right - :alt: simple task graph created with dask.delayed - -We can now compute this lazy result to execute the graph in parallel: - -.. code-block:: python - - >>> total.compute() - 45 - - -Delayed Function ----------------- - -The ``dask.delayed`` interface consists of one function, ``delayed``: - -- ``delayed`` wraps functions - - Wraps functions. Can be used as a decorator, or around function calls - directly (i.e. ``delayed(foo)(a, b, c)``). Outputs from functions wrapped in - ``delayed`` are proxy objects of type ``Delayed`` that contain a graph of - all operations done to get to this result. - -- ``delayed`` wraps objects - - Wraps objects. Used to create ``Delayed`` proxies directly. - -``Delayed`` objects can be thought of as representing a key in the dask. A -``Delayed`` supports *most* python operations, each of which creates another -``Delayed`` representing the result: - -- Most operators (``*``, ``-``, and so on) -- Item access and slicing (``a[0]``) -- Attribute access (``a.size``) -- Method calls (``a.index(0)``) - -Operations that aren't supported include: - -- Mutating operators (``a += 1``) -- Mutating magics such as ``__setitem__``/``__setattr__`` (``a[0] = 1``, ``a.foo = 1``) -- Iteration. (``for i in a: ...``) -- Use as a predicate (``if a: ...``) - -The last two points in particular mean that ``Delayed`` objects cannot be used for -control flow, meaning that no ``Delayed`` can appear in a loop or if statement. -In other words you can't iterate over a ``Delayed`` object, or use it as part of -a condition in an if statement, but ``Delayed`` object can be used in a body of a loop -or if statement (i.e. the example above is fine, but if ``data`` was a ``Delayed`` -object it wouldn't be). -Even with this limitation, many workflows can easily be parallelized. diff --git a/docs/source/delayed.rst b/docs/source/delayed.rst index c7190893b..ec098cf8a 100644 --- a/docs/source/delayed.rst +++ b/docs/source/delayed.rst @@ -1,6 +1,13 @@ Delayed ======= +.. toctree:: + :maxdepth: 1 + :hidden: + + delayed-api.rst + delayed-collections.rst + Sometimes problems don't fit into one of the collections like ``dask.array`` or ``dask.dataframe``. In these cases, users can parallelize custom algorithms using the simpler ``dask.delayed`` interface. This allows one to create graphs @@ -18,9 +25,125 @@ directly with a light annotation of normal python code. .. image:: images/inc-add.svg :alt: simple task graph created with dask.delayed -.. toctree:: - :maxdepth: 1 +Example +------- - delayed-overview.rst - delayed-api.rst - delayed-collections.rst +Sometimes we face problems that are parallelizable, but don't fit high-level +abstractions Dask array or Dask dataframe. Consider the following example: + +.. code-block:: python + + def inc(x): + return x + 1 + + def double(x): + return x + 2 + + def add(x, y): + return x + y + + data = [1, 2, 3, 4, 5] + + output = [] + for x in data: + a = inc(x) + b = double(x) + c = add(a, b) + output.append(c) + + total = sum(output) + +There is clearly parallelism in this problem (many of the ``inc`` and +``double`` and ``add`` functions can evaluate independently), but it's not +clear how to convert this to a big array or big dataframe computation. + +As written this code runs sequentially in a single thread. However we see that +a lot of this could be executed in parallel. + +The Dask ``delayed`` function decorates your functions so that they operate +*lazily*. Rather than executing your function immediately it will defer +execution, placing the function and its arguments into a task graph. + +.. currentmodule:: dask.delayed + +.. autosummary:: + delayed + +We slightly modify our code our code by wrapping functions in ``delayed``. +This delays the execution of the function and generates a dask graph instead. + +.. code-block:: python + + import dask + + output = [] + for x in data: + a = dask.delayed(inc)(x) + b = dask.delayed(double)(x) + c = dask.delayed(add)(a, b) + output.append(c) + + total = dask.delayed(sum)(output) + +We used the ``dask.delayed`` function to wrap the function calls that we want +to turn into tasks. None of the ``inc``, ``double``, ``add`` or ``sum`` calls +have happened yet, instead the object ``total`` is a ``Delayed`` result that +contains a task graph of the entire computation. Looking at the graph we see +clear opportunities for parallel execution. The dask schedulers will exploit +this parallelism, generally improving performance. (although not in this +example, because these functions are already very small and fast.) + +.. code-block:: python + + total.visualize() # see image to the right + +.. image:: images/delayed-inc-double-add.svg + :align: right + :alt: simple task graph created with dask.delayed + +We can now compute this lazy result to execute the graph in parallel: + +.. code-block:: python + + >>> total.compute() + 45 + +Decorator +--------- + +It is also common to see the delayed function used as a decorator. Here is a +reproduction of our original problem as a parallel code. + +.. code-block:: python + + import dask + + @dask.delayed + def inc(x): + return x + 1 + + @dask.delayed + def double(x): + return x + 2 + + @dask.delayed + def add(x, y): + return x + y + + data = [1, 2, 3, 4, 5] + + output = [] + for x in data: + a = inc(x) + b = double(x) + c = add(a, b) + output.append(c) + + total = dask.delayed(sum)(output) + + +Real time +--------- + +Sometimes you want to create and destroy work during execution, launch tasks +from other tasks, etc.. For this, see the :doc:`Futures <futures>` interface. diff --git a/docs/source/futures.rst b/docs/source/futures.rst index 718a3b131..8a5ecfb95 100644 --- a/docs/source/futures.rst +++ b/docs/source/futures.rst @@ -316,7 +316,9 @@ Submit Tasks from Tasks ----------------------- .. autosummary:: + compute get_client + rejoin secede Tasks can launch other tasks by getting their own client. This enables complex @@ -372,6 +374,41 @@ thread that does not take up a slot within the Dask worker: future = client.submit(process, data) fire_and_forget(future) +If you intend to do more work in the same thread after waiting on client work, +you may want to explicitly block until the thread is able to *rejoin* the +thread pool. This allows some control over the number of threads that are +created. + +.. code-block:: python + + def f(n): + client = get_client() + + secede() # secede while we wait for results to come back + futures = client.map(func, range(n)) + results = client.gather(futures) + + rejoin() # block until a slot is open in the thread pool + result = analyze(results) + return result + + +Alternatively, you can just use the normal ``dask.compute`` function *within* a +task. This will automatically call ``secede`` and ``rejoin`` appropriately. + +.. code-block:: python + + def f(name, fn): + df = dd.read_csv(fn) # note that this is a dask collection + result = df[df.name == name].count() + + # This calls secede + # Then runs the computation on the cluster (including this worker) + # Then blocks on rejoin, and finally delivers the answer + result = result.compute() + + return result + Coordinate Data Between Clients ------------------------------- @@ -449,6 +486,33 @@ If you want to share large pieces of information then scatter the data first >>> future = client.scatter(parameters) >>> var.set(future) + +Locks +----- + +.. autosummary:: + Lock + +You can also hold onto cluster-wide locks using the ``Lock`` object. +This lock can either be given a consistent name, or you can pass the lock +object around itself. + +.. code-block:: python + + from dask.distributed import Lock + lock = Lock() + + def load(fn, lock=None): + with lock: + # read data from filename using some sensitive source + return ... + + futures = client.map(load, filenames, lock=lock) + +This can be useful if you want to control concurrent access to some external +resource like a database or un-thread-safe library. + + API --- @@ -523,3 +587,6 @@ API .. autoclass:: Variable :members: + +.. autoclass:: Lock + :members: diff --git a/docs/source/remote-data-services.rst b/docs/source/remote-data-services.rst index f5f9a9caa..8fe0f913a 100644 --- a/docs/source/remote-data-services.rst +++ b/docs/source/remote-data-services.rst @@ -224,6 +224,23 @@ Possible additional storage options: a JSON file created by gcloud. +HTTP +---- + +Direct file-like access to arbitrary URLs is available over HTTP and HTTPS. However, +there is no such thing as ``glob`` functionality over HTTP, so only explicit lists +of files can be used. + +Server implementations differ in the information they provide - they may or may +not specify the size of a file via a HEAD request or at the start of a download - +and some servers may not respect bytes range requests. The HTTPFileSystem therefore +offers best-effort behaviour: the download is streamed, but if more data is seen +than the configured block-size, an error will be raised. To be able to access such +data, you must read the whole file in one shot (and it must fit in memory). + +Note that, currently, ``http://`` and ``https://`` are treated as separate protocols, +and cannot be mixed. + Developer API ~~~~~~~~~~~~~
request: HTTP file system It would be convenient to be able to do `open_files` on a list of HTTP URLs. This cannot be a complete file-system like the other backends, because there is no possibility of `glob()`, but I still think it would be useful for accessing remote data. In general, the size of a given URL's data can be found through a HEAD command, and chunks read using the `Range` header keyword; some servers do not support that, however, and then downloads could only be parallelised across multiple files.
dask/dask
diff --git a/dask/bytes/tests/test_bytes_utils.py b/dask/bytes/tests/test_bytes_utils.py index 1972e0b16..aed29a6a9 100644 --- a/dask/bytes/tests/test_bytes_utils.py +++ b/dask/bytes/tests/test_bytes_utils.py @@ -91,7 +91,8 @@ def test_infer_storage_options(): assert so.pop('username') == 'User-name' assert so.pop('host') == 'Node-name.com' - assert infer_storage_options('http://127.0.0.1:8080/test.csv')['host'] == '127.0.0.1' + u = 'http://127.0.0.1:8080/test.csv' + assert infer_storage_options(u) == {'protocol': 'http', 'path': u} # For s3 and gcs the netloc is actually the bucket name, so we want to # include it in the path. Test that: diff --git a/dask/bytes/tests/test_http.py b/dask/bytes/tests/test_http.py new file mode 100644 index 000000000..ce877c524 --- /dev/null +++ b/dask/bytes/tests/test_http.py @@ -0,0 +1,135 @@ +import os +import pytest +import requests +import subprocess +import time + +from dask.bytes.core import open_files +from dask.compatibility import PY2 +from dask.utils import tmpdir + +files = ['a', 'b'] + + [email protected](scope='module') +def dir_server(): + with tmpdir() as d: + for fn in files: + with open(os.path.join(d, fn), 'wb') as f: + f.write(b'a' * 10000) + + if PY2: + cmd = ['python', '-m', 'SimpleHTTPServer', '8999'] + else: + cmd = ['python', '-m', 'http.server', '8999'] + p = subprocess.Popen(cmd, cwd=d) + timeout = 10 + while True: + try: + requests.get('http://localhost:8999') + break + except requests.exceptions.ConnectionError: + time.sleep(0.1) + timeout -= 0.1 + if timeout < 0: + raise RuntimeError('Server did not appear') + yield d + p.terminate() + + +def test_simple(dir_server): + root = 'http://localhost:8999/' + fn = files[0] + f = open_files(root + fn)[0] + with f as f: + data = f.read() + assert data == open(os.path.join(dir_server, fn), 'rb').read() + + [email protected]('block_size', [None, 99999]) +def test_ops(dir_server, block_size): + root = 'http://localhost:8999/' + fn = files[0] + f = open_files(root + fn)[0] + data = open(os.path.join(dir_server, fn), 'rb').read() + with f as f: + # these pass because the default + assert f.read(10) == data[:10] + f.seek(0) + assert f.read(10) == data[:10] + assert f.read(10) == data[10:20] + f.seek(-10, 2) + assert f.read() == data[-10:] + + +def test_ops_blocksize(dir_server): + root = 'http://localhost:8999/' + fn = files[0] + f = open_files(root + fn, block_size=2)[0] + data = open(os.path.join(dir_server, fn), 'rb').read() + with f as f: + # it's OK to read the whole file + assert f.read() == data + + # note that if we reuse f from above, because it is tokenized, we get + # the same open file - where is this cached? + fn = files[1] + f = open_files(root + fn, block_size=2)[0] + with f as f: + # fails becasue we want only 12 bytes + with pytest.raises(ValueError): + assert f.read(10) == data[:10] + + +def test_errors(dir_server): + f = open_files('http://localhost:8999/doesnotexist')[0] + with pytest.raises(requests.exceptions.RequestException): + with f: + pass + f = open_files('http://nohost/')[0] + with pytest.raises(requests.exceptions.RequestException): + with f: + pass + root = 'http://localhost:8999/' + fn = files[0] + f = open_files(root + fn, mode='wb')[0] + with pytest.raises(NotImplementedError): + with f: + pass + f = open_files(root + fn)[0] + with f as f: + with pytest.raises(ValueError): + f.seek(-1) + + +def test_files(dir_server): + root = 'http://localhost:8999/' + fs = open_files([root + f for f in files]) + for f, f2 in zip(fs, files): + with f as f: + assert f.read() == open(os.path.join(dir_server, f2), 'rb').read() + + [email protected] +def test_parquet(): + dd = pytest.importorskip('dask.dataframe') + pytest.importorskip('fastparquet') # no pyarrow compatability FS yet + df = dd.read_parquet([ + 'https://github.com/Parquet/parquet-compatibility/raw/' + 'master/parquet-testdata/impala/1.1.1-NONE/' + 'nation.impala.parquet']).compute() + assert df.n_nationkey.tolist() == list(range(25)) + assert df.columns.tolist() == ['n_nationkey', 'n_name', 'n_regionkey', + 'n_comment'] + + [email protected] +def test_bag(): + # This test pulls from different hosts + db = pytest.importorskip('dask.bag') + urls = ['https://raw.githubusercontent.com/weierophinney/pastebin/' + 'master/public/js-src/dojox/data/tests/stores/patterns.csv', + 'https://en.wikipedia.org'] + b = db.read_text(urls) + assert b.npartitions == 2 + b.compute()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_added_files", "has_removed_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 13 }
1.21
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[complete]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio", "numpydoc", "sphinx", "sphinx_rtd_theme", "cloudpickle", "pandas>=0.19.0", "distributed" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work Babel==2.11.0 certifi==2021.5.30 charset-normalizer==2.0.12 click==8.0.4 cloudpickle==2.2.1 contextvars==2.4 coverage==6.2 -e git+https://github.com/dask/dask.git@246d2ce2ef5bb306b7b6e26e7bc6cfaea492b26b#egg=dask distributed==1.21.8 docutils==0.18.1 execnet==1.9.0 HeapDict==1.0.1 idna==3.10 imagesize==1.4.1 immutables==0.19 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==3.0.3 locket==1.0.0 MarkupSafe==2.0.1 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work msgpack==1.0.5 numpy==1.19.5 numpydoc==1.1.0 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pandas==1.1.5 partd==1.2.0 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work psutil==7.0.0 py @ file:///opt/conda/conda-bld/py_1644396412707/work Pygments==2.14.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytest-asyncio==0.16.0 pytest-cov==4.0.0 pytest-mock==3.6.1 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 requests==2.27.1 six==1.17.0 snowballstemmer==2.2.0 sortedcontainers==2.4.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tblib==1.7.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 toolz==0.12.0 tornado==6.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 zict==2.1.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: dask channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - babel==2.11.0 - charset-normalizer==2.0.12 - click==8.0.4 - cloudpickle==2.2.1 - contextvars==2.4 - coverage==6.2 - distributed==1.21.8 - docutils==0.18.1 - execnet==1.9.0 - heapdict==1.0.1 - idna==3.10 - imagesize==1.4.1 - immutables==0.19 - jinja2==3.0.3 - locket==1.0.0 - markupsafe==2.0.1 - msgpack==1.0.5 - numpy==1.19.5 - numpydoc==1.1.0 - pandas==1.1.5 - partd==1.2.0 - psutil==7.0.0 - pygments==2.14.0 - pytest-asyncio==0.16.0 - pytest-cov==4.0.0 - pytest-mock==3.6.1 - pytest-xdist==3.0.2 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - requests==2.27.1 - six==1.17.0 - snowballstemmer==2.2.0 - sortedcontainers==2.4.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tblib==1.7.0 - tomli==1.2.3 - toolz==0.12.0 - tornado==6.1 - urllib3==1.26.20 - zict==2.1.0 prefix: /opt/conda/envs/dask
[ "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options", "dask/bytes/tests/test_http.py::test_simple", "dask/bytes/tests/test_http.py::test_ops[None]", "dask/bytes/tests/test_http.py::test_ops[99999]", "dask/bytes/tests/test_http.py::test_ops_blocksize", "dask/bytes/tests/test_http.py::test_errors", "dask/bytes/tests/test_http.py::test_files", "dask/bytes/tests/test_http.py::test_bag" ]
[]
[ "dask/bytes/tests/test_bytes_utils.py::test_read_block", "dask/bytes/tests/test_bytes_utils.py::test_seek_delimiter_endline", "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options_c[c:\\\\foo\\\\bar-c:\\\\foo\\\\bar]", "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options_c[C:\\\\\\\\foo\\\\bar-C:\\\\\\\\foo\\\\bar]", "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options_c[c:/foo/bar-c:/foo/bar]", "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options_c[file:///c|\\\\foo\\\\bar-c:\\\\foo\\\\bar]", "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options_c[file:///C|/foo/bar-C:/foo/bar]", "dask/bytes/tests/test_bytes_utils.py::test_infer_storage_options_c[file:///C:/foo/bar-C:/foo/bar]" ]
[]
BSD 3-Clause "New" or "Revised" License
2,149
[ "docs/source/remote-data-services.rst", "docs/source/dataframe-overview.rst", "docs/source/array-creation.rst", "docs/source/futures.rst", "docs/source/array.rst", "dask/bytes/core.py", "docs/source/changelog.rst", "docs/source/array-ghost.rst", "docs/source/dataframe.rst", "docs/source/dataframe-create.rst", "docs/source/array-overview.rst", "docs/source/delayed-api.rst", "docs/source/delayed-overview.rst", "dask/bytes/http.py", "dask/bytes/utils.py", "docs/source/delayed.rst", "docs/source/conf.py" ]
[ "docs/source/remote-data-services.rst", "docs/source/dataframe-overview.rst", "docs/source/array-creation.rst", "docs/source/futures.rst", "docs/source/array.rst", "dask/bytes/core.py", "docs/source/changelog.rst", "docs/source/array-ghost.rst", "docs/source/dataframe.rst", "docs/source/dataframe-create.rst", "docs/source/array-overview.rst", "docs/source/delayed-api.rst", "docs/source/delayed-overview.rst", "dask/bytes/http.py", "dask/bytes/utils.py", "docs/source/delayed.rst", "docs/source/conf.py" ]
Azure__WALinuxAgent-1043
b526b7ada84dfbf21bed8a3e7092ec23447fe14e
2018-02-12 21:11:33
6e9b985c1d7d564253a1c344bab01b45093103cd
diff --git a/azurelinuxagent/common/utils/restutil.py b/azurelinuxagent/common/utils/restutil.py index c521f63a..807be29c 100644 --- a/azurelinuxagent/common/utils/restutil.py +++ b/azurelinuxagent/common/utils/restutil.py @@ -28,7 +28,8 @@ import azurelinuxagent.common.utils.textutil as textutil from azurelinuxagent.common.exception import HttpError, ResourceGoneError from azurelinuxagent.common.future import httpclient, urlparse, ustr -from azurelinuxagent.common.version import PY_VERSION_MAJOR, AGENT_NAME, GOAL_STATE_AGENT_VERSION +from azurelinuxagent.common.version import PY_VERSION_MAJOR + SECURE_WARNING_EMITTED = False @@ -77,7 +78,6 @@ RETRY_EXCEPTIONS = [ HTTP_PROXY_ENV = "http_proxy" HTTPS_PROXY_ENV = "https_proxy" -HTTP_USER_AGENT = "{0}/{1}".format(AGENT_NAME, GOAL_STATE_AGENT_VERSION) DEFAULT_PROTOCOL_ENDPOINT='168.63.129.16' HOST_PLUGIN_PORT = 32526 @@ -175,13 +175,11 @@ def _http_request(method, host, rel_uri, port=None, data=None, secure=False, if port is None: port = 443 if secure else 80 - if 'User-Agent' not in headers: - headers['User-Agent'] = HTTP_USER_AGENT - if use_proxy: conn_host, conn_port = proxy_host, proxy_port scheme = "https" if secure else "http" url = "{0}://{1}:{2}{3}".format(scheme, host, port, rel_uri) + else: conn_host, conn_port = host, port url = rel_uri @@ -192,6 +190,7 @@ def _http_request(method, host, rel_uri, port=None, data=None, secure=False, timeout=10) if use_proxy: conn.set_tunnel(host, port) + else: conn = httpclient.HTTPConnection(conn_host, conn_port, diff --git a/azurelinuxagent/ga/env.py b/azurelinuxagent/ga/env.py index d9b7d823..fa39b84f 100644 --- a/azurelinuxagent/ga/env.py +++ b/azurelinuxagent/ga/env.py @@ -82,6 +82,12 @@ class EnvHandler(object): self.dhcp_handler.conf_routes() self.hostname = self.osutil.get_hostname_record() self.dhcp_id = self.osutil.get_dhcp_pid() + self.start() + + def is_alive(self): + return self.server_thread.is_alive() + + def start(self): self.server_thread = threading.Thread(target=self.monitor) self.server_thread.setDaemon(True) self.server_thread.start() diff --git a/azurelinuxagent/ga/monitor.py b/azurelinuxagent/ga/monitor.py index 71ac9b0b..02767651 100644 --- a/azurelinuxagent/ga/monitor.py +++ b/azurelinuxagent/ga/monitor.py @@ -94,13 +94,19 @@ class MonitorHandler(object): self.osutil = get_osutil() self.protocol_util = get_protocol_util() self.sysinfo = [] + self.event_thread = None def run(self): self.init_sysinfo() + self.start() - event_thread = threading.Thread(target=self.daemon) - event_thread.setDaemon(True) - event_thread.start() + def is_alive(self): + return self.event_thread.is_alive() + + def start(self): + self.event_thread = threading.Thread(target=self.daemon) + self.event_thread.setDaemon(True) + self.event_thread.start() def init_sysinfo(self): osversion = "{0}:{1}-{2}-{3}:{4}".format(platform.system(), diff --git a/azurelinuxagent/ga/update.py b/azurelinuxagent/ga/update.py index 2e430318..dcd2955d 100644 --- a/azurelinuxagent/ga/update.py +++ b/azurelinuxagent/ga/update.py @@ -249,10 +249,12 @@ class UpdateHandler(object): # Launch monitoring threads from azurelinuxagent.ga.monitor import get_monitor_handler - get_monitor_handler().run() + monitor_thread = get_monitor_handler() + monitor_thread.run() from azurelinuxagent.ga.env import get_env_handler - get_env_handler().run() + env_thread = get_env_handler() + env_thread.run() from azurelinuxagent.ga.exthandlers import get_exthandlers_handler, migrate_handler_state exthandlers_handler = get_exthandlers_handler() @@ -269,6 +271,14 @@ class UpdateHandler(object): CURRENT_AGENT) break + if not monitor_thread.is_alive(): + logger.warn(u"Monitor thread died, restarting") + monitor_thread.start() + + if not env_thread.is_alive(): + logger.warn(u"Environment thread died, restarting") + env_thread.start() + if self._upgrade_available(): available_agent = self.get_latest_agent() if available_agent is None:
Ensure the Monitor Thead does not Die The monitoring thread is not (ironically) not monitored. There have been cases where VMs stop sending telemetry data, which is the responsibility of the monitoring thread. The working theory is the thread died, and was not automatically restarted.
Azure/WALinuxAgent
diff --git a/tests/ga/test_monitor.py b/tests/ga/test_monitor.py index c646cef9..51f12a0d 100644 --- a/tests/ga/test_monitor.py +++ b/tests/ga/test_monitor.py @@ -18,6 +18,7 @@ from tests.tools import * from azurelinuxagent.ga.monitor import * + class TestMonitor(AgentTestCase): def test_parse_xml_event(self): data_str = load_data('ext/event.xml') diff --git a/tests/ga/test_update.py b/tests/ga/test_update.py index 21c81e98..0726d4c2 100644 --- a/tests/ga/test_update.py +++ b/tests/ga/test_update.py @@ -1248,7 +1248,6 @@ class TestUpdate(UpdateTestCase): self.assertEqual(1, mock_env.call_count) self.assertEqual(1, mock_exit.call_count) - def test_run(self): self._test_run() @@ -1497,6 +1496,151 @@ class TestUpdate(UpdateTestCase): self.assertTrue(ga_manifest_2.allowed_versions[1] == '2.2.14') +class MonitorThreadTest(AgentTestCase): + def setUp(self): + AgentTestCase.setUp(self) + self.event_patch = patch('azurelinuxagent.common.event.add_event') + self.update_handler = get_update_handler() + self.update_handler.protocol_util = Mock() + + def _test_run(self, invocations=1): + iterations = [0] + def iterator(*args, **kwargs): + iterations[0] += 1 + if iterations[0] >= invocations: + self.update_handler.running = False + return + + with patch('os.getpid', return_value=42): + with patch.object(UpdateHandler, '_is_orphaned') as mock_is_orphaned: + mock_is_orphaned.__get__ = Mock(return_value=False) + with patch('azurelinuxagent.ga.exthandlers.get_exthandlers_handler') as mock_handler: + with patch('time.sleep', side_effect=iterator) as mock_sleep: + with patch('sys.exit') as mock_exit: + self.update_handler.run() + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_start_threads(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_monitor_thread = MagicMock() + mock_monitor_thread.run = MagicMock() + mock_monitor.return_value = mock_monitor_thread + + mock_env_thread = MagicMock() + mock_env_thread.run = MagicMock() + mock_env.return_value = mock_env_thread + + self._test_run(invocations=0) + self.assertEqual(1, mock_monitor.call_count) + self.assertEqual(1, mock_monitor_thread.run.call_count) + self.assertEqual(1, mock_env.call_count) + self.assertEqual(1, mock_env_thread.run.call_count) + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_check_if_monitor_thread_is_alive(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_monitor_thread = MagicMock() + mock_monitor_thread.run = MagicMock() + mock_monitor_thread.is_alive = MagicMock(return_value=True) + mock_monitor_thread.start = MagicMock() + mock_monitor.return_value = mock_monitor_thread + + self._test_run(invocations=0) + self.assertEqual(1, mock_monitor.call_count) + self.assertEqual(1, mock_monitor_thread.run.call_count) + self.assertEqual(1, mock_monitor_thread.is_alive.call_count) + self.assertEqual(0, mock_monitor_thread.start.call_count) + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_check_if_env_thread_is_alive(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_env_thread = MagicMock() + mock_env_thread.run = MagicMock() + mock_env_thread.is_alive = MagicMock(return_value=True) + mock_env_thread.start = MagicMock() + mock_env.return_value = mock_env_thread + + self._test_run(invocations=1) + self.assertEqual(1, mock_env.call_count) + self.assertEqual(1, mock_env_thread.run.call_count) + self.assertEqual(1, mock_env_thread.is_alive.call_count) + self.assertEqual(0, mock_env_thread.start.call_count) + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_restart_monitor_thread_if_not_alive(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_monitor_thread = MagicMock() + mock_monitor_thread.run = MagicMock() + mock_monitor_thread.is_alive = MagicMock(return_value=False) + mock_monitor_thread.start = MagicMock() + mock_monitor.return_value = mock_monitor_thread + + self._test_run(invocations=1) + self.assertEqual(1, mock_monitor.call_count) + self.assertEqual(1, mock_monitor_thread.run.call_count) + self.assertEqual(1, mock_monitor_thread.is_alive.call_count) + self.assertEqual(1, mock_monitor_thread.start.call_count) + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_restart_env_thread_if_not_alive(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_env_thread = MagicMock() + mock_env_thread.run = MagicMock() + mock_env_thread.is_alive = MagicMock(return_value=False) + mock_env_thread.start = MagicMock() + mock_env.return_value = mock_env_thread + + self._test_run(invocations=1) + self.assertEqual(1, mock_env.call_count) + self.assertEqual(1, mock_env_thread.run.call_count) + self.assertEqual(1, mock_env_thread.is_alive.call_count) + self.assertEqual(1, mock_env_thread.start.call_count) + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_restart_monitor_thread(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_monitor_thread = MagicMock() + mock_monitor_thread.run = MagicMock() + mock_monitor_thread.is_alive = MagicMock(return_value=False) + mock_monitor_thread.start = MagicMock() + mock_monitor.return_value = mock_monitor_thread + + self._test_run(invocations=0) + self.assertEqual(True, mock_monitor.called) + self.assertEqual(True, mock_monitor_thread.run.called) + self.assertEqual(True, mock_monitor_thread.is_alive.called) + self.assertEqual(True, mock_monitor_thread.start.called) + + @patch('azurelinuxagent.ga.monitor.get_monitor_handler') + @patch('azurelinuxagent.ga.env.get_env_handler') + def test_restart_env_thread(self, mock_env, mock_monitor): + self.assertTrue(self.update_handler.running) + + mock_env_thread = MagicMock() + mock_env_thread.run = MagicMock() + mock_env_thread.is_alive = MagicMock(return_value=False) + mock_env_thread.start = MagicMock() + mock_env.return_value = mock_env_thread + + self._test_run(invocations=0) + self.assertEqual(True, mock_env.called) + self.assertEqual(True, mock_env_thread.run.called) + self.assertEqual(True, mock_env_thread.is_alive.called) + self.assertEqual(True, mock_env_thread.start.called) + + class ChildMock(Mock): def __init__(self, return_value=0, side_effect=None): Mock.__init__(self, return_value=return_value, side_effect=side_effect) diff --git a/tests/utils/test_rest_util.py b/tests/utils/test_rest_util.py index 4f993227..bde0c3d0 100644 --- a/tests/utils/test_rest_util.py +++ b/tests/utils/test_rest_util.py @@ -15,11 +15,13 @@ # Requires Python 2.4+ and Openssl 1.0+ # +import os +import unittest + from azurelinuxagent.common.exception import HttpError, \ + ProtocolError, \ ResourceGoneError - import azurelinuxagent.common.utils.restutil as restutil -from azurelinuxagent.common.utils.restutil import HTTP_USER_AGENT from azurelinuxagent.common.future import httpclient, ustr @@ -195,7 +197,7 @@ class TestHttpOperations(AgentTestCase): ]) HTTPSConnection.assert_not_called() mock_conn.request.assert_has_calls([ - call(method="GET", url="/bar", body=None, headers={'User-Agent': HTTP_USER_AGENT}) + call(method="GET", url="/bar", body=None, headers={}) ]) mock_conn.getresponse.assert_called_once() self.assertNotEquals(None, resp) @@ -218,7 +220,7 @@ class TestHttpOperations(AgentTestCase): call("foo", 443, timeout=10) ]) mock_conn.request.assert_has_calls([ - call(method="GET", url="/bar", body=None, headers={'User-Agent': HTTP_USER_AGENT}) + call(method="GET", url="/bar", body=None, headers={}) ]) mock_conn.getresponse.assert_called_once() self.assertNotEquals(None, resp) @@ -242,7 +244,7 @@ class TestHttpOperations(AgentTestCase): ]) HTTPSConnection.assert_not_called() mock_conn.request.assert_has_calls([ - call(method="GET", url="http://foo:80/bar", body=None, headers={'User-Agent': HTTP_USER_AGENT}) + call(method="GET", url="http://foo:80/bar", body=None, headers={}) ]) mock_conn.getresponse.assert_called_once() self.assertNotEquals(None, resp) @@ -267,7 +269,7 @@ class TestHttpOperations(AgentTestCase): call("foo.bar", 23333, timeout=10) ]) mock_conn.request.assert_has_calls([ - call(method="GET", url="https://foo:443/bar", body=None, headers={'User-Agent': HTTP_USER_AGENT}) + call(method="GET", url="https://foo:443/bar", body=None, headers={}) ]) mock_conn.getresponse.assert_called_once() self.assertNotEquals(None, resp)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 4 }
2.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "nose", "pyasn1", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work nose==1.3.7 packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyasn1==0.5.1 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 toml @ file:///tmp/build/80754af9/toml_1616166611790/work typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work -e git+https://github.com/Azure/WALinuxAgent.git@b526b7ada84dfbf21bed8a3e7092ec23447fe14e#egg=WALinuxAgent zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: WALinuxAgent channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - nose==1.3.7 - pyasn1==0.5.1 prefix: /opt/conda/envs/WALinuxAgent
[ "tests/ga/test_update.py::MonitorThreadTest::test_check_if_env_thread_is_alive", "tests/ga/test_update.py::MonitorThreadTest::test_check_if_monitor_thread_is_alive", "tests/ga/test_update.py::MonitorThreadTest::test_restart_env_thread", "tests/ga/test_update.py::MonitorThreadTest::test_restart_env_thread_if_not_alive", "tests/ga/test_update.py::MonitorThreadTest::test_restart_monitor_thread", "tests/ga/test_update.py::MonitorThreadTest::test_restart_monitor_thread_if_not_alive", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_direct", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_direct_secure", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_proxy", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_proxy_secure" ]
[]
[ "tests/ga/test_monitor.py::TestMonitor::test_add_sysinfo", "tests/ga/test_monitor.py::TestMonitor::test_parse_xml_event", "tests/ga/test_update.py::TestGuestAgentError::test_clear", "tests/ga/test_update.py::TestGuestAgentError::test_creation", "tests/ga/test_update.py::TestGuestAgentError::test_mark_failure", "tests/ga/test_update.py::TestGuestAgentError::test_mark_failure_permanent", "tests/ga/test_update.py::TestGuestAgentError::test_save", "tests/ga/test_update.py::TestGuestAgentError::test_str", "tests/ga/test_update.py::TestGuestAgent::test_clear_error", "tests/ga/test_update.py::TestGuestAgent::test_creation", "tests/ga/test_update.py::TestGuestAgent::test_download", "tests/ga/test_update.py::TestGuestAgent::test_download_fail", "tests/ga/test_update.py::TestGuestAgent::test_download_fallback", "tests/ga/test_update.py::TestGuestAgent::test_ensure_download_skips_blacklisted", "tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded", "tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_download_fails", "tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_load_manifest_fails", "tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_unpack_fails", "tests/ga/test_update.py::TestGuestAgent::test_ioerror_not_blacklisted", "tests/ga/test_update.py::TestGuestAgent::test_is_available", "tests/ga/test_update.py::TestGuestAgent::test_is_blacklisted", "tests/ga/test_update.py::TestGuestAgent::test_is_downloaded", "tests/ga/test_update.py::TestGuestAgent::test_load_error", "tests/ga/test_update.py::TestGuestAgent::test_load_manifest", "tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_empty", "tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_malformed", "tests/ga/test_update.py::TestGuestAgent::test_load_manifest_missing", "tests/ga/test_update.py::TestGuestAgent::test_mark_failure", "tests/ga/test_update.py::TestGuestAgent::test_resource_gone_error_not_blacklisted", "tests/ga/test_update.py::TestGuestAgent::test_unpack", "tests/ga/test_update.py::TestGuestAgent::test_unpack_fail", "tests/ga/test_update.py::TestUpdate::test_creation", "tests/ga/test_update.py::TestUpdate::test_emit_restart_event_emits_event_if_not_clean_start", "tests/ga/test_update.py::TestUpdate::test_emit_restart_event_writes_sentinal_file", "tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans", "tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_ignores_exceptions", "tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_kills_after_interval", "tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_skips_if_no_orphans", "tests/ga/test_update.py::TestUpdate::test_ensure_partition_assigned", "tests/ga/test_update.py::TestUpdate::test_ensure_readonly_leaves_unmodified", "tests/ga/test_update.py::TestUpdate::test_ensure_readonly_sets_readonly", "tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_ignores_installed_agent", "tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_raises_exception_for_restarting_agent", "tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_resets_with_new_agent", "tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_for_long_restarts", "tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_too_few_restarts", "tests/ga/test_update.py::TestUpdate::test_filter_blacklisted_agents", "tests/ga/test_update.py::TestUpdate::test_find_agents", "tests/ga/test_update.py::TestUpdate::test_find_agents_does_reload", "tests/ga/test_update.py::TestUpdate::test_find_agents_sorts", "tests/ga/test_update.py::TestUpdate::test_get_host_plugin_returns_host_for_wireserver", "tests/ga/test_update.py::TestUpdate::test_get_host_plugin_returns_none_otherwise", "tests/ga/test_update.py::TestUpdate::test_get_latest_agent", "tests/ga/test_update.py::TestUpdate::test_get_latest_agent_excluded", "tests/ga/test_update.py::TestUpdate::test_get_latest_agent_no_updates", "tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skip_updates", "tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skips_unavailable", "tests/ga/test_update.py::TestUpdate::test_get_pid_files", "tests/ga/test_update.py::TestUpdate::test_get_pid_files_returns_previous", "tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_false_for_exceptions", "tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_false_when_sentinal_exists", "tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_true_when_no_sentinal", "tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_false_if_parent_exists", "tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_true_if_parent_does_not_exist", "tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_true_if_parent_is_init", "tests/ga/test_update.py::TestUpdate::test_is_version_available", "tests/ga/test_update.py::TestUpdate::test_is_version_available_accepts_current", "tests/ga/test_update.py::TestUpdate::test_is_version_available_rejects", "tests/ga/test_update.py::TestUpdate::test_is_version_available_rejects_by_default", "tests/ga/test_update.py::TestUpdate::test_package_filter_for_agent_manifest", "tests/ga/test_update.py::TestUpdate::test_purge_agents", "tests/ga/test_update.py::TestUpdate::test_run", "tests/ga/test_update.py::TestUpdate::test_run_clears_sentinal_on_successful_exit", "tests/ga/test_update.py::TestUpdate::test_run_emits_restart_event", "tests/ga/test_update.py::TestUpdate::test_run_keeps_running", "tests/ga/test_update.py::TestUpdate::test_run_latest", "tests/ga/test_update.py::TestUpdate::test_run_latest_captures_signals", "tests/ga/test_update.py::TestUpdate::test_run_latest_creates_only_one_signal_handler", "tests/ga/test_update.py::TestUpdate::test_run_latest_defaults_to_current", "tests/ga/test_update.py::TestUpdate::test_run_latest_exception_blacklists", "tests/ga/test_update.py::TestUpdate::test_run_latest_exception_does_not_blacklist_if_terminating", "tests/ga/test_update.py::TestUpdate::test_run_latest_forwards_output", "tests/ga/test_update.py::TestUpdate::test_run_latest_nonzero_code_marks_failures", "tests/ga/test_update.py::TestUpdate::test_run_latest_passes_child_args", "tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_failure", "tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_success", "tests/ga/test_update.py::TestUpdate::test_run_latest_polls_and_waits_for_success", "tests/ga/test_update.py::TestUpdate::test_run_latest_polls_frequently_if_installed_is_latest", "tests/ga/test_update.py::TestUpdate::test_run_latest_polls_moderately_if_installed_not_latest", "tests/ga/test_update.py::TestUpdate::test_run_leaves_sentinal_on_unsuccessful_exit", "tests/ga/test_update.py::TestUpdate::test_run_stops_if_orphaned", "tests/ga/test_update.py::TestUpdate::test_run_stops_if_update_available", "tests/ga/test_update.py::TestUpdate::test_set_agents_sets_agents", "tests/ga/test_update.py::TestUpdate::test_set_agents_sorts_agents", "tests/ga/test_update.py::TestUpdate::test_set_sentinal", "tests/ga/test_update.py::TestUpdate::test_set_sentinal_writes_current_agent", "tests/ga/test_update.py::TestUpdate::test_shutdown", "tests/ga/test_update.py::TestUpdate::test_shutdown_ignores_exceptions", "tests/ga/test_update.py::TestUpdate::test_shutdown_ignores_missing_sentinal_file", "tests/ga/test_update.py::TestUpdate::test_update_available_returns_true_if_current_gets_blacklisted", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_handles_missing_family", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_includes_old_agents", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_purges_old_agents", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_returns_true_on_first_use", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_if_too_frequent", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_if_when_no_new_versions", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_when_no_versions", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_when_updates_are_disabled", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_sorts", "tests/ga/test_update.py::TestUpdate::test_upgrade_available_will_refresh_goal_state", "tests/ga/test_update.py::TestUpdate::test_write_pid_file", "tests/ga/test_update.py::TestUpdate::test_write_pid_file_ignores_exceptions", "tests/ga/test_update.py::MonitorThreadTest::test_start_threads", "tests/utils/test_rest_util.py::TestIOErrorCounter::test_get_and_reset", "tests/utils/test_rest_util.py::TestIOErrorCounter::test_increment_hostplugin", "tests/utils/test_rest_util.py::TestIOErrorCounter::test_increment_other", "tests/utils/test_rest_util.py::TestIOErrorCounter::test_increment_protocol", "tests/utils/test_rest_util.py::TestHttpOperations::test_get_http_proxy_configuration_overrides_env", "tests/utils/test_rest_util.py::TestHttpOperations::test_get_http_proxy_configuration_requires_host", "tests/utils/test_rest_util.py::TestHttpOperations::test_get_http_proxy_http_uses_httpproxy", "tests/utils/test_rest_util.py::TestHttpOperations::test_get_http_proxy_https_uses_httpsproxy", "tests/utils/test_rest_util.py::TestHttpOperations::test_get_http_proxy_ignores_user_in_httpproxy", "tests/utils/test_rest_util.py::TestHttpOperations::test_get_http_proxy_none_is_default", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_raises_for_bad_request", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_raises_for_resource_gone", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_exceptions", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_for_safe_minimum_number_when_throttled", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_ioerrors", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_passed_status_codes", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_status_codes", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_with_constant_delay_when_throttled", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_retries_with_fibonacci_delay", "tests/utils/test_rest_util.py::TestHttpOperations::test_http_request_with_retry", "tests/utils/test_rest_util.py::TestHttpOperations::test_parse_url", "tests/utils/test_rest_util.py::TestHttpOperations::test_read_response_bytes", "tests/utils/test_rest_util.py::TestHttpOperations::test_read_response_error", "tests/utils/test_rest_util.py::TestHttpOperations::test_request_failed", "tests/utils/test_rest_util.py::TestHttpOperations::test_request_succeeded" ]
[]
Apache License 2.0
2,150
[ "azurelinuxagent/ga/env.py", "azurelinuxagent/ga/monitor.py", "azurelinuxagent/ga/update.py", "azurelinuxagent/common/utils/restutil.py" ]
[ "azurelinuxagent/ga/env.py", "azurelinuxagent/ga/monitor.py", "azurelinuxagent/ga/update.py", "azurelinuxagent/common/utils/restutil.py" ]
CartoDB__cartoframes-379
86069f44058986062a2af21ef1f6690864784596
2018-02-13 16:26:13
3f73c6e380983a820e7703bebea0b752618aa722
diff --git a/cartoframes/layer.py b/cartoframes/layer.py index 6ddaa641..49143e33 100644 --- a/cartoframes/layer.py +++ b/cartoframes/layer.py @@ -60,9 +60,7 @@ class BaseMap(AbstractLayer): self.source = source self.labels = labels - stem = 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' - if source == 'voyager': - stem += 'rastertiles' + stem = 'https://{s}.basemaps.cartocdn.com/rastertiles/' if self.is_basic(): if only_labels:
Update basemap URLs Basemap URL domains have fastly.net (e.g. here: https://github.com/CartoDB/cartoframes/blob/master/cartoframes/layer.py#L63), this should be replaced by basemaps.cartocdn.com in whole code and tests: * Old: https://cartodb-basemaps-{s}.global.ssl.fastly.net/ * New: https://{s}.basemaps.cartocdn.com/ Other URL parts remain same. Needed for https://github.com/CartoDB/lbs-services/issues/16
CartoDB/cartoframes
diff --git a/test/test_layer.py b/test/test_layer.py index e24117b4..c10e2133 100644 --- a/test/test_layer.py +++ b/test/test_layer.py @@ -69,32 +69,32 @@ class TestBaseMap(unittest.TestCase): # ensure correct BaseMap urls are created # See URLs here: https://carto.com/location-data-services/basemaps/ self.assertEqual(self.dark_map_all.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' + 'https://{s}.basemaps.cartocdn.com/rastertiles/' 'dark_all/{z}/{x}/{y}.png') self.assertEqual(self.light_map_all.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' + 'https://{s}.basemaps.cartocdn.com/rastertiles/' 'light_all/{z}/{x}/{y}.png') self.assertEqual(self.voyager_labels_under.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' - 'rastertiles/voyager_labels_under/{z}/{x}/{y}.png') + 'https://{s}.basemaps.cartocdn.com/rastertiles/' + 'voyager_labels_under/{z}/{x}/{y}.png') self.assertEqual(self.dark_map_no_labels.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' + 'https://{s}.basemaps.cartocdn.com/rastertiles/' 'dark_nolabels/{z}/{x}/{y}.png') self.assertEqual(self.light_map_no_labels.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' + 'https://{s}.basemaps.cartocdn.com/rastertiles/' 'light_nolabels/{z}/{x}/{y}.png') self.assertEqual(self.voyager_map_no_labels.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' - 'rastertiles/voyager_nolabels/{z}/{x}/{y}.png') + 'https://{s}.basemaps.cartocdn.com/rastertiles/' + 'voyager_nolabels/{z}/{x}/{y}.png') self.assertEqual(self.light_only_labels.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' + 'https://{s}.basemaps.cartocdn.com/rastertiles/' 'light_only_labels/{z}/{x}/{y}.png') self.assertEqual(self.dark_only_labels.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' + 'https://{s}.basemaps.cartocdn.com/rastertiles/' 'dark_only_labels/{z}/{x}/{y}.png') self.assertEqual(self.voyager_only_labels.url, - 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/' - 'rastertiles/voyager_only_labels/{z}/{x}/{y}.png') + 'https://{s}.basemaps.cartocdn.com/rastertiles/' + 'voyager_only_labels/{z}/{x}/{y}.png') # ensure self.is_basic() works as intended self.assertTrue(self.light_map_all.is_basic(), diff --git a/test/test_maps.py b/test/test_maps.py index 1e3819ce..429feac2 100644 --- a/test/test_maps.py +++ b/test/test_maps.py @@ -62,111 +62,110 @@ class TestMaps(unittest.TestCase): map_name = get_map_name(self.layers, has_zoom=False) self.assertEqual( - map_name, - 'cartoframes_ver20170406_layers2_time0_baseid1_labels0_zoom0') + map_name, + 'cartoframes_ver20170406_layers2_time0_baseid1_labels0_zoom0') self.assertEqual( - get_map_name(self.layers, has_zoom=True), - 'cartoframes_ver20170406_layers2_time0_baseid1_labels0_zoom1') + get_map_name(self.layers, has_zoom=True), + 'cartoframes_ver20170406_layers2_time0_baseid1_labels0_zoom1') self.assertEqual( - get_map_name(self.layers_w_time, has_zoom=False), - 'cartoframes_ver20170406_layers3_time1_baseid1_labels1_zoom0') + get_map_name(self.layers_w_time, has_zoom=False), + 'cartoframes_ver20170406_layers3_time1_baseid1_labels1_zoom0') def test_map_template(self): """maps.map_template_dict""" map_template = get_map_template(self.layers, has_zoom=False) - js = { - "placeholders": { - "north": { - "default": 45, - "type": "number" - }, - "cartocss_1": { - "default": ("#layer { " - "marker-fill: red; " - "marker-width: 5; " - "marker-allow-overlap: true; " - "marker-line-color: #000; " - "}"), - "type": "sql_ident" - }, - "cartocss_0": { - "default": ("#layer { " - "marker-fill: red; " - "marker-width: 5; " - "marker-allow-overlap: true; " - "marker-line-color: #000; }"), - "type": "sql_ident" - }, - "west": { - "default": -45, - "type": "number" - }, - "east": { - "default": 45, - "type": "number" - }, - "sql_0": { - "default": ("SELECT ST_PointFromText('POINT(0 0)', " - "4326) AS the_geom, 1 AS cartodb_id, " - "ST_PointFromText('Point(0 0)', 3857) AS " - "the_geom_webmercator"), - "type": "sql_ident" - }, - "sql_1": { - "default": ("SELECT ST_PointFromText('POINT(0 0)', " - "4326) AS the_geom, 1 AS cartodb_id, " - "ST_PointFromText('Point(0 0)', 3857) AS " - "the_geom_webmercator"), - "type": "sql_ident" - }, - "south": { - "default": -45, - "type": "number" + filledtemplate = { + "placeholders": { + "north": { + "default": 45, + "type": "number" + }, + "cartocss_1": { + "default": ("#layer { " + "marker-fill: red; " + "marker-width: 5; " + "marker-allow-overlap: true; " + "marker-line-color: #000; " + "}"), + "type": "sql_ident" + }, + "cartocss_0": { + "default": ("#layer { " + "marker-fill: red; " + "marker-width: 5; " + "marker-allow-overlap: true; " + "marker-line-color: #000; }"), + "type": "sql_ident" + }, + "west": { + "default": -45, + "type": "number" + }, + "east": { + "default": 45, + "type": "number" + }, + "sql_0": { + "default": ("SELECT ST_PointFromText('POINT(0 0)', " + "4326) AS the_geom, 1 AS cartodb_id, " + "ST_PointFromText('Point(0 0)', 3857) AS " + "the_geom_webmercator"), + "type": "sql_ident" + }, + "sql_1": { + "default": ("SELECT ST_PointFromText('POINT(0 0)', " + "4326) AS the_geom, 1 AS cartodb_id, " + "ST_PointFromText('Point(0 0)', 3857) AS " + "the_geom_webmercator"), + "type": "sql_ident" + }, + "south": { + "default": -45, + "type": "number" + } + }, + "version": "0.0.1", + "name": ("cartoframes_ver20170406_layers2_time0_baseid1_" + "labels0_zoom0"), + "layergroup": { + "layers": [ + { + "type": "http", + "options": { + "urlTemplate": ("https://{s}.basemaps." + "cartocdn.com/rastertiles" + "/dark_all/{z}/{x}/{y}." + "png"), + "subdomains": "abcd" } }, - "version": "0.0.1", - "name": ("cartoframes_ver20170406_layers2_time0_baseid1_" - "labels0_zoom0"), - "layergroup": { - "layers": [ - { - "type": "http", - "options": { - "urlTemplate": ("https://cartodb-basemaps-" - "{s}.global.ssl.fastly.net" - "/dark_all/{z}/{x}/{y}." - "png"), - "subdomains": "abcd" - } - }, - { - "type": "mapnik", - "options": { - "cartocss": "<%= cartocss_0 %>", - "sql": "<%= sql_0 %>", - "cartocss_version": "2.1.1" - } - }, - { - "type": "mapnik", - "options": { - "cartocss": "<%= cartocss_1 %>", - "sql": "<%= sql_1 %>", - "cartocss_version": "2.1.1" - } - } - ], - "version": "1.0.1" - }, - "view": { - "bounds": { - "west": "<%= west %>", - "east": "<%= east %>", - "north": "<%= north %>", - "south": "<%= south %>" - } + { + "type": "mapnik", + "options": { + "cartocss": "<%= cartocss_0 %>", + "sql": "<%= sql_0 %>", + "cartocss_version": "2.1.1" } + }, + { + "type": "mapnik", + "options": { + "cartocss": "<%= cartocss_1 %>", + "sql": "<%= sql_1 %>", + "cartocss_version": "2.1.1" + } + }], + "version": "1.0.1" + }, + "view": { + "bounds": { + "west": "<%= west %>", + "east": "<%= east %>", + "north": "<%= north %>", + "south": "<%= south %>" } + } + } map_template_dict = json.loads(map_template) - self.assertDictEqual(map_template_dict, js) + self.assertDictEqual(map_template_dict, filledtemplate)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "shapely", "coveralls" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 appdirs==1.4.4 attrs==22.2.0 Babel==2.11.0 backcall==0.2.0 carto==1.11.3 -e git+https://github.com/CartoDB/cartoframes.git@86069f44058986062a2af21ef1f6690864784596#egg=cartoframes certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 coveralls==3.3.1 decorator==5.1.1 docopt==0.6.2 docutils==0.18.1 future==1.0.0 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 MarkupSafe==2.0.1 numpy==1.19.5 packaging==21.3 pandas==1.1.5 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 pockets==0.9.1 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrestcli==0.6.11 pytest==7.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 requests==2.27.1 Shapely==1.8.5.post1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-napoleon==0.7 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 tqdm==4.64.1 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 wcwidth==0.2.13 webcolors==1.7 zipp==3.6.0
name: cartoframes channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - appdirs==1.4.4 - attrs==22.2.0 - babel==2.11.0 - backcall==0.2.0 - carto==1.11.3 - charset-normalizer==2.0.12 - coverage==6.2 - coveralls==3.3.1 - decorator==5.1.1 - docopt==0.6.2 - docutils==0.18.1 - future==1.0.0 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - markupsafe==2.0.1 - numpy==1.19.5 - packaging==21.3 - pandas==1.1.5 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - pockets==0.9.1 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrestcli==0.6.11 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - requests==2.27.1 - shapely==1.8.5.post1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-napoleon==0.7 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - tqdm==4.64.1 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - wcwidth==0.2.13 - webcolors==1.7 - zipp==3.6.0 prefix: /opt/conda/envs/cartoframes
[ "test/test_layer.py::TestBaseMap::test_basemap_source", "test/test_maps.py::TestMaps::test_map_template" ]
[]
[ "test/test_layer.py::TestAbstractLayer::test_class", "test/test_layer.py::TestLayer::test_layer_setup_dataframe", "test/test_layer.py::TestBaseMap::test_basemap_invalid", "test/test_layer.py::TestQueryLayer::test_querylayer_colors", "test/test_layer.py::TestQueryLayer::test_querylayer_get_cartocss", "test/test_layer.py::TestQueryLayer::test_querylayer_size_and_time", "test/test_layer.py::TestQueryLayer::test_querylayer_size_column_key", "test/test_layer.py::TestQueryLayer::test_querylayer_size_default", "test/test_layer.py::TestQueryLayer::test_querylayer_size_defaults", "test/test_layer.py::TestQueryLayer::test_querylayer_time_category", "test/test_layer.py::TestQueryLayer::test_querylayer_time_default", "test/test_layer.py::TestQueryLayer::test_querylayer_time_errors", "test/test_layer.py::TestQueryLayer::test_querylayer_time_numeric", "test/test_maps.py::TestMaps::test_get_map_name", "test/test_maps.py::TestMaps::test_has_time_layer", "test/test_maps.py::TestMaps::test_non_basemap_layers" ]
[]
BSD 3-Clause "New" or "Revised" License
2,151
[ "cartoframes/layer.py" ]
[ "cartoframes/layer.py" ]
pika__pika-955
9acd7684132d03581715518a72d027b70f3954ea
2018-02-13 20:37:37
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
codecov[bot]: # [Codecov](https://codecov.io/gh/pika/pika/pull/955?src=pr&el=h1) Report > :exclamation: No coverage uploaded for pull request base (`master@9acd768`). [Click here to learn what that means](https://docs.codecov.io/docs/error-reference#section-missing-base-commit). > The diff coverage is `94.11%`. [![Impacted file tree graph](https://codecov.io/gh/pika/pika/pull/955/graphs/tree.svg?token=cJFWQg66l4&width=650&src=pr&height=150)](https://codecov.io/gh/pika/pika/pull/955?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #955 +/- ## ========================================= Coverage ? 82.35% ========================================= Files ? 19 Lines ? 3718 Branches ? 554 ========================================= Hits ? 3062 Misses ? 504 Partials ? 152 ``` | [Impacted Files](https://codecov.io/gh/pika/pika/pull/955?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [pika/adapters/blocking\_connection.py](https://codecov.io/gh/pika/pika/pull/955/diff?src=pr&el=tree#diff-cGlrYS9hZGFwdGVycy9ibG9ja2luZ19jb25uZWN0aW9uLnB5) | `84.96% <100%> (ø)` | | | [pika/channel.py](https://codecov.io/gh/pika/pika/pull/955/diff?src=pr&el=tree#diff-cGlrYS9jaGFubmVsLnB5) | `94.03% <81.81%> (ø)` | | ------ [Continue to review full report at Codecov](https://codecov.io/gh/pika/pika/pull/955?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/pika/pika/pull/955?src=pr&el=footer). Last update [9acd768...e54c68b](https://codecov.io/gh/pika/pika/pull/955?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). vitaly-krugl: @michaelklishin @lukebakken Please don't merge - I am going to provide feedback shortly lukebakken: My two cents - > I think that supporting the old argument is counter-productive It is a `1.0.0` release so not supporting the old argument is to be expected, I think. > Not to be disrespectful, but I question the value of this proposition. I haven't experienced confusion over the use of no-ack, which is at least consistent with the AMQP spec, and haven't encountered user confusion over it (nothing that really stood out, anyway), having thoroughly scrubbed pika issues a couple of years ago If I were a new user of this library, I would have to look up what `no_ack` or `auto_ack` means anyway. Maybe I could guess what the latter means from its name, but I would have to be familiar with how acknowledgements work with AMQP to begin with. > Is no_ack the only significant way that Pika args differ from the other AMQP clients under the RabbitMQ umbrella? Probably (?) I did a quick comparison for `basic.consume`, `basic.get` and `queue.declare` between Pika and the .NET client. vitaly-krugl: Okay, I would still prefer not supporting the old arg since we're moving to 1.0 and introduced other incompatibilities that will [break existing usage](https://github.com/pika/pika/pull/955#discussion_r168062473) of those same API methods. But feel free to override my recommendation so that we can move forward. Let's at least: 1. Check to make sure that nothing unexpected is in kwargs - [see my code example](https://github.com/pika/pika/pull/955#discussion_r168064170). 2. Use a shared function to process the kwargs, if practical. 3. Emit the deprecation and incompatibility warnings that @michaelklishin and I discussed. michaelklishin: FTR, @lukebakken convinced me to drop support for `no_ack`. Oh the thousands of code examples on the Web that will be copied only to immediately fail… vitaly-krugl: > Oh the thousands of code examples on the Web that will be copied only to immediately fail… School of hard knocks of life :) shinji-s: Would `assert 'no_ack' not in kwargs, "'no_ack' is obsolete. Use 'auto_ack' instead." ` be a too much hand-holding? kmonson: > Would assert 'no_ack' not in kwargs, "'no_ack' is obsolete. Use 'auto_ack' instead." be a too much hand-holding? Personally I love it when library developers do this kind of hand holding. It saves me time and saves them user help requests. lukebakken: I'm glad we received feedback from a couple users - thanks @kmonson and @shinji-s If we `assert` on `no_ack`, then we ought to do the same for other parameters that have changed. vitaly-krugl: Okay to merge after addressing feedback about redundant test and AMQP no-ack correlation in `auto_ack` arg documentation
diff --git a/.gitignore b/.gitignore index 9fdbeeb..d011fbd 100644 --- a/.gitignore +++ b/.gitignore @@ -16,5 +16,5 @@ build dist docs/_build *.conf.in -venvs/ +venv*/ env/ diff --git a/docs/examples/direct_reply_to.rst b/docs/examples/direct_reply_to.rst index 74072cf..ff552d3 100644 --- a/docs/examples/direct_reply_to.rst +++ b/docs/examples/direct_reply_to.rst @@ -46,12 +46,12 @@ direct_reply_to.py:: # Also, client must create the consumer *before* starting to publish the # RPC requests. # - # Client must create its consumer with no_ack=True, because the reply-to + # Client must create its consumer with auto_ack=True, because the reply-to # queue isn't real. channel.basic_consume('amq.rabbitmq.reply-to', on_client_rx_reply_from_server, - no_ack=True) + auto_ack=True) channel.basic_publish( exchange='', routing_key=SERVER_QUEUE, diff --git a/docs/examples/twisted_example.rst b/docs/examples/twisted_example.rst index b73650f..44251a8 100644 --- a/docs/examples/twisted_example.rst +++ b/docs/examples/twisted_example.rst @@ -23,7 +23,7 @@ Example of writing a consumer using the :py:class:`Twisted connection adapter <p yield channel.basic_qos(prefetch_count=1) - queue_object, consumer_tag = yield channel.basic_consume('hello', no_ack=False) + queue_object, consumer_tag = yield channel.basic_consume('hello', auto_ack=False) l = task.LoopingCall(read, queue_object) diff --git a/examples/consumer_queued.py b/examples/consumer_queued.py index 22c8d57..3afe192 100644 --- a/examples/consumer_queued.py +++ b/examples/consumer_queued.py @@ -53,7 +53,7 @@ def callback(ch, method, properties, body): process_buffer() -consumer_channel.basic_consume(queue, callback, no_ack=True) +consumer_channel.basic_consume(queue, callback, auto_ack=True) try: consumer_channel.start_consuming() diff --git a/examples/consumer_simple.py b/examples/consumer_simple.py index 863cbf4..f9aa780 100644 --- a/examples/consumer_simple.py +++ b/examples/consumer_simple.py @@ -44,7 +44,7 @@ def callback(ch, method, properties, body): import logging logging.basicConfig(level=logging.INFO) -consumer_channel.basic_consume(queue, callback, no_ack=True) +consumer_channel.basic_consume(queue, callback, auto_ack=True) try: consumer_channel.start_consuming() diff --git a/examples/direct_reply_to.py b/examples/direct_reply_to.py index 4f52c41..4fd202e 100644 --- a/examples/direct_reply_to.py +++ b/examples/direct_reply_to.py @@ -41,12 +41,12 @@ def main(): # Also, client must create the consumer *before* starting to publish the # RPC requests. # - # Client must create its consumer with no_ack=True, because the reply-to + # Client must create its consumer with auto_ack=True, because the reply-to # queue isn't real. channel.basic_consume('amq.rabbitmq.reply-to', on_client_rx_reply_from_server, - no_ack=True) + auto_ack=True) channel.basic_publish( exchange='', routing_key=SERVER_QUEUE, diff --git a/examples/twisted_service.py b/examples/twisted_service.py index 5d9afc1..ced89a0 100644 --- a/examples/twisted_service.py +++ b/examples/twisted_service.py @@ -94,7 +94,7 @@ class PikaProtocol(twisted_connection.TwistedProtocolConnection): self.channel.queue_declare(queue=routing_key, durable=True) - (queue, consumer_tag,) = yield self.channel.basic_consume(queue=routing_key, no_ack=False) + (queue, consumer_tag,) = yield self.channel.basic_consume(queue=routing_key, auto_ack=False) d = queue.get() d.addCallback(self._read_item, queue, callback) d.addErrback(self._read_item_err) diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index d021e94..8649d2e 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -955,7 +955,7 @@ class ReturnedMessage(object): class _ConsumerInfo(object): """Information about an active consumer""" - __slots__ = ('consumer_tag', 'no_ack', 'on_message_callback', + __slots__ = ('consumer_tag', 'auto_ack', 'on_message_callback', 'alternate_event_sink', 'state') # Consumer states @@ -964,13 +964,13 @@ class _ConsumerInfo(object): TEARING_DOWN = 3 CANCELLED_BY_BROKER = 4 - def __init__(self, consumer_tag, no_ack, on_message_callback=None, + def __init__(self, consumer_tag, auto_ack, on_message_callback=None, alternate_event_sink=None): """ NOTE: exactly one of callback/alternate_event_sink musts be non-None. :param str consumer_tag: - :param bool no_ack: the no-ack value for the consumer + :param bool auto_ack: the no-ack value for the consumer :param callable on_message_callback: The function for dispatching messages to user, having the signature: on_message_callback(channel, method, properties, body) @@ -988,7 +988,7 @@ class _ConsumerInfo(object): 'exactly one of on_message_callback/alternate_event_sink must be non-None', on_message_callback, alternate_event_sink) self.consumer_tag = consumer_tag - self.no_ack = no_ack + self.auto_ack = auto_ack self.on_message_callback = on_message_callback self.alternate_event_sink = alternate_event_sink self.state = self.SETTING_UP @@ -1020,7 +1020,7 @@ class _QueueConsumerGeneratorInfo(object): def __init__(self, params, consumer_tag): """ - :params tuple params: a three-tuple (queue, no_ack, exclusive) that were + :params tuple params: a three-tuple (queue, auto_ack, exclusive) that were used to create the queue consumer :param str consumer_tag: consumer tag """ @@ -1502,7 +1502,7 @@ class BlockingChannel(object): def basic_consume(self, queue, on_message_callback, - no_ack=False, + auto_ack=False, exclusive=False, consumer_tag=None, arguments=None): @@ -1528,8 +1528,10 @@ class BlockingChannel(object): method: spec.Basic.Deliver properties: spec.BasicProperties body: str or unicode - :param bool no_ack: Tell the broker to not expect a response (i.e., - no ack/nack) + :param bool auto_ack: if set to True, automatic acknowledgement mode will be used + (see http://www.rabbitmq.com/confirms.html). This corresponds + with the 'no_ack' parameter in the basic.consume AMQP 0.9.1 + method :param bool exclusive: Don't allow other consumers on the queue :param consumer_tag: You may specify your own consumer tag; if left empty, a consumer tag will be generated automatically @@ -1549,14 +1551,14 @@ class BlockingChannel(object): return self._basic_consume_impl( queue=queue, on_message_callback=on_message_callback, - no_ack=no_ack, + auto_ack=auto_ack, exclusive=exclusive, consumer_tag=consumer_tag, arguments=arguments) def _basic_consume_impl(self, queue, - no_ack, + auto_ack, exclusive, consumer_tag, arguments=None, @@ -1599,7 +1601,7 @@ class BlockingChannel(object): # Create new consumer self._consumer_infos[consumer_tag] = _ConsumerInfo( consumer_tag, - no_ack=no_ack, + auto_ack=auto_ack, on_message_callback=on_message_callback, alternate_event_sink=alternate_event_sink) @@ -1608,7 +1610,7 @@ class BlockingChannel(object): tag = self._impl.basic_consume( on_message_callback=self._on_consumer_message_delivery, queue=queue, - no_ack=no_ack, + auto_ack=auto_ack, exclusive=exclusive, consumer_tag=consumer_tag, arguments=arguments) @@ -1639,18 +1641,18 @@ class BlockingChannel(object): of messages in between sending the cancel method and receiving the cancel-ok reply. - NOTE: When cancelling a no_ack=False consumer, this implementation + NOTE: When cancelling an auto_ack=False consumer, this implementation automatically Nacks and suppresses any incoming messages that have not yet been dispatched to the consumer's callback. However, when cancelling - a no_ack=True consumer, this method will return any pending messages + a auto_ack=True consumer, this method will return any pending messages that arrived before broker confirmed the cancellation. :param str consumer_tag: Identifier for the consumer; the result of passing a consumer_tag that was created on another channel is undefined (bad things will happen) - :returns: (NEW IN pika 0.10.0) empty sequence for a no_ack=False - consumer; for a no_ack=True consumer, returns a (possibly empty) + :returns: (NEW IN pika 0.10.0) empty sequence for a auto_ack=False + consumer; for a auto_ack=True consumer, returns a (possibly empty) sequence of pending messages that arrived before broker confirmed the cancellation (this is done instead of via consumer's callback in order to prevent reentrancy/recursion. Each message is four-tuple: @@ -1678,13 +1680,13 @@ class BlockingChannel(object): assert (consumer_info.cancelled_by_broker or consumer_tag in self._impl._consumers), consumer_tag - no_ack = consumer_info.no_ack + auto_ack = consumer_info.auto_ack consumer_info.state = _ConsumerInfo.TEARING_DOWN with _CallbackResult() as cancel_ok_result: - # Nack pending messages for no_ack=False consumer - if not no_ack: + # Nack pending messages for auto_ack=False consumer + if not auto_ack: pending_messages = self._remove_pending_deliveries( consumer_tag) if pending_messages: @@ -1698,7 +1700,7 @@ class BlockingChannel(object): requeue=True) # Cancel the consumer; impl takes care of rejecting any - # additional deliveries that arrive for a no_ack=False + # additional deliveries that arrive for a auto_ack=False # consumer self._impl.basic_cancel( consumer_tag=consumer_tag, @@ -1710,8 +1712,8 @@ class BlockingChannel(object): cancel_ok_result.is_ready, lambda: consumer_tag not in self._impl._consumers) - if no_ack: - # Return pending messages for no_ack=True consumer + if auto_ack: + # Return pending messages for auto_ack=True consumer return [ (evt.method, evt.properties, evt.body) for evt in self._remove_pending_deliveries(consumer_tag)] @@ -1795,7 +1797,7 @@ class BlockingChannel(object): else: self._cancel_all_consumers() - def consume(self, queue, no_ack=False, + def consume(self, queue, auto_ack=False, exclusive=False, arguments=None, inactivity_timeout=None): """Blocking consumption of a queue instead of via a callback. This @@ -1813,13 +1815,13 @@ class BlockingChannel(object): generator loop. If you don't cancel this consumer, then next call on the same channel - to `consume()` with the exact same (queue, no_ack, exclusive) parameters + to `consume()` with the exact same (queue, auto_ack, exclusive) parameters will resume the existing consumer generator; however, calling with different parameters will result in an exception. :param queue: The queue name to consume :type queue: str or unicode - :param bool no_ack: Tell the broker to not expect a ack/nack response + :param bool auto_ack: Tell the broker to not expect a ack/nack response :param bool exclusive: Don't allow other consumers on the queue :param dict arguments: Custom key/value pair arguments for the consumer :param float inactivity_timeout: if a number is given (in @@ -1837,7 +1839,7 @@ class BlockingChannel(object): of the existing queue consumer generator, if any. NEW in pika 0.10.0 """ - params = (queue, no_ack, exclusive) + params = (queue, auto_ack, exclusive) if self._queue_consumer_generator is not None: if params != self._queue_consumer_generator.params: @@ -1846,7 +1848,7 @@ class BlockingChannel(object): 'queue consumer generator; previous params: %r; ' 'new params: %r' % (self._queue_consumer_generator.params, - (queue, no_ack, exclusive))) + (queue, auto_ack, exclusive))) else: LOGGER.debug('Creating new queue consumer generator; params: %r', params) @@ -1862,7 +1864,7 @@ class BlockingChannel(object): try: self._basic_consume_impl( queue=queue, - no_ack=no_ack, + auto_ack=auto_ack, exclusive=exclusive, consumer_tag=consumer_tag, arguments=arguments, @@ -1948,8 +1950,8 @@ class BlockingChannel(object): return 0 try: - _, no_ack, _ = self._queue_consumer_generator.params - if not no_ack: + _, auto_ack, _ = self._queue_consumer_generator.params + if not auto_ack: # Reject messages held by queue consumer generator; NOTE: we # can't use basic_nack with the multiple option to avoid nacking # messages already held by our client. @@ -1965,7 +1967,7 @@ class BlockingChannel(object): # Return 0 for compatibility with legacy implementation; the number of # nacked messages is not meaningful since only messages consumed with - # no_ack=False may be nacked, and those arriving after calling + # auto_ack=False may be nacked, and those arriving after calling # basic_cancel will be rejected automatically by impl channel, so we'll # never know how many of those were nacked. return 0 @@ -2013,13 +2015,13 @@ class BlockingChannel(object): requeue=requeue) self._flush_output() - def basic_get(self, queue, no_ack=False): + def basic_get(self, queue, auto_ack=False): """Get a single message from the AMQP broker. Returns a sequence with the method frame, message properties, and body. :param queue: Name of queue from which to get a message :type queue: str or unicode - :param bool no_ack: Tell the broker to not expect a reply + :param bool auto_ack: Tell the broker to not expect a reply :returns: a three-tuple; (None, None, None) if the queue was empty; otherwise (method, properties, body); NOTE: body may be None :rtype: (None, None, None)|(spec.Basic.GetOk, @@ -2027,11 +2029,12 @@ class BlockingChannel(object): str or unicode or None) """ assert not self._basic_getempty_result + # NOTE: nested with for python 2.6 compatibility with _CallbackResult(self._RxMessageArgs) as get_ok_result: with self._basic_getempty_result: self._impl.basic_get(queue=queue, - no_ack=no_ack, + auto_ack=auto_ack, callback=get_ok_result.set_value_once) self._flush_output(get_ok_result.is_ready, self._basic_getempty_result.is_ready) diff --git a/pika/channel.py b/pika/channel.py index 5299d15..335a1b9 100644 --- a/pika/channel.py +++ b/pika/channel.py @@ -263,7 +263,7 @@ class Channel(object): def basic_consume(self, queue, on_message_callback, - no_ack=False, + auto_ack=False, exclusive=False, consumer_tag=None, arguments=None, @@ -287,8 +287,10 @@ class Channel(object): method: pika.spec.Basic.Deliver properties: pika.spec.BasicProperties body: str, unicode, or bytes (python 3.x) - :param bool no_ack: if set to True, automatic acknowledgement mode will be used - (see http://www.rabbitmq.com/confirms.html) + :param bool auto_ack: if set to True, automatic acknowledgement mode will be used + (see http://www.rabbitmq.com/confirms.html). This corresponds + with the 'no_ack' parameter in the basic.consume AMQP 0.9.1 + method :param bool exclusive: Don't allow other consumers on the queue :param consumer_tag: Specify your own consumer tag :type consumer_tag: str or unicode @@ -310,7 +312,7 @@ class Channel(object): if consumer_tag in self._consumers or consumer_tag in self._cancelled: raise exceptions.DuplicateConsumerTag(consumer_tag) - if no_ack: + if auto_ack: self._consumers_with_noack.add(consumer_tag) self._consumers[consumer_tag] = on_message_callback @@ -319,11 +321,11 @@ class Channel(object): self._rpc(spec.Basic.Consume(queue=queue, consumer_tag=consumer_tag, - no_ack=no_ack, + no_ack=auto_ack, exclusive=exclusive, arguments=arguments or dict()), rpc_callback, [(spec.Basic.ConsumeOk, - {'consumer_tag': consumer_tag})]) + {'consumer_tag': consumer_tag})]) return consumer_tag @@ -339,7 +341,7 @@ class Channel(object): return 'ctag%i.%s' % (self.channel_number, uuid.uuid4().hex) - def basic_get(self, queue, callback, no_ack=False): + def basic_get(self, queue, callback, auto_ack=False): """Get a single message from the AMQP broker. If you want to be notified of Basic.GetEmpty, use the Channel.add_callback method adding your Basic.GetEmpty callback which should expect only one @@ -359,7 +361,7 @@ class Channel(object): method: pika.spec.Basic.GetOk properties: pika.spec.BasicProperties body: str, unicode, or bytes (python 3.x) - :param bool no_ack: Tell the broker to not expect a reply + :param bool auto_ack: Tell the broker to not expect a reply :raises ValueError: """ @@ -367,11 +369,12 @@ class Channel(object): if self._on_getok_callback is not None: raise exceptions.DuplicateGetOkCallback() self._on_getok_callback = callback + # pylint: disable=W0511 # TODO Strangely, not using _rpc for the synchronous Basic.Get. Would # need to extend _rpc to handle Basic.GetOk method, header, and body # frames (or similar) - self._send_method(spec.Basic.Get(queue=queue, no_ack=no_ack)) + self._send_method(spec.Basic.Get(queue=queue, no_ack=auto_ack)) def basic_nack(self, delivery_tag=None, multiple=False, requeue=True): """This method allows a client to reject one or more incoming messages. @@ -1421,6 +1424,12 @@ class Channel(object): 'Completion callback must be callable if not None') def _validate_zero_or_greater(self, name, value): + """Verify that value is zero or greater. If not, 'name' + will be used in error message + + :raises: ValueError + + """ if int(value) < 0: errmsg = '{} must be >= 0, but got {}'.format(name, value) raise ValueError(errmsg)
Rename the "no-ack" option of basic_consume to something clearer Per discussion with @lukebakken and https://github.com/pika/pika/pull/948#discussion_r167042045. “no-ack” is really confusing (yes, an unfortunate naming choice in the protocol) and should at least be “no manual ack”, which is the same as “auto-ack”. In some other clients (Bunny, Java, .NET) we renamed it to “manual ack” but "auto-ack" can work just as well, it only depends on what's the default used by a particular client. I think changing the default is probably not worth the upgrade pain.
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index 81a8c3c..70220a8 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -94,7 +94,7 @@ class TestConsumeCancel(AsyncTestCase, AsyncAdapters): self.channel.basic_publish('', self.queue_name, msg_body) self.ctag = self.channel.basic_consume(self.queue_name, self.on_message, - no_ack=True) + auto_ack=True) def on_message(self, _channel, _frame, _header, body): self.channel.basic_cancel(self.ctag, callback=self.on_cancel) diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index 7628f89..c6e3125 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -208,8 +208,8 @@ class TestCreateAndCloseConnectionWithChannelAndConsumer(BlockingTestCaseBase): # Publish the message to the queue by way of default exchange ch.publish(exchange='', routing_key=q_name, body=body1) - # Create a non-ackable consumer - ch.basic_consume(q_name, lambda *x: None, no_ack=True, + # Create a consumer that uses automatic ack mode + ch.basic_consume(q_name, lambda *x: None, auto_ack=True, exclusive=False, arguments=None) connection.close() @@ -804,7 +804,7 @@ class TestBasicGet(BlockingTestCaseBase): LOGGER.info('%s DECLARED QUEUE (%s)', datetime.utcnow(), self) # Verify result of getting a message from an empty queue - msg = ch.basic_get(q_name, no_ack=False) + msg = ch.basic_get(q_name, auto_ack=False) self.assertTupleEqual(msg, (None, None, None)) LOGGER.info('%s GOT FROM EMPTY QUEUE (%s)', datetime.utcnow(), self) @@ -816,7 +816,7 @@ class TestBasicGet(BlockingTestCaseBase): LOGGER.info('%s PUBLISHED (%s)', datetime.utcnow(), self) # Get the message - (method, properties, body) = ch.basic_get(q_name, no_ack=False) + (method, properties, body) = ch.basic_get(q_name, auto_ack=False) LOGGER.info('%s GOT FROM NON-EMPTY QUEUE (%s)', datetime.utcnow(), self) self.assertIsInstance(method, pika.spec.Basic.GetOk) self.assertEqual(method.delivery_tag, 1) @@ -867,10 +867,10 @@ class TestBasicReject(BlockingTestCaseBase): mandatory=True) # Get the messages - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicReject1')) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicReject2')) # Nack the second message @@ -881,7 +881,7 @@ class TestBasicReject(BlockingTestCaseBase): self._assert_exact_message_count_with_retries(channel=ch, queue=q_name, expected_count=1) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicReject2')) @@ -913,11 +913,11 @@ class TestBasicRejectNoRequeue(BlockingTestCaseBase): mandatory=True) # Get the messages - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicRejectNoRequeue1')) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicRejectNoRequeue2')) @@ -958,10 +958,10 @@ class TestBasicNack(BlockingTestCaseBase): mandatory=True) # Get the messages - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNack1')) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNack2')) # Nack the second message @@ -972,7 +972,7 @@ class TestBasicNack(BlockingTestCaseBase): self._assert_exact_message_count_with_retries(channel=ch, queue=q_name, expected_count=1) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNack2')) @@ -1004,11 +1004,11 @@ class TestBasicNackNoRequeue(BlockingTestCaseBase): mandatory=True) # Get the messages - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNackNoRequeue1')) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNackNoRequeue2')) @@ -1049,11 +1049,11 @@ class TestBasicNackMultiple(BlockingTestCaseBase): mandatory=True) # Get the messages - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNackMultiple1')) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNackMultiple2')) @@ -1064,10 +1064,10 @@ class TestBasicNackMultiple(BlockingTestCaseBase): self._assert_exact_message_count_with_retries(channel=ch, queue=q_name, expected_count=2) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNackMultiple1')) - (rx_method, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (rx_method, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestBasicNackMultiple2')) @@ -1106,7 +1106,7 @@ class TestBasicRecoverWithRequeue(BlockingTestCaseBase): rx_messages = [] num_messages = 0 - for msg in ch.consume(q_name, no_ack=False): + for msg in ch.consume(q_name, auto_ack=False): num_messages += 1 if num_messages == 2: @@ -1164,7 +1164,7 @@ class TestTxCommit(BlockingTestCaseBase): frame = ch.queue_declare(q_name, passive=True) self.assertEqual(frame.method.message_count, 1) - (_, _, rx_body) = ch.basic_get(q_name, no_ack=False) + (_, _, rx_body) = ch.basic_get(q_name, auto_ack=False) self.assertEqual(rx_body, as_bytes('TestTxCommit1')) @@ -1567,7 +1567,7 @@ class TestPublishAndConsumeWithPubacksAndQosOfOne(BlockingTestCaseBase): consumer_tag = ch.basic_consume( q_name, lambda *args: rx_messages.append(args), - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -1695,7 +1695,7 @@ class TestTwoBasicConsumersOnSameChannel(BlockingTestCaseBase): q1_consumer_tag = ch.basic_consume( q1_name, lambda *args: q1_rx_messages.append(args), - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -1703,7 +1703,7 @@ class TestTwoBasicConsumersOnSameChannel(BlockingTestCaseBase): q2_consumer_tag = ch.basic_consume( q2_name, lambda *args: q2_rx_messages.append(args), - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -1852,7 +1852,7 @@ class TestBasicPublishWithoutPubacks(BlockingTestCaseBase): consumer_tag = ch.basic_consume( q_name, lambda *args: rx_messages.append(args), - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -1959,12 +1959,12 @@ class TestPublishFromBasicConsumeCallback(BlockingTestCaseBase): ch.basic_consume(src_q_name, on_consume, - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) # Consume from destination queue - for _, _, rx_body in ch.consume(dest_q_name, no_ack=True): + for _, _, rx_body in ch.consume(dest_q_name, auto_ack=True): self.assertEqual(rx_body, as_bytes('via-publish')) break else: @@ -2009,7 +2009,7 @@ class TestStopConsumingFromBasicConsumeCallback(BlockingTestCaseBase): ch.basic_consume(q_name, on_consume, - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -2064,7 +2064,7 @@ class TestCloseChannelFromBasicConsumeCallback(BlockingTestCaseBase): ch.basic_consume(q_name, on_consume, - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -2118,7 +2118,7 @@ class TestCloseConnectionFromBasicConsumeCallback(BlockingTestCaseBase): ch.basic_consume(q_name, on_consume, - no_ack=False, + auto_ack=False, exclusive=False, arguments=None) @@ -2156,7 +2156,7 @@ class TestNonPubAckPublishAndConsumeHugeMessage(BlockingTestCaseBase): LOGGER.info('Published message body size=%s', len(body)) # Consume the message - for rx_method, rx_props, rx_body in ch.consume(q_name, no_ack=False, + for rx_method, rx_props, rx_body in ch.consume(q_name, auto_ack=False, exclusive=False, arguments=None): self.assertIsInstance(rx_method, pika.spec.Basic.Deliver) @@ -2209,7 +2209,7 @@ class TestNonPubackPublishAndConsumeManyMessages(BlockingTestCaseBase): # Consume the messages num_consumed = 0 for rx_method, rx_props, rx_body in ch.consume(q_name, - no_ack=False, + auto_ack=False, exclusive=False, arguments=None): num_consumed += 1 @@ -2269,8 +2269,8 @@ class TestBasicCancelWithNonAckableConsumer(BlockingTestCaseBase): queue=q_name, expected_count=2) - # Create a non-ackable consumer - consumer_tag = ch.basic_consume(q_name, lambda *x: None, no_ack=True, + # Create a consumer that uses automatic ack mode + consumer_tag = ch.basic_consume(q_name, lambda *x: None, auto_ack=True, exclusive=False, arguments=None) # Wait for all messages to be sent by broker to client @@ -2327,7 +2327,7 @@ class TestBasicCancelWithAckableConsumer(BlockingTestCaseBase): expected_count=2) # Create an ackable consumer - consumer_tag = ch.basic_consume(q_name, lambda *x: None, no_ack=False, + consumer_tag = ch.basic_consume(q_name, lambda *x: None, auto_ack=False, exclusive=False, arguments=None) # Wait for all messages to be sent by broker to client @@ -2376,7 +2376,7 @@ class TestUnackedMessageAutoRestoredToQueueOnChannelClose(BlockingTestCaseBase): # Consume the events, but don't ack rx_messages = [] ch.basic_consume(q_name, lambda *args: rx_messages.append(args), - no_ack=False, exclusive=False, arguments=None) + auto_ack=False, exclusive=False, arguments=None) while len(rx_messages) != 2: connection.process_data_events(time_limit=None) @@ -2422,7 +2422,7 @@ class TestNoAckMessageNotRestoredToQueueOnChannelClose(BlockingTestCaseBase): # Consume, but don't ack num_messages = 0 - for rx_method, _, _ in ch.consume(q_name, no_ack=True, exclusive=False): + for rx_method, _, _ in ch.consume(q_name, auto_ack=True, exclusive=False): num_messages += 1 self.assertEqual(rx_method.delivery_tag, num_messages) diff --git a/tests/unit/channel_tests.py b/tests/unit/channel_tests.py index 67e0325..0ba24cf 100644 --- a/tests/unit/channel_tests.py +++ b/tests/unit/channel_tests.py @@ -330,7 +330,7 @@ class ChannelTests(unittest.TestCase): expectation = spec.Basic.Consume( queue='test-queue', consumer_tag=consumer_tag, - no_ack=False, + auto_ack=False, exclusive=False) rpc.assert_called_once_with(expectation, self.obj._on_eventok, [(spec.Basic.ConsumeOk, { @@ -350,7 +350,7 @@ class ChannelTests(unittest.TestCase): expectation = spec.Basic.Consume( queue='test-queue', consumer_tag=consumer_tag, - no_ack=False, + auto_ack=False, exclusive=False) rpc.assert_called_once_with(expectation, mock_callback, [(spec.Basic.ConsumeOk, { @@ -378,7 +378,16 @@ class ChannelTests(unittest.TestCase): mock_callback = mock.Mock() self.obj.basic_get('test-queue', mock_callback) send_method.assert_called_once_with( - spec.Basic.Get(queue='test-queue', no_ack=False)) + spec.Basic.Get(queue='test-queue', auto_ack=False)) + + @mock.patch('pika.spec.Basic.Get') + @mock.patch('pika.channel.Channel._send_method') + def test_basic_get_send_method_called_auto_ack(self, send_method, _unused): + self.obj._set_state(self.obj.OPEN) + mock_callback = mock.Mock() + self.obj.basic_get('test-queue', mock_callback, auto_ack=True) + send_method.assert_called_once_with( + spec.Basic.Get(queue='test-queue', auto_ack=True)) def test_basic_nack_raises_channel_closed(self): self.assertRaises(exceptions.ChannelClosed, self.obj.basic_nack, 0,
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 9 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 charset-normalizer==2.0.12 codecov==2.1.13 coverage==6.2 idna==3.10 importlib-metadata==4.8.3 iniconfig==1.1.1 mock==5.2.0 nose==1.3.7 packaging==21.3 -e git+https://github.com/pika/pika.git@9acd7684132d03581715518a72d027b70f3954ea#egg=pika pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 requests==2.27.1 tomli==1.2.3 tornado==6.1 Twisted==15.3.0 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0 zope.interface==5.5.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - charset-normalizer==2.0.12 - codecov==2.1.13 - coverage==6.2 - idna==3.10 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mock==5.2.0 - nose==1.3.7 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - requests==2.27.1 - tomli==1.2.3 - tornado==6.1 - twisted==15.3.0 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 - zope-interface==5.5.2 prefix: /opt/conda/envs/pika
[ "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called_auto_ack" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestLostConnectionResultsInIsClosedConnectionAndChannel::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeInactivityTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/channel_tests.py::ChannelTests::test_add_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_callback_multiple_replies", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_cancel_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_basic_get_empty_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_close_added", "tests/unit/channel_tests.py::ChannelTests::test_add_callbacks_channel_flow_added", "tests/unit/channel_tests.py::ChannelTests::test_add_on_cancel_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_close_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_add_on_return_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_calls_send_method", "tests/unit/channel_tests.py::ChannelTests::test_basic_ack_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_asynch_with_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_synch_no_user_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_then_close", "tests/unit/channel_tests.py::ChannelTests::test_basic_cancel_unknown_consumer_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_calls_validate", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_cancelled_full", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_in_consumers", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumer_tag_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_callback_value", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_consumers_rpc_with_no_completion_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_consume_duplicate_consumer_tag_raises", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_calls_require_callback", "tests/unit/channel_tests.py::ChannelTests::test_basic_get_send_method_called", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_nack_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_publish_send_method_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_invalid_prefetch_count_raises_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_invalid_prefetch_size_raises_error", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_qos_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_recover_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_send_method_request_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_int_tag", "tests/unit/channel_tests.py::ChannelTests::test_basic_reject_spec_with_long_tag", "tests/unit/channel_tests.py::ChannelTests::test_channel_open_add_callbacks_called", "tests/unit/channel_tests.py::ChannelTests::test_cleanup", "tests/unit/channel_tests.py::ChannelTests::test_close_basic_cancel_called", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closed_state_raises_channel_error_and_stays_closed", "tests/unit/channel_tests.py::ChannelTests::test_close_in_closing_state_raises_already_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_open_state_transitions_to_closing", "tests/unit/channel_tests.py::ChannelTests::test_close_in_opening_state", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_async", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_ack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_basic_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_without_nowait_selectok", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_ack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_callback_yes_basic_nack_callback", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_no_callback_callback_call_count", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_confirms", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_raises_method_not_implemented_for_nack", "tests/unit/channel_tests.py::ChannelTests::test_confirm_delivery_with_bad_callback_raises_value_error", "tests/unit/channel_tests.py::ChannelTests::test_consumer_tags", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_exchange_unbind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_flow_off_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_flow_on_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_flow_raises_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_deliver_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_get_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_basic_return_called", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_method_returns_none", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_header_frame", "tests/unit/channel_tests.py::ChannelTests::test_handle_content_frame_sets_method_frame", "tests/unit/channel_tests.py::ChannelTests::test_has_content_false", "tests/unit/channel_tests.py::ChannelTests::test_has_content_true", "tests/unit/channel_tests.py::ChannelTests::test_immediate_called_logger_warning", "tests/unit/channel_tests.py::ChannelTests::test_init_blocked", "tests/unit/channel_tests.py::ChannelTests::test_init_blocking", "tests/unit/channel_tests.py::ChannelTests::test_init_callbacks", "tests/unit/channel_tests.py::ChannelTests::test_init_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_init_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_connection", "tests/unit/channel_tests.py::ChannelTests::test_init_consumers", "tests/unit/channel_tests.py::ChannelTests::test_init_content_frame_assembler", "tests/unit/channel_tests.py::ChannelTests::test_init_flow", "tests/unit/channel_tests.py::ChannelTests::test_init_has_on_flow_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_invalid_channel_number", "tests/unit/channel_tests.py::ChannelTests::test_init_on_flowok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_getok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_on_openok_callback", "tests/unit/channel_tests.py::ChannelTests::test_init_state", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closed_true", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_false", "tests/unit/channel_tests.py::ChannelTests::test_is_closing_true", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_not_appended_cancelled", "tests/unit/channel_tests.py::ChannelTests::test_on_cancel_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_cancelok_removed_consumer", "tests/unit/channel_tests.py::ChannelTests::test_on_close_in_closing_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_in_open_state", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closed_state_is_suppressed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_closing_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_open_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_meta_in_opening_state_transitions_to_closed", "tests/unit/channel_tests.py::ChannelTests::test_on_close_warning", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok", "tests/unit/channel_tests.py::ChannelTests::test_on_closeok_following_close_from_broker", "tests/unit/channel_tests.py::ChannelTests::test_on_confirm_selectok", "tests/unit/channel_tests.py::ChannelTests::test_on_deliver_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_eventok", "tests/unit/channel_tests.py::ChannelTests::test_on_flow", "tests/unit/channel_tests.py::ChannelTests::test_on_flow_with_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_flowok_calls_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_getempty", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_callback_reset", "tests/unit/channel_tests.py::ChannelTests::test_on_getok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_callback_called", "tests/unit/channel_tests.py::ChannelTests::test_on_openok_no_callback", "tests/unit/channel_tests.py::ChannelTests::test_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_onreturn", "tests/unit/channel_tests.py::ChannelTests::test_onreturn_warning", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_bind_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_declare_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_delete_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_queue_purge_rpc_request_nowait", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_raises_value_error_on_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_queue_unbind_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_repr", "tests/unit/channel_tests.py::ChannelTests::test_rpc_adds_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_enters_blocking_and_adds_on_synchronous_complete", "tests/unit/channel_tests.py::ChannelTests::test_rpc_not_blocking_and_no_on_synchronous_complete_when_no_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_type_error_with_invalid_callback", "tests/unit/channel_tests.py::ChannelTests::test_rpc_throws_value_error_with_unacceptable_replies", "tests/unit/channel_tests.py::ChannelTests::test_rpc_while_blocking_appends_blocked_collection", "tests/unit/channel_tests.py::ChannelTests::test_send_method", "tests/unit/channel_tests.py::ChannelTests::test_set_state", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_raises_channel_closed", "tests/unit/channel_tests.py::ChannelTests::test_tx_commit_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_rollback_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_tx_select_rpc_request", "tests/unit/channel_tests.py::ChannelTests::test_validate_callback_raises_value_error_not_callable", "tests/unit/channel_tests.py::ChannelTests::test_validate_channel_raises_channel_closed" ]
[]
BSD 3-Clause "New" or "Revised" License
2,154
[ "pika/adapters/blocking_connection.py", "docs/examples/direct_reply_to.rst", "examples/consumer_queued.py", "docs/examples/twisted_example.rst", ".gitignore", "examples/direct_reply_to.py", "pika/channel.py", "examples/twisted_service.py", "examples/consumer_simple.py" ]
[ "pika/adapters/blocking_connection.py", "docs/examples/direct_reply_to.rst", "examples/consumer_queued.py", "docs/examples/twisted_example.rst", ".gitignore", "examples/direct_reply_to.py", "pika/channel.py", "examples/twisted_service.py", "examples/consumer_simple.py" ]
ELIFE-ASU__Neet-93
fbc1f937c407974c04c847d44517fea5ea76176f
2018-02-13 20:59:01
fbc1f937c407974c04c847d44517fea5ea76176f
diff --git a/neet/boolean/logicnetwork.py b/neet/boolean/logicnetwork.py index 69f44fc..9f1123b 100644 --- a/neet/boolean/logicnetwork.py +++ b/neet/boolean/logicnetwork.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a MIT # license that can be found in the LICENSE file. import re +from neet.python3 import * from neet.statespace import StateSpace from neet.exceptions import FormatError @@ -88,7 +89,7 @@ class LogicNetwork(object): raise ValueError("Invalid table format") conditions = set() for condition in row[1]: - conditions.add(''.join([str(int(s)) for s in condition])) + conditions.add(''.join([str(long(s)) for s in condition])) self.table.append((row[0], conditions)) if reduced: @@ -105,15 +106,15 @@ class LogicNetwork(object): self._encoded_table = [] for indices, conditions in self.table: # Encode the mask. - mask_code = 0 + mask_code = long(0) for idx in indices: mask_code += 2 ** idx # Low order, low index. # Encode each condition of truth table. encoded_sub_table = set() for condition in conditions: - encoded_condition = 0 + encoded_condition = long(0) for idx, state in zip(indices, condition): - encoded_condition += 2 ** idx if int(state) else 0 + encoded_condition += 2 ** idx if long(state) else 0 encoded_sub_table.add(encoded_condition) self._encoded_table.append((mask_code, encoded_sub_table)) @@ -134,7 +135,7 @@ class LogicNetwork(object): for state in sub_table[1]: # State excluding source. state_sans_source = state[:i] + state[i + 1:] - if int(state[i]) == 1: + if long(state[i]) == 1: counter[state_sans_source] = counter.get( state_sans_source, 0) + 1 else: diff --git a/neet/python3.py b/neet/python3.py new file mode 100644 index 0000000..b3b118d --- /dev/null +++ b/neet/python3.py @@ -0,0 +1,9 @@ +# Copyright 2018 ELIFE. All rights reserved. +# Use of this source code is governed by a MIT +# license that can be found in the LICENSE file. + +import sys +if sys.version_info > (3,): + long = int + unicode = str + diff --git a/neet/statespace.py b/neet/statespace.py index fc8230b..609c2c8 100644 --- a/neet/statespace.py +++ b/neet/statespace.py @@ -1,6 +1,7 @@ # Copyright 2016-2017 ELIFE. All rights reserved. # Use of this source code is governed by a MIT # license that can be found in the LICENSE file. +from .python3 import * import numpy as np class StateSpace(object): @@ -201,7 +202,7 @@ class StateSpace(object): :returns: a unique integer encoding of the state :raises ValueError: if ``state`` has an incorrect length """ - encoded, place = 0, 1 + encoded, place = long(0), long(1) base = self.__base if self.is_uniform: @@ -213,7 +214,7 @@ class StateSpace(object): encoded += place * x place *= b - return encoded + return long(encoded) def encode(self, state): """
logic network update function fails with large number of nodes Above about 62 nodes, the update function for logic networks fails due to an issue with encoding states of the network.
ELIFE-ASU/Neet
diff --git a/test/test_asynchronous.py b/test/test_asynchronous.py index 2dce250..5b6b7b1 100644 --- a/test/test_asynchronous.py +++ b/test/test_asynchronous.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a MIT # license that can be found in the LICENSE file. import unittest +from neet.python3 import * from neet.asynchronous import transitions from neet.automata import ECA from neet.boolean.examples import s_pombe, s_cerevisiae, c_elegans @@ -68,7 +69,7 @@ class TestAsync(unittest.TestCase): for net in [s_pombe, s_cerevisiae, c_elegans]: for states, _ in transitions(net, encoded=True): for state in states: - self.assertIsInstance(state, int) + self.assertIsInstance(state, (int, long)) for states, _ in transitions(net, encoded=False): for state in states: self.assertIsInstance(state, list) @@ -77,7 +78,7 @@ class TestAsync(unittest.TestCase): for size in [5, 8, 10]: for states, _ in transitions(net, size, encoded=True): for state in states: - self.assertIsInstance(state, int) + self.assertIsInstance(state, (int, long)) for states, _ in transitions(net, size, encoded=False): for state in states: self.assertIsInstance(state, list) diff --git a/test/test_statespace.py b/test/test_statespace.py index 813597d..80b77f3 100644 --- a/test/test_statespace.py +++ b/test/test_statespace.py @@ -2,10 +2,10 @@ # Use of this source code is governed by a MIT # license that can be found in the LICENSE file. import unittest +from neet.python3 import * from neet.statespace import StateSpace import numpy as np - class TestStateSpace(unittest.TestCase): def test_invalid_spec_type(self): with self.assertRaises(TypeError): @@ -277,3 +277,17 @@ class TestStateSpace(unittest.TestCase): self.assertFalse([0, 2, 1] not in StateSpace([2, 3, 2])) self.assertTrue([0, 1] not in StateSpace([2, 2, 3])) self.assertTrue([1, 1, 6] not in StateSpace([2, 3, 4])) + + def test_long_encoding(self): + state_space = StateSpace(10) + code = state_space.encode(np.ones(10, dtype=int)) + print(type(code)) + self.assertIsInstance(code, long) + + state_space = StateSpace(68) + code = state_space.encode(np.ones(68, dtype=int)) + self.assertIsInstance(code, long) + + state_space = StateSpace(100) + code = state_space.encode(np.ones(100, dtype=int)) + self.assertIsInstance(code, long)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_added_files", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 2 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.5", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 decorator==4.4.2 importlib-metadata==4.8.3 iniconfig==1.1.1 -e git+https://github.com/ELIFE-ASU/Neet.git@fbc1f937c407974c04c847d44517fea5ea76176f#egg=neet networkx==2.5.1 nose==1.3.7 numpy==1.19.5 packaging==21.3 pluggy==1.0.0 py==1.11.0 pyinform==0.2.0 pyparsing==3.1.4 pytest==7.0.1 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: Neet channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - decorator==4.4.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - networkx==2.5.1 - nose==1.3.7 - numpy==1.19.5 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pyinform==0.2.0 - pyparsing==3.1.4 - pytest==7.0.1 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/Neet
[ "test/test_asynchronous.py::TestAsync::test_canary", "test/test_asynchronous.py::TestAsync::test_transitions_encoded", "test/test_asynchronous.py::TestAsync::test_transitions_not_network", "test/test_asynchronous.py::TestAsync::test_transitions_require_update", "test/test_asynchronous.py::TestAsync::test_transitions_sum_to_one", "test/test_statespace.py::TestStateSpace::test_base_mismatch", "test/test_statespace.py::TestStateSpace::test_check_states_uniform", "test/test_statespace.py::TestStateSpace::test_check_states_varied", "test/test_statespace.py::TestStateSpace::test_decode_encode_nonuniform", "test/test_statespace.py::TestStateSpace::test_decode_encode_uniform", "test/test_statespace.py::TestStateSpace::test_decoding_nonuniform", "test/test_statespace.py::TestStateSpace::test_decoding_uniform", "test/test_statespace.py::TestStateSpace::test_encode_decode_nonuniform", "test/test_statespace.py::TestStateSpace::test_encode_decode_uniform", "test/test_statespace.py::TestStateSpace::test_encoding_error", "test/test_statespace.py::TestStateSpace::test_encoding_nonuniform", "test/test_statespace.py::TestStateSpace::test_encoding_uniform", "test/test_statespace.py::TestStateSpace::test_invalid_base_type", "test/test_statespace.py::TestStateSpace::test_invalid_base_value", "test/test_statespace.py::TestStateSpace::test_invalid_spec_type", "test/test_statespace.py::TestStateSpace::test_invalid_spec_value", "test/test_statespace.py::TestStateSpace::test_long_encoding", "test/test_statespace.py::TestStateSpace::test_nonuniform_bases", "test/test_statespace.py::TestStateSpace::test_states_boolean", "test/test_statespace.py::TestStateSpace::test_states_boolean_list", "test/test_statespace.py::TestStateSpace::test_states_count", "test/test_statespace.py::TestStateSpace::test_states_nonboolean", "test/test_statespace.py::TestStateSpace::test_states_nonboolean_list", "test/test_statespace.py::TestStateSpace::test_uniform_bases" ]
[]
[]
[]
MIT License
2,155
[ "neet/statespace.py", "neet/python3.py", "neet/boolean/logicnetwork.py" ]
[ "neet/statespace.py", "neet/python3.py", "neet/boolean/logicnetwork.py" ]
pika__pika-956
f3705381d710a1c6b78de6c5e4c5772516adbca0
2018-02-14 07:59:42
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
diff --git a/docs/faq.rst b/docs/faq.rst index 132b495..f70ef55 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -3,7 +3,7 @@ Frequently Asked Questions - Is Pika thread safe? - Pika does not have any notion of threading in the code. If you want to use Pika with threading, make sure you have a Pika connection per thread, created in that thread. It is not safe to share one Pika connection across threads. + Pika does not have any notion of threading in the code. If you want to use Pika with threading, make sure you have a Pika connection per thread, created in that thread. It is not safe to share one Pika connection across threads, with one exception: you may call the connection method `add_callback_threadsafe` from another thread to schedule a callback within an active pika connection. - How do I report a bug with Pika? diff --git a/pika/adapters/asyncio_connection.py b/pika/adapters/asyncio_connection.py index cc38c3f..ebfd179 100644 --- a/pika/adapters/asyncio_connection.py +++ b/pika/adapters/asyncio_connection.py @@ -20,6 +20,17 @@ class IOLoopAdapter: self.readers = set() self.writers = set() + def close(self): + """Release ioloop's resources. + + This method is intended to be called by the application or test code + only after the ioloop's outermost `start()` call returns. After calling + `close()`, no other interaction with the closed instance of ioloop + should be performed. + + """ + self.loop.close() + def add_timeout(self, deadline, callback): """Add the callback to the EventLoop timer to fire after deadline seconds. Returns a Handle to the timeout. @@ -41,6 +52,28 @@ class IOLoopAdapter: """ return handle.cancel() + def add_callback_threadsafe(self, callback): + """Requests a call to the given function as soon as possible in the + context of this IOLoop's thread. + + NOTE: This is the only thread-safe method offered by the IOLoop adapter. + All other manipulations of the IOLoop adapter and its parent connection + must be performed from the connection's thread. + + For example, a thread may request a call to the + `channel.basic_ack` method of a connection that is running in a + different thread via + + ``` + connection.add_callback_threadsafe( + functools.partial(channel.basic_ack, delivery_tag=...)) + ``` + + :param method callback: The callback method; must be callable. + + """ + self.loop.call_soon_threadsafe(callback) + def add_handler(self, fd, cb, event_state): """ Registers the given handler to receive the given events for ``fd``. diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py index fdc456b..ba814bd 100644 --- a/pika/adapters/base_connection.py +++ b/pika/adapters/base_connection.py @@ -127,6 +127,32 @@ class BaseConnection(connection.Connection): """ self.ioloop.remove_timeout(timeout_id) + def add_callback_threadsafe(self, callback): + """Requests a call to the given function as soon as possible in the + context of this connection's IOLoop thread. + + NOTE: This is the only thread-safe method offered by the connection. All + other manipulations of the connection must be performed from the + connection's thread. + + For example, a thread may request a call to the + `channel.basic_ack` method of a connection that is running in a + different thread via + + ``` + connection.add_callback_threadsafe( + functools.partial(channel.basic_ack, delivery_tag=...)) + ``` + + :param method callback: The callback method; must be callable. + + """ + if not callable(callback): + raise TypeError( + 'callback must be a callable, but got %r' % (callback,)) + + self.ioloop.add_callback_threadsafe(callback) + def _adapter_connect(self): """Connect to the RabbitMQ broker, returning True if connected. diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index 2a1629f..3a845f8 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -10,6 +10,9 @@ and the :class:`~pika.adapters.blocking_connection.BlockingChannel` classes. """ +# Suppress too-many-lines +# pylint: disable=C0302 + # Disable "access to protected member warnings: this wrapper implementation is # a friend of those instances # pylint: disable=W0212 @@ -158,7 +161,7 @@ class _CallbackResult(object): with `append_element` """ assert self._ready, '_CallbackResult was not set' - assert isinstance(self._values, list) and len(self._values) > 0, ( + assert isinstance(self._values, list) and self._values, ( '_CallbackResult value is incompatible with append_element: %r' % (self._values,)) @@ -380,7 +383,7 @@ class BlockingConnection(object): def _cleanup(self): """Clean up members that might inhibit garbage collection""" - self._impl.ioloop.deactivate_poller() + self._impl.ioloop.close() self._ready_events.clear() self._opened_result.reset() self._open_error_result.reset() @@ -527,6 +530,18 @@ class BlockingConnection(object): """ self._ready_events.append(evt) + def _on_threadsafe_callback(self, user_callback): + """Handle callback that was registered via `add_callback_threadsafe`. + + :param user_callback: callback passed to `add_callback_threadsafe` by + the application. + + """ + # Turn it into a 0-delay timeout to take advantage of our existing logic + # that deals with reentrancy + self.add_timeout(0, user_callback) + + def _on_connection_blocked(self, user_callback, method_frame): """Handle Connection.Blocked notification from RabbitMQ broker @@ -622,9 +637,8 @@ class BlockingConnection(object): """ if not callable(callback): - raise ValueError( - 'callback parameter must be callable, but got %r' - % (callback,)) + raise TypeError( + 'callback must be a callable, but got %r' % (callback,)) evt = _TimerEvt(callback=callback) timer_id = self._impl.add_timeout( @@ -634,6 +648,29 @@ class BlockingConnection(object): return timer_id + def add_callback_threadsafe(self, callback): + """Requests a call to the given function as soon as possible in the + context of this connection's thread. + + NOTE: This is the only thread-safe method in `BlockingConnection`. All + other manipulations of `BlockingConnection` must be performed from the + connection's thread. + + For example, a thread may request a call to the + `BlockingChannel.basic_ack` method of a `BlockingConnection` that is + running in a different thread via + + ``` + connection.add_callback_threadsafe( + functools.partial(channel.basic_ack, delivery_tag=...)) + ``` + + :param method callback: The callback method; must be callable + + """ + self._impl.add_callback_threadsafe( + functools.partial(self._on_threadsafe_callback, callback)) + def remove_timeout(self, timeout_id): """Remove a timer if it's still in the timeout stack @@ -877,7 +914,7 @@ class _ConsumerCancellationEvt(_ChannelPendingEvt): `Basic.Cancel` """ - __slots__ = 'method_frame' + __slots__ = ('method_frame',) def __init__(self, method_frame): """ @@ -1802,7 +1839,8 @@ class BlockingChannel(object): """Blocking consumption of a queue instead of via a callback. This method is a generator that yields each message as a tuple of method, properties, and body. The active generator iterator terminates when the - consumer is cancelled by client or broker. + consumer is cancelled by client via `BlockingChannel.cancel()` or by + broker. Example: @@ -2397,7 +2435,8 @@ class BlockingChannel(object): :param queue: The queue name :type queue: str or unicode; if empty string, the broker will create a unique queue name; - :param bool passive: Only check to see if the queue exists + :param bool passive: Only check to see if the queue exists and raise + `ChannelClosed` if it doesn't; :param bool durable: Survive reboots of the broker :param bool exclusive: Only allow access by the current connection :param bool auto_delete: Delete after consumer cancels or disconnects diff --git a/pika/adapters/select_connection.py b/pika/adapters/select_connection.py index 3ae3eca..85922ce 100644 --- a/pika/adapters/select_connection.py +++ b/pika/adapters/select_connection.py @@ -3,6 +3,7 @@ platform pika is running on. """ import abc +import collections import errno import functools import heapq @@ -11,8 +12,6 @@ import select import time import threading -from collections import defaultdict - import pika.compat from pika.adapters.base_connection import BaseConnection @@ -164,6 +163,16 @@ class _Timer(object): # collection of canceled timeouts self._num_cancellations = 0 + def close(self): + """Release resources. Don't use the `_Timer` instance after closing + it + """ + # Eliminate potential reference cycles to aid garbage-collection + if self._timeout_heap is not None: + for timeout in self._timeout_heap: + timeout.callback = None + self._timeout_heap = None + def call_later(self, delay, callback): """Schedule a one-shot timeout given delay seconds. @@ -283,9 +292,28 @@ class IOLoop(object): def __init__(self): self._timer = _Timer() - self._poller = self._get_poller(self._timer.get_remaining_interval, + # Callbacks requested via `add_callback` + self._callbacks = collections.deque() + + # Identity of this IOLoop's thread + self._thread_id = None + + self._poller = self._get_poller(self._get_remaining_interval, self.process_timeouts) + def close(self): + """Release IOLoop's resources. + + `IOLoop.close` is intended to be called by the application or test code + only after `IOLoop.start()` returns. After calling `close()`, no other + interaction with the closed instance of `IOLoop` should be performed. + + """ + if self._callbacks is not None: + self._poller.close() + self._timer.close() + self._callbacks = None + @staticmethod def _get_poller(get_wait_seconds, process_timeouts): """Determine the best poller to use for this environment and instantiate @@ -347,13 +375,57 @@ class IOLoop(object): """ self._timer.remove_timeout(timeout_id) + def add_callback_threadsafe(self, callback): + """Requests a call to the given function as soon as possible in the + context of this IOLoop's thread. + + NOTE: This is the only thread-safe method in IOLoop. All other + manipulations of IOLoop must be performed from the IOLoop's thread. + + For example, a thread may request a call to the `stop` method of an + ioloop that is running in a different thread via + `ioloop.add_callback_threadsafe(ioloop.stop)` + + :param method callback: The callback method + + """ + if not callable(callback): + raise TypeError( + 'callback must be a callable, but got %r' % (callback,)) + + # NOTE: `deque.append` is atomic + self._callbacks.append(callback) + if threading.current_thread().ident != self._thread_id: + # Wake up the IOLoop running in another thread + self._poller.wake_threadsafe() + + LOGGER.debug('add_callback_threadsafe: added callback=%r', callback) + def process_timeouts(self): - """[Extension] Process pending timeouts, invoking callbacks for those + """[Extension] Process pending callbacks and timeouts, invoking those whose time has come. Internal use only. """ + # Avoid I/O starvation by postponing new callbacks to the next iteration + for _ in pika.compat.xrange(len(self._callbacks)): + self._callbacks.popleft()() + self._timer.process_timeouts() + def _get_remaining_interval(self): + """Get the remaining interval to the next callback or timeout + expiration. + + :returns: non-negative number of seconds until next callback or timer + expiration; None if there are no callbacks and timers + :rtype: float + + """ + if self._callbacks: + return 0 + + return self._timer.get_remaining_interval() + def add_handler(self, fileno, handler, events): """[API] Add a new fileno to the set to be monitored @@ -386,20 +458,31 @@ class IOLoop(object): exit. See `IOLoop.stop`. """ + self._thread_id = threading.current_thread().ident self._poller.start() def stop(self): """[API] Request exit from the ioloop. The loop is NOT guaranteed to - stop before this method returns. This is the only method that may be - called from another thread. + stop before this method returns. + + To invoke `stop()` safely from a thread other than this IOLoop's thread, + call it via `add_callback_threadsafe`; e.g., + + `ioloop.add_callback_threadsafe(ioloop.stop)` """ + if (self._thread_id is not None and + threading.current_thread().ident != self._thread_id): + LOGGER.warning('Use add_callback_threadsafe to request ' + 'ioloop.stop() from another thread') + self._poller.stop() def activate_poller(self): """[Extension] Activate the poller """ + self._thread_id = threading.current_thread().ident self._poller.activate_poller() def deactivate_poller(self): @@ -443,6 +526,10 @@ class _PollerBase(_AbstractBase): # pylint: disable=R0902 self._get_wait_seconds = get_wait_seconds self._process_timeouts = process_timeouts + # We guard access to the waking file descriptors to avoid races from + # closing them while another thread is calling our `wake()` method. + self._waking_mutex = threading.Lock() + # fd-to-handler function mappings self._fd_handlers = dict() @@ -456,14 +543,60 @@ class _PollerBase(_AbstractBase): # pylint: disable=R0902 self._stopping = False - # Mutex for controlling critical sections where ioloop-interrupt sockets - # are created, used, and destroyed. Needed in case `stop()` is called - # from a thread. - self._mutex = threading.Lock() + # Create ioloop-interrupt socket pair and register read handler. + self._r_interrupt, self._w_interrupt = self._get_interrupt_pair() + self.add_handler(self._r_interrupt.fileno(), self._read_interrupt, READ) + + def close(self): + """Release poller's resources. + + `close()` is intended to be called after the poller's `start()` method + returns. After calling `close()`, no other interaction with the closed + poller instance should be performed. + + """ + # Unregister and close ioloop-interrupt socket pair; mutual exclusion is + # necessary to avoid race condition with `wake_threadsafe` executing in + # another thread's context + assert self._start_nesting_levels == 0, \ + 'Cannot call close() before start() unwinds.' + + with self._waking_mutex: + if self._w_interrupt is not None: + self.remove_handler(self._r_interrupt.fileno()) # pylint: disable=E1101 + self._r_interrupt.close() + self._r_interrupt = None + self._w_interrupt.close() + self._w_interrupt = None + + self.deactivate_poller() + + self._fd_handlers = None + self._fd_events = None + self._processing_fd_event_map = None + + def wake_threadsafe(self): + """Wake up the poller as soon as possible. As the name indicates, this + method is thread-safe. + + """ + with self._waking_mutex: + if self._w_interrupt is None: + return + + try: + # Send byte to interrupt the poll loop, use send() instead of + # os.write for Windows compatibility + self._w_interrupt.send(b'X') + except pika.compat.SOCKET_ERROR as err: + if err.errno != errno.EWOULDBLOCK: + raise + except Exception as err: + # There's nothing sensible to do here, we'll exit the interrupt + # loop after POLL_TIMEOUT secs in worst case anyway. + LOGGER.warning("Failed to send interrupt to poller: %s", err) + raise - # ioloop-interrupt socket pair; initialized in start() - self._r_interrupt = None - self._w_interrupt = None def _get_max_wait(self): """Get the interval to the next timeout event, or a default interval @@ -558,7 +691,7 @@ class _PollerBase(_AbstractBase): # pylint: disable=R0902 """ # Activate the underlying poller and register current events self._init_poller() - fd_to_events = defaultdict(int) + fd_to_events = collections.defaultdict(int) for event, file_descriptors in self._fd_events.items(): for fileno in file_descriptors: fd_to_events[fileno] |= event @@ -580,22 +713,10 @@ class _PollerBase(_AbstractBase): # pylint: disable=R0902 if self._start_nesting_levels == 1: LOGGER.debug('Entering IOLoop') - self._stopping = False # Activate the underlying poller and register current events self.activate_poller() - # Create ioloop-interrupt socket pair and register read handler. - # NOTE: we defer their creation because some users (e.g., - # BlockingConnection adapter) don't use the event loop and these - # sockets would get reported as leaks - with self._mutex: - assert self._r_interrupt is None - self._r_interrupt, self._w_interrupt = self._get_interrupt_pair( - ) - self.add_handler(self._r_interrupt.fileno(), - self._read_interrupt, READ) - else: LOGGER.debug('Reentering IOLoop at nesting level=%s', self._start_nesting_levels) @@ -610,46 +731,26 @@ class _PollerBase(_AbstractBase): # pylint: disable=R0902 self._start_nesting_levels -= 1 if self._start_nesting_levels == 0: - LOGGER.debug('Cleaning up IOLoop') - # Unregister and close ioloop-interrupt socket pair - with self._mutex: - self.remove_handler(self._r_interrupt.fileno()) - self._r_interrupt.close() - self._r_interrupt = None - self._w_interrupt.close() - self._w_interrupt = None - - # Deactivate the underlying poller - self.deactivate_poller() + try: + LOGGER.debug('Deactivating poller') + + # Deactivate the underlying poller + self.deactivate_poller() + finally: + self._stopping = False else: LOGGER.debug('Leaving IOLoop with %s nesting levels remaining', self._start_nesting_levels) def stop(self): """Request exit from the ioloop. The loop is NOT guaranteed to stop - before this method returns. This is the only method that may be called - from another thread. + before this method returns. """ LOGGER.debug('Stopping IOLoop') self._stopping = True - with self._mutex: - if self._w_interrupt is None: - return - - try: - # Send byte to interrupt the poll loop, use send() instead of - # os.write for Windows compatibility - self._w_interrupt.send(b'X') - except pika.compat.SOCKET_ERROR as err: - if err.errno != errno.EWOULDBLOCK: - raise - except Exception as err: - # There's nothing sensible to do here, we'll exit the interrupt - # loop after POLL_TIMEOUT secs in worst case anyway. - LOGGER.warning("Failed to send ioloop interrupt: %s", err) - raise + self.wake_threadsafe() @abc.abstractmethod def poll(self): @@ -797,7 +898,7 @@ class SelectPoller(_PollerBase): # Build an event bit mask for each fileno we've received an event for - fd_event_map = defaultdict(int) + fd_event_map = collections.defaultdict(int) for fd_set, evt in zip((read, write, error), (READ, WRITE, ERROR)): for fileno in fd_set: fd_event_map[fileno] |= evt @@ -856,9 +957,8 @@ class KQueuePoller(_PollerBase): def __init__(self, get_wait_seconds, process_timeouts): """Create an instance of the KQueuePoller """ - super(KQueuePoller, self).__init__(get_wait_seconds, process_timeouts) - self._kqueue = None + super(KQueuePoller, self).__init__(get_wait_seconds, process_timeouts) @staticmethod def _map_event(kevent): @@ -894,7 +994,7 @@ class KQueuePoller(_PollerBase): else: raise - fd_event_map = defaultdict(int) + fd_event_map = collections.defaultdict(int) for event in kevents: fd_event_map[event.ident] |= self._map_event(event) @@ -908,8 +1008,9 @@ class KQueuePoller(_PollerBase): def _uninit_poller(self): """Notify the implementation to release the poller resource""" - self._kqueue.close() - self._kqueue = None + if self._kqueue is not None: + self._kqueue.close() + self._kqueue = None def _register_fd(self, fileno, events): """The base class invokes this method to notify the implementation to @@ -1013,7 +1114,7 @@ class PollPoller(_PollerBase): else: raise - fd_event_map = defaultdict(int) + fd_event_map = collections.defaultdict(int) for fileno, event in events: fd_event_map[fileno] |= event @@ -1027,10 +1128,11 @@ class PollPoller(_PollerBase): def _uninit_poller(self): """Notify the implementation to release the poller resource""" - if hasattr(self._poll, "close"): - self._poll.close() + if self._poll is not None: + if hasattr(self._poll, "close"): + self._poll.close() - self._poll = None + self._poll = None def _register_fd(self, fileno, events): """The base class invokes this method to notify the implementation to diff --git a/pika/adapters/tornado_connection.py b/pika/adapters/tornado_connection.py index 3c3c736..c8369df 100644 --- a/pika/adapters/tornado_connection.py +++ b/pika/adapters/tornado_connection.py @@ -79,3 +79,29 @@ class TornadoConnection(base_connection.BaseConnection): """ return self.ioloop.remove_timeout(timeout_id) + + def add_callback_threadsafe(self, callback): + """Requests a call to the given function as soon as possible in the + context of this connection's IOLoop thread. + + NOTE: This is the only thread-safe method offered by the connection. All + other manipulations of the connection must be performed from the + connection's thread. + + For example, a thread may request a call to the + `channel.basic_ack` method of a connection that is running in a + different thread via + + ``` + connection.add_callback_threadsafe( + functools.partial(channel.basic_ack, delivery_tag=...)) + ``` + + :param method callback: The callback method; must be callable. + + """ + if not callable(callback): + raise TypeError( + 'callback must be a callable, but got %r' % (callback,)) + + self.ioloop.add_callback(callback) diff --git a/pika/adapters/twisted_connection.py b/pika/adapters/twisted_connection.py index 72c7c2e..d48487a 100644 --- a/pika/adapters/twisted_connection.py +++ b/pika/adapters/twisted_connection.py @@ -225,6 +225,28 @@ class IOLoopReactorAdapter(object): """ call.cancel() + def add_callback_threadsafe(self, callback): + """Requests a call to the given function as soon as possible in the + context of this IOLoop's thread. + + NOTE: This is the only thread-safe method offered by the IOLoop adapter. + All other manipulations of the IOLoop adapter and its parent connection + must be performed from the connection's thread. + + For example, a thread may request a call to the + `channel.basic_ack` method of a connection that is running in a + different thread via + + ``` + connection.add_callback_threadsafe( + functools.partial(channel.basic_ack, delivery_tag=...)) + ``` + + :param method callback: The callback method; must be callable. + + """ + self.reactor.callFromThread(callback) + def stop(self): # Guard against stopping the reactor multiple times if not self.started: diff --git a/pika/connection.py b/pika/connection.py index 60d40ca..03bfb5e 100644 --- a/pika/connection.py +++ b/pika/connection.py @@ -1244,8 +1244,11 @@ class Connection(object): LOGGER.warning('Suppressing close request on %s', self) return + # NOTE The connection is either in opening or open state + # Initiate graceful closing of channels that are OPEN or OPENING - self._close_channels(reply_code, reply_text) + if self._channels: + self._close_channels(reply_code, reply_text) # Set our connection state self._set_connection_state(self.CONNECTION_CLOSING)
Question re. interrupting `BlockingChannel.consume` from another thread In [Dramatiq][dramatiq] I've got an architecture where consumers live on their own thread and call `BlockingChannel`'s `consume` method with an inactivity timeout of `1s` by default. Whenever a consumer gets a message from Rabbit, it puts it on a shared in-memory work queue where it is grabbed and processed by any of `n` worker threads. Once the worker thread is done processing the message, it gets put on an "acks" queue that belongs to the consumer. When the consumer hits the inactivity timeout it acks any pending messages off its queue and then waits for new messages from RMQ. Acking from the worker threads is not an option since `BlockingChannel`s are not thread safe. When you set a QoS value the fact that `consume` can't be interrupted from another thread really starts to hurt throughput, at which point you are forced to either: 1. use a tiny `inactivity_timeout` (<100ms), which "fixes" the throughput issue but is essentially a spinlock so it's inefficient, 2. switch to `SelectConnection` and implement your own interrupt, this is probably what any sane person would do, but the problem is you'd end up implementing a lot of stuff that `BlockingChannel` already implements, or 3. hack your own interrupt by hooking into implementation details of `BlockingChannel`. The latter is what I ended up doing ([here][c1], [here][c2] and [here][c3]) and my interrupt implementation is working well, but I don't feel too good about depending on `BlockingConnection`'s implementation details so my "issue" is more of a question than anything: is there a better way I could be doing this and, if not, would this be a good addition to `BlockingConnection` (I'd be happy to contribute)? [dramatiq]: http://dramatiq.io/ [c1]: https://github.com/Bogdanp/dramatiq/blob/master/dramatiq/brokers/rabbitmq.py#L359-L373 [c2]: https://github.com/Bogdanp/dramatiq/blob/master/dramatiq/brokers/rabbitmq.py#L422-L435 [c3]: https://github.com/Bogdanp/dramatiq/blob/master/dramatiq/brokers/rabbitmq.py#L439-L458
pika/pika
diff --git a/tests/acceptance/async_adapter_tests.py b/tests/acceptance/async_adapter_tests.py index 00b87ee..d392d04 100644 --- a/tests/acceptance/async_adapter_tests.py +++ b/tests/acceptance/async_adapter_tests.py @@ -11,6 +11,7 @@ # pylint: disable=W0613 import functools +import threading import time import uuid @@ -37,6 +38,7 @@ class TestConfirmSelect(AsyncTestCase, AsyncAdapters): channel.confirm_delivery(ack_nack_callback=self.ack_nack_callback, callback=self.on_complete) + @staticmethod def ack_nack_callback(frame): pass @@ -493,6 +495,114 @@ class TestBlockedConnectionUnblocks(AsyncTestCase, AsyncAdapters): # pylint: di reply_text) +class TestAddCallbackThreadsafeRequestBeforeIOLoopStarts(AsyncTestCase, AsyncAdapters): + DESCRIPTION = "Test add_callback_threadsafe request before ioloop starts." + + def _run_ioloop(self, *args, **kwargs): # pylint: disable=W0221 + """We intercept this method from AsyncTestCase in order to call + add_callback_threadsafe before AsyncTestCase starts the ioloop. + + """ + self.my_start_time = time.time() + # Request a callback from our current (ioloop's) thread + self.connection.add_callback_threadsafe(self.on_requested_callback) + + return super( + TestAddCallbackThreadsafeRequestBeforeIOLoopStarts, self)._run_ioloop( + *args, **kwargs) + + def start(self, *args, **kwargs): # pylint: disable=W0221 + self.loop_thread_ident = threading.current_thread().ident + self.my_start_time = None + self.got_callback = False + super(TestAddCallbackThreadsafeRequestBeforeIOLoopStarts, self).start(*args, **kwargs) + self.assertTrue(self.got_callback) + + def begin(self, channel): + self.stop() + + def on_requested_callback(self): + self.assertEqual(threading.current_thread().ident, + self.loop_thread_ident) + self.assertLess(time.time() - self.my_start_time, 0.25) + self.got_callback = True + + +class TestAddCallbackThreadsafeFromIOLoopThread(AsyncTestCase, AsyncAdapters): + DESCRIPTION = "Test add_callback_threadsafe request from same thread." + + def start(self, *args, **kwargs): + self.loop_thread_ident = threading.current_thread().ident + self.my_start_time = None + self.got_callback = False + super(TestAddCallbackThreadsafeFromIOLoopThread, self).start(*args, **kwargs) + self.assertTrue(self.got_callback) + + def begin(self, channel): + self.my_start_time = time.time() + # Request a callback from our current (ioloop's) thread + channel.connection.add_callback_threadsafe(self.on_requested_callback) + + def on_requested_callback(self): + self.assertEqual(threading.current_thread().ident, + self.loop_thread_ident) + self.assertLess(time.time() - self.my_start_time, 0.25) + self.got_callback = True + self.stop() + + +class TestAddCallbackThreadsafeFromAnotherThread(AsyncTestCase, AsyncAdapters): + DESCRIPTION = "Test add_callback_threadsafe request from another thread." + + def start(self, *args, **kwargs): + self.loop_thread_ident = threading.current_thread().ident + self.my_start_time = None + self.got_callback = False + super(TestAddCallbackThreadsafeFromAnotherThread, self).start(*args, **kwargs) + self.assertTrue(self.got_callback) + + def begin(self, channel): + self.my_start_time = time.time() + # Request a callback from ioloop while executing in another thread + timer = threading.Timer( + 0, + lambda: channel.connection.add_callback_threadsafe( + self.on_requested_callback)) + self.addCleanup(timer.cancel) + timer.start() + + def on_requested_callback(self): + self.assertEqual(threading.current_thread().ident, + self.loop_thread_ident) + self.assertLess(time.time() - self.my_start_time, 0.25) + self.got_callback = True + self.stop() + + +class TestIOLoopStopBeforeIOLoopStarts(AsyncTestCase, AsyncAdapters): + DESCRIPTION = "Test ioloop.stop() before ioloop starts causes ioloop to exit quickly." + + def _run_ioloop(self, *args, **kwargs): # pylint: disable=W0221 + """We intercept this method from AsyncTestCase in order to call + ioloop.stop() before AsyncTestCase starts the ioloop. + """ + # Request ioloop to stop before it starts + self.my_start_time = time.time() + self.stop_ioloop_only() + + return super( + TestIOLoopStopBeforeIOLoopStarts, self)._run_ioloop(*args, **kwargs) + + def start(self, *args, **kwargs): # pylint: disable=W0221 + self.loop_thread_ident = threading.current_thread().ident + self.my_start_time = None + super(TestIOLoopStopBeforeIOLoopStarts, self).start(*args, **kwargs) + self.assertLess(time.time() - self.my_start_time, 0.25) + + def begin(self, channel): + pass + + class TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback(AsyncTestCase, AsyncAdapters): # pylint: disable=C0103 DESCRIPTION = "Test viability of multiple timeouts with same deadline and callback" diff --git a/tests/acceptance/async_test_base.py b/tests/acceptance/async_test_base.py index ac017f9..db01e48 100644 --- a/tests/acceptance/async_test_base.py +++ b/tests/acceptance/async_test_base.py @@ -52,34 +52,55 @@ class AsyncTestCase(unittest.TestCase): """Extend to start the actual tests on the channel""" self.fail("AsyncTestCase.begin_test not extended") - def start(self, adapter=None): + def start(self, adapter, ioloop_factory): self.logger.info('start at %s', datetime.utcnow()) self.adapter = adapter or self.ADAPTER - self.connection = self.adapter(self.parameters, self.on_open, - self.on_open_error, self.on_closed) - self.timeout = self.connection.add_timeout(self.TIMEOUT, - self.on_timeout) - self.connection.ioloop.start() - self.assertFalse(self._timed_out) + self.connection = self.adapter(self.parameters, + self.on_open, + self.on_open_error, + self.on_closed, + custom_ioloop=ioloop_factory()) + try: + self.timeout = self.connection.add_timeout(self.TIMEOUT, + self.on_timeout) + self._run_ioloop() + self.assertFalse(self._timed_out) + finally: + self.connection.ioloop.close() + self.connection = None + + def stop_ioloop_only(self): + """Request stopping of the connection's ioloop to end the test without + closing the connection + """ + self._safe_remove_test_timeout() + self.connection.ioloop.stop() def stop(self): """close the connection and stop the ioloop""" self.logger.info("Stopping test") - if self.timeout is not None: - self.connection.remove_timeout(self.timeout) - self.timeout = None - self.connection.close() + self._safe_remove_test_timeout() + self.connection.close() # NOTE: on_closed() will stop the ioloop + + def _run_ioloop(self): + """Some tests need to subclass this in order to bootstrap their test + logic after we instantiate the connection and assign it to + `self.connection`, but before we run the ioloop + """ + self.connection.ioloop.start() - def _stop(self): + def _safe_remove_test_timeout(self): if hasattr(self, 'timeout') and self.timeout is not None: self.logger.info("Removing timeout") self.connection.remove_timeout(self.timeout) self.timeout = None - if hasattr(self, 'connection') and self.connection: + + def _stop(self): + self._safe_remove_test_timeout() + if hasattr(self, 'connection') and self.connection is not None: self.logger.info("Stopping ioloop") self.connection.ioloop.stop() - self.connection = None def on_closed(self, connection, reply_code, reply_text): """called when the connection has finished closing""" @@ -99,7 +120,7 @@ class AsyncTestCase(unittest.TestCase): def on_timeout(self): """called when stuck waiting for connection to close""" self.logger.error('%s timed out; on_timeout called at %s', - self, datetime.utcnow()) + self, datetime.utcnow()) self.timeout = None # the dispatcher should have removed it self._timed_out = True # initiate cleanup @@ -108,12 +129,12 @@ class AsyncTestCase(unittest.TestCase): class BoundQueueTestCase(AsyncTestCase): - def start(self, adapter=None): + def start(self, adapter, ioloop_factory): # PY3 compat encoding self.exchange = 'e-' + self.__class__.__name__ + ':' + uuid.uuid1().hex self.queue = 'q-' + self.__class__.__name__ + ':' + uuid.uuid1().hex self.routing_key = self.__class__.__name__ - super(BoundQueueTestCase, self).start(adapter) + super(BoundQueueTestCase, self).start(adapter, ioloop_factory) def begin(self, channel): self.channel.exchange_declare(self.exchange, @@ -150,20 +171,31 @@ class BoundQueueTestCase(AsyncTestCase): class AsyncAdapters(object): - def start(self, adapter_class): + def start(self, adapter_class, ioloop_factory): + """ + + :param adapter_class: pika connection adapter class to test. + :param ioloop_factory: to be called without args to instantiate a + non-shared ioloop to be passed as the `custom_ioloop` arg to the + `adapter_class` constructor. This is needed because some of the + adapters default to using a singleton ioloop, which results in + tests errors after prior tests close the ioloop to release resources, + in order to eliminate ResourceWarning warnings concerning unclosed + sockets from our adapters. + :return: + """ raise NotImplementedError def select_default_test(self): """SelectConnection:DefaultPoller""" - with mock.patch.multiple(select_connection, SELECT_TYPE=None): - self.start(adapters.SelectConnection) + self.start(adapters.SelectConnection, select_connection.IOLoop) def select_select_test(self): """SelectConnection:select""" with mock.patch.multiple(select_connection, SELECT_TYPE='select'): - self.start(adapters.SelectConnection) + self.start(adapters.SelectConnection, select_connection.IOLoop) @unittest.skipIf( not hasattr(select, 'poll') or @@ -172,27 +204,36 @@ class AsyncAdapters(object): """SelectConnection:poll""" with mock.patch.multiple(select_connection, SELECT_TYPE='poll'): - self.start(adapters.SelectConnection) + self.start(adapters.SelectConnection, select_connection.IOLoop) @unittest.skipIf(not hasattr(select, 'epoll'), "epoll not supported") def select_epoll_test(self): """SelectConnection:epoll""" with mock.patch.multiple(select_connection, SELECT_TYPE='epoll'): - self.start(adapters.SelectConnection) + self.start(adapters.SelectConnection, select_connection.IOLoop) @unittest.skipIf(not hasattr(select, 'kqueue'), "kqueue not supported") def select_kqueue_test(self): """SelectConnection:kqueue""" with mock.patch.multiple(select_connection, SELECT_TYPE='kqueue'): - self.start(adapters.SelectConnection) + self.start(adapters.SelectConnection, select_connection.IOLoop) def tornado_test(self): """TornadoConnection""" - self.start(adapters.TornadoConnection) + ioloop_factory = None + if adapters.TornadoConnection is not None: + import tornado.ioloop + ioloop_factory = tornado.ioloop.IOLoop + self.start(adapters.TornadoConnection, ioloop_factory) @unittest.skipIf(sys.version_info < (3, 4), "Asyncio available for Python 3.4+") def asyncio_test(self): """AsyncioConnection""" - self.start(adapters.AsyncioConnection) + ioloop_factory = None + if adapters.AsyncioConnection is not None: + import asyncio + ioloop_factory = asyncio.new_event_loop + + self.start(adapters.AsyncioConnection, ioloop_factory) diff --git a/tests/acceptance/blocking_adapter_test.py b/tests/acceptance/blocking_adapter_test.py index 287b486..d9db804 100644 --- a/tests/acceptance/blocking_adapter_test.py +++ b/tests/acceptance/blocking_adapter_test.py @@ -1,7 +1,9 @@ """blocking adapter test""" from datetime import datetime +import functools import logging import socket +import threading import time import unittest import uuid @@ -449,6 +451,48 @@ class TestBlockedConnectionTimeout(BlockingTestCaseBase): 'Blocked connection timeout expired')) +class TestAddCallbackThreadsafeFromSameThread(BlockingTestCaseBase): + + def test(self): + """BlockingConnection.add_callback_threadsafe from same thread""" + connection = self._connect() + + # Test timer completion + start_time = time.time() + rx_callback = [] + connection.add_callback_threadsafe( + lambda: rx_callback.append(time.time())) + while not rx_callback: + connection.process_data_events(time_limit=None) + + self.assertEqual(len(rx_callback), 1) + elapsed = time.time() - start_time + self.assertLess(elapsed, 0.25) + + +class TestAddCallbackThreadsafeFromAnotherThread(BlockingTestCaseBase): + + def test(self): + """BlockingConnection.add_callback_threadsafe from another thread""" + connection = self._connect() + + # Test timer completion + start_time = time.time() + rx_callback = [] + timer = threading.Timer( + 0, + functools.partial(connection.add_callback_threadsafe, + lambda: rx_callback.append(time.time()))) + self.addCleanup(timer.cancel) + timer.start() + while not rx_callback: + connection.process_data_events(time_limit=None) + + self.assertEqual(len(rx_callback), 1) + elapsed = time.time() - start_time + self.assertLess(elapsed, 0.25) + + class TestAddTimeoutRemoveTimeout(BlockingTestCaseBase): def test(self): @@ -1495,7 +1539,7 @@ class TestBasicPublishDeliveredWhenPendingUnroutable(BlockingTestCaseBase): mandatory=True) self.assertEqual(res, True) - # Flush channel to force Basic.Return + # Flush connection to force Basic.Return connection.channel().close() # Deposit a routable message in the queue @@ -1654,7 +1698,7 @@ class TestPublishAndConsumeWithPubacksAndQosOfOne(BlockingTestCaseBase): queue=q_name, expected_count=0) - # Attempt to cosume again with a short timeout + # Attempt to consume again with a short timeout connection.process_data_events(time_limit=0.005) self.assertEqual(len(rx_messages), 2) @@ -1669,6 +1713,197 @@ class TestPublishAndConsumeWithPubacksAndQosOfOne(BlockingTestCaseBase): self.assertEqual(frame.method.consumer_tag, consumer_tag) +class TestBasicConsumeWithAckFromAnotherThread(BlockingTestCaseBase): + + def test(self): # pylint: disable=R0914,R0915 + """BlockingChannel.basic_consume with ack from another thread and \ + requesting basic_ack via add_callback_threadsafe + """ + # This test simulates processing of a message on another thread and + # then requesting an ACK to be dispatched on the connection's thread + # via BlockingConnection.add_callback_threadsafe + + connection = self._connect() + + ch = connection.channel() + + q_name = 'TestBasicConsumeWithAckFromAnotherThread_q' + uuid.uuid1().hex + exg_name = ('TestBasicConsumeWithAckFromAnotherThread_exg' + + uuid.uuid1().hex) + routing_key = 'TestBasicConsumeWithAckFromAnotherThread' + + # Place channel in publisher-acknowledgments mode so that publishing + # with mandatory=True will be synchronous (for convenience) + res = ch.confirm_delivery() + self.assertIsNone(res) + + # Declare a new exchange + ch.exchange_declare(exg_name, exchange_type='direct') + self.addCleanup(connection.channel().exchange_delete, exg_name) + + # Declare a new queue + ch.queue_declare(q_name, auto_delete=True) + self.addCleanup(self._connect().channel().queue_delete, q_name) + + # Bind the queue to the exchange using routing key + ch.queue_bind(q_name, exchange=exg_name, routing_key=routing_key) + + # Publish 2 messages with mandatory=True for synchronous processing + ch.publish(exg_name, routing_key, body='msg1', mandatory=True) + ch.publish(exg_name, routing_key, body='last-msg', mandatory=True) + + # Configure QoS for one message so that the 2nd message will be + # delivered only after the 1st one is ACKed + ch.basic_qos(prefetch_size=0, prefetch_count=1, all_channels=False) + + # Create a consumer + rx_messages = [] + def ackAndEnqueueMessageViaAnotherThread(rx_ch, + rx_method, + rx_properties, # pylint: disable=W0613 + rx_body): + LOGGER.debug( + '%s: Got message body=%r; delivery-tag=%r', + datetime.now(), rx_body, rx_method.delivery_tag) + + # Request ACK dispatch via add_callback_threadsafe from other + # thread; if last message, cancel consumer so that start_consuming + # can return + + def processOnConnectionThread(): + LOGGER.debug('%s: ACKing message body=%r; delivery-tag=%r', + datetime.now(), + rx_body, + rx_method.delivery_tag) + ch.basic_ack(delivery_tag=rx_method.delivery_tag, + multiple=False) + rx_messages.append(rx_body) + + # NOTE on python3, `b'last-msg' != 'last-msg'` + if rx_body == b'last-msg': + LOGGER.debug('%s: Canceling consumer consumer-tag=%r', + datetime.now(), + rx_method.consumer_tag) + rx_ch.basic_cancel(rx_method.consumer_tag) + + # Spawn a thread to initiate ACKing + timer = threading.Timer(0, + lambda: connection.add_callback_threadsafe( + processOnConnectionThread)) + self.addCleanup(timer.cancel) + timer.start() + + consumer_tag = ch.basic_consume( + q_name, + ackAndEnqueueMessageViaAnotherThread, + auto_ack=False, + exclusive=False, + arguments=None) + + # Wait for both messages + LOGGER.debug('%s: calling start_consuming(); consumer tag=%r', + datetime.now(), + consumer_tag) + ch.start_consuming() + LOGGER.debug('%s: Returned from start_consuming(); consumer tag=%r', + datetime.now(), + consumer_tag) + + self.assertEqual(len(rx_messages), 2) + self.assertEqual(rx_messages[0], b'msg1') + self.assertEqual(rx_messages[1], b'last-msg') + + +class TestConsumeGeneratorWithAckFromAnotherThread(BlockingTestCaseBase): + + def test(self): # pylint: disable=R0914,R0915 + """BlockingChannel.consume and requesting basic_ack from another \ + thread via add_callback_threadsafe + """ + connection = self._connect() + + ch = connection.channel() + + q_name = ('TestConsumeGeneratorWithAckFromAnotherThread_q' + + uuid.uuid1().hex) + exg_name = ('TestConsumeGeneratorWithAckFromAnotherThread_exg' + + uuid.uuid1().hex) + routing_key = 'TestConsumeGeneratorWithAckFromAnotherThread' + + # Place channel in publisher-acknowledgments mode so that publishing + # with mandatory=True will be synchronous (for convenience) + res = ch.confirm_delivery() + self.assertIsNone(res) + + # Declare a new exchange + ch.exchange_declare(exg_name, exchange_type='direct') + self.addCleanup(connection.channel().exchange_delete, exg_name) + + # Declare a new queue + ch.queue_declare(q_name, auto_delete=True) + self.addCleanup(self._connect().channel().queue_delete, q_name) + + # Bind the queue to the exchange using routing key + ch.queue_bind(q_name, exchange=exg_name, routing_key=routing_key) + + # Publish 2 messages with mandatory=True for synchronous processing + ch.publish(exg_name, routing_key, body='msg1', mandatory=True) + ch.publish(exg_name, routing_key, body='last-msg', mandatory=True) + + # Configure QoS for one message so that the 2nd message will be + # delivered only after the 1st one is ACKed + ch.basic_qos(prefetch_size=0, prefetch_count=1, all_channels=False) + + # Create a consumer + rx_messages = [] + def ackAndEnqueueMessageViaAnotherThread(rx_ch, + rx_method, + rx_properties, # pylint: disable=W0613 + rx_body): + LOGGER.debug( + '%s: Got message body=%r; delivery-tag=%r', + datetime.now(), rx_body, rx_method.delivery_tag) + + # Request ACK dispatch via add_callback_threadsafe from other + # thread; if last message, cancel consumer so that consumer + # generator completes + + def processOnConnectionThread(): + LOGGER.debug('%s: ACKing message body=%r; delivery-tag=%r', + datetime.now(), + rx_body, + rx_method.delivery_tag) + ch.basic_ack(delivery_tag=rx_method.delivery_tag, + multiple=False) + rx_messages.append(rx_body) + + # NOTE on python3, `b'last-msg' != 'last-msg'` + if rx_body == b'last-msg': + LOGGER.debug('%s: Canceling consumer consumer-tag=%r', + datetime.now(), + rx_method.consumer_tag) + # NOTE Need to use cancel() for the consumer generator + # instead of basic_cancel() + rx_ch.cancel() + + # Spawn a thread to initiate ACKing + timer = threading.Timer(0, + lambda: connection.add_callback_threadsafe( + processOnConnectionThread)) + self.addCleanup(timer.cancel) + timer.start() + + for method, properties, body in ch.consume(q_name, auto_ack=False): + ackAndEnqueueMessageViaAnotherThread(rx_ch=ch, + rx_method=method, + rx_properties=properties, + rx_body=body) + + self.assertEqual(len(rx_messages), 2) + self.assertEqual(rx_messages[0], b'msg1') + self.assertEqual(rx_messages[1], b'last-msg') + + class TestTwoBasicConsumersOnSameChannel(BlockingTestCaseBase): def test(self): # pylint: disable=R0914 @@ -1939,7 +2174,7 @@ class TestBasicPublishWithoutPubacks(BlockingTestCaseBase): queue=q_name, expected_count=0) - # Attempt to cosume again with a short timeout + # Attempt to consume again with a short timeout connection.process_data_events(time_limit=0.005) self.assertEqual(len(rx_messages), 2) diff --git a/tests/unit/blocking_connection_tests.py b/tests/unit/blocking_connection_tests.py index 69a80e5..f303023 100644 --- a/tests/unit/blocking_connection_tests.py +++ b/tests/unit/blocking_connection_tests.py @@ -125,8 +125,7 @@ class BlockingConnectionTests(unittest.TestCase): connection._flush_output(lambda: False, lambda: True) self.assertEqual(connection._impl.ioloop.activate_poller.call_count, 1) - self.assertEqual(connection._impl.ioloop.deactivate_poller.call_count, - 1) + self.assertEqual(connection._impl.ioloop.close.call_count, 1) @patch.object( blocking_connection, @@ -151,8 +150,7 @@ class BlockingConnectionTests(unittest.TestCase): self.assertSequenceEqual(cm.exception.args, (404, 'not found')) self.assertEqual(connection._impl.ioloop.activate_poller.call_count, 1) - self.assertEqual(connection._impl.ioloop.deactivate_poller.call_count, - 1) + self.assertEqual(connection._impl.ioloop.close.call_count, 1) @patch.object( blocking_connection, @@ -177,8 +175,7 @@ class BlockingConnectionTests(unittest.TestCase): self.assertSequenceEqual(cm.exception.args, (200, 'ok')) self.assertEqual(connection._impl.ioloop.activate_poller.call_count, 1) - self.assertEqual(connection._impl.ioloop.deactivate_poller.call_count, - 1) + self.assertEqual(connection._impl.ioloop.close.call_count, 1) @patch.object( blocking_connection, diff --git a/tests/unit/connection_timeout_tests.py b/tests/unit/connection_timeout_tests.py index b44161b..3118fab 100644 --- a/tests/unit/connection_timeout_tests.py +++ b/tests/unit/connection_timeout_tests.py @@ -49,8 +49,13 @@ class ConnectionTests(unittest.TestCase): connect=mock.Mock(side_effect=mock_timeout)) ) as create_sock_mock: params = pika.ConnectionParameters(socket_timeout=2.0) - conn = asyncio_connection.AsyncioConnection(params) + ioloop = asyncio_connection.asyncio.new_event_loop() + self.addCleanup(ioloop.close) + conn = asyncio_connection.AsyncioConnection( + params, + custom_ioloop=ioloop) conn._on_connect_timer() + create_sock_mock.return_value.settimeout.assert_called_with(2.0) self.assertIn('timeout', str(err_ctx.exception)) @@ -99,6 +104,7 @@ class ConnectionTests(unittest.TestCase): side_effect=mock_timeout))) as create_sock_mock: params = pika.ConnectionParameters(socket_timeout=2.0) conn = select_connection.SelectConnection(params) + self.addCleanup(conn.ioloop.close) conn._on_connect_timer() create_sock_mock.return_value.settimeout.assert_called_with(2.0) self.assertIn('timeout', str(err_ctx.exception)) @@ -113,7 +119,11 @@ class ConnectionTests(unittest.TestCase): connect=mock.Mock( side_effect=mock_timeout))) as create_sock_mock: params = pika.ConnectionParameters(socket_timeout=2.0) - conn = tornado_connection.TornadoConnection(params) + ioloop = tornado_connection.ioloop.IOLoop() + self.addCleanup(ioloop.close) + conn = tornado_connection.TornadoConnection( + params, + custom_ioloop=ioloop) conn._on_connect_timer() create_sock_mock.return_value.settimeout.assert_called_with(2.0) self.assertIn('timeout', str(err_ctx.exception)) diff --git a/tests/unit/select_connection_ioloop_tests.py b/tests/unit/select_connection_ioloop_tests.py index 62bf3f1..3f40222 100644 --- a/tests/unit/select_connection_ioloop_tests.py +++ b/tests/unit/select_connection_ioloop_tests.py @@ -19,6 +19,12 @@ import pika from pika import compat from pika.adapters import select_connection +# protected-access +# pylint: disable=W0212 +# missing-docstring +# pylint: disable=C0111 + + EPOLL_SUPPORTED = hasattr(select, 'epoll') POLL_SUPPORTED = hasattr(select, 'poll') and hasattr(select.poll(), 'modify') KQUEUE_SUPPORTED = hasattr(select, 'kqueue') @@ -36,6 +42,7 @@ class IOLoopBaseTest(unittest.TestCase): self.ioloop = select_connection.IOLoop() self.addCleanup(setattr, self, 'ioloop', None) + self.addCleanup(self.ioloop.close) activate_poller_patch = mock.patch.object( self.ioloop._poller, @@ -73,16 +80,79 @@ class IOLoopBaseTest(unittest.TestCase): self.fail('Test timed out') +class IOLoopCloseClosesSubordinateObjectsTestSelect(IOLoopBaseTest): + """ Test ioloop being closed """ + SELECT_POLLER = 'select' + + def start_test(self): + with mock.patch.multiple(self.ioloop, + _timer=mock.DEFAULT, + _poller=mock.DEFAULT, + _callbacks=mock.DEFAULT) as mocks: + self.ioloop.close() + mocks['_timer'].close.assert_called_once() + mocks['_poller'].close.assert_called_once() + self.assertIsNone(self.ioloop._callbacks) + + +class IOLoopCloseAfterStartReturnsTestSelect(IOLoopBaseTest): + """ Test IOLoop.close() after normal return from start(). """ + SELECT_POLLER = 'select' + + def start_test(self): + self.ioloop.stop() # so start will terminate quickly + self.start() + self.ioloop.close() + self.assertIsNone(self.ioloop._callbacks) + + +class IOLoopCloseBeforeStartReturnsTestSelect(IOLoopBaseTest): + """ Test calling IOLoop.close() before return from start() raises exception. """ + SELECT_POLLER = 'select' + + def start_test(self): + callback_completed = [] + + def call_close_from_callback(): + with self.assertRaises(AssertionError) as cm: + self.ioloop.close() + + self.assertEqual(cm.exception.args[0], + 'Cannot call close() before start() unwinds.') + self.ioloop.stop() + callback_completed.append(1) + + self.ioloop.add_callback_threadsafe(call_close_from_callback) + self.start() + self.assertEqual(callback_completed, [1]) + + class IOLoopThreadStopTestSelect(IOLoopBaseTest): """ Test ioloop being stopped by another Thread. """ SELECT_POLLER = 'select' def start_test(self): """Starts a thread that stops ioloop after a while and start polling""" - timer = threading.Timer(0.1, self.ioloop.stop) + timer = threading.Timer( + 0.1, + lambda: self.ioloop.add_callback_threadsafe(self.ioloop.stop)) self.addCleanup(timer.cancel) timer.start() - self.start() + self.start() # NOTE: Normal return from `start()` constitutes success + + +class IOLoopThreadStopTestSelect(IOLoopBaseTest): + """ Test ioloop being stopped by another Thread. """ + SELECT_POLLER = 'select' + + def start_test(self): + """Starts a thread that stops ioloop after a while and start polling""" + timer = threading.Timer( + 0.1, + lambda: self.ioloop.add_callback_threadsafe(self.ioloop.stop)) + self.addCleanup(timer.cancel) + timer.start() + self.start() # NOTE: Normal return from `start()` constitutes success @unittest.skipIf(not POLL_SUPPORTED, 'poll not supported') @@ -438,6 +508,7 @@ class IOLoopEintrTestCaseSelect(IOLoopBaseTest): timer = select_connection._Timer() self.poller = self.ioloop._get_poller(timer.get_remaining_interval, timer.process_timeouts) + self.addCleanup(self.poller.close) sockpair = self.poller._get_interrupt_pair() self.addCleanup(sockpair[0].close) @@ -493,6 +564,7 @@ class SelectPollerTestPollWithoutSockets(unittest.TestCase): poller = select_connection.SelectPoller( get_wait_seconds=timer.get_remaining_interval, process_timeouts=timer.process_timeouts) + self.addCleanup(poller.close) timer_call_container = [] timer.call_later(0.00001, lambda: timer_call_container.append(1)) @@ -514,3 +586,46 @@ class SelectPollerTestPollWithoutSockets(unittest.TestCase): break self.assertEqual(timer_call_container, [1]) + + +class PollerTestCaseSelect(unittest.TestCase): + SELECT_POLLER = 'select' + + def setUp(self): + select_type_patch = mock.patch.multiple( + select_connection, SELECT_TYPE=self.SELECT_POLLER) + select_type_patch.start() + self.addCleanup(select_type_patch.stop) + + timer = select_connection._Timer() + self.addCleanup(timer.close) + self.poller = select_connection.IOLoop._get_poller( + timer.get_remaining_interval, + timer.process_timeouts) + self.addCleanup(self.poller.close) + + def test_poller_close(self): + self.poller.close() + self.assertIsNone(self.poller._r_interrupt) + self.assertIsNone(self.poller._w_interrupt) + self.assertIsNone(self.poller._fd_handlers) + self.assertIsNone(self.poller._fd_events) + self.assertIsNone(self.poller._processing_fd_event_map) + + [email protected](not POLL_SUPPORTED, 'poll not supported') +class PollerTestCasePoll(PollerTestCaseSelect): + """Same as PollerTestCaseSelect but uses poll syscall""" + SELECT_POLLER = 'poll' + + [email protected](not EPOLL_SUPPORTED, 'epoll not supported') +class PollerTestCaseEPoll(PollerTestCaseSelect): + """Same as PollerTestCaseSelect but uses epoll syscall""" + SELECT_POLLER = 'epoll' + + [email protected](not KQUEUE_SUPPORTED, 'kqueue not supported') +class PollerTestCaseKqueue(PollerTestCaseSelect): + """Same as PollerTestCaseSelect but uses kqueue syscall""" + SELECT_POLLER = 'kqueue' diff --git a/tests/unit/select_connection_timer_tests.py b/tests/unit/select_connection_timer_tests.py index 1017e7e..72e3db5 100644 --- a/tests/unit/select_connection_timer_tests.py +++ b/tests/unit/select_connection_timer_tests.py @@ -91,6 +91,20 @@ class TimeoutClassTests(unittest.TestCase): class TimerClassTests(unittest.TestCase): """Test select_connection._Timer class""" + def test_close_empty(self): + timer = select_connection._Timer() + timer.close() + self.assertIsNone(timer._timeout_heap) + + def test_close_non_empty(self): + timer = select_connection._Timer() + t1 = timer.call_later(10, lambda: 10) + t2 = timer.call_later(20, lambda: 20) + timer.close() + self.assertIsNone(timer._timeout_heap) + self.assertIsNone(t1.callback) + self.assertIsNone(t2.callback) + def test_no_timeouts_remaining_interval_is_none(self): timer = select_connection._Timer() self.assertIsNone(timer.get_remaining_interval())
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 8 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y rabbitmq-server" ], "python": "3.9", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.8.0 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 nose==1.3.7 packaging==24.2 -e git+https://github.com/pika/pika.git@f3705381d710a1c6b78de6c5e4c5772516adbca0#egg=pika pluggy==1.5.0 pytest==8.3.5 requests==2.32.3 tomli==2.2.1 tornado==6.4.2 Twisted==15.3.0 urllib3==2.3.0 zope.interface==7.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.8.0 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - requests==2.32.3 - tomli==2.2.1 - tornado==6.4.2 - twisted==15.3.0 - urllib3==2.3.0 - zope-interface==7.2 prefix: /opt/conda/envs/pika
[ "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_no_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_user_initiated_close", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_select_connection_timeout", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestSelect::test_normal", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestSelect::test_timer_delete_another", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestSelect::test_timer_for_deleting_itself", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestPoll::test_normal", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestPoll::test_timer_delete_another", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestPoll::test_timer_for_deleting_itself", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestEPoll::test_normal", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestEPoll::test_timer_delete_another", "tests/unit/select_connection_ioloop_tests.py::IOLoopTimerTestEPoll::test_timer_for_deleting_itself", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestSelect::test_normal", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestSelect::test_timer_delete_another", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestSelect::test_timer_for_deleting_itself", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestPoll::test_normal", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestPoll::test_timer_delete_another", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestPoll::test_timer_for_deleting_itself", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestEPoll::test_normal", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestEPoll::test_timer_delete_another", "tests/unit/select_connection_ioloop_tests.py::IOLoopSleepTimerTestEPoll::test_timer_for_deleting_itself", "tests/unit/select_connection_ioloop_tests.py::IOLoopEintrTestCaseSelect::test_eintr", "tests/unit/select_connection_ioloop_tests.py::IOLoopEintrTestCasePoll::test_eintr", "tests/unit/select_connection_ioloop_tests.py::IOLoopEintrTestCaseEPoll::test_eintr", "tests/unit/select_connection_ioloop_tests.py::PollerTestCaseSelect::test_poller_close", "tests/unit/select_connection_ioloop_tests.py::PollerTestCasePoll::test_poller_close", "tests/unit/select_connection_ioloop_tests.py::PollerTestCaseEPoll::test_poller_close", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_close_empty", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_close_non_empty" ]
[ "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestMultiCloseConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnection::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesOriginalException::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionContextManagerClosesConnectionAndPassesSystemException::test", "tests/acceptance/blocking_adapter_test.py::TestLostConnectionResultsInIsClosedConnectionAndChannel::test", "tests/acceptance/blocking_adapter_test.py::TestInvalidExchangeTypeRaisesConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseConnectionWithChannelAndConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestSuddenBrokerDisconnectBeforeChannel::test", "tests/acceptance/blocking_adapter_test.py::TestNoAccessToFileDescriptorAfterConnectionClosed::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionStart::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionTune::test", "tests/acceptance/blocking_adapter_test.py::TestProcessDataEvents::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionRegisterForBlockAndUnblock::test", "tests/acceptance/blocking_adapter_test.py::TestBlockedConnectionTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestAddCallbackThreadsafeFromSameThread::test", "tests/acceptance/blocking_adapter_test.py::TestAddCallbackThreadsafeFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestAddTimeoutRemoveTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestViabilityOfMultipleTimeoutsWithSameDeadlineAndCallback::test", "tests/acceptance/blocking_adapter_test.py::TestRemoveTimeoutFromTimeoutCallback::test", "tests/acceptance/blocking_adapter_test.py::TestSleep::test", "tests/acceptance/blocking_adapter_test.py::TestConnectionProperties::test", "tests/acceptance/blocking_adapter_test.py::TestCreateAndCloseChannel::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestExchangeBindAndUnbind::test", "tests/acceptance/blocking_adapter_test.py::TestQueueDeclareAndDelete::test", "tests/acceptance/blocking_adapter_test.py::TestPassiveQueueDeclareOfUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestQueueBindAndUnbindAndPurge::test", "tests/acceptance/blocking_adapter_test.py::TestBasicGet::test", "tests/acceptance/blocking_adapter_test.py::TestBasicReject::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRejectNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNack::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackNoRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestBasicNackMultiple::test", "tests/acceptance/blocking_adapter_test.py::TestBasicRecoverWithRequeue::test", "tests/acceptance/blocking_adapter_test.py::TestTxCommit::test", "tests/acceptance/blocking_adapter_test.py::TestTxRollback::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeFromUnknownQueueRaisesChannelClosed::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndBasicPublishWithPubacksUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestConfirmDeliveryAfterUnroutableMessage::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessagesReturnedInNonPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestUnroutableMessageReturnedInPubackMode::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishDeliveredWhenPendingUnroutable::test", "tests/acceptance/blocking_adapter_test.py::TestPublishAndConsumeWithPubacksAndQosOfOne::test", "tests/acceptance/blocking_adapter_test.py::TestBasicConsumeWithAckFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeGeneratorWithAckFromAnotherThread::test", "tests/acceptance/blocking_adapter_test.py::TestTwoBasicConsumersOnSameChannel::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelPurgesPendingConsumerCancellationEvt::test", "tests/acceptance/blocking_adapter_test.py::TestBasicPublishWithoutPubacks::test", "tests/acceptance/blocking_adapter_test.py::TestPublishFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestStopConsumingFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseChannelFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestCloseConnectionFromBasicConsumeCallback::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubAckPublishAndConsumeHugeMessage::test", "tests/acceptance/blocking_adapter_test.py::TestNonPubackPublishAndConsumeManyMessages::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithNonAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestBasicCancelWithAckableConsumer::test", "tests/acceptance/blocking_adapter_test.py::TestUnackedMessageAutoRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestNoAckMessageNotRestoredToQueueOnChannelClose::test", "tests/acceptance/blocking_adapter_test.py::TestConsumeInactivityTimeout::test", "tests/acceptance/blocking_adapter_test.py::TestChannelFlow::test" ]
[ "tests/acceptance/blocking_adapter_test.py::TestConnectWithDownedBroker::test", "tests/acceptance/blocking_adapter_test.py::TestDisconnectDuringConnectionProtocol::test", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_channel", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close_with_channel_closed_exception", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_attempts_with_timeout", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_constructor", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup_fails_with_open_error", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_sleep", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_asyncio_connection_timeout", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_base_connection_timeout", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_blocking_connection_timeout", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_parameters", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_tornado_connection_timeout", "tests/unit/connection_timeout_tests.py::ConnectionTests::test_twisted_connection_timeout", "tests/unit/select_connection_timer_tests.py::TimeoutClassTests::test_eq_operator", "tests/unit/select_connection_timer_tests.py::TimeoutClassTests::test_le_operator", "tests/unit/select_connection_timer_tests.py::TimeoutClassTests::test_lt_operator", "tests/unit/select_connection_timer_tests.py::TimeoutClassTests::test_non_callable_callback_raises", "tests/unit/select_connection_timer_tests.py::TimeoutClassTests::test_non_negative_deadline", "tests/unit/select_connection_timer_tests.py::TimeoutClassTests::test_properties", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_add_and_remove_timeout", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_add_timeout_from_another_timeout", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_call_later_multiple_timers", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_call_later_non_negative_delay_check", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_call_later_single_timer_expires", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_cancel_expired_timeout_from_another_timeout", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_cancel_unexpired_timeout_from_another_timeout", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_gc_of_unexpired_timeouts", "tests/unit/select_connection_timer_tests.py::TimerClassTests::test_no_timeouts_remaining_interval_is_none" ]
[]
BSD 3-Clause "New" or "Revised" License
2,156
[ "pika/adapters/blocking_connection.py", "pika/adapters/asyncio_connection.py", "pika/adapters/select_connection.py", "pika/adapters/tornado_connection.py", "docs/faq.rst", "pika/adapters/twisted_connection.py", "pika/connection.py", "pika/adapters/base_connection.py" ]
[ "pika/adapters/blocking_connection.py", "pika/adapters/asyncio_connection.py", "pika/adapters/select_connection.py", "pika/adapters/tornado_connection.py", "docs/faq.rst", "pika/adapters/twisted_connection.py", "pika/connection.py", "pika/adapters/base_connection.py" ]
innolitics__hdat-14
f0a6bdf05137ae12dfcfa4f1ac9053224f905e71
2018-02-14 22:53:31
f0a6bdf05137ae12dfcfa4f1ac9053224f905e71
diff --git a/README.md b/README.md index 5714ec4..b296333 100644 --- a/README.md +++ b/README.md @@ -103,9 +103,9 @@ A casespec is a string that selects one or more test cases. A casespec may spec Here are several casespecs along with the test cases they would select: -- `` Selects all test cases in all suites. -- `a` Selects all cases in the test suite with id "a". -- `a/b` Selects test case with id "b" in the suite with id "b". +"" - Selects all test cases in all suites. +"a" - Selects all cases in the test suite with id "a". +"a/b" - Selects test case with id "b" in the suite with id "b". # Resultspecs @@ -113,10 +113,10 @@ A resultspec is a string that selects one or more test results. A result spec m Here are several resultspec along with the test cases they would select: -- `` Selects the most recent result for every test case in every test suite. -- `a` Selects the most recent results for every test case in the test suite with id "a". -- `a/b` Selects the most recent result for the test case with id "b" in the test suite with id "a". -- `a/b/c` Selects the test result with id "c" for the test case with id "b" in the test suite with id "a". -- `a/b/~0` Selects the most recent result for the test case with id "b" in the test suite with id "a". -- `a/b/~1` Selects the previous result for the test case with id "b" in the test suite with id "a". -- `a/b/~4` Selects the 4 test older than the previous result for the test case with id "b" in the test suite with id "a". +"" - Selects the most recent result for every test case in every test suite. +"a" - Selects the most recent results for every test case in the test suite with id "a". +"a/b" - Selects the most recent result for the test case with id "b" in the test suite with id "a". +"a/b/c" - Selects the test result with id "c" for the test case with id "b" in the test suite with id "a". +"a/b/~0" - Selects the most recent result for the test case with id "b" in the test suite with id "a". +"a/b/~1" - Selects the previous result for the test case with id "b" in the test suite with id "a". +"a/b/~4" - Selects the 4 test older than the previous result for the test case with id "b" in the test suite with id "a". diff --git a/hdat/hdat_cli.py b/hdat/hdat_cli.py new file mode 100644 index 0000000..a995af4 --- /dev/null +++ b/hdat/hdat_cli.py @@ -0,0 +1,111 @@ +import argparse +import traceback + +from .resultspec import resolve_resultspecs, print_resultspec +from .casespec import resolve_casespecs, select_suite +from .runner import run_cases +from .util import AbortError + + +def parse_arguments(arguments): + parser = argparse.ArgumentParser(prog='hdat') + subparsers = parser.add_subparsers(dest='command', metavar='<command>') + + run_help = 'run cases, store results in archive, compare against goldens' + run_parser = subparsers.add_parser('run', help=run_help) + run_parser.add_argument('casespecs', nargs='*', default=[''], metavar='<case>') + run_parser.add_argument('--collect-only', default=False, action='store_true') + + show_help = 'visualize a single result' + show_parser = subparsers.add_parser('show', help=show_help) + show_result_help = 'result specifier to show' + show_parser.add_argument('resultspec', nargs="?", default='', metavar='<result>', help=show_result_help) + + runshow_help = 'run then visualize a single result' + runshow_parser = subparsers.add_parser('runshow', help=runshow_help) + runshow_parser.add_argument('casespecs', nargs=1, default='', metavar='<result>') + + diff_help = 'compare two results' + diff_parser = subparsers.add_parser('diff', help=diff_help) + diff_result_help = 'results being compared' + diff_parser.add_argument('resultspec', nargs=2, metavar='<result>', help=diff_result_help) + + verify_help = 'move result metrics from archive to the golden store' + verify_parser = subparsers.add_parser('verify', help=verify_help) + verify_result_help = 'results to be stripped and moved into the golden store' + verify_parser.add_argument('resultspec', nargs='?', default='', metavar='<result>', help=verify_result_help) + + return parser.parse_args(arguments) + + +def _format_cases_status(cases_status): + return 'PASS: {}, FAIL: {}, UNKNOWN: {}, ERROR: {}'.format( + cases_status['pass'], + cases_status['fail'], + cases_status['unknown'], + cases_status['error'], + ) + + +def hdat_cli(arguments, suites, golden_store, archive, git_info): + args = parse_arguments(arguments) + + if args.command is None: + parse_arguments(['-h']) + + if args.command == 'run' and args.collect_only: + cases = resolve_casespecs(suites, args.casespecs) + print("\n".join(['{}/{}'.format(suite_id, case_id) for suite_id, case_id in cases])) + elif args.command == 'run': + cases = resolve_casespecs(suites, args.casespecs) + cases_status = run_cases(suites, golden_store, archive, git_info, cases) + if cases_status['pass'] < len(cases): + raise AbortError(_format_cases_status(cases_status)) + elif args.command == 'show': + results = resolve_resultspecs(archive, args.resultspec) + for result in results: + show_result(suites, result) + elif args.command == 'runshow': + cases = resolve_casespecs(suites, args.casespecs) + cases_status = run_cases(suites, golden_store, archive, git_info, cases) + if cases_status['error'] > 0: + raise AbortError(_format_cases_status(cases_status)) + results = resolve_resultspecs(archive, args.casespecs) + for result in results: + show_result(suites, result) + elif args.command == 'diff': + golden_results = resolve_resultspecs(archive, args.resultspec[0]) + results = resolve_resultspecs(archive, args.resultspec[1]) + for golden_result, result in zip(golden_results, results): + diff_results(suites, golden_result, result) + elif args.command == 'verify': + results = resolve_resultspecs(archive, args.resultspec) + for result in results: + golden_store.insert(result) + + +def show_result(suites, result): + suite = select_suite(suites, result['suite_id']) + try: + suite.show(result) + except Exception as e: + traceback.print_exc() + msg = 'Error when attempting to show "{}": {}' + raise AbortError(msg.format(print_resultspec(result), e)) + + +def diff_results(suites, golden_result, result): + suite_id = result['suite_id'] + golden_suite_id = golden_result['suite_id'] + + if golden_suite_id != suite_id: + msg = 'Can not diff results from different suites "{}" and "{}"' + raise AbortError(msg.format(golden_suite_id, suite_id)) + + suite = select_suite(suites, suite_id) + try: + suite.diff(golden_result, result) + except Exception as e: + traceback.print_exc() + msg = 'Error when attempting to show "{}": {}' + raise AbortError(msg.format(print_resultspec(result), e)) diff --git a/hdat/main.py b/hdat/main.py old mode 100644 new mode 100755 index 2e763c6..b35ba0b --- a/hdat/main.py +++ b/hdat/main.py @@ -1,132 +1,41 @@ -import argparse -import traceback -from collections import OrderedDict +#!/usr/bin/env python3 +import sys +import os -import tabulate +from hdat.hdat_cli import hdat_cli +from hdat.suite import collect_suites +from hdat.source_control import git_info_from_directory +from hdat.util import repository_root, print_error, AbortError +from hdat.store import Archive, GoldenStore -from .resultspec import resolve_resultspec, print_resultspec -from .casespec import resolve_casespecs, select_suite -from .runner import run_cases -from .util import AbortError +def main(): + cwd = os.getcwd() -def parse_arguments(arguments): - parser = argparse.ArgumentParser(prog='hdatt') - subparsers = parser.add_subparsers(dest='command', metavar='<command>') - - run_help = 'run cases, store results in archive, compare against goldens' - run_parser = subparsers.add_parser('run', help=run_help) - run_parser.add_argument('casespecs', nargs='*', default=[''], metavar='<case>') - run_parser.add_argument('--collect-only', default=False, action='store_true') - - show_help = 'visualize a single result' - show_parser = subparsers.add_parser('show', help=show_help) - show_result_help = 'result specifier to show' - show_parser.add_argument('resultspec', nargs="?", default='', metavar='<result>', help=show_result_help) - - runshow_help = 'run then visualize a single result' - runshow_parser = subparsers.add_parser('runshow', help=runshow_help) - runshow_parser.add_argument('casespecs', nargs=1, default='', metavar='<result>') - - diff_help = 'compare two results' - diff_parser = subparsers.add_parser('diff', help=diff_help) - diff_golden_help = 'result to compare to (defaults to current golden for the case)' - diff_parser.add_argument('goldenspec', nargs='?', default=None, metavar='<golden>', help=diff_golden_help) - diff_result_help = 'result being compared' - diff_parser.add_argument('resultspec', nargs=1, metavar='<result>', help=diff_result_help) - - verify_help = 'move result metrics from archive to the golden store' - verify_parser = subparsers.add_parser('verify', help=verify_help) - verify_result_help = 'results to be stripped and moved into the golden store' - verify_parser.add_argument('resultspec', nargs='?', default='', metavar='<result>', help=verify_result_help) - - report_help = 'Print a report from the most recent results for a given suite.' - report_parser = subparsers.add_parser('report', help=report_help) - report_help = 'suite to generate and print out a report for.' - report_parser.add_argument('suitespec', default='', metavar='<suite>', help=report_help) - - return parser.parse_args(arguments) - - -def _format_cases_status(cases_status): - return 'PASS: {}, FAIL: {}, UNKNOWN: {}, ERROR: {}'.format( - cases_status['pass'], - cases_status['fail'], - cases_status['unknown'], - cases_status['error'], - ) - - -def main(arguments, suites, golden_store, archive, git_info): - args = parse_arguments(arguments) + try: + git_info = git_info_from_directory(cwd) + repo_directory = repository_root(cwd) - if args.command is None: - parse_arguments(['-h']) + if 'HDATT_ARCHIVE' in os.environ: + archive_location = os.environ['HDATT_ARCHIVE'] + else: + archive_location = os.path.join(repo_directory, '.hdattarchive') + archive = Archive(archive_location) - if args.command == 'run' and args.collect_only: - cases = resolve_casespecs(suites, args.casespecs) - print("\n".join(['{}/{}'.format(s, c) for s, c in cases])) - elif args.command == 'run': - cases = resolve_casespecs(suites, args.casespecs) - cases_status = run_cases(suites, golden_store, archive, git_info, cases) - if cases_status['pass'] < len(cases): - raise AbortError(_format_cases_status(cases_status)) - elif args.command == 'show': - result = resolve_resultspec(archive, args.resultspec) - show_result(suites, result) - elif args.command == 'runshow': - cases = resolve_casespecs(suites, args.casespecs) - cases_status = run_cases(suites, golden_store, archive, git_info, cases) - if cases_status['error'] > 0: - raise AbortError(_format_cases_status(cases_status)) - result = resolve_resultspec(archive, args.casespecs[0]) - show_result(suites, result) - elif args.command == 'diff': - golden_result = resolve_resultspec(archive, args.goldenspec) - result = resolve_resultspec(archive, args.resultspec) - diff_results(suites, golden_result, result) - elif args.command == 'verify': - result = resolve_resultspec(archive, args.resultspec) - golden_store.insert(result) - elif args.command == 'report': - suite = select_suite(suites, args.suitespec) - result_specs = ['{}/{}'.format(args.suitespec, case_id) for case_id in suite.collect()] - results = [resolve_resultspec(archive, result_spec) for result_spec in result_specs] - table = [] - for result in results: - row = OrderedDict({'case id': result['case_id']}) - row['TPF'] = result['metrics']['TPF'] - row['FPF'] = result['metrics']['FPF'] - table.append(row) + golden_store_location = os.path.join(repo_directory, 'golden_results') + golden_store = GoldenStore(golden_store_location) - print(tabulate.tabulate(table, headers="keys", tablefmt="psql", floatfmt=".5f")) - # TODO: Find better way to print results table. Currently it only works - # with feature detection hdat. e.g. allow the user to specify two - # metrics (e.g. using jsonrefs) for the x and y axis of the table + suites = collect_suites(cwd) + hdat_cli(sys.argv[1:], suites, golden_store, archive, git_info) -def show_result(suites, result): - suite = select_suite(suites, result['suite_id']) - try: - suite.show(result) - except Exception as e: - traceback.print_exc() - msg = 'Error when attempting to show "{}": {}' - raise AbortError(msg.format(print_resultspec(result), e)) + sys.exit(0) + except AbortError as e: + print_error(e) -def diff_results(suites, golden_result, result): - suite_id = result['suite_id'] - golden_suite_id = golden_result['result_id'] + sys.exit(1) - if golden_suite_id != suite_id: - msg = 'Can not diff results from different suites "{}" and "{}"' - raise AbortError(msg.format(golden_suite_id, suite_id)) - suite = select_suite(suites, suite_id) - try: - suite.diff(golden_result, result) - except Exception as e: - traceback.print_exc() - msg = 'Error when attempting to show "{}": {}' - raise AbortError(msg.format(print_resultspec(result), e)) +if __name__ == '__main__': + main() diff --git a/hdat/resultspec.py b/hdat/resultspec.py index afcf739..9851c8a 100644 --- a/hdat/resultspec.py +++ b/hdat/resultspec.py @@ -4,10 +4,10 @@ import traceback from .util import AbortError -def resolve_resultspec(archive, resultspec): +def resolve_resultspecs(archive, resultspec): if os.path.isfile(resultspec): try: - return archive.read_result(resultspec) + return [archive.read_result(resultspec)] except Exception: traceback.print_exc() msg = 'Unable to read resultspec "{}"' @@ -18,12 +18,14 @@ def resolve_resultspec(archive, resultspec): else: resultspec_parts = resultspec.split('/') - pick_latest = len(resultspec_parts) == 2 + pick_recents_all_suites = len(resultspec_parts) == 0 + pick_recents_one_suite = len(resultspec_parts) == 1 + pick_recent = len(resultspec_parts) == 2 pick_by_index = len(resultspec_parts) == 3 and resultspec_parts[2].startswith('~') pick_by_result_id = len(resultspec_parts) == 3 and not pick_by_index - if pick_latest or pick_by_index: - if pick_latest: + if pick_recent or pick_by_index: + if pick_recent: i = -1 else: try: @@ -32,27 +34,23 @@ def resolve_resultspec(archive, resultspec): msg = 'Invalid resultspec "{}"; the third part must be a valid result_id' + \ 'or a tilde followed by an integer, but not "{}"' raise AbortError(msg.format(resultspec, resultspec_parts[2][1])) - - # TODO: make this faster in the event there are many results - results = archive.select_all(*resultspec_parts[:2]) - if len(results) == 0: - msg = 'Unable to locate any results matching "{}" in the archive at "{}"' - raise AbortError(msg.format(resultspec, archive.root)) - else: - try: - return results[i] - except IndexError: - msg = 'Unable to locate any results matching "{}", there are only {} results present.' - raise AbortError(msg.format(resultspec, i)) - + try: + result = archive.select_recent(i, *resultspec_parts[:2]) + return [result] + except IndexError: + msg = 'Unable to locate any results matching "{}", there are more than {} results present.' + raise AbortError(msg.format(resultspec, i)) elif pick_by_result_id: result = archive.select(*resultspec_parts) if result is None: msg = 'Unable to locate result "{}" in the archive at "{}"' raise AbortError(msg.format(resultspec, archive.root)) else: - return result - + return [result] + elif pick_recents_all_suites: + return archive.select_recents_all() + elif pick_recents_one_suite: + return archive.select_recents_suite(resultspec_parts[0]) else: msg = 'Invalid result spec "{}". ' + \ 'Resultspecs must point to a result file, ' + \ diff --git a/hdat/runner.py b/hdat/runner.py index e57ba50..db8e82b 100644 --- a/hdat/runner.py +++ b/hdat/runner.py @@ -6,7 +6,7 @@ from .casespec import print_casespec def run_cases(suites, golden_store, archive, git_info, cases): ''' - Run a list of cases, store results in the archive, and verify against + Run a list of cases, store results in the archive, and check against results in the golden_store. Cases are specified by a list of tuples of the form `(suite_id, case_id)`. @@ -47,7 +47,7 @@ def run_case(suite, golden_store, archive, git_info, case_id): if golden_result is None: status, comments = 'unknown', 'No golden result present' else: - passed, comments = suite.verify(golden_result['metrics'], metrics) + passed, comments = suite.check(golden_result['metrics'], metrics) status = 'pass' if passed else 'fail' result = build_result(suite, git_info, case_id, case_input, metrics, context, status) @@ -61,6 +61,10 @@ def validate_result(run_result): raise ValueError(msg.format(repr(run_result))) +def build_result_id(result): + return '{}_{}'.format(result['ran_on'], result['commit']) + + def build_result(suite, git_info, case_id, case_input, metrics, context, status): run_datetime = datetime.datetime.utcnow() result = { @@ -75,5 +79,5 @@ def build_result(suite, git_info, case_id, case_input, metrics, context, status) 'status': status, } - result['result_id'] = suite.build_result_id(result) + result['result_id'] = build_result_id(result) return result diff --git a/hdat/store.py b/hdat/store.py index 5f29d80..e1b890c 100644 --- a/hdat/store.py +++ b/hdat/store.py @@ -2,6 +2,9 @@ import os import json import copy import pickle +from collections import namedtuple + +from .util import AbortError class Archive: @@ -17,21 +20,50 @@ class Archive: else: return self.read_result(result_filename) - def select_all(self, *args): + def select_recent(self, i, *args): top_directory = os.path.join(self.root, *args) - - result_filenames = [] - for dirpath, _, filenames in os.walk(top_directory): - result_filenames.extend([os.path.join(dirpath, p) for p in filenames]) + if not os.path.isdir(top_directory): + msg = "Selected case directory {} does not exist or is not a directory" + raise AbortError(msg.format(top_directory)) results = [] - for filename in result_filenames: - result = self.read_result(filename) - results.append(result) - - results_sorted = sorted(results, key=lambda r: r['ran_on']) + ResultDesc = namedtuple('ResultDesc', ('id', 'ran_on')) + id_to_full_result = dict() + + for entry in os.listdir(top_directory): + if not entry.startswith('.') and os.path.isfile(os.path.join(top_directory, entry)): + # check for ran_on timestamp as part of <timestamp>_<commit_id> result ID format + try: + ran_on = float(entry.split('_')[0]) + results.append(ResultDesc(entry, ran_on)) + except ValueError: + result = self.read_result(os.path.join(top_directory, entry)) + ran_on = result['ran_on'] + id_to_full_result[entry] = result + results.append(ResultDesc(entry, ran_on)) + + results_sorted = sorted(results, key=lambda r: r.ran_on) + recent_id = results_sorted[i].id + + if recent_id in id_to_full_result: + return id_to_full_result[recent_id] + else: + return self.read_result(os.path.join(top_directory, recent_id)) - return results_sorted + def select_recents_suite(self, *args): + top_directory = os.path.join(self.root, *args) + if not os.path.isdir(top_directory): + msg = "Selected suite directory {} does not exist or is not a directory" + raise AbortError(msg.format(top_directory)) + + for entry in os.listdir(top_directory): + if not entry.startswith('.') and os.path.isdir(os.path.join(top_directory, entry)): + yield self.select_recent(-1, *(args+(entry,))) + + def select_recents_all(self): + for entry in os.listdir(self.root): + if not entry.startswith('.') and os.path.isdir(os.path.join(self.root, entry)): + yield from self.select_recents_suite(entry) def insert(self, result): suite_id = result['suite_id'] diff --git a/hdat/suite.py b/hdat/suite.py index b730350..a45c80c 100644 --- a/hdat/suite.py +++ b/hdat/suite.py @@ -7,7 +7,7 @@ class Suite: ''' Base class for a suite of algorithm test cases. - Is responsible for collecting, running, verifying, and visualizing the + Is responsible for collecting, running, checking, and visualizing the results of running the algorithm against its test cases. ''' def cases(self): @@ -18,9 +18,9 @@ class Suite: ''' raise NotImplementedError() - def verify(self, golden_metrics, metrics): + def check(self, golden_metrics, metrics): ''' - Given two result comparable outputs, verify if the second result passes based on + Given two result comparable outputs, check if the second result passes based on the first result. Should return a tuple with a boolean and a string with any comments. ''' @@ -47,9 +47,6 @@ class Suite: def id(self): return type(self).__name__ - def build_result_id(self, result): - return '{}_{}'.format(result['ran_on'], result['commit']) - def collect_suites(directory): suite_classes = _collect_suite_classes(directory) diff --git a/hdat_main_script b/hdat_main_script deleted file mode 100755 index 9e40c89..0000000 --- a/hdat_main_script +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -import sys -import os - -from hdatt.main import main -from hdatt.suite import collect_suites -from hdatt.source_control import git_info_from_directory -from hdatt.util import repository_root, print_error, AbortError -from hdatt.store import Archive, GoldenStore - - -if __name__ == '__main__': - cwd = os.getcwd() - - try: - git_info = git_info_from_directory(cwd) - repo_directory = repository_root(cwd) - - if 'HDATT_ARCHIVE' in os.environ: - archive_location = os.environ['HDATT_ARCHIVE'] - else: - archive_location = os.path.join(repo_directory, '.hdattarchive') - archive = Archive(archive_location) - - golden_store_location = os.path.join(repo_directory, 'golden_results') - golden_store = GoldenStore(golden_store_location) - - suites = collect_suites(cwd) - - main(sys.argv[1:], suites, golden_store, archive, git_info) - - sys.exit(0) - - except AbortError as e: - print_error(e) - - sys.exit(1) diff --git a/setup.py b/setup.py index acce29c..2621d2f 100644 --- a/setup.py +++ b/setup.py @@ -38,7 +38,7 @@ setup( packages=find_packages(exclude=['contrib', 'docs', 'tests']), - install_requires=['gitpython', 'tabulate'], + install_requires=['gitpython'], extras_require={ 'dev': ['check-manifest', 'sphinx', 'sphinx-autobuild', 'mock'], @@ -47,5 +47,9 @@ setup( package_data={}, data_files=[], - entry_points={}, + entry_points={ + 'console_scripts': [ + 'hdat = hdat.main:main' + ] + }, )
Hook up hdat_main_script in setup.py After running `pip install hdat` we should have the `hdat_main_script` available as `hdat` on the shell PATH. I am not sure how to do this, but we will probably need to update our `setup.py` and hook up either a "script" or an "entrypoint".
innolitics/hdat
diff --git a/tests/conftest.py b/tests/conftest.py index 7de6259..8374aa9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,7 +23,7 @@ def tmp_archive(): class BaseSuite(Suite): - def verify(self, old, new): + def check(self, old, new): return old == new, 'Looks good!' def run(self, case_input): @@ -81,3 +81,40 @@ def mock_suites(basic_suite_a, basic_suite_b): 'a': basic_suite_a(), 'b': basic_suite_b(), } + + [email protected] +def mock_results(): + return [ + { + 'suite_id': 'a', + 'case_id': '1', + 'result_id': 'r1', + 'ran_on': 100, + }, + { + 'suite_id': 'a', + 'case_id': '1', + 'result_id': '101_r2', + 'ran_on': 101, + }, + { + 'suite_id': 'a', + 'case_id': '2', + 'result_id': '103_r3', + 'ran_on': 103, + }, + { + 'suite_id': 'b', + 'case_id': '1', + 'result_id': '103_r4', + 'ran_on': 104, + }, + ] + + [email protected] +def archive(tmp_archive, mock_results): + for result in mock_results: + tmp_archive.insert(result) + return tmp_archive diff --git a/tests/main_test.py b/tests/main_test.py index 52d4e81..399dd97 100644 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -1,37 +1,43 @@ import pytest -from hdat.main import main +from hdat.hdat_cli import hdat_cli from hdat.util import AbortError @pytest.fixture -def main_with_mocks(mock_suites, tmp_golden_store, tmp_archive, mock_git_info): - return lambda args: main(args, mock_suites, tmp_golden_store, tmp_archive, mock_git_info) +def hdat_cli_with_mocks(mock_suites, tmp_golden_store, tmp_archive, mock_git_info): + return lambda args: hdat_cli(args, mock_suites, tmp_golden_store, tmp_archive, mock_git_info) class TestMainRun: - def test_run_all_verify_all_rerun(self, main_with_mocks): + def test_run_all_verify_all_rerun(self, hdat_cli_with_mocks): with pytest.raises(AbortError) as e: - main_with_mocks(['run']) + hdat_cli_with_mocks(['run']) assert 'UNKNOWN: 3' in str(e) - main_with_mocks(['verify', 'a/1']) + hdat_cli_with_mocks(['verify', 'a/1']) with pytest.raises(AbortError) as e: - main_with_mocks(['run']) + hdat_cli_with_mocks(['run']) assert 'UNKNOWN: 2' in str(e) assert 'PASS: 1' in str(e) - main_with_mocks(['verify', 'a/2']) - main_with_mocks(['verify', 'b/3']) + hdat_cli_with_mocks(['verify', 'a/2']) + hdat_cli_with_mocks(['verify', 'b/3']) - main_with_mocks(['run']) + hdat_cli_with_mocks(['run']) - def test_show_most_recent(self, main_with_mocks): + def test_show_most_recent(self, hdat_cli_with_mocks): with pytest.raises(AbortError) as e: - main_with_mocks(['run', 'a/1']) + hdat_cli_with_mocks(['run', 'a/1']) with pytest.raises(AbortError) as e: - main_with_mocks(['show', 'a/1']) + hdat_cli_with_mocks(['show', 'a/1']) assert 'showing "a/1' in str(e) + + def test_diff(self, hdat_cli_with_mocks, archive, mock_results): + with pytest.raises(AbortError) as e: + hdat_cli_with_mocks(['diff', 'a/1/r1', 'a/1/101_r2']) + + assert 'diffing "a/1/r1" and "a/1/101_r2"' in str(e) diff --git a/tests/resultspec_test.py b/tests/resultspec_test.py index 27de696..1505f1b 100644 --- a/tests/resultspec_test.py +++ b/tests/resultspec_test.py @@ -1,50 +1,13 @@ import pytest -from hdat.resultspec import resolve_resultspec +from hdat.resultspec import resolve_resultspecs from hdat.util import AbortError [email protected] -def mock_results(): - return [ - { - 'suite_id': 'a', - 'case_id': '1', - 'result_id': 'r1', - 'ran_on': 100, - }, - { - 'suite_id': 'a', - 'case_id': '1', - 'result_id': 'r2', - 'ran_on': 101, - }, - { - 'suite_id': 'a', - 'case_id': '2', - 'result_id': 'r3', - 'ran_on': 103, - }, - { - 'suite_id': 'b', - 'case_id': '1', - 'result_id': 'r4', - 'ran_on': 104, - }, - ] - - [email protected] -def archive(tmp_archive, mock_results): - for result in mock_results: - tmp_archive.insert(result) - return tmp_archive - - class TestResolveResultSpec: def test_existing_file(self, archive, mock_results): resultspec = archive._result_filename('a', '1', 'r1') - assert resolve_resultspec(archive, resultspec) == mock_results[0] + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[0]] @pytest.mark.skip def test_existing_file_invalid(self, archive): @@ -54,32 +17,43 @@ class TestResolveResultSpec: def test_nonexistant_file(self, archive): resultspec = archive._result_filename('a', '1', 'huh') with pytest.raises(AbortError): - resolve_resultspec(archive, resultspec) + resolve_resultspecs(archive, resultspec) def test_fully_qualified(self, archive, mock_results): - resultspec = 'a/1/r2' - assert resolve_resultspec(archive, resultspec) == mock_results[1] + resultspec = 'a/1/101_r2' + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[1]] + + def test_relative_index_1(self, archive, mock_results): + resultspec = 'a/1/~0' + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[1]] + + def test_relative_index_2(self, archive, mock_results): + resultspec = 'a/1/~1' + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[0]] + + def test_relative_index_3(self, archive, mock_results): + resultspec = 'a/1/~2' + with pytest.raises(AbortError): + assert resolve_resultspecs(archive, resultspec) def test_fully_qualified_missing(self, archive): resultspec = 'a/1/huh' with pytest.raises(AbortError): - resolve_resultspec(archive, resultspec) + resolve_resultspecs(archive, resultspec) def test_most_recent_in_case(self, archive, mock_results): resultspec = 'a/1' - assert resolve_resultspec(archive, resultspec) == mock_results[1] + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[1]] - @pytest.mark.xfail # TODO not implemented def test_most_recent_in_suite(self, archive, mock_results): resultspec = 'a' - assert resolve_resultspec(archive, resultspec) == mock_results[2] + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[1], mock_results[2]] - @pytest.mark.xfail # TODO not implemented - def test_most_recent_overall(self, archive, mock_results): + def test_most_recent_all(self, archive, mock_results): resultspec = '' - assert resolve_resultspec(archive, resultspec) == mock_results[3] + assert list(resolve_resultspecs(archive, resultspec)) == [mock_results[1], mock_results[2], mock_results[3]] def test_most_recent_missing(self, archive): resultspec = 'a/huh' with pytest.raises(AbortError): - resolve_resultspec(archive, resultspec) + resolve_resultspecs(archive, resultspec)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_added_files", "has_removed_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 7 }
unknown
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "readme_renderer", "flake8", "pytest" ], "pre_install": [], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work Babel==2.11.0 bleach==4.1.0 build==0.9.0 certifi==2021.5.30 charset-normalizer==2.0.12 check-manifest==0.48 colorama==0.4.5 docutils==0.18.1 flake8==5.0.4 gitdb==4.0.9 GitPython==3.1.18 -e git+https://github.com/innolitics/hdat.git@f0a6bdf05137ae12dfcfa4f1ac9053224f905e71#egg=hdat idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work Jinja2==3.0.3 livereload==2.6.3 MarkupSafe==2.0.1 mccabe==0.7.0 mock==5.2.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pep517==0.13.1 pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 pytz==2025.2 readme-renderer==34.0 requests==2.27.1 six==1.17.0 smmap==5.0.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-autobuild==2021.3.14 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tabulate==0.8.10 toml @ file:///tmp/build/80754af9/toml_1616166611790/work tomli==1.2.3 tornado==6.1 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work urllib3==1.26.20 webencodings==0.5.1 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: hdat channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - babel==2.11.0 - bleach==4.1.0 - build==0.9.0 - charset-normalizer==2.0.12 - check-manifest==0.48 - colorama==0.4.5 - docutils==0.18.1 - flake8==5.0.4 - gitdb==4.0.9 - gitpython==3.1.18 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - jinja2==3.0.3 - livereload==2.6.3 - markupsafe==2.0.1 - mccabe==0.7.0 - mock==5.2.0 - pep517==0.13.1 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pytz==2025.2 - readme-renderer==34.0 - requests==2.27.1 - six==1.17.0 - smmap==5.0.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-autobuild==2021.3.14 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tabulate==0.8.10 - tomli==1.2.3 - tornado==6.1 - urllib3==1.26.20 - webencodings==0.5.1 prefix: /opt/conda/envs/hdat
[ "tests/main_test.py::TestMainRun::test_run_all_verify_all_rerun", "tests/main_test.py::TestMainRun::test_show_most_recent", "tests/main_test.py::TestMainRun::test_diff", "tests/resultspec_test.py::TestResolveResultSpec::test_existing_file", "tests/resultspec_test.py::TestResolveResultSpec::test_nonexistant_file", "tests/resultspec_test.py::TestResolveResultSpec::test_fully_qualified", "tests/resultspec_test.py::TestResolveResultSpec::test_relative_index_1", "tests/resultspec_test.py::TestResolveResultSpec::test_relative_index_2", "tests/resultspec_test.py::TestResolveResultSpec::test_relative_index_3", "tests/resultspec_test.py::TestResolveResultSpec::test_fully_qualified_missing", "tests/resultspec_test.py::TestResolveResultSpec::test_most_recent_in_case", "tests/resultspec_test.py::TestResolveResultSpec::test_most_recent_missing" ]
[ "tests/resultspec_test.py::TestResolveResultSpec::test_most_recent_in_suite", "tests/resultspec_test.py::TestResolveResultSpec::test_most_recent_all" ]
[]
[]
MIT License
2,157
[ "hdat/resultspec.py", "setup.py", "hdat/main.py", "hdat_main_script", "hdat/store.py", "README.md", "hdat/hdat_cli.py", "hdat/runner.py", "hdat/suite.py" ]
[ "hdat/resultspec.py", "setup.py", "hdat/main.py", "hdat_main_script", "hdat/store.py", "README.md", "hdat/hdat_cli.py", "hdat/runner.py", "hdat/suite.py" ]
ARMmbed__greentea-263
68508c5f4d7cf0635c75399d0ff7cfa896fdf2cc
2018-02-15 17:29:56
68508c5f4d7cf0635c75399d0ff7cfa896fdf2cc
diff --git a/mbed_greentea/mbed_greentea_cli.py b/mbed_greentea/mbed_greentea_cli.py index f6a13c4..446b965 100644 --- a/mbed_greentea/mbed_greentea_cli.py +++ b/mbed_greentea/mbed_greentea_cli.py @@ -23,6 +23,7 @@ import os import sys import random import optparse +import fnmatch from time import time try: from Queue import Queue @@ -119,18 +120,6 @@ def create_filtered_test_list(ctest_test_list, test_by_names, skip_test, test_sp @return """ - def filter_names_by_prefix(test_case_name_list, prefix_name): - """! - @param test_case_name_list List of all test cases - @param prefix_name Prefix of test name we are looking for - @result Set with names of test names starting with 'prefix_name' - """ - result = list() - for test_name in test_case_name_list: - if test_name.startswith(prefix_name): - result.append(test_name) - return sorted(result) - filtered_ctest_test_list = ctest_test_list test_list = None invalid_test_names = [] @@ -143,17 +132,15 @@ def create_filtered_test_list(ctest_test_list, test_by_names, skip_test, test_sp gt_logger.gt_log("test case filter (specified with -n option)") for test_name in set(test_list): - if test_name.endswith('*'): - # This 'star-sufix' filter allows users to filter tests with fixed prefixes - # Example: -n 'TESTS-mbed_drivers* will filter all test cases with name starting with 'TESTS-mbed_drivers' - for test_name_filtered in filter_names_by_prefix(ctest_test_list.keys(), test_name[:-1]): - gt_logger.gt_log_tab("test filtered in '%s'"% gt_logger.gt_bright(test_name_filtered)) - filtered_ctest_test_list[test_name_filtered] = ctest_test_list[test_name_filtered] - elif test_name not in ctest_test_list: - invalid_test_names.append(test_name) + gt_logger.gt_log_tab(test_name) + matches = [test for test in ctest_test_list.keys() if fnmatch.fnmatch(test, test_name)] + gt_logger.gt_log_tab(str(ctest_test_list)) + if matches: + for match in matches: + gt_logger.gt_log_tab("test filtered in '%s'"% gt_logger.gt_bright(match)) + filtered_ctest_test_list[match] = ctest_test_list[match] else: - gt_logger.gt_log_tab("test filtered in '%s'"% gt_logger.gt_bright(test_name)) - filtered_ctest_test_list[test_name] = ctest_test_list[test_name] + invalid_test_names.append(test_name) if skip_test: test_list = skip_test.split(',')
Test names are not correctly globbed Test names only respect a wildcard that is placed at the end of the string. Ex. "mbed-os-*". However, it does not respect the wildcard anywhere else. Ex. "*-timer" The build tools accept these wildcards, so greentea should as well. This is the line responsible: https://github.com/ARMmbed/greentea/blob/32b95b44be653c3db527c02e1c5e1ffdc7d37f6f/mbed_greentea/mbed_greentea_cli.py#L146 Should be switched to `fnmatch`. (This is mostly a note to myself to fix it)
ARMmbed/greentea
diff --git a/test/mbed_gt_cli.py b/test/mbed_gt_cli.py index 0646c20..8f4a1eb 100644 --- a/test/mbed_gt_cli.py +++ b/test/mbed_gt_cli.py @@ -21,6 +21,36 @@ import sys import unittest from mbed_greentea import mbed_greentea_cli +from mbed_greentea.tests_spec import TestSpec + +test_spec_def = { + "builds": { + "K64F-ARM": { + "platform": "K64F", + "toolchain": "ARM", + "base_path": "./.build/K64F/ARM", + "baud_rate": 115200, + "tests": { + "mbed-drivers-test-generic_tests":{ + "binaries":[ + { + "binary_type": "bootable", + "path": "./.build/K64F/ARM/mbed-drivers-test-generic_tests.bin" + } + ] + }, + "mbed-drivers-test-c_strings":{ + "binaries":[ + { + "binary_type": "bootable", + "path": "./.build/K64F/ARM/mbed-drivers-test-c_strings.bin" + } + ] + } + } + } + } +} class GreenteaCliFunctionality(unittest.TestCase): @@ -86,5 +116,36 @@ class GreenteaCliFunctionality(unittest.TestCase): os.chdir(curr_dir) shutil.rmtree(test1_dir) + def test_create_filtered_test_list(self): + test_spec = TestSpec() + test_spec.parse(test_spec_def) + test_build = test_spec.get_test_builds()[0] + + test_list = mbed_greentea_cli.create_filtered_test_list(test_build.get_tests(), + 'mbed-drivers-test-generic_*', + None, + test_spec=test_spec) + self.assertEqual(set(test_list.keys()), set(['mbed-drivers-test-generic_tests'])) + + test_list = mbed_greentea_cli.create_filtered_test_list(test_build.get_tests(), + '*_strings', + None, + test_spec=test_spec) + self.assertEqual(set(test_list.keys()), set(['mbed-drivers-test-c_strings'])) + + test_list = mbed_greentea_cli.create_filtered_test_list(test_build.get_tests(), + 'mbed*s', + None, + test_spec=test_spec) + expected = set(['mbed-drivers-test-c_strings', 'mbed-drivers-test-generic_tests']) + self.assertEqual(set(test_list.keys()), expected) + + test_list = mbed_greentea_cli.create_filtered_test_list(test_build.get_tests(), + '*-drivers-*', + None, + test_spec=test_spec) + expected = set(['mbed-drivers-test-c_strings', 'mbed-drivers-test-generic_tests']) + self.assertEqual(set(test_list.keys()), expected) + if __name__ == '__main__': unittest.main() diff --git a/test/mbed_gt_target_info.py b/test/mbed_gt_target_info.py index e630e7b..a12ba09 100644 --- a/test/mbed_gt_target_info.py +++ b/test/mbed_gt_target_info.py @@ -416,7 +416,7 @@ mbed-gcc 1.1.0 with patch("mbed_greentea.mbed_target_info.walk") as _walk: _walk.return_value = iter([("", ["foo"], []), ("foo", [], ["targets.json"])]) result = list(mbed_target_info._find_targets_json("bogus_path")) - self.assertEqual(result, ["foo/targets.json"]) + self.assertEqual(result, [os.path.join("foo", "targets.json")]) def test_find_targets_json_ignored(self): with patch("mbed_greentea.mbed_target_info.walk") as _walk:
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
1.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
appdirs==1.4.4 beautifulsoup4==4.13.3 certifi==2025.1.31 charset-normalizer==3.4.1 colorama==0.3.9 exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work fasteners==0.19 future==1.0.0 idna==3.10 iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work intelhex==2.3.0 junit-xml==1.9 lockfile==0.12.2 -e git+https://github.com/ARMmbed/greentea.git@68508c5f4d7cf0635c75399d0ff7cfa896fdf2cc#egg=mbed_greentea mbed-host-tests==1.8.15 mbed-ls==1.8.15 mbed-os-tools==1.8.15 mock==5.2.0 packaging @ file:///croot/packaging_1734472117206/work pluggy @ file:///croot/pluggy_1733169602837/work prettytable==2.5.0 pyserial==3.5 pytest @ file:///croot/pytest_1738938843180/work requests==2.32.3 six==1.17.0 soupsieve==2.6 tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work typing_extensions==4.13.0 urllib3==2.3.0 wcwidth==0.2.13
name: greentea channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - exceptiongroup=1.2.0=py39h06a4308_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - packaging=24.2=py39h06a4308_0 - pip=25.0=py39h06a4308_0 - pluggy=1.5.0=py39h06a4308_0 - pytest=8.3.4=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tomli=2.0.1=py39h06a4308_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - appdirs==1.4.4 - beautifulsoup4==4.13.3 - certifi==2025.1.31 - charset-normalizer==3.4.1 - colorama==0.3.9 - fasteners==0.19 - future==1.0.0 - idna==3.10 - intelhex==2.3.0 - junit-xml==1.9 - lockfile==0.12.2 - mbed-host-tests==1.8.15 - mbed-ls==1.8.15 - mbed-os-tools==1.8.15 - mock==5.2.0 - prettytable==2.5.0 - pyserial==3.5 - requests==2.32.3 - six==1.17.0 - soupsieve==2.6 - typing-extensions==4.13.0 - urllib3==2.3.0 - wcwidth==0.2.13 prefix: /opt/conda/envs/greentea
[ "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_create_filtered_test_list" ]
[]
[ "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_get_greentea_version", "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_get_hello_string", "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_get_local_host_tests_dir_default_path", "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_get_local_host_tests_dir_invalid_path", "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_get_local_host_tests_dir_valid_path", "test/mbed_gt_cli.py::GreenteaCliFunctionality::test_print_version", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_find_targets_json", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_find_targets_json_ignored", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_mbed_target_from_current_dir_ok", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_mbed_targets_from_yotta_local_module_invalid_path", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_mbed_targets_from_yotta_local_module_invalid_target", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_mbed_targets_from_yotta_local_module_valid", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_platform_property_from_targets_empty_json", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_platform_property_from_targets_in_json", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_platform_property_from_targets_invalid_json", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_platform_property_from_targets_no_file", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_platform_property_from_targets_no_json", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_platform_property_from_targets_no_value", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_yotta_target_from_local_config_failed_open", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_yotta_target_from_local_config_invalid_path", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_get_yotta_target_from_local_config_valid_path", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_add_target_info_mapping", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_missing_json_data", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_missing_keywords", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_missing_name", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_missing_target", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_multiple", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_no_keywords", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_mbed_target_from_target_json_no_name", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_json_for_build_name", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_search_cmd_output", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_search_cmd_output_new_style", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_search_cmd_output_new_style_text", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_search_cmd_output_new_style_text_2", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_search_cmd_output_text", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_search_cmd_output_with_ssl_errors", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_fail", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_chars", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_nl", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_nl_whitechars", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_rcnl", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_rcnl_whitechars", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_version", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_parse_yotta_target_cmd_output_mixed_whitechars", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_default", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_default_missing", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_info_mapping", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_info_mapping_bad_platform", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_info_mapping_missing", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_targets_json_base_target", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_targets_json_empty", "test/mbed_gt_target_info.py::GreenteaTargetInfo::test_platform_property_from_targets_json_inherits" ]
[]
Apache License 2.0
2,159
[ "mbed_greentea/mbed_greentea_cli.py" ]
[ "mbed_greentea/mbed_greentea_cli.py" ]
ucfopen__canvasapi-149
65b75497665ca966c9046f2efde1273697c09e3b
2018-02-15 17:48:04
c69f6a9801ac275fdad46d97fa95c77c25d6f953
diff --git a/canvasapi/account.py b/canvasapi/account.py index 63f2ca2..3a03734 100644 --- a/canvasapi/account.py +++ b/canvasapi/account.py @@ -663,6 +663,7 @@ class Account(CanvasObject): 'GET', 'accounts/{}/terms'.format(self.id), {'account_id': self.id}, + _root='enrollment_terms', _kwargs=combine_kwargs(**kwargs) ) diff --git a/canvasapi/paginated_list.py b/canvasapi/paginated_list.py index 8717902..56732fb 100644 --- a/canvasapi/paginated_list.py +++ b/canvasapi/paginated_list.py @@ -10,30 +10,31 @@ class PaginatedList(object): def __init__( self, content_class, requester, request_method, first_url, extra_attribs=None, - **kwargs): + _root=None, **kwargs): - self.__elements = list() + self._elements = list() - self.__requester = requester - self.__content_class = content_class - self.__first_url = first_url - self.__first_params = kwargs or {} - self.__first_params['per_page'] = kwargs.get('per_page', 100) - self.__next_url = first_url - self.__next_params = self.__first_params - self.__extra_attribs = extra_attribs or {} - self.__request_method = request_method + self._requester = requester + self._content_class = content_class + self._first_url = first_url + self._first_params = kwargs or {} + self._first_params['per_page'] = kwargs.get('per_page', 100) + self._next_url = first_url + self._next_params = self._first_params + self._extra_attribs = extra_attribs or {} + self._request_method = request_method + self._root = _root def __getitem__(self, index): assert isinstance(index, (int, slice)) if isinstance(index, int): - self.__get_up_to_index(index) - return self.__elements[index] + self._get_up_to_index(index) + return self._elements[index] else: return self._Slice(self, index) def __iter__(self): - for element in self.__elements: + for element in self._elements: yield element while self._has_next(): new_elements = self._grow() @@ -41,60 +42,68 @@ class PaginatedList(object): yield element def __repr__(self): - return "<PaginatedList of type {}>".format(self.__content_class.__name__) + return "<PaginatedList of type {}>".format(self._content_class.__name__) def _is_larger_than(self, index): - return len(self.__elements) > index or self._has_next() + return len(self._elements) > index or self._has_next() - def __get_up_to_index(self, index): - while len(self.__elements) <= index and self._has_next(): + def _get_up_to_index(self, index): + while len(self._elements) <= index and self._has_next(): self._grow() def _grow(self): new_elements = self._get_next_page() - self.__elements += new_elements + self._elements += new_elements return new_elements def _has_next(self): - return self.__next_url is not None + return self._next_url is not None def _get_next_page(self): - response = self.__requester.request( - self.__request_method, - self.__next_url, - **self.__next_params + response = self._requester.request( + self._request_method, + self._next_url, + **self._next_params ) data = response.json() - self.__next_url = None + self._next_url = None next_link = response.links.get('next') - regex = r'{}(.*)'.format(re.escape(self.__requester.base_url)) + regex = r'{}(.*)'.format(re.escape(self._requester.base_url)) - self.__next_url = re.search(regex, next_link['url']).group(1) if next_link else None + self._next_url = re.search(regex, next_link['url']).group(1) if next_link else None - self.__next_params = {} + self._next_params = {} content = [] + + if self._root: + try: + data = data[self._root] + except KeyError: + # TODO: Fix this message to make more sense to an end user. + raise ValueError("Invalid root value specified.") + for element in data: if element is not None: - element.update(self.__extra_attribs) - content.append(self.__content_class(self.__requester, element)) + element.update(self._extra_attribs) + content.append(self._content_class(self._requester, element)) return content class _Slice(object): def __init__(self, the_list, the_slice): - self.__list = the_list - self.__start = the_slice.start or 0 - self.__stop = the_slice.stop - self.__step = the_slice.step or 1 + self._list = the_list + self._start = the_slice.start or 0 + self._stop = the_slice.stop + self._step = the_slice.step or 1 def __iter__(self): - index = self.__start - while not self.__finished(index): - if self.__list._is_larger_than(index): - yield self.__list[index] - index += self.__step - - def __finished(self, index): - return self.__stop is not None and index >= self.__stop + index = self._start + while not self._finished(index): + if self._list._is_larger_than(index): + yield self._list[index] + index += self._step + + def _finished(self, index): + return self._stop is not None and index >= self._stop
Unable to list_enrollment_terms() on 0.8.2 It seems Enrollment Terms in the Canvas API aren't structured like most other Paginated Lists. Specifically, in the example response at https://canvas.instructure.com/doc/api/enrollment_terms.html#method.terms_api.index (and my own testing), the list of term objects are nested under a top-level string key `enrollment_terms` in the JSON response. It seems like other Paginated Lists return a JSON payload where the list object of requested items is the top-level element. This difference seems to cause `paginated_list.py` to fail to parse the Enrollment Terms response in the loop over the JSON payload at https://github.com/ucfopen/canvasapi/blob/v0.8.2/canvasapi/paginated_list.py#L78-L81, since on the first iteration `element` will be the string `enrollment_terms`, which does not have an update() member function. I'm sorry if I missed something obvious that lets me specify what I want the "root element" of the payload to be when parsing a Paginated List. I poked around a little bit but didn't see any way to mess with the data variable in _get_next_page().
ucfopen/canvasapi
diff --git a/tests/fixtures/account.json b/tests/fixtures/account.json index bf80693..e12a3e6 100644 --- a/tests/fixtures/account.json +++ b/tests/fixtures/account.json @@ -291,7 +291,8 @@ "list_enrollment_terms": { "method": "GET", "endpoint": "accounts/1/terms", - "data": [ + "data": { + "enrollment_terms": [ { "id": 1, "name": "Enrollment Term 1" @@ -299,8 +300,8 @@ { "id": 2, "name": "Enrollment Term 2" - } - ], + }] + }, "status_code": 200 }, "list_groups_context": { diff --git a/tests/test_paginated_list.py b/tests/test_paginated_list.py index b7c2a1d..8fd6521 100644 --- a/tests/test_paginated_list.py +++ b/tests/test_paginated_list.py @@ -4,6 +4,7 @@ import unittest import requests_mock from canvasapi import Canvas +from canvasapi.enrollment_term import EnrollmentTerm from canvasapi.paginated_list import PaginatedList from canvasapi.user import User from tests import settings @@ -205,3 +206,30 @@ class TestPaginatedList(unittest.TestCase): 'six_objects_three_pages' ) self.assertEqual(pag_list.__repr__(), '<PaginatedList of type User>') + + def test_root_element_incorrect(self, m): + register_uris({'account': ['list_enrollment_terms']}, m) + + pag_list = PaginatedList( + EnrollmentTerm, + self.requester, + 'GET', + 'accounts/1/terms', + _root='wrong' + ) + + with self.assertRaises(ValueError): + pag_list[0] + + def test_root_element(self, m): + register_uris({'account': ['list_enrollment_terms']}, m) + + pag_list = PaginatedList( + EnrollmentTerm, + self.requester, + 'GET', + 'accounts/1/terms', + _root='enrollment_terms' + ) + + self.assertIsInstance(pag_list[0], EnrollmentTerm)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "flake8", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt", "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 -e git+https://github.com/ucfopen/canvasapi.git@65b75497665ca966c9046f2efde1273697c09e3b#egg=canvasapi certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 docutils==0.17.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytz==2025.2 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==4.3.2 sphinx-rtd-theme==1.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: canvasapi channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - docutils==0.17.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytz==2025.2 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-rtd-theme==1.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/canvasapi
[ "tests/test_paginated_list.py::TestPaginatedList::test_root_element", "tests/test_paginated_list.py::TestPaginatedList::test_root_element_incorrect" ]
[]
[ "tests/test_paginated_list.py::TestPaginatedList::test_getitem_first", "tests/test_paginated_list.py::TestPaginatedList::test_getitem_second_page", "tests/test_paginated_list.py::TestPaginatedList::test_iterator", "tests/test_paginated_list.py::TestPaginatedList::test_paginated_list_empty", "tests/test_paginated_list.py::TestPaginatedList::test_paginated_list_four_two_pages", "tests/test_paginated_list.py::TestPaginatedList::test_paginated_list_single", "tests/test_paginated_list.py::TestPaginatedList::test_paginated_list_six_three_pages", "tests/test_paginated_list.py::TestPaginatedList::test_paginated_list_two_one_page", "tests/test_paginated_list.py::TestPaginatedList::test_repr", "tests/test_paginated_list.py::TestPaginatedList::test_slice_beginning", "tests/test_paginated_list.py::TestPaginatedList::test_slice_end", "tests/test_paginated_list.py::TestPaginatedList::test_slice_middle" ]
[]
MIT License
2,160
[ "canvasapi/account.py", "canvasapi/paginated_list.py" ]
[ "canvasapi/account.py", "canvasapi/paginated_list.py" ]
ucfopen__canvasapi-151
fbe18169649f8e9dce1c85c5ef11dbc84641f935
2018-02-15 18:54:35
c69f6a9801ac275fdad46d97fa95c77c25d6f953
diff --git a/canvasapi/requester.py b/canvasapi/requester.py index f979866..e989962 100644 --- a/canvasapi/requester.py +++ b/canvasapi/requester.py @@ -68,8 +68,12 @@ class Requester(object): for i, kwarg in enumerate(_kwargs): kw, arg = kwarg + # Convert boolean objects to a lowercase string. + if isinstance(arg, bool): + _kwargs[i] = (kw, str(arg).lower()) + # Convert any datetime objects into ISO 8601 formatted strings. - if isinstance(arg, datetime): + elif isinstance(arg, datetime): _kwargs[i] = (kw, arg.isoformat()) # Determine the appropriate request method.
Automatically lowercase boolean strings before sending to Canvas This will resolve any future weirdness like #64.
ucfopen/canvasapi
diff --git a/tests/test_requester.py b/tests/test_requester.py index fb582a7..93e87a0 100644 --- a/tests/test_requester.py +++ b/tests/test_requester.py @@ -94,6 +94,24 @@ class TestRequester(unittest.TestCase): self.assertLessEqual(len(self.requester._cache), 5) self.assertEqual(response, self.requester._cache[0]) + def test_request_lowercase_boolean(self, m): + def custom_matcher(request): + match_text = 'test=true&test2=false' + if request.text == match_text: + resp = requests.Response() + resp.status_code = 200 + return resp + + m.add_matcher(custom_matcher) + + response = self.requester.request( + 'POST', + 'test', + test=True, + test2=False + ) + self.assertEqual(response.status_code, 200) + def test_request_400(self, m): register_uris({'requests': ['400']}, m)
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_issue_reference" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 1 }
0.8
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "flake8", "coverage", "pycodestyle", "pyflakes", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt", "dev_requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 -e git+https://github.com/ucfopen/canvasapi.git@fbe18169649f8e9dce1c85c5ef11dbc84641f935#egg=canvasapi certifi==2021.5.30 charset-normalizer==2.0.12 coverage==6.2 docutils==0.17.1 flake8==5.0.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.2.0 iniconfig==1.1.1 Jinja2==3.0.3 MarkupSafe==2.0.1 mccabe==0.7.0 packaging==21.3 pluggy==1.0.0 py==1.11.0 pycodestyle==2.9.1 pyflakes==2.5.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytz==2025.2 requests==2.27.1 requests-mock==1.12.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==4.3.2 sphinx-rtd-theme==1.3.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: canvasapi channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - charset-normalizer==2.0.12 - coverage==6.2 - docutils==0.17.1 - flake8==5.0.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.2.0 - iniconfig==1.1.1 - jinja2==3.0.3 - markupsafe==2.0.1 - mccabe==0.7.0 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytz==2025.2 - requests==2.27.1 - requests-mock==1.12.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==4.3.2 - sphinx-rtd-theme==1.3.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/canvasapi
[ "tests/test_requester.py::TestRequester::test_request_lowercase_boolean" ]
[]
[ "tests/test_requester.py::TestRequester::test_request_400", "tests/test_requester.py::TestRequester::test_request_401_InvalidAccessToken", "tests/test_requester.py::TestRequester::test_request_401_Unauthorized", "tests/test_requester.py::TestRequester::test_request_404", "tests/test_requester.py::TestRequester::test_request_500", "tests/test_requester.py::TestRequester::test_request_cache", "tests/test_requester.py::TestRequester::test_request_cache_clear_after_5", "tests/test_requester.py::TestRequester::test_request_delete", "tests/test_requester.py::TestRequester::test_request_get", "tests/test_requester.py::TestRequester::test_request_get_datetime", "tests/test_requester.py::TestRequester::test_request_post", "tests/test_requester.py::TestRequester::test_request_post_datetime", "tests/test_requester.py::TestRequester::test_request_put" ]
[]
MIT License
2,162
[ "canvasapi/requester.py" ]
[ "canvasapi/requester.py" ]
briancurtin__deprecation-12
70bb27fef996b214d7b698ed3124fb598220eea9
2018-02-16 13:15:01
70bb27fef996b214d7b698ed3124fb598220eea9
codecov-io: # [Codecov](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=h1) Report > Merging [#12](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=desc) into [master](https://codecov.io/gh/briancurtin/deprecation/commit/70bb27fef996b214d7b698ed3124fb598220eea9?src=pr&el=desc) will **not change** coverage. > The diff coverage is `100%`. [![Impacted file tree graph](https://codecov.io/gh/briancurtin/deprecation/pull/12/graphs/tree.svg?width=650&height=150&token=6w8zqu2CEQ&src=pr)](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #12 +/- ## ===================================== Coverage 100% 100% ===================================== Files 1 1 Lines 56 64 +8 Branches 9 10 +1 ===================================== + Hits 56 64 +8 ``` | [Impacted Files](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [deprecation.py](https://codecov.io/gh/briancurtin/deprecation/pull/12/diff?src=pr&el=tree#diff-ZGVwcmVjYXRpb24ucHk=) | `100% <100%> (ø)` | :arrow_up: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=footer). Last update [70bb27f...adfcd2e](https://codecov.io/gh/briancurtin/deprecation/pull/12?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/deprecation.py b/deprecation.py index 28a21ee..00598db 100644 --- a/deprecation.py +++ b/deprecation.py @@ -10,6 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. import functools +import textwrap import warnings from packaging import version @@ -167,8 +168,31 @@ def deprecated(deprecated_in=None, removed_in=None, current_version=None, deprecation_note = ("*Deprecated{deprecated_in}{removed_in}" "{period}{details}*".format(**parts)) - function.__doc__ = "\n\n".join([existing_docstring, + pos = existing_docstring.find("\n") + + if pos != -1: + # With a multi-line docstring, when we modify + # existing_docstring to add our deprecation_note, + # if we're not careful we'll interfere with the + # indentation levels of the contents below the + # first line, or as PEP 257 calls it, the summary + # line. Since the summary line can start on the + # same line as the """, dedenting the whole thing + # won't help. Split the summary and contents up, + # dedent the contents independently, then join + # summary, dedent'ed contents, and our + # deprecation_note. + + summary = existing_docstring[:pos] + contents = existing_docstring[pos:] + + function.__doc__ = "".join([summary, + textwrap.dedent(contents), + "\n\n", deprecation_note]) + else: + function.__doc__ = "\n\n".join([existing_docstring, + deprecation_note]) @functools.wraps(function) def _inner(*args, **kwargs): diff --git a/sample.py b/sample.py index be00938..9373753 100644 --- a/sample.py +++ b/sample.py @@ -14,6 +14,19 @@ def won(): return 1 [email protected](deprecated_in="1.0", removed_in="2.0", + current_version=__version__, + details="Use the ``one`` function instead") +def uno(): + """Esta función regresa 1 + + This is Spanish for 'This function returns 1' + + This is also here to show that multiline docstrings work + """ + return 1 + + def one(): """This function returns 1""" return 1 diff --git a/setup.py b/setup.py index ffe453a..3da8a87 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,7 @@ setup(name="deprecation", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules"] diff --git a/tox.ini b/tox.ini index 3458187..026704c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py35,py34,py27,pypy,flake8 +envlist = py37,py36,py35,py34,py27,pypy3,pypy,flake8 skipsdist = True [testenv]
deprecation_note brakes docstring processing While joining the `function.__doc__`-string with the `deprecation_note`-string the indentation isn't handled properly. Let's look at this example: ```python @deprecated(deprecated_in="0.0.3", removed_in="0.1.0", current_version="0.0.4", details="Do not use this function any more.") def testfunc(): """Test function. Returns ------- name : str Some string """ return "Teststring" def newfunc(): """Test function. Returns ------- name : str Some string """ return "Teststring" ``` Output of `help(testfunc)` (broken): ``` testfunc() Test function. Returns ------- name : str Some string *Deprecated in 0.0.3, to be removed in 0.1.0. Do not use this function any more.* ``` Output of `help(newfunc)` (correct): ``` newfunc() Test function. Returns ------- name : str Some string ``` The indentation of the function (numpy-style) docstring has been broken by the addition of the `deprecation_note`. While this might seem like a minor cosmetic issue, this breaks proper docstring processing in eg. `sphinx`.
briancurtin/deprecation
diff --git a/tests/test_deprecation.py b/tests/test_deprecation.py index 3364f9c..cf9d420 100644 --- a/tests/test_deprecation.py +++ b/tests/test_deprecation.py @@ -43,6 +43,30 @@ class Test_deprecated(unittest2.TestCase): self.assertEqual(fn.__doc__, test["__doc__"]) + def test_multiline_docstring(self): + docstring = "summary line\n\ndetails\nand more details\n" + for test in [{"args": {}, + "__doc__": "%s\n\n*Deprecated*"}, + {"args": {"deprecated_in": "1.0"}, + "__doc__": "%s\n\n*Deprecated in 1.0.*"}, + {"args": {"deprecated_in": "1.0", "removed_in": "2.0"}, + "__doc__": "%s\n\n*Deprecated in 1.0, " + "to be removed in 2.0.*"}, + {"args": {"deprecated_in": "1.0", "removed_in": "2.0", + "details": "some details"}, + "__doc__": "%s\n\n*Deprecated in 1.0, " + "to be removed in 2.0. some details*"}]: + with self.subTest(**test): + @deprecation.deprecated(**test["args"]) + def fn(): + """summary line + + details + and more details + """ + + self.assertEqual(fn.__doc__, test["__doc__"] % docstring) + def test_warning_raised(self): ret_val = "lololol"
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 4 }
1.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "coverage", "codecov", "flake8", "pytest" ], "pre_install": [], "python": "3.7", "reqs_path": [ "test-requirements.txt", "docs-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.2.7 -e git+https://github.com/briancurtin/deprecation.git@70bb27fef996b214d7b698ed3124fb598220eea9#egg=deprecation exceptiongroup==1.2.2 flake8==5.0.4 idna==3.10 importlib-metadata==4.2.0 iniconfig==2.0.0 linecache2==1.0.0 mccabe==0.7.0 packaging==24.0 pluggy==1.2.0 pycodestyle==2.9.1 pyflakes==2.5.0 pytest==7.4.4 requests==2.31.0 six==1.17.0 tomli==2.0.1 traceback2==1.4.0 typing_extensions==4.7.1 unittest2==1.1.0 urllib3==2.0.7 zipp==3.15.0
name: deprecation channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.2.7 - exceptiongroup==1.2.2 - flake8==5.0.4 - idna==3.10 - importlib-metadata==4.2.0 - iniconfig==2.0.0 - linecache2==1.0.0 - mccabe==0.7.0 - packaging==24.0 - pluggy==1.2.0 - pycodestyle==2.9.1 - pyflakes==2.5.0 - pytest==7.4.4 - requests==2.31.0 - six==1.17.0 - tomli==2.0.1 - traceback2==1.4.0 - typing-extensions==4.7.1 - unittest2==1.1.0 - urllib3==2.0.7 - zipp==3.15.0 prefix: /opt/conda/envs/deprecation
[ "tests/test_deprecation.py::Test_deprecated::test_multiline_docstring" ]
[]
[ "tests/test_deprecation.py::Test_deprecated::test_DeprecatedWarning_not_raised", "tests/test_deprecation.py::Test_deprecated::test_docstring", "tests/test_deprecation.py::Test_deprecated::test_removing_without_deprecating", "tests/test_deprecation.py::Test_deprecated::test_warning_raised", "tests/test_deprecation.py::Test_fail_if_not_removed::test_DeprecatedWarning_doesnt_fail", "tests/test_deprecation.py::Test_fail_if_not_removed::test_UnsupportedWarning_causes_failure", "tests/test_deprecation.py::Test_fail_if_not_removed::test_literal_DeprecatedWarning" ]
[]
Apache License 2.0
2,163
[ "setup.py", "sample.py", "tox.ini", "deprecation.py" ]
[ "setup.py", "sample.py", "tox.ini", "deprecation.py" ]
piotrmaslanka__satella-11
5fa0a67e4d35431f2f54740ba8fcfbd7f6d8bc59
2018-02-16 16:59:11
aa8d49a9754c89eb3dcdafcf32d98797b3264908
diff --git a/satella/coding/recast_exceptions.py b/satella/coding/recast_exceptions.py index bf164db4..32e4e1c8 100644 --- a/satella/coding/recast_exceptions.py +++ b/satella/coding/recast_exceptions.py @@ -45,7 +45,8 @@ class rethrow_as(object): """ # You can also provide just two exceptions - if len(pairs) == 2 and all(issubclass(p, BaseException) for p in pairs): + if len(pairs) == 2 and not isinstance(pairs[1], (tuple, list)) \ + and all(issubclass(p, BaseException) for p in pairs): self.mapping = {pairs[0]: pairs[1]} else: self.mapping = dict(pairs)
Bad rethrow_as ```python @rethrow_as((UnicodeDecodeError, ConfigurationMalformed), (json.JSONDecodeError, ConfigurationMalformed)) @rethrow_as(ValueError, ConfigurationMalformed) @rethrow_as(binascii.Error, ConfigurationMalformed) @rethrow_as(TypeError, ConfigurationError) def provide(self): return json.loads(self.root, encoding=self.encoding) ``` breaks it by treating two first pairs in a wrong way
piotrmaslanka/satella
diff --git a/tests/test_coding/test_rethrow.py b/tests/test_coding/test_rethrow.py index ce17f722..80dd8ec6 100644 --- a/tests/test_coding/test_rethrow.py +++ b/tests/test_coding/test_rethrow.py @@ -39,4 +39,17 @@ class TestStuff(unittest.TestCase): def lol(): raise ValueError() - self.assertRaises(NameError, lol) \ No newline at end of file + self.assertRaises(NameError, lol) + + def test_issue_10(self): + + class WTFException1(Exception): pass + class WTFException2(Exception): pass + + @rethrow_as((NameError, WTFException1), + (TypeError, WTFException2)) + def provide(exc): + raise exc() + + self.assertRaises(WTFException1, lambda: provide(NameError)) + self.assertRaises(WTFException2, lambda: provide(TypeError))
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 1 }
2.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 iniconfig==2.1.0 monotonic==1.6 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 -e git+https://github.com/piotrmaslanka/satella.git@5fa0a67e4d35431f2f54740ba8fcfbd7f6d8bc59#egg=satella six==1.17.0 tomli==2.2.1 typing==3.7.4.3
name: satella channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - iniconfig==2.1.0 - monotonic==1.6 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - six==1.17.0 - tomli==2.2.1 - typing==3.7.4.3 prefix: /opt/conda/envs/satella
[ "tests/test_coding/test_rethrow.py::TestStuff::test_issue_10" ]
[]
[ "tests/test_coding/test_rethrow.py::TestStuff::test_rethrow", "tests/test_coding/test_rethrow.py::TestStuff::test_rethrow_2", "tests/test_coding/test_rethrow.py::TestStuff::test_rethrow_3", "tests/test_coding/test_rethrow.py::TestStuff::test_silencer", "tests/test_coding/test_rethrow.py::TestStuff::test_silencer_2" ]
[]
MIT License
2,165
[ "satella/coding/recast_exceptions.py" ]
[ "satella/coding/recast_exceptions.py" ]
piotrmaslanka__satella-17
a11e207b41c0037c201704c243ce3c01879b3127
2018-02-16 19:32:27
aa8d49a9754c89eb3dcdafcf32d98797b3264908
diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a55c8e3..2f6f6c64 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ -## v2.0.22 +## v2.0.22rc1 -* ... +* fixes #16 ## v2.0.21 diff --git a/satella/coding/typecheck.py b/satella/coding/typecheck.py index 84bd8097..010121d5 100644 --- a/satella/coding/typecheck.py +++ b/satella/coding/typecheck.py @@ -293,19 +293,21 @@ def istype(var, type_): return any(istype(var, subtype) for subtype in type_) try: + if type_ in (Callable, Iterable, Sequence, Mapping): + raise TypeError() + if isinstance(var, type_): return True except TypeError as e: # must be a typing.* annotation - try: - return all(hasattr(var, n) for n in { - Iterable: ('__iter__',), - Sequence: ('__iter__', '__getattr__', '__len__'), - Callable: ('__call__', ), - Mapping: ('__getitem__', ), - }[type(var)]) - except KeyError: - pass + if type_ == Callable: + return hasattr(var, '__call__') + elif type_ == Iterable: + return hasattr(var, '__iter__') + elif type_ == Sequence: + return hasattr(var, '__iter__') and hasattr(var, '__getattr__') and hasattr(var, '__len__') + elif type_ == Mapping: + return hasattr(var, '__getitem__') return type(var) == type_ diff --git a/setup.cfg b/setup.cfg index 5a0931c8..63ace994 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = satella -version = 2.0.22a1 +version = 2.0.22rc1 description-file = README.md author = Piotr Maślanka author_email = [email protected]
@typed(Callable) does not accept lambdas
piotrmaslanka/satella
diff --git a/tests/test_coding/test_debug.py b/tests/test_coding/test_debug.py index 2256def1..504638ae 100644 --- a/tests/test_coding/test_debug.py +++ b/tests/test_coding/test_debug.py @@ -48,6 +48,13 @@ class TestTypecheck(unittest.TestCase): p(None) self.assertRaises(TypeError, lambda: p(5.0)) + def test_lambda(self): + @typed(Callable) + def q(p): + pass + + q(lambda: None) + def test_forarg(self): @for_argument(int) def testa(a):
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 0, "issue_text_score": 3, "test_score": 0 }, "num_modified_files": 3 }
2.0
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "nose", "mock", "coverage", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.7", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi @ file:///croot/certifi_1671487769961/work/certifi coverage==7.2.7 exceptiongroup==1.2.2 importlib-metadata==6.7.0 iniconfig==2.0.0 mock==5.2.0 monotonic==1.6 nose==1.3.7 packaging==24.0 pluggy==1.2.0 pytest==7.4.4 -e git+https://github.com/piotrmaslanka/satella.git@a11e207b41c0037c201704c243ce3c01879b3127#egg=satella six==1.17.0 tomli==2.0.1 typing==3.7.4.3 typing_extensions==4.7.1 zipp==3.15.0
name: satella channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2022.12.7=py37h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=22.3.1=py37h06a4308_0 - python=3.7.16=h7a1cb2a_0 - readline=8.2=h5eee18b_0 - setuptools=65.6.3=py37h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.38.4=py37h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.2.7 - exceptiongroup==1.2.2 - importlib-metadata==6.7.0 - iniconfig==2.0.0 - mock==5.2.0 - monotonic==1.6 - nose==1.3.7 - packaging==24.0 - pluggy==1.2.0 - pytest==7.4.4 - six==1.17.0 - tomli==2.0.1 - typing==3.7.4.3 - typing-extensions==4.7.1 - zipp==3.15.0 prefix: /opt/conda/envs/satella
[ "tests/test_coding/test_debug.py::TestTypecheck::test_lambda" ]
[]
[ "tests/test_coding/test_debug.py::TestTypecheck::test_T2", "tests/test_coding/test_debug.py::TestTypecheck::test_T2a", "tests/test_coding/test_debug.py::TestTypecheck::test_che_co2", "tests/test_coding/test_debug.py::TestTypecheck::test_checked_coerce", "tests/test_coding/test_debug.py::TestTypecheck::test_cls", "tests/test_coding/test_debug.py::TestTypecheck::test_cls_test", "tests/test_coding/test_debug.py::TestTypecheck::test_coerce", "tests/test_coding/test_debug.py::TestTypecheck::test_coerce_result", "tests/test_coding/test_debug.py::TestTypecheck::test_forarg", "tests/test_coding/test_debug.py::TestTypecheck::test_precondition", "tests/test_coding/test_debug.py::TestTypecheck::test_self", "tests/test_coding/test_debug.py::TestTypecheck::test_shorter_coerces", "tests/test_coding/test_debug.py::TestTypecheck::test_t1", "tests/test_coding/test_debug.py::TestTypecheck::test_t2", "tests/test_coding/test_debug.py::TestTypecheck::test_t3", "tests/test_coding/test_debug.py::TestTypecheck::test_ta", "tests/test_coding/test_debug.py::TestTypecheck::test_tma" ]
[]
MIT License
2,166
[ "setup.cfg", "CHANGELOG.md", "satella/coding/typecheck.py" ]
[ "setup.cfg", "CHANGELOG.md", "satella/coding/typecheck.py" ]
streamlink__streamlink-1486
db9f6640df5bf2cdb8d2acdf2960fe1fc96acfec
2018-02-17 02:09:27
3b7dae75d924caa94d5d2e023b85959f0ac8ef39
back-to: you should use there api and not build all urls as a hardcode, this would most likely cleanup the code. --- **LIVE** from api http://www.ruv.is/sites/all/themes/at_ruv/scripts/ruv-stream.php?channel=ruv&format=json http://www.ruv.is/sites/all/themes/at_ruv/scripts/ruv-stream.php?channel=ruv2&format=json http://www.ruv.is/sites/all/themes/at_ruv/scripts/ruv-stream.php?channel=ras1&format=json http://www.ruv.is/sites/all/themes/at_ruv/scripts/ruv-stream.php?channel=ras2&format=json http://www.ruv.is/sites/all/themes/at_ruv/scripts/ruv-stream.php?channel=ras3&format=json **VOD** from website http://www.ruv.is/spila/ruv/ol-2018-listhlaup-karla/20180217 ```js theoplayer.onReady = function () { var video = document.createElement('video'); video.controls = "controls"; video.poster = "http://cdn-img.ruv.is/sites/default/files/styles/1200x750/public/kringlumyndir/26243-75-1_0.jpg?"; video.src = "http://sip-ruv-vod.dcp.adaptive.level3.net/lokad/4953386R12.mp4.m3u8"; video.className = "player col12"; video.preload = "none"; video.autoplay = "autoplay"; video.style = "width:100%;"; video.type = "application/x-mpegURL"; document.getElementById("sarpur-player").appendChild(video); var player = theoplayer(video); ``` hannespetur: > you should use there api and not build all urls as a hardcode, this would most likely cleanup the code. I am not seeing a way to get the correct HLS URLs with the api you sent. Can you elaborate? Best, Hannes back-to: for http://www.ruv.is/sites/all/themes/at_ruv/scripts/ruv-stream.php?channel=ruv2&format=json if will get ```json { "result": [ "rtmp:\/\/ruvruverllivefs.fplive.net\/ruvruverllive-live\/", "https:\/\/ruvruverl-live-hls.secure.footprint.net\/ID-TIME\/ruv\/ruv2erl\/index.m3u8" ], "ipinfo": ["0.0.0.0", "ABC"], "user_agent": ["web"] } ``` --- `https://ruvruverl-live-hls.secure.footprint.net/...-.../ruv/ruv2erl/index.m3u8` this is the m3u8 url ``` #EXTM3U #EXT-X-VERSION:5 #EXT-X-STREAM-INF:BANDWIDTH=950400,CODECS="avc1.77.30,mp4a.40.2",RESOLUTION=640x360 index/stream1.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=1390400,CODECS="avc1.77.30,mp4a.40.2",RESOLUTION=852x480 index/stream2.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=2710400,CODECS="avc1.4d401f,mp4a.40.2",RESOLUTION=1280x720 index/stream3.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=4030400,CODECS="avc1.4d4028,mp4a.40.2",RESOLUTION=1920x1080 index/stream4.m3u8 #EXT-X-STREAM-INF:BANDWIDTH=5570400,CODECS="avc1.4d4028,mp4a.40.2",RESOLUTION=1920x1080 index/stream5.m3u8 ``` ``` $ streamlink "https://ruvruverl-live-hls.secure.footprint.net/...-.../ruv/ruv2erl/index.m3u8" [cli][info] Found matching plugin hls for URL https://ruvruverl-live-hls.secure.footprint.net/...-.../ruv/ruv2erl/index.m3u8 [plugin.hls][debug] URL=https://ruvruverl-live-hls.secure.footprint.net/...-.../ruv/ruv2erl/index.m3u8; params={} [cli][info] Available streams: 360p (worst), 480p, 720p, 1080p_alt, 1080p (best) [cli][info] Opening stream: 1080p (hls) [stream.hls][debug] Reloading playlist [stream.hls][debug] First Sequence: 98252; Last Sequence: 99051 [stream.hls][debug] Start offset: 0; Duration: None; Start Sequence: 99049; End Sequence: None [stream.hls][debug] Adding segment 99049 to queue [cli][debug] Pre-buffering 8192 bytes [stream.hls][debug] Adding segment 99050 to queue [stream.hls][debug] Adding segment 99051 to queue [stream.hls][debug] Download of segment 99049 complete [cli][info] Starting player: /usr/bin/mpv [cli][debug] Writing stream to output [stream.hls][debug] Download of segment 99050 complete [stream.hls][debug] Download of segment 99051 complete [stream.hls][debug] Reloading playlist [stream.hls][debug] Adding segment 99052 to queue [stream.hls][debug] Download of segment 99052 complete [stream.hls][debug] Reloading playlist [stream.hls][debug] Adding segment 99053 to queue [stream.hls][debug] Download of segment 99053 complete ``` can't test all of them because of geo blocking but they should work aswell. hannespetur: Amazing find. The API has a 1080p live stream which is not available at the old hard-coded URL! Thank you very much. The latest changed replace the hard-coded URLs with the URLs provided by the API. Best, Hannes gravyboat: Good stuff, thanks @hannespetur and @back-to!
diff --git a/src/streamlink/plugins/ruv.py b/src/streamlink/plugins/ruv.py index 09db4860..5873d330 100644 --- a/src/streamlink/plugins/ruv.py +++ b/src/streamlink/plugins/ruv.py @@ -3,40 +3,34 @@ import re from streamlink.plugin import Plugin -from streamlink.stream import RTMPStream, HLSStream +from streamlink.stream import HLSStream from streamlink.plugin.api import http +from streamlink.plugin.api import validate -RTMP_LIVE_URL = "rtmp://ruv{0}livefs.fplive.net/ruv{0}live-live/stream{1}" -RTMP_SARPURINN_URL = "rtmp://sipvodfs.fplive.net/sipvod/{0}/{1}{2}.{3}" - -HLS_RUV_LIVE_URL = "http://ruvruv-live.hls.adaptive.level3.net/ruv/ruv/index/stream{0}.m3u8" -HLS_RADIO_LIVE_URL = "http://sip-live.hds.adaptive.level3.net/hls-live/ruv-{0}/_definst_/live/stream1.m3u8" -HLS_SARPURINN_URL = "http://sip-ruv-vod.dcp.adaptive.level3.net/{0}/{1}{2}.{3}.m3u8" +# URL to the RUV LIVE API +RUV_LIVE_API = """http://www.ruv.is/sites/all/themes/at_ruv/scripts/\ +ruv-stream.php?channel={0}&format=json""" _live_url_re = re.compile(r"""^(?:https?://)?(?:www\.)?ruv\.is/ - (?P<channel_path> - ruv| - ras1| - ras-1| - ras2| - ras-2| - rondo + (?P<stream_id> + ruv/?$| + ruv2/?$| + ruv-2/?$| + ras1/?$| + ras2/?$| + rondo/?$ ) /? """, re.VERBOSE) -_sarpurinn_url_re = re.compile(r"""^(?:https?://)?(?:www\.)?ruv\.is/sarpurinn/ - (?: +_sarpurinn_url_re = re.compile(r"""^(?:https?://)?(?:www\.)?ruv\.is/spila/ + (?P<stream_id> ruv| ruv2| ruv-2| ruv-aukaras| - ras1| - ras-1| - ras2| - ras-2 ) / [a-zA-Z0-9_-]+ @@ -45,37 +39,26 @@ _sarpurinn_url_re = re.compile(r"""^(?:https?://)?(?:www\.)?ruv\.is/sarpurinn/ /? """, re.VERBOSE) -_rtmp_url_re = re.compile(r"""rtmp://sipvodfs\.fplive.net/sipvod/ - (?P<status> - lokad| - opid - ) - / - (?P<date>[0-9]+/[0-9][0-9]/[0-9][0-9]/)? - (?P<id>[A-Z0-9\$_]+) - \. - (?P<ext> - mp4| - mp3 - )""", re.VERBOSE) - -_id_map = { - "ruv": "ruv", - "ras1": "ras1", - "ras-1": "ras1", - "ras2": "ras2", - "ras-2": "ras2", - "rondo": "ras3" -} +_single_re = re.compile(r"""(?P<url>http://[0-9a-zA-Z\-\.]*/ + (lokad|opid) + / + ([0-9]+/[0-9][0-9]/[0-9][0-9]/)? + ([A-Z0-9\$_]+\.mp4\.m3u8) + ) + """, re.VERBOSE) + +_multi_re = re.compile(r"""(?P<base_url>http://[0-9a-zA-Z\-\.]*/ + (lokad|opid) + /) + manifest.m3u8\?tlm=hls&streams= + (?P<streams>[0-9a-zA-Z\/\.\,:]+) + """, re.VERBOSE) class Ruv(Plugin): @classmethod def can_handle_url(cls, url): - if _live_url_re.match(url): - return _live_url_re.match(url) - else: - return _sarpurinn_url_re.match(url) + return _live_url_re.match(url) or _sarpurinn_url_re.match(url) def __init__(self, url): Plugin.__init__(self, url) @@ -83,75 +66,77 @@ class Ruv(Plugin): if live_match: self.live = True - self.channel_path = live_match.group("channel_path") + self.stream_id = live_match.group("stream_id") + + # Remove slashes + self.stream_id.replace("/", "") + + # Remove dashes + self.stream_id.replace("-", "") + + # Rondo is identified as ras3 + if self.stream_id == "rondo": + self.stream_id = "ras3" else: self.live = False def _get_live_streams(self): - stream_id = _id_map[self.channel_path] + # Get JSON API + res = http.get(RUV_LIVE_API.format(self.stream_id)) - if stream_id == "ruv": - qualities_rtmp = ["720p", "480p", "360p", "240p"] - - for i, quality in enumerate(qualities_rtmp): - yield quality, RTMPStream( - self.session, - { - "rtmp": RTMP_LIVE_URL.format(stream_id, i + 1), - "pageUrl": self.url, - "live": True - } - ) + # Parse the JSON API + json_res = http.json(res) - qualities_hls = ["240p", "360p", "480p", "720p"] - for i, quality_hls in enumerate(qualities_hls): - yield quality_hls, HLSStream( - self.session, - HLS_RUV_LIVE_URL.format(i + 1) - ) + for url in json_res["result"]: + if url.startswith("rtmp:"): + continue - else: - yield "audio", RTMPStream(self.session, { - "rtmp": RTMP_LIVE_URL.format(stream_id, 1), - "pageUrl": self.url, - "live": True - }) + # Get available streams + streams = HLSStream.parse_variant_playlist(self.session, url) - yield "audio", HLSStream( - self.session, - HLS_RADIO_LIVE_URL.format(stream_id) - ) + for quality, hls in streams.items(): + yield quality, hls def _get_sarpurinn_streams(self): - res = http.get(self.url) - match = _rtmp_url_re.search(res.text) - - if not match: - yield - - token = match.group("id") - status = match.group("status") - extension = match.group("ext") - date = match.group("date") - if not date: - date = "" + # Get HTML page + res = http.get(self.url).text + lines = "\n".join([l for l in res.split("\n") if "video.src" in l]) + multi_stream_match = _multi_re.search(lines) + + if multi_stream_match and multi_stream_match.group("streams"): + base_url = multi_stream_match.group("base_url") + streams = multi_stream_match.group("streams").split(",") + + for stream in streams: + if stream.count(":") != 1: + continue + + [token, quality] = stream.split(":") + quality = int(quality) + key = "" + + if quality <= 500: + key = "240p" + elif quality <= 800: + key = "360p" + elif quality <= 1200: + key = "480p" + elif quality <= 2400: + key = "720p" + else: + key = "1080p" + + yield key, HLSStream( + self.session, + base_url + token + ) - if extension == "mp3": - key = "audio" else: - key = "576p" - - # HLS on Sarpurinn is currently only available on videos - yield key, HLSStream( - self.session, - HLS_SARPURINN_URL.format(status, date, token, extension) - ) - - yield key, RTMPStream(self.session, { - "rtmp": RTMP_SARPURINN_URL.format(status, date, token, extension), - "pageUrl": self.url, - "live": True - }) + single_stream_match = _single_re.search(lines) + + if single_stream_match: + url = single_stream_match.group("url") + yield "576p", HLSStream(self.session, url) def _get_streams(self): if self.live:
RUV Iceland : plugin partially outdated ### Checklist - [x] This is a bug report. - [ ] This is a plugin request. - [ ] This is a feature request. - [ ] I used the search function to find already opened/closed issues or pull requests. ### Description RUV is Icelandic broadcasting corporation consisting of 3 radio and TV channels : 1. Radio channels ------------------ - RAS 1 : `http://ruv.is/nolayout/popup/ras1` - RAS 2 : `http://ruv.is/nolayout/popup/ras2` - Rondo : `http://ruv.is/nolayout/popup/rondo` 2. TV ----- - RUV : `http://ruv.is/ruv` - RUV 2 : `http://ruv.is/ruv-2` - Krakkaruv : `http://krakkaruv.is/hlusta/spila` ### Expected / Actual behavior Channels use HLS for broacasting. In the past they used rtmp too, not sure if it's still the case. ### Reproduction steps / Stream URLs to test Radio ------ `streamlink -l debug "http://ruv.is/nolayout/popup/ras1" error: No plugin can handle URL: http://ruv.is/nolayout/popup/ras1` TV --- ``` streamlink -l debug --http-proxy "82.221.48.137:8080" "http://ruv.is/ruv" best [cli][info] Found matching plugin ruv for URL http://ruv.is/ruv [cli][info] Available streams: 720p_hls, 480p_hls, 240p_hls, 360p_hls, 240p (wor st), 360p, 480p, 720p (best) [cli][info] Opening stream: 720p (rtmp) [stream.rtmp][debug] Spawning command: C:\Users\Ddr\Downloads\streamlink\\rtmpdu mp\rtmpdump.exe --flv - --live --pageUrl http://ruv.is/ruv --rtmp rtmp://ruvruvl ivefs.fplive.net/ruvruvlive-live/stream1 [cli][error] Could not open stream: Error while executing subprocess ``` ### Environment details (operating system, python version, etc.) W7 PRO/streamlink portable ### Comments, logs, screenshots, etc. TV channels are geolocked unlike radio.
streamlink/streamlink
diff --git a/tests/test_plugin_ruv.py b/tests/test_plugin_ruv.py new file mode 100644 index 00000000..fc7d5194 --- /dev/null +++ b/tests/test_plugin_ruv.py @@ -0,0 +1,28 @@ +import unittest + +from streamlink.plugins.ruv import Ruv + + +class TestPluginRuv(unittest.TestCase): + def test_can_handle_url(self): + # should match + self.assertTrue(Ruv.can_handle_url("ruv.is/ruv")) + self.assertTrue(Ruv.can_handle_url("http://ruv.is/ruv")) + self.assertTrue(Ruv.can_handle_url("http://ruv.is/ruv/")) + self.assertTrue(Ruv.can_handle_url("https://ruv.is/ruv/")) + self.assertTrue(Ruv.can_handle_url("http://www.ruv.is/ruv")) + self.assertTrue(Ruv.can_handle_url("http://www.ruv.is/ruv/")) + self.assertTrue(Ruv.can_handle_url("ruv.is/ruv2")) + self.assertTrue(Ruv.can_handle_url("ruv.is/ras1")) + self.assertTrue(Ruv.can_handle_url("ruv.is/ras2")) + self.assertTrue(Ruv.can_handle_url("ruv.is/rondo")) + self.assertTrue(Ruv.can_handle_url("http://www.ruv.is/spila/ruv/ol-2018-ishokki-karla/20180217")) + self.assertTrue(Ruv.can_handle_url("http://www.ruv.is/spila/ruv/frettir/20180217")) + + # shouldn't match + self.assertFalse(Ruv.can_handle_url("rruv.is/ruv")) + self.assertFalse(Ruv.can_handle_url("ruv.is/ruvnew")) + self.assertFalse(Ruv.can_handle_url("https://www.bloomberg.com/live/")) + self.assertFalse(Ruv.can_handle_url("https://www.bloomberg.com/politics/articles/2017-04-17/french-race-up-for-grabs-days-before-voters-cast-first-ballots")) + self.assertFalse(Ruv.can_handle_url("http://www.tvcatchup.com/")) + self.assertFalse(Ruv.can_handle_url("http://www.youtube.com/"))
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 1 }
0.10
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "codecov", "coverage", "mock", "requests-mock" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.9", "reqs_path": [ "dev-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.8.0 distlib==0.3.9 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 iso-639==0.4.5 iso3166==2.1.1 Jinja2==3.1.6 MarkupSafe==3.0.2 mock==5.2.0 packaging==24.2 pluggy==1.5.0 pycryptodome==3.22.0 pynsist==2.8 PySocks==1.7.1 pytest==8.3.5 pytest-cov==6.0.0 requests==2.32.3 requests-mock==1.12.1 requests_download==0.1.2 -e git+https://github.com/streamlink/streamlink.git@db9f6640df5bf2cdb8d2acdf2960fe1fc96acfec#egg=streamlink tomli==2.2.1 urllib3==2.3.0 websocket-client==1.8.0 yarg==0.1.10
name: streamlink channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.8.0 - distlib==0.3.9 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - iso-639==0.4.5 - iso3166==2.1.1 - jinja2==3.1.6 - markupsafe==3.0.2 - mock==5.2.0 - packaging==24.2 - pluggy==1.5.0 - pycryptodome==3.22.0 - pynsist==2.8 - pysocks==1.7.1 - pytest==8.3.5 - pytest-cov==6.0.0 - requests==2.32.3 - requests-download==0.1.2 - requests-mock==1.12.1 - tomli==2.2.1 - urllib3==2.3.0 - websocket-client==1.8.0 - yarg==0.1.10 prefix: /opt/conda/envs/streamlink
[ "tests/test_plugin_ruv.py::TestPluginRuv::test_can_handle_url" ]
[]
[]
[]
BSD 2-Clause "Simplified" License
2,168
[ "src/streamlink/plugins/ruv.py" ]
[ "src/streamlink/plugins/ruv.py" ]
pika__pika-964
0e13941b26bfa2f5a074b6ee968b180bb5a13724
2018-02-17 08:56:59
7b6d7983db021ae4b84d08ea9cee4b8f960ada43
diff --git a/docs/examples/asynchronous_consumer_example.rst b/docs/examples/asynchronous_consumer_example.rst index 2b6f9a7..88cdf5d 100644 --- a/docs/examples/asynchronous_consumer_example.rst +++ b/docs/examples/asynchronous_consumer_example.rst @@ -55,8 +55,7 @@ consumer.py:: """ LOGGER.info('Connecting to %s', self._url) return pika.SelectConnection(pika.URLParameters(self._url), - self.on_connection_open, - stop_ioloop_on_close=False) + self.on_connection_open) def on_connection_open(self, unused_connection): """This method is called by pika once the connection to RabbitMQ has diff --git a/docs/examples/asynchronous_publisher_example.rst b/docs/examples/asynchronous_publisher_example.rst index b310146..dac92e6 100644 --- a/docs/examples/asynchronous_publisher_example.rst +++ b/docs/examples/asynchronous_publisher_example.rst @@ -55,9 +55,7 @@ publisher.py:: def connect(self): """This method connects to RabbitMQ, returning the connection handle. When the connection is established, the on_connection_open method - will be invoked by pika. If you want the reconnection to work, make - sure you set stop_ioloop_on_close to False, which is not the default - behavior of this adapter. + will be invoked by pika. :rtype: pika.SelectConnection @@ -65,8 +63,7 @@ publisher.py:: LOGGER.info('Connecting to %s', self._url) return pika.SelectConnection(pika.URLParameters(self._url), on_open_callback=self.on_connection_open, - on_close_callback=self.on_connection_closed, - stop_ioloop_on_close=False) + on_close_callback=self.on_connection_closed) def on_connection_open(self, unused_connection): """This method is called by pika once the connection to RabbitMQ has diff --git a/examples/asynchronous_consumer_example.py b/examples/asynchronous_consumer_example.py index 67f16bd..1744788 100644 --- a/examples/asynchronous_consumer_example.py +++ b/examples/asynchronous_consumer_example.py @@ -52,8 +52,7 @@ class ExampleConsumer(object): return pika.SelectConnection(parameters=pika.URLParameters(self._url), on_open_callback=self.on_connection_open, on_open_error_callback=self.on_connection_open_error, - on_close_callback=self.on_connection_closed, - stop_ioloop_on_close=False) + on_close_callback=self.on_connection_closed) def on_connection_open(self, unused_connection): """This method is called by pika once the connection to RabbitMQ has diff --git a/examples/asynchronous_publisher_example.py b/examples/asynchronous_publisher_example.py index 75143ce..d92e511 100644 --- a/examples/asynchronous_publisher_example.py +++ b/examples/asynchronous_publisher_example.py @@ -50,9 +50,7 @@ class ExamplePublisher(object): def connect(self): """This method connects to RabbitMQ, returning the connection handle. When the connection is established, the on_connection_open method - will be invoked by pika. If you want the reconnection to work, make - sure you set stop_ioloop_on_close to False, which is not the default - behavior of this adapter. + will be invoked by pika. :rtype: pika.SelectConnection @@ -61,8 +59,7 @@ class ExamplePublisher(object): return pika.SelectConnection(pika.URLParameters(self._url), on_open_callback=self.on_connection_open, on_open_error_callback=self.on_connection_open_error, - on_close_callback=self.on_connection_closed, - stop_ioloop_on_close=False) + on_close_callback=self.on_connection_closed) def on_connection_open(self, unused_connection): """This method is called by pika once the connection to RabbitMQ has diff --git a/pika/adapters/asyncio_connection.py b/pika/adapters/asyncio_connection.py index dff7656..cc38c3f 100644 --- a/pika/adapters/asyncio_connection.py +++ b/pika/adapters/asyncio_connection.py @@ -159,7 +159,6 @@ class AsyncioConnection(base_connection.BaseConnection): on_open_callback=None, on_open_error_callback=None, on_close_callback=None, - stop_ioloop_on_close=False, custom_ioloop=None): """ Create a new instance of the AsyncioConnection class, connecting to RabbitMQ automatically @@ -169,7 +168,7 @@ class AsyncioConnection(base_connection.BaseConnection): :type on_open_callback: method :param on_open_error_callback: Method to call if the connection cant be opened :type on_open_error_callback: method - :param asyncio.AbstractEventLoop loop: By default asyncio.get_event_loop() + :param asyncio.AbstractEventLoop custom_ioloop: By default asyncio.get_event_loop() """ self.sleep_counter = 0 @@ -177,10 +176,11 @@ class AsyncioConnection(base_connection.BaseConnection): self.ioloop = IOLoopAdapter(self.loop) super().__init__( - parameters, on_open_callback, + parameters, + on_open_callback, on_open_error_callback, - on_close_callback, self.ioloop, - stop_ioloop_on_close=stop_ioloop_on_close, + on_close_callback, + self.ioloop ) def _adapter_connect(self): diff --git a/pika/adapters/base_connection.py b/pika/adapters/base_connection.py index 37dcc43..fdc456b 100644 --- a/pika/adapters/base_connection.py +++ b/pika/adapters/base_connection.py @@ -37,8 +37,7 @@ class BaseConnection(connection.Connection): on_open_callback=None, on_open_error_callback=None, on_close_callback=None, - ioloop=None, - stop_ioloop_on_close=True): + ioloop=None): """Create a new instance of the Connection object. :param pika.connection.Parameters parameters: Connection parameters @@ -48,7 +47,6 @@ class BaseConnection(connection.Connection): :param method on_close_callback: Called when the connection is closed: on_close_callback(connection, reason_code, reason_text) :param object ioloop: IOLoop object to use - :param bool stop_ioloop_on_close: Call ioloop.stop() if disconnected :raises: RuntimeError :raises: ValueError @@ -64,7 +62,6 @@ class BaseConnection(connection.Connection): self.event_state = self.base_events self.ioloop = ioloop self.socket = None - self.stop_ioloop_on_close = stop_ioloop_on_close self.write_buffer = None super(BaseConnection, self).__init__(parameters, on_open_callback, @@ -119,11 +116,7 @@ class BaseConnection(connection.Connection): :param str reply_text: The text reason for the close """ - try: - super(BaseConnection, self).close(reply_code, reply_text) - finally: - if self.is_closed: - self._handle_ioloop_stop() + super(BaseConnection, self).close(reply_code, reply_text) def remove_timeout(self, timeout_id): """Remove the timeout from the IOLoop by the ID returned from @@ -170,10 +163,7 @@ class BaseConnection(connection.Connection): def _adapter_disconnect(self): """Invoked if the connection is being told to disconnect""" - try: - self._cleanup_socket() - finally: - self._handle_ioloop_stop() + self._cleanup_socket() def _cleanup_socket(self): """Close the socket cleanly""" @@ -299,14 +289,6 @@ class BaseConnection(connection.Connection): # called), etc., etc., etc. self._manage_event_state() - def _handle_ioloop_stop(self): - """Invoked when the connection is closed to determine if the IOLoop - should be stopped or not. - - """ - if self.stop_ioloop_on_close and self.ioloop: - self.ioloop.stop() - def _handle_error(self, error_value): """Internal error handling method. Here we expect a socket error coming in and will handle different socket errors differently. diff --git a/pika/adapters/blocking_connection.py b/pika/adapters/blocking_connection.py index 8649d2e..2a1629f 100644 --- a/pika/adapters/blocking_connection.py +++ b/pika/adapters/blocking_connection.py @@ -369,8 +369,7 @@ class BlockingConnection(object): parameters=parameters, on_open_callback=self._opened_result.set_value_once, on_open_error_callback=self._open_error_result.set_value_once, - on_close_callback=self._closed_result.set_value_once, - stop_ioloop_on_close=False) + on_close_callback=self._closed_result.set_value_once) self._impl.ioloop.activate_poller() diff --git a/pika/adapters/select_connection.py b/pika/adapters/select_connection.py index f6e6f3e..3ae3eca 100644 --- a/pika/adapters/select_connection.py +++ b/pika/adapters/select_connection.py @@ -71,7 +71,6 @@ class SelectConnection(BaseConnection): on_open_callback=None, on_open_error_callback=None, on_close_callback=None, - stop_ioloop_on_close=True, custom_ioloop=None): """Create a new instance of the Connection object. @@ -81,15 +80,17 @@ class SelectConnection(BaseConnection): be established: on_open_error_callback(connection, str|exception) :param method on_close_callback: Called when the connection is closed: on_close_callback(connection, reason_code, reason_text) - :param bool stop_ioloop_on_close: Call ioloop.stop() if disconnected :param custom_ioloop: Override using the global IOLoop in Tornado :raises: RuntimeError """ ioloop = custom_ioloop or IOLoop() super(SelectConnection, self).__init__( - parameters, on_open_callback, on_open_error_callback, - on_close_callback, ioloop, stop_ioloop_on_close) + parameters, + on_open_callback, + on_open_error_callback, + on_close_callback, + ioloop) def _adapter_connect(self): """Connect to the RabbitMQ broker, returning True on success, False diff --git a/pika/adapters/tornado_connection.py b/pika/adapters/tornado_connection.py index 309fb77..3c3c736 100644 --- a/pika/adapters/tornado_connection.py +++ b/pika/adapters/tornado_connection.py @@ -9,20 +9,7 @@ LOGGER = logging.getLogger(__name__) class TornadoConnection(base_connection.BaseConnection): - """The TornadoConnection runs on the Tornado IOLoop. If you're running the - connection in a web app, make sure you set stop_ioloop_on_close to False, - which is the default behavior for this adapter, otherwise the web app - will stop taking requests. - - :param pika.connection.Parameters parameters: Connection parameters - :param on_open_callback: The method to call when the connection is open - :type on_open_callback: method - :param on_open_error_callback: Method to call if the connection cant - be opened - :type on_open_error_callback: method - :param bool stop_ioloop_on_close: Call ioloop.stop() if disconnected - :param custom_ioloop: Override using the global IOLoop in Tornado - + """The TornadoConnection runs on the Tornado IOLoop. """ def __init__(self, @@ -30,7 +17,6 @@ class TornadoConnection(base_connection.BaseConnection): on_open_callback=None, on_open_error_callback=None, on_close_callback=None, - stop_ioloop_on_close=False, custom_ioloop=None): """Create a new instance of the TornadoConnection class, connecting to RabbitMQ automatically @@ -42,16 +28,16 @@ class TornadoConnection(base_connection.BaseConnection): be established: on_open_error_callback(connection, str|exception) :param method on_close_callback: Called when the connection is closed: on_close_callback(connection, reason_code, reason_text) - :param bool stop_ioloop_on_close: Call ioloop.stop() if disconnected :param custom_ioloop: Override using the global IOLoop in Tornado """ self.sleep_counter = 0 self.ioloop = custom_ioloop or ioloop.IOLoop.instance() - super(TornadoConnection, self).__init__(parameters, on_open_callback, + super(TornadoConnection, self).__init__(parameters, + on_open_callback, on_open_error_callback, - on_close_callback, self.ioloop, - stop_ioloop_on_close) + on_close_callback, + self.ioloop) def _adapter_connect(self): """Connect to the remote socket, adding the socket to the IOLoop if diff --git a/pika/adapters/twisted_connection.py b/pika/adapters/twisted_connection.py index 3ce458b..72c7c2e 100644 --- a/pika/adapters/twisted_connection.py +++ b/pika/adapters/twisted_connection.py @@ -282,15 +282,13 @@ class TwistedConnection(base_connection.BaseConnection): parameters=None, on_open_callback=None, on_open_error_callback=None, - on_close_callback=None, - stop_ioloop_on_close=False): + on_close_callback=None): super(TwistedConnection, self).__init__( parameters=parameters, on_open_callback=on_open_callback, on_open_error_callback=on_open_error_callback, on_close_callback=on_close_callback, - ioloop=IOLoopReactorAdapter(self, reactor), - stop_ioloop_on_close=stop_ioloop_on_close) + ioloop=IOLoopReactorAdapter(self, reactor)) def _adapter_connect(self): """Connect to the RabbitMQ broker""" @@ -374,8 +372,7 @@ class TwistedProtocolConnection(base_connection.BaseConnection): on_open_callback=self.connectionReady, on_open_error_callback=self.connectionFailed, on_close_callback=on_close_callback, - ioloop=IOLoopReactorAdapter(self, reactor), - stop_ioloop_on_close=False) + ioloop=IOLoopReactorAdapter(self, reactor)) def connect(self): # The connection is open asynchronously by Twisted, so skip the whole
Remove stop_ioloop_on_close arg from adapter constructors for the 1.0 release Some time ago, the `stop_ioloop_on_close=True` constructor arg slipped into all the asynchronous adapters. I think that the presence of this arg is overly-opinionated. An asynchronous app is likely to have other logic besides a single pika connection running on the same IOLoop (tornado, asyncio, etc.), so having ioloop-stopping logic in pika adapters is too pika-centric, unnecessary, non-orthogonal, and likely error-prone. Especially when it's brain-dead easy for the app to pass `on_close_callback`- having the signature `on_close_callback(connection, reason_code, reason_text)` - to the adapter constructor and call `connection.ioloop.stop()` itself if needed. I think we should remove the `stop_ioloop_on_close` args everywhere in pika as well as any logic dealing with stopping the ioloop in pika adapters.
pika/pika
diff --git a/tests/unit/blocking_connection_tests.py b/tests/unit/blocking_connection_tests.py index 118aa2d..69a80e5 100644 --- a/tests/unit/blocking_connection_tests.py +++ b/tests/unit/blocking_connection_tests.py @@ -44,8 +44,7 @@ class BlockingConnectionTests(unittest.TestCase): parameters='params', on_open_callback=mock.ANY, on_open_error_callback=mock.ANY, - on_close_callback=mock.ANY, - stop_ioloop_on_close=mock.ANY) + on_close_callback=mock.ANY) self.assertEqual(connection._impl.ioloop.activate_poller.call_count, 1) diff --git a/tests/unit/tornado_tests.py b/tests/unit/tornado_tests.py index de78c60..7c36346 100644 --- a/tests/unit/tornado_tests.py +++ b/tests/unit/tornado_tests.py @@ -15,4 +15,4 @@ class TornadoConnectionTests(unittest.TestCase): obj = tornado_connection.TornadoConnection() mock_init.assert_called_once_with( None, None, None, None, - tornado_connection.ioloop.IOLoop.instance(), False) + tornado_connection.ioloop.IOLoop.instance())
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 2 }, "num_modified_files": 10 }
0.11
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc", "apt-get install -y rabbitmq-server" ], "python": "3.9", "reqs_path": [ "test-requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
certifi==2025.1.31 charset-normalizer==3.4.1 codecov==2.1.13 coverage==7.8.0 exceptiongroup==1.2.2 idna==3.10 iniconfig==2.1.0 mock==5.2.0 nose==1.3.7 packaging==24.2 -e git+https://github.com/pika/pika.git@0e13941b26bfa2f5a074b6ee968b180bb5a13724#egg=pika pluggy==1.5.0 pytest==8.3.5 pytest-asyncio==0.26.0 requests==2.32.3 tomli==2.2.1 tornado==6.4.2 Twisted==15.3.0 typing_extensions==4.13.0 urllib3==2.3.0 zope.interface==7.2
name: pika channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - certifi==2025.1.31 - charset-normalizer==3.4.1 - codecov==2.1.13 - coverage==7.8.0 - exceptiongroup==1.2.2 - idna==3.10 - iniconfig==2.1.0 - mock==5.2.0 - nose==1.3.7 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - requests==2.32.3 - tomli==2.2.1 - tornado==6.4.2 - twisted==15.3.0 - typing-extensions==4.13.0 - urllib3==2.3.0 - zope-interface==7.2 prefix: /opt/conda/envs/pika
[ "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_constructor", "tests/unit/tornado_tests.py::TornadoConnectionTests::test_tornado_connection_call_parent" ]
[]
[ "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_channel", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_close_with_channel_closed_exception", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_connection_attempts_with_timeout", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_server_initiated_no_error_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_flush_output_user_initiated_close", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_process_io_for_connection_setup_fails_with_open_error", "tests/unit/blocking_connection_tests.py::BlockingConnectionTests::test_sleep" ]
[]
BSD 3-Clause "New" or "Revised" License
2,169
[ "pika/adapters/blocking_connection.py", "pika/adapters/asyncio_connection.py", "pika/adapters/select_connection.py", "pika/adapters/tornado_connection.py", "pika/adapters/twisted_connection.py", "examples/asynchronous_publisher_example.py", "examples/asynchronous_consumer_example.py", "docs/examples/asynchronous_publisher_example.rst", "pika/adapters/base_connection.py", "docs/examples/asynchronous_consumer_example.rst" ]
[ "pika/adapters/blocking_connection.py", "pika/adapters/asyncio_connection.py", "pika/adapters/select_connection.py", "pika/adapters/tornado_connection.py", "pika/adapters/twisted_connection.py", "examples/asynchronous_publisher_example.py", "examples/asynchronous_consumer_example.py", "docs/examples/asynchronous_publisher_example.rst", "pika/adapters/base_connection.py", "docs/examples/asynchronous_consumer_example.rst" ]
jupyter__nbgrader-932
97909fa3e4f358aa3b79e2a079d8a8a595d51119
2018-02-17 20:02:54
5bc6f37c39c8b10b8f60440b2e6d9487e63ef3f1
diff --git a/nbgrader/coursedir.py b/nbgrader/coursedir.py index 26207215..978afb3f 100644 --- a/nbgrader/coursedir.py +++ b/nbgrader/coursedir.py @@ -4,7 +4,7 @@ import re from textwrap import dedent from traitlets.config import LoggingConfigurable -from traitlets import Unicode, List, default +from traitlets import Unicode, List, default, validate, TraitError from .utils import full_split, parse_utc @@ -42,6 +42,12 @@ class CourseDirectory(LoggingConfigurable): ) ).tag(config=True) + @validate('assignment_id') + def _validate_assignment_id(self, proposal): + if '+' in proposal['value']: + raise TraitError('Assignment names should not contain the following characters: +') + return proposal['value'] + notebook_id = Unicode( "*", help=dedent( diff --git a/nbgrader/server_extensions/formgrader/static/js/manage_assignments.js b/nbgrader/server_extensions/formgrader/static/js/manage_assignments.js index e8e68b1f..dd2757e3 100644 --- a/nbgrader/server_extensions/formgrader/static/js/manage_assignments.js +++ b/nbgrader/server_extensions/formgrader/static/js/manage_assignments.js @@ -395,6 +395,15 @@ var createAssignmentModal = function () { modal.modal('hide'); return; } + if (name.indexOf("+") != -1) { + var err = $("#create-error"); + err.text("Assignment names may not include the '+' character."); + err.show(); + return; + } else { + var err = $("#create-error"); + err.hide(); + } var model = new Assignment({ "name": name, @@ -417,19 +426,22 @@ var createAssignmentModal = function () { modal.modal('hide'); }; - var body = $("<table/>").addClass("table table-striped form-table"); + var body = $("<p/>") + body.append($("<p id='create-error' class='alert alert-danger' style='display: none'/>")); + var table = $("<table/>").addClass("table table-striped form-table"); + body.append(table) var name = $("<tr/>"); - body.append(name); + table.append(name); name.append($("<td/>").addClass("align-middle").text("Name")); name.append($("<td/>").append($("<input/>").addClass("name").attr("type", "text").attr("size", "31"))); var duedate = $("<tr/>"); - body.append(duedate); + table.append(duedate); duedate.append($("<td/>").addClass("align-middle").text("Due date (optional)")); duedate.append($("<td/>").append($("<input/>").addClass("duedate").attr("type", "datetime-local"))); var timezone = $("<tr/>"); - body.append(timezone); + table.append(timezone); timezone.append($("<td/>").addClass("align-middle").text("Timezone as UTC offset (optional)")); timezone.append($("<td/>").append($("<input/>").addClass("timezone").attr("type", "text")));
Assignment names with a '+' character break autograding I (perhaps foolishly) created an assignment with a '+' character in its name (the web interface did not complain). Now nbgrader will not autograde submitted assignments. From the log file, I gather that the + character is used as a separator between student id and assignment name. Does anyone know of a workaround that lets me autograde assignments that have already been submitted? Regards, Axel ### Operating system Kubuntu Linux 16.04.3 LTS ### `nbgrader --version` Python version 3.5.2 (default, Nov 23 2017, 16:37:01) [GCC 5.4.0 20160609] nbgrader version 0.5.4 ### `jupyterhub --version` (if used with JupyterHub) 0.8.1 ### `jupyter notebook --version` 5.4.0 ### Expected behavior `nbgrader autograde PHY332-08-Charge+Plate_Free_Fall`should autograde submitted assignments ### Actual behavior Autograding fails; .nbgrader.log says ``` [AutogradeApp | INFO] Copying /home/axel/PHY332/submitted/doe1j+PHY332-08-Charge/PHY332-08-Charge+Plate_Free_Fall/timestamp.txt -> /home/axel/PHY332/autograded/doe1j+PHY332-08-Charge/PHY332-08-Charge+Plate_Free_Fall/timestamp.txt [AutogradeApp | INFO] Copying /home/axel/PHY332/submitted/doe1j+PHY332-08-Charge/PHY332-08-Charge+Plate_Free_Fall/image_charge.png -> /home/axel/PHY332/autograded/doe1j+PHY332-08-Charge/PHY332-08-Charge+Plate_Free_Fall/image_charge.png [AutogradeApp | ERROR] No student with ID 'doe1j+PHY332-08-Charge' exists in the database [AutogradeApp | ERROR] There was an error processing assignment: /home/axel/PHY332/submitted/doe1j+PHY332-08-Charge/PHY332-08-Charge+Plate_Free_Fall [AutogradeApp | ERROR] Traceback (most recent call last): File "/usr/local/lib/python3.5/dist-packages/nbgrader/api.py", line 1122, in find_student .filter(Student.id == student_id)\ File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/query.py", line 2843, in one raise orm_exc.NoResultFound("No row was found for one()") sqlalchemy.orm.exc.NoResultFound: No row was found for one() During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.5/dist-packages/nbgrader/converters/autograde.py", line 78, in init_assignment gb.find_student(student_id) File "/usr/local/lib/python3.5/dist-packages/nbgrader/api.py", line 1125, in find_student raise MissingEntry("No such student: {}".format(student_id)) nbgrader.api.MissingEntry: No such student: doe1j+PHY332-08-Charge During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.5/dist-packages/nbgrader/converters/base.py", line 289, in convert_notebooks self.init_assignment(gd['assignment_id'], gd['student_id']) File "/usr/local/lib/python3.5/dist-packages/nbgrader/converters/autograde.py", line 82, in init_assignment raise NbGraderException(msg) nbgrader.converters.base.NbGraderException: No student with ID 'doe1j+PHY332-08-Charge' exists in the database ``` ### Steps to reproduce the behavior 1. Create assignment with `+` in its name, e.g. `PHY332-08-Charge+Plate_Free_Fall` 2. Collect submitted assignments using `nbgrader collect PHY332-08-Charge+Plate_Free_Fall --update` 3. Attempt to autograde with `nbgrader autograde PHY332-08-Charge+Plate_Free_Fall`
jupyter/nbgrader
diff --git a/nbgrader/tests/apps/test_nbgrader_assign.py b/nbgrader/tests/apps/test_nbgrader_assign.py index 73575ef2..c39d91db 100644 --- a/nbgrader/tests/apps/test_nbgrader_assign.py +++ b/nbgrader/tests/apps/test_nbgrader_assign.py @@ -1,6 +1,7 @@ import os import sys import pytest +import traitlets from os.path import join from sqlalchemy.exc import InvalidRequestError @@ -44,6 +45,13 @@ class TestNbGraderAssign(BaseTestApp): run_nbgrader(["assign", "ps1"]) assert os.path.isfile(join(course_dir, "release", "ps1", "foo.ipynb")) + def test_single_file_bad_assignment_name(self, course_dir, temp_cwd): + """Test that an error is thrown when the assignment name is invalid.""" + self._empty_notebook(join(course_dir, 'source', 'foo+bar', 'foo.ipynb')) + with pytest.raises(traitlets.TraitError): + run_nbgrader(["assign", "foo+bar", "--create"]) + assert not os.path.isfile(join(course_dir, "release", "foo+bar", "foo.ipynb")) + def test_multiple_files(self, course_dir): """Can multiple files be assigned?""" self._empty_notebook(join(course_dir, 'source', 'ps1', 'foo.ipynb')) diff --git a/nbgrader/tests/nbextensions/test_formgrader.py b/nbgrader/tests/nbextensions/test_formgrader.py index 5fcabca8..772a252f 100644 --- a/nbgrader/tests/nbextensions/test_formgrader.py +++ b/nbgrader/tests/nbextensions/test_formgrader.py @@ -825,7 +825,7 @@ def test_add_new_assignment(browser, port, gradebook): # set the name and dudedate elem = browser.find_element_by_css_selector("#add-assignment-modal .name") elem.click() - elem.send_keys("ps2") + elem.send_keys("ps2+a") elem = browser.find_element_by_css_selector("#add-assignment-modal .duedate") elem.click() elem.send_keys("2017-07-05T17:00") @@ -833,6 +833,16 @@ def test_add_new_assignment(browser, port, gradebook): elem.click() elem.send_keys("UTC") + # click save and wait for the error message to appear + utils._click_element(browser, "#add-assignment-modal .save") + WebDriverWait(browser, 10).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#create-error"))) + + # set a valid name + elem = browser.find_element_by_css_selector("#add-assignment-modal .name") + elem.clear() + elem.click() + elem.send_keys("ps2") + # click save and wait for the modal to close utils._click_element(browser, "#add-assignment-modal .save") modal_not_present = lambda browser: browser.execute_script("""return $("#add-assignment-modal").length === 0;""")
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_media", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 1, "test_score": 2 }, "num_modified_files": 2 }
0.5
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest pytest-cov pytest-rerunfailures coverage selenium invoke sphinx codecov cov-core", "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "dev-requirements.txt", "dev-requirements-windows.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 alembic==1.7.7 anyio==3.6.2 argon2-cffi==21.3.0 argon2-cffi-bindings==21.2.0 async-generator==1.10 attrs==22.2.0 Babel==2.11.0 backcall==0.2.0 bleach==4.1.0 certifi==2021.5.30 cffi==1.15.1 charset-normalizer==2.0.12 codecov==2.1.13 comm==0.1.4 contextvars==2.4 cov-core==1.15.0 coverage==6.2 dataclasses==0.8 decorator==5.1.1 defusedxml==0.7.1 docutils==0.18.1 entrypoints==0.4 greenlet==2.0.2 idna==3.10 imagesize==1.4.1 immutables==0.19 importlib-metadata==4.8.3 importlib-resources==5.4.0 iniconfig==1.1.1 invoke==2.2.0 ipykernel==5.5.6 ipython==7.16.3 ipython-genutils==0.2.0 ipywidgets==7.8.5 jedi==0.17.2 Jinja2==3.0.3 json5==0.9.16 jsonschema==3.2.0 jupyter==1.1.1 jupyter-client==7.1.2 jupyter-console==6.4.3 jupyter-core==4.9.2 jupyter-server==1.13.1 jupyterlab==3.2.9 jupyterlab-pygments==0.1.2 jupyterlab-server==2.10.3 jupyterlab_widgets==1.1.11 Mako==1.1.6 MarkupSafe==2.0.1 mistune==0.8.4 nbclassic==0.3.5 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 -e git+https://github.com/jupyter/nbgrader.git@97909fa3e4f358aa3b79e2a079d8a8a595d51119#egg=nbgrader nbval==0.10.0 nest-asyncio==1.6.0 notebook==6.4.10 packaging==21.3 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 prometheus-client==0.17.1 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 pycparser==2.21 pyenchant==3.2.2 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 pytest-cov==4.0.0 pytest-rerunfailures==10.3 python-dateutil==2.9.0.post0 pytz==2025.2 pyzmq==25.1.2 requests==2.27.1 selenium==3.141.0 Send2Trash==1.8.3 six==1.17.0 sniffio==1.2.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-rtd-theme==2.0.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jquery==4.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 sphinxcontrib-spelling==7.7.0 SQLAlchemy==1.4.54 terminado==0.12.1 testpath==0.6.0 tomli==1.2.3 tornado==6.1 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 wcwidth==0.2.13 webencodings==0.5.1 websocket-client==1.3.1 widgetsnbextension==3.6.10 zipp==3.6.0
name: nbgrader channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - alembic==1.7.7 - anyio==3.6.2 - argon2-cffi==21.3.0 - argon2-cffi-bindings==21.2.0 - async-generator==1.10 - attrs==22.2.0 - babel==2.11.0 - backcall==0.2.0 - bleach==4.1.0 - cffi==1.15.1 - charset-normalizer==2.0.12 - codecov==2.1.13 - comm==0.1.4 - contextvars==2.4 - cov-core==1.15.0 - coverage==6.2 - dataclasses==0.8 - decorator==5.1.1 - defusedxml==0.7.1 - docutils==0.18.1 - entrypoints==0.4 - greenlet==2.0.2 - idna==3.10 - imagesize==1.4.1 - immutables==0.19 - importlib-metadata==4.8.3 - importlib-resources==5.4.0 - iniconfig==1.1.1 - invoke==2.2.0 - ipykernel==5.5.6 - ipython==7.16.3 - ipython-genutils==0.2.0 - ipywidgets==7.8.5 - jedi==0.17.2 - jinja2==3.0.3 - json5==0.9.16 - jsonschema==3.2.0 - jupyter==1.1.1 - jupyter-client==7.1.2 - jupyter-console==6.4.3 - jupyter-core==4.9.2 - jupyter-server==1.13.1 - jupyterlab==3.2.9 - jupyterlab-pygments==0.1.2 - jupyterlab-server==2.10.3 - jupyterlab-widgets==1.1.11 - mako==1.1.6 - markupsafe==2.0.1 - mistune==0.8.4 - nbclassic==0.3.5 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nbval==0.10.0 - nest-asyncio==1.6.0 - notebook==6.4.10 - packaging==21.3 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - prometheus-client==0.17.1 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pycparser==2.21 - pyenchant==3.2.2 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-rerunfailures==10.3 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyzmq==25.1.2 - requests==2.27.1 - selenium==3.141.0 - send2trash==1.8.3 - six==1.17.0 - sniffio==1.2.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-rtd-theme==2.0.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jquery==4.1 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - sphinxcontrib-spelling==7.7.0 - sqlalchemy==1.4.54 - terminado==0.12.1 - testpath==0.6.0 - tomli==1.2.3 - tornado==6.1 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - wcwidth==0.2.13 - webencodings==0.5.1 - websocket-client==1.3.1 - widgetsnbextension==3.6.10 - zipp==3.6.0 prefix: /opt/conda/envs/nbgrader
[ "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_single_file_bad_assignment_name" ]
[]
[ "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_help", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_no_args", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_conflicting_args", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_multiple_args", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_no_assignment", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_single_file", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_multiple_files", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_dependent_files", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_save_cells", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_force", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_permissions", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_custom_permissions", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_add_remove_extra_notebooks", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_add_extra_notebooks_with_submissions", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_remove_extra_notebooks_with_submissions", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_same_notebooks_with_submissions", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_force_single_notebook", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_fail_no_notebooks", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_no_metadata", "nbgrader/tests/apps/test_nbgrader_assign.py::TestNbGraderAssign::test_header" ]
[]
BSD 3-Clause "New" or "Revised" License
2,171
[ "nbgrader/coursedir.py", "nbgrader/server_extensions/formgrader/static/js/manage_assignments.js" ]
[ "nbgrader/coursedir.py", "nbgrader/server_extensions/formgrader/static/js/manage_assignments.js" ]
MasoniteFramework__core-16
3f87388490aa65aa2906f9f3e6727d70f16c5509
2018-02-18 02:32:53
416a7a42f83d4212557f9f7edf4412fc4259eb13
diff --git a/masonite/drivers/UploadDiskDriver.py b/masonite/drivers/UploadDiskDriver.py index 410f732..e3721bc 100644 --- a/masonite/drivers/UploadDiskDriver.py +++ b/masonite/drivers/UploadDiskDriver.py @@ -12,6 +12,8 @@ class UploadDiskDriver(object): if not location: location = self.config.DRIVERS['disk']['location'] + location += '/' + open(location + filename, 'wb').write(fileitem.file.read()) return location + filename diff --git a/masonite/request.py b/masonite/request.py index 8036a6f..7586018 100644 --- a/masonite/request.py +++ b/masonite/request.py @@ -196,11 +196,18 @@ class Request(object): # if the url contains a parameter variable like @id:int if '@' in url: url = url.replace('@', '').replace(':int', '').replace(':string', '') - print('url after @', url) - compiled_url += '/' + str(self.param(url)) + '/' + compiled_url += str(self.param(url)) + '/' else: - compiled_url += url - print('compiled_url:', compiled_url) + compiled_url += url + '/' + + # The loop isn't perfect and may have an unwanted trailing slash + if compiled_url.endswith('/') and not self.redirect_url.endswith('/'): + compiled_url = compiled_url[:-1] + + # The loop isn't perfect and may have 2 slashes next to eachother + if '//' in compiled_url: + compiled_url = compiled_url.replace('//', '/') + return compiled_url def has_subdomain(self): diff --git a/setup.py b/setup.py index 8e9a335..668e018 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( 'masonite.testsuite', 'masonite.queues', ], - version='1.3.1', + version='1.3.2', install_requires=[ 'validator.py==1.2.5', 'cryptography==2.1.4',
Redirect does not compile url correctly a redirect url of '/url/here' compiles into "urlhere" for some reason. check tests because I thought this was fixed
MasoniteFramework/core
diff --git a/tests/test_requests.py b/tests/test_requests.py index d68c2d8..f9b640f 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -149,3 +149,58 @@ def test_request_has_subdomain_returns_bool(): request.environ['HTTP_HOST'] = 'test.localhost.com' assert request.has_subdomain() is True + +def test_redirect_compiles_url(): + app = App() + app.bind('Request', REQUEST) + request = app.make('Request').load_app(app) + + request.redirect('test/url') + + assert request.compile_route_to_url() == '/test/url' + +def test_redirect_compiles_url_with_multiple_slashes(): + app = App() + app.bind('Request', REQUEST) + request = app.make('Request').load_app(app) + + request.redirect('test/url/here') + + assert request.compile_route_to_url() == '/test/url/here' + +def test_redirect_compiles_url_with_trailing_slash(): + app = App() + app.bind('Request', REQUEST) + request = app.make('Request').load_app(app) + + request.redirect('test/url/here/') + + assert request.compile_route_to_url() == '/test/url/here/' + +def test_redirect_compiles_url_with_parameters(): + app = App() + app.bind('Request', REQUEST) + request = app.make('Request').load_app(app) + + request.redirect('test/@id').send({'id': '1'}) + + assert request.compile_route_to_url() == '/test/1' + +def test_redirect_compiles_url_with_multiple_parameters(): + app = App() + app.bind('Request', REQUEST) + request = app.make('Request').load_app(app) + + request.redirect('test/@id/@test').send({'id': '1', 'test': 'user'}) + + assert request.compile_route_to_url() == '/test/1/user' + +def test_redirect_compiles_url_with_http(): + app = App() + app.bind('Request', REQUEST) + request = app.make('Request').load_app(app) + + request.redirect('http://google.com') + + assert request.compile_route_to_url() == 'http://google.com' +
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 2 }, "num_modified_files": 3 }
1.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist" ], "pre_install": [], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
asn1crypto==1.5.1 attrs==22.2.0 bcrypt==3.1.4 boto3==1.5.24 botocore==1.8.50 certifi==2021.5.30 cffi==1.15.1 chardet==3.0.4 coverage==6.2 cryptography==2.1.4 docutils==0.18.1 execnet==1.9.0 idna==2.6 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 jmespath==0.10.0 libsass==0.22.0 MarkupSafe==2.0.1 -e git+https://github.com/MasoniteFramework/core.git@3f87388490aa65aa2906f9f3e6727d70f16c5509#egg=masonite packaging==21.3 pluggy==1.0.0 py==1.11.0 pycparser==2.21 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 python-dotenv==0.20.0 requests==2.18.4 requests-file==2.1.0 s3transfer==0.1.13 six==1.17.0 tldextract==2.2.0 tomli==1.2.3 typing_extensions==4.1.1 urllib3==1.22 validator.py==1.2.5 whitenoise==5.3.0 zipp==3.6.0
name: core channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - asn1crypto==1.5.1 - attrs==22.2.0 - bcrypt==3.1.4 - boto3==1.5.24 - botocore==1.8.50 - cffi==1.15.1 - chardet==3.0.4 - coverage==6.2 - cryptography==2.1.4 - docutils==0.18.1 - execnet==1.9.0 - idna==2.6 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - jmespath==0.10.0 - libsass==0.22.0 - markupsafe==2.0.1 - packaging==21.3 - pluggy==1.0.0 - py==1.11.0 - pycparser==2.21 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - pytest-xdist==3.0.2 - python-dateutil==2.9.0.post0 - python-dotenv==0.20.0 - requests==2.18.4 - requests-file==2.1.0 - s3transfer==0.1.13 - six==1.17.0 - tldextract==2.2.0 - tomli==1.2.3 - typing-extensions==4.1.1 - urllib3==1.22 - validator-py==1.2.5 - whitenoise==5.3.0 - zipp==3.6.0 prefix: /opt/conda/envs/core
[ "tests/test_requests.py::test_redirect_compiles_url", "tests/test_requests.py::test_redirect_compiles_url_with_multiple_slashes", "tests/test_requests.py::test_redirect_compiles_url_with_trailing_slash", "tests/test_requests.py::test_redirect_compiles_url_with_parameters", "tests/test_requests.py::test_redirect_compiles_url_with_multiple_parameters" ]
[]
[ "tests/test_requests.py::test_request_is_callable", "tests/test_requests.py::test_request_input_should_return_input_on_get_request", "tests/test_requests.py::test_request_all_should_return_params", "tests/test_requests.py::test_request_has_should_return_bool", "tests/test_requests.py::test_request_set_params_should_return_self", "tests/test_requests.py::test_request_param_returns_parameter_set_or_false", "tests/test_requests.py::test_request_appends_cookie", "tests/test_requests.py::test_request_sets_and_gets_cookies", "tests/test_requests.py::test_redirect_returns_request", "tests/test_requests.py::test_redirectTo_returns_request", "tests/test_requests.py::test_request_no_input_returns_false", "tests/test_requests.py::test_request_get_cookies_returns_cookies", "tests/test_requests.py::test_request_set_user_sets_object", "tests/test_requests.py::test_request_loads_app", "tests/test_requests.py::test_request_gets_input_from_container", "tests/test_requests.py::test_redirections_reset", "tests/test_requests.py::test_request_has_subdomain_returns_bool", "tests/test_requests.py::test_redirect_compiles_url_with_http" ]
[]
MIT License
2,173
[ "setup.py", "masonite/request.py", "masonite/drivers/UploadDiskDriver.py" ]
[ "setup.py", "masonite/request.py", "masonite/drivers/UploadDiskDriver.py" ]
grabbles__grabbit-48
c1a811a2a41153afec970f73923c0a53c66ef694
2018-02-18 04:25:46
5a588731d1a4a42a6b67f09ede110d7770845ed0
diff --git a/grabbit/__init__.py b/grabbit/__init__.py index 35f4778..4d08018 100644 --- a/grabbit/__init__.py +++ b/grabbit/__init__.py @@ -1,10 +1,12 @@ -from .core import File, Entity, Layout, merge_layouts +from .core import File, Entity, Layout, Tag, Domain, merge_layouts from .extensions import (replace_entities, build_path, write_contents_to_file) __all__ = [ 'File', 'Entity', 'Layout', + 'Tag', + 'Domain', 'replace_entities', 'build_path', 'write_contents_to_file', diff --git a/grabbit/core.py b/grabbit/core.py index cc828e0..a84092b 100644 --- a/grabbit/core.py +++ b/grabbit/core.py @@ -5,9 +5,10 @@ from collections import defaultdict, OrderedDict, namedtuple from grabbit.external import six, inflect from grabbit.utils import natural_sort, listify from grabbit.extensions.writable import build_path, write_contents_to_file -from os.path import join, basename, dirname, abspath, split +from os.path import (join, basename, dirname, abspath, split, isabs, exists) from functools import partial from copy import deepcopy +import warnings __all__ = ['File', 'Entity', 'Layout'] @@ -22,15 +23,26 @@ class File(object): self.path = filename self.filename = basename(self.path) self.dirname = dirname(self.path) - self.entities = {} + self.tags = {} + + @property + def entities(self): + return {k: v.value for k, v in self.tags.items()} + + @property + def domains(self): + return tuple(set([t.entity.domain.name for t in self.tags.values()])) - def _matches(self, entities=None, extensions=None, regex_search=False): + def _matches(self, entities=None, extensions=None, domains=None, + regex_search=False): """ Checks whether the file matches all of the passed entities and extensions. + Args: entities (dict): A dictionary of entity names -> regex patterns. extensions (str, list): One or more file extensions to allow. + domains (str, list): One or more domains the file must match. regex_search (bool): Whether to require exact match (False) or regex search (True) when comparing the query string to each entity. @@ -44,11 +56,16 @@ class File(object): if re.search(extensions, self.path) is None: return False + if domains is not None: + domains = listify(domains) + if not set(self.domains) & set(domains): + return False + if entities is not None: for name, val in entities.items(): - if name not in self.entities: + if name not in self.tags: return False def make_patt(x): @@ -64,7 +81,7 @@ class File(object): ent_patts = [make_patt(x) for x in listify(val)] patt = '|'.join(ent_patts) - if re.search(patt, str(self.entities[name])) is None: + if re.search(patt, str(self.tags[name].value)) is None: return False return True @@ -73,9 +90,9 @@ class File(object): Returns the File as a named tuple. The full path plus all entity key/value pairs are returned as attributes. """ - _File = namedtuple('File', 'filename ' + - ' '.join(self.entities.keys())) - return _File(filename=self.path, **self.entities) + entities = self.entities + _File = namedtuple('File', 'filename ' + ' '.join(entities.keys())) + return _File(filename=self.path, **entities) def copy(self, path_patterns, symbolic_link=False, root=None, conflicts='fail'): @@ -102,12 +119,45 @@ class File(object): conflicts=conflicts) +class Domain(object): + + def __init__(self, name, config, root): + + self.name = name + self.config = config + self.root = root + self.entities = {} + self.files = [] + self.filtering_regex = {} + self.path_patterns = [] + + if 'index' in config: + self.filtering_regex = config['index'] + if self.filtering_regex.get('include') and \ + self.filtering_regex.get('exclude'): + raise ValueError("You can only define either include or " + "exclude regex, not both.") + + if 'default_path_patterns' in config: + self.path_patterns += listify(config['default_path_patterns']) + + def add_entity(self, ent): + self.entities[ent.name] = ent + + def add_file(self, file): + self.files.append(file) + + +Tag = namedtuple('Tag', ['entity', 'value']) + + class Entity(object): - def __init__(self, name, pattern=None, mandatory=False, directory=None, - map_func=None, **kwargs): + def __init__(self, name, pattern=None, domain=None, mandatory=False, + directory=None, map_func=None, **kwargs): """ Represents a single entity defined in the JSON config. + Args: name (str): The name of the entity (e.g., 'subject', 'run', etc.) pattern (str): A regex pattern used to match against file names. @@ -119,6 +169,7 @@ class Entity(object): map_func (callable): Optional callable used to extract the Entity's value from the passed string (instead of trying to match on the defined .pattern). + domain (Domain): The Domain the Entity belongs to. kwargs (dict): Additional keyword arguments. """ if pattern is None and map_func is None: @@ -128,12 +179,14 @@ class Entity(object): "set." % name) self.name = name self.pattern = pattern + self.domain = domain self.mandatory = mandatory self.directory = directory self.map_func = map_func self.files = {} self.regex = re.compile(pattern) if pattern is not None else None self.kwargs = kwargs + self.id = '.'.join([getattr(domain, 'name', ''), name]) def __iter__(self): for i in self.unique(): @@ -150,20 +203,29 @@ class Entity(object): setattr(result, k, new_val) return result - def matches(self, f): + def matches(self, f, update_file=False): """ Determine whether the passed file matches the Entity and update the Entity/File mappings. + Args: f (File): The File instance to match against. + update_file (bool): If True, the file's tag list is updated to + include the current Entity. """ if self.map_func is not None: - f.entities[self.name] = self.map_func(f) + val = self.map_func(f) else: m = self.regex.search(f.path) - if m is not None: - val = m.group(1) - f.entities[self.name] = val + val = m.group(1) if m is not None else None + + if val is None: + return False + + if update_file: + f.tags[self.name] = Tag(self, val) + + return True def add_file(self, filename, value): """ Adds the specified filename to tracking. """ @@ -175,6 +237,7 @@ class Entity(object): def count(self, files=False): """ Returns a count of unique values or files. + Args: files (bool): When True, counts all files mapped to the Entity. When False, counts all unique values. @@ -203,7 +266,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def __init__(self, path, config=None, index=None, dynamic_getters=False, absolute_paths=True, regex_search=False, entity_mapper=None, - path_patterns=None): + path_patterns=None, config_filename='layout.json'): """ A container for all the files and metadata found at the specified path. @@ -245,6 +308,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): path_patterns (str, list): One or more filename patterns to use as a default path pattern for this layout's files. Can also be specified in the config file. + config_filename (str): The name of directory-specific config files. + Every directory will be scanned for this file, and if found, + the config file will be read in and added to the list of + configs. """ self.root = abspath(path) if absolute_paths else path @@ -253,62 +320,106 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): self.mandatory = set() self.dynamic_getters = dynamic_getters self.regex_search = regex_search - self.filtering_regex = {} self.entity_mapper = self if entity_mapper == 'self' else entity_mapper self.path_patterns = path_patterns if path_patterns else [] + self.config_filename = config_filename + self.domains = OrderedDict() if config is not None: - self._load_config(config) + for c in listify(config): + self._load_domain(c) if index is None: self.index() else: self.load_index(index) - def _load_config(self, config): + def _load_domain(self, config, root=None): + if isinstance(config, six.string_types): config = json.load(open(config, 'r')) - elif isinstance(config, list): - merged = {} - for c in config: - if isinstance(c, six.string_types): - c = json.load(open(c, 'r')) - merged.update(c) - config = merged - for e in config['entities']: - self.add_entity(**e) - - if 'index' in config: - self.filtering_regex = config['index'] - if self.filtering_regex.get('include') and \ - self.filtering_regex.get('exclude'): - raise ValueError("You can only define either include or " - "exclude regex, not both.") - - if 'default_path_patterns' in config: - self.path_patterns += listify(config['default_path_patterns']) + if 'name' not in config: + raise ValueError("Config file missing 'name' attribute.") + if config['name'] in self.domains: + raise ValueError("Config with name '%s' already exists in " + "Layout. Name of each config file must be " + "unique across entire Layout.") + if root is not None: + config['root'] = root + + if 'root' not in config: + warnings.warn("No valid root directory found for domain '%s'." + " Falling back on the Layout's root directory. " + "If this isn't the intended behavior, make sure " + "the config file for this domain includes a " + "'root' key." % config['name']) + config['root'] = self.root + elif not isabs(config['root']): + _root = config['root'] + config['root'] = abspath(join(self.root, config['root'])) + if not exists(config['root']): + msg = ("Relative path '%s' for domain '%s' interpreted as '%s'" + ", but this directory doesn't exist. Either specify the" + " domain root as an absolute path, or make sure it " + "points to a valid directory when appended to the " + "Layout's root (%s)." % (_root, config['name'], + config['root'], self.root)) + raise ValueError(msg) + + # Load entities + domain = Domain(config['name'], config, config['root']) + for e in config.get('entities', []): + self.add_entity(domain=domain, **e) + + self.domains[domain.name] = domain + + def get_domain_entities(self, domains=None, file=None): + # Get all Entities included in the specified Domains, in the same + # order as Domains in the list. Alternatively, if a file is passed, + # identify its domains and then return the entities. + + if file is None: + if domains is None: + domains = list(self.domains.keys()) + else: + domains = self._get_domains_for_file(file) - return config + ents = {} + for d in domains: + ents.update(self.domains[d].entities) + return ents - def _check_inclusions(self, f): + def _check_inclusions(self, f, domains=None): ''' Check file or directory against regexes in config to determine if it should be included in the index ''' + filename = f if isinstance(f, six.string_types) else f.path - # If file matches any include regex, then True - include_regex = self.filtering_regex.get('include', []) - if include_regex: - for regex in include_regex: - if re.match(regex, filename): - break - else: - return False - else: - # If file matches any excldue regex, then false - for regex in self.filtering_regex.get('exclude', []): - if re.match(regex, filename, flags=re.UNICODE): + if os.path.isabs(filename) and filename.startswith(self.root + os.path.sep): + # for filenames under the root - analyze relative path to avoid + # bringing injustice to the grandkids of some unfortunately named + # root directories. + filename = os.path.relpath(filename, self.root) + + if domains is None: + domains = list(self.domains.keys()) + + for dom in domains: + dom = self.domains[dom] + # If file matches any include regex, then True + include_regex = dom.filtering_regex.get('include', []) + if include_regex: + for regex in include_regex: + if re.match(regex, filename): + break + else: return False + else: + # If file matches any excldue regex, then false + for regex in dom.filtering_regex.get('exclude', []): + if re.match(regex, filename, flags=re.UNICODE): + return False return True @@ -342,25 +453,59 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): for ent in self.entities.values(): ent.files = {} - def _index_file(self, root, f): + def _get_domains_for_file(self, f): + if isinstance(f, File): + return f.domains + return [d.name for d in self.domains.values() if f.startswith(d.root)] + def _index_file(self, root, f, domains=None): + + # If domains aren't explicitly passed, figure out what applies + if domains is None: + domains = self._get_domains_for_file(root) + + # Create the file object--allows for subclassing f = self._make_file_object(root, f) if not (self._check_inclusions(f) and self._validate_file(f)): return - for e in self.entities.values(): - e.matches(f) + for d in domains: + self.domains[d].add_file(f) + + entities = self.get_domain_entities(domains) + + if entities: + self.files[f.path] = f + + for e in entities.values(): + e.matches(f, update_file=True) - fe = f.entities.keys() + file_ents = f.tags.keys() # Only keep Files that match at least one Entity, and all # mandatory Entities - if fe and not (self.mandatory - set(fe)): + if file_ents and not (self.mandatory - set(file_ents)): self.files[f.path] = f # Bind the File to all of the matching entities - for ent, val in f.entities.items(): - self.entities[ent].add_file(f.path, val) + for name, tag in f.tags.items(): + ent_id = tag.entity.id + self.entities[ent_id].add_file(f.path, tag.value) + + def _find_entity(self, entity): + ''' Find an Entity instance by name. Checks both name and id fields.''' + if entity in self.entities: + return self.entities[entity] + _ent = [e for e in self.entities.values() if e.name == entity] + if len(_ent) > 1: + raise ValueError("Entity name '%s' matches %d entities. To " + "avoid ambiguity, please prefix the entity " + "name with its domain (e.g., 'bids.%s'." % + (entity, len(_ent), entity)) + if _ent: + return _ent[0] + + raise ValueError("No entity '%s' found." % entity) def index(self): @@ -371,28 +516,63 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): # Loop over all files for root, directories, filenames in dataset: + # Determine which Domains apply to the current directory + domains = self._get_domains_for_file(root) + # Exclude directories that match exclude regex from further search full_dirs = [os.path.join(root, d) for d in directories] - full_dirs = filter(self._check_inclusions, full_dirs) - directories[:] = [split(d)[1] for d in - filter(self._validate_dir, full_dirs)] - # self._index_filenames(filenames) + def check_incl(directory): + return self._check_inclusions(directory, domains) + + full_dirs = filter(check_incl, full_dirs) + full_dirs = filter(self._validate_dir, full_dirs) + directories[:] = [split(d)[1] for d in full_dirs] + + if self.config_filename in filenames: + config_path = os.path.join(root, self.config_filename) + config = json.load(open(config_path, 'r')) + self._load_domain(config) + + # Filter Domains if current dir's config file has an + # include directive + if 'include' in config: + missing = set(config['include']) - set(domains) + if missing: + msg = ("Missing configs '%s' specified in include " + "directive of config '%s'. Please make sure " + "these config files are accessible from the " + "directory %s.") % (missing, config['name'], + root) + raise ValueError(msg) + domains = config['include'] + domains.append(config['name']) + + filenames.remove(self.config_filename) for f in filenames: - self._index_file(root, f) + self._index_file(root, f, domains) def save_index(self, filename): ''' Save the current Layout's index to a .json file. + Args: filename (str): Filename to write to. + + Note: At the moment, this won't serialize directory-specific config + files. This means reconstructed indexes will only work properly in + cases where there aren't multiple layout specs within a project. ''' - data = {f.path: f.entities for f in self.files.values()} + data = {} + for f in self.files.values(): + entities = {v.entity.id: v.value for k, v in f.tags.items()} + data[f.path] = entities with open(filename, 'w') as outfile: json.dump(data, outfile) def load_index(self, filename, reindex=False): ''' Load the Layout's index from a plaintext file. + Args: filename (str): Path to the plaintext index file. reindex (bool): If True, discards entity values provided in the @@ -401,24 +581,33 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): False, in which case it is assumed that all entity definitions in the loaded index are correct and do not need any further validation. + + Note: At the moment, directory-specific config files aren't serialized. + This means reconstructed indexes will only work properly in cases + where there aren't multiple layout specs within a project. ''' self._reset_index() data = json.load(open(filename, 'r')) for path, ents in data.items(): + # If file path isn't absolute, assume it's relative to layout root + if not isabs(path): + path = join(self.root, path) + root, f = dirname(path), basename(path) if reindex: self._index_file(root, f) else: f = self._make_file_object(root, f) - f.entities = ents + tags = {k: Tag(self.entities[k], v) for k, v in ents.items()} + f.tags = tags self.files[f.path] = f for ent, val in f.entities.items(): self.entities[ent].add_file(f.path, val) - def add_entity(self, **kwargs): + def add_entity(self, domain, **kwargs): ''' Add a new Entity to tracking. ''' # Set the entity's mapping func if one was specified @@ -433,12 +622,14 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): map_func = getattr(self.entity_mapper, kwargs['map_func']) kwargs['map_func'] = map_func - ent = Entity(**kwargs) + ent = Entity(domain=domain, **kwargs) + domain.add_entity(ent) + if ent.mandatory: - self.mandatory.add(ent.name) + self.mandatory.add(ent.id) if ent.directory is not None: ent.directory = ent.directory.replace('{{root}}', self.root) - self.entities[ent.name] = ent + self.entities[ent.id] = ent if self.dynamic_getters: func = partial(getattr(self, 'get'), target=ent.name, return_type='id') @@ -446,9 +637,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): setattr(self, 'get_%s' % func_name, func) def get(self, return_type='tuple', target=None, extensions=None, - regex_search=None, **kwargs): + domains=None, regex_search=None, **kwargs): """ Retrieve files and/or metadata from the current Layout. + Args: return_type (str): Type of result to return. Valid values: 'tuple': returns a list of namedtuples containing file name as @@ -462,6 +654,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): (if return_type is 'dir' or 'id'). extensions (str, list): One or more file extensions to filter on. Files with any other extensions will be excluded. + domains (list): Optional list of domain names to scan for files. + If None, all available domains are scanned. regex_search (bool or None): Whether to require exact matching (False) or regex search (True) when comparing the query string to each entity. If None (default), uses the value found in @@ -481,7 +675,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): filters = {} filters.update(kwargs) for filename, file in self.files.items(): - if not file._matches(filters, extensions, regex_search): + if not file._matches(filters, extensions, domains, regex_search): continue result.append(file) @@ -496,6 +690,8 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): return result else: + valid_entities = self.get_domain_entities(domains) + if target is None: raise ValueError('If return_type is "id" or "dir", a valid ' 'target entity must also be specified.') @@ -506,7 +702,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): return natural_sort(result) elif return_type == 'dir': - template = self.entities[target].directory + template = valid_entities[target].directory if template is None: raise ValueError('Return type set to directory, but no ' 'directory template is defined for the ' @@ -514,7 +710,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): # Construct regex search pattern from target directory template to_rep = re.findall('\{(.*?)\}', template) for ent in to_rep: - patt = self.entities[ent].pattern + patt = valid_entities[ent].pattern template = template.replace('{%s}' % ent, patt) template += '[^\%s]*$' % os.path.sep matches = [f.dirname for f in self.files.values() @@ -528,27 +724,30 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def unique(self, entity): """ Return a list of unique values for the named entity. + Args: entity (str): The name of the entity to retrieve unique values of. """ - return self.entities[entity].unique() + return self._find_entity(entity).unique() def count(self, entity, files=False): """ Return the count of unique values or files for the named entity. + Args: entity (str): The name of the entity. files (bool): If True, counts the number of filenames that contain at least one value of the entity, rather than the number of unique values of the entity. """ - return self.entities[entity].count(files) + return self._find_entity(entity).count(files) def as_data_frame(self, **kwargs): """ Return information for all Files tracked in the Layout as a pandas DataFrame. - args: + + Args: kwargs: Optional keyword arguments passed on to get(). This allows one to easily select only a subset of files for export. Returns: @@ -574,6 +773,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): ignore_strict_entities=None, **kwargs): ''' Walk up the file tree from the specified path and return the nearest matching file(s). + Args: path (str): The file to search from. return_type (str): What to return; must be one of 'file' (default) @@ -593,10 +793,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): ''' entities = {} - for name, ent in self.entities.items(): + for ent in self.entities.values(): m = ent.regex.search(path) if m: - entities[name] = m.group(1) + entities[ent.name] = m.group(1) # Remove any entities we want to ignore when strict matching is on if strict and ignore_strict_entities is not None: @@ -612,9 +812,10 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): folders[f.dirname].append(f) def count_matches(f): - keys = set(entities.keys()) & set(f.entities.keys()) + f_ents = f.entities + keys = set(entities.keys()) & set(f_ents.keys()) shared = len(keys) - return [shared, sum([entities[k] == f.entities[k] for k in keys])] + return [shared, sum([entities[k] == f_ents[k] for k in keys])] matches = [] @@ -715,7 +916,7 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): def write_contents_to_file(self, entities, path_patterns=None, contents=None, link_to=None, content_mode='text', conflicts='fail', - strict=False): + strict=False, domains=None): """ Write arbitrary data to a file defined by the passed entities and path patterns. @@ -737,10 +938,17 @@ class Layout(six.with_metaclass(LayoutMetaclass, object)): strict (bool): If True, all entities must be matched inside a pattern in order to be a valid match. If False, extra entities will be ignored so long as all mandatory entities are found. + domains (list): List of Domains to scan for path_patterns. Order + determines precedence (i.e., earlier Domains will be scanned + first). If None, all available domains are included. """ if not path_patterns: path_patterns = self.path_patterns + if domains is None: + domains = list(self.domains.keys()) + for dom in domains: + path_patterns.extend(self.domains[dom].path_patterns) path = build_path(entities, path_patterns, strict) write_contents_to_file(path, contents=contents, link_to=link_to, content_mode=content_mode, conflicts=conflicts, @@ -766,6 +974,7 @@ def merge_layouts(layouts): for l in layouts[1:]: layout.files.update(l.files) + layout.domains.update(l.domains) for k, v in l.entities.items(): if k not in layout.entities:
would fail to work on bids datasets which have a directory starting with . somewhere in their paths Originally (https://github.com/datalad/datalad/issues/2150) I thought that the issue is symlinked directory somewhere in the path, so was trying to figure out where path `realpath`-ing is happening... long story short that apparently the issue with my `TMPDIR=/home/yoh/.tmp` (which is a symlink to `/tmp`) is having that `.tmp/` in the path since ATM https://github.com/INCF/pybids/blob/master/bids/grabbids/config/bids.json#L4 leads to exclusion of the paths which have it anywhere within their path. I wondered -- may be the relative path under the dataset root could be considered instead of the full path? (will submit a tentative fix in this vein)
grabbles/grabbit
diff --git a/grabbit/tests/data/valuable_stamps/USA/dir_config.json b/grabbit/tests/data/valuable_stamps/USA/dir_config.json new file mode 100644 index 0000000..0563cca --- /dev/null +++ b/grabbit/tests/data/valuable_stamps/USA/dir_config.json @@ -0,0 +1,14 @@ +{ + "name": "usa_stamps", + "root": ".", + "entities": [ + { + "name": "name", + "pattern": "name=(.*?)\\#" + }, + { + "name": "value", + "pattern": "value=([a-z0-9]+)\\.txt" + } + ] +} \ No newline at end of file diff --git a/grabbit/tests/data/valuable_stamps/USA/name=1c_Washington_Irving#value=35cents.txt b/grabbit/tests/data/valuable_stamps/USA/name=1c_Washington_Irving#value=35cents.txt new file mode 100644 index 0000000..e69de29 diff --git a/grabbit/tests/data/valuable_stamps/USA/name=5c_Francis_E_Willard#value=1dollar.txt b/grabbit/tests/data/valuable_stamps/USA/name=5c_Francis_E_Willard#value=1dollar.txt new file mode 100644 index 0000000..e69de29 diff --git a/grabbit/tests/data/valuable_stamps/USA/name=5c_Walt_Whitman#value=80cents.txt b/grabbit/tests/data/valuable_stamps/USA/name=5c_Walt_Whitman#value=80cents.txt new file mode 100644 index 0000000..e69de29 diff --git a/grabbit/tests/misc/index.json b/grabbit/tests/misc/index.json index a1394b3..c3bbf1d 100644 --- a/grabbit/tests/misc/index.json +++ b/grabbit/tests/misc/index.json @@ -1,130 +1,130 @@ { - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/dataset_description.json": { - "type": "description" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/participants.tsv": { - "type": "trt/participants" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-fullbrain_bold.json": { - "type": "bold", - "task": "rest_acq" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-fullbrain_run-1_physio.json": { - "run": "1", - "type": "physio", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-fullbrain_run-2_physio.json": { - "run": "2", - "type": "physio", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-prefrontal_bold.json": { - "type": "bold", - "task": "rest_acq" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/task-rest_acq-prefrontal_physio.json": { - "type": "physio", - "task": "rest_acq" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/test.bval": { - "type": "trt/test", - "bval": "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/test.bval" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/models/excluded_model.json": { - "type": "model" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/sub-01_sessions.tsv": { - "subject": "01", - "type": "sessions" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/sub-01_ses-1_scans.tsv": { - "subject": "01", - "session": "1", - "type": "scans" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/anat/sub-01_ses-1_T1map.nii.gz": { - "subject": "01", - "session": "1", - "type": "T1map" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/anat/sub-01_ses-1_T1w.nii.gz": { - "subject": "01", - "session": "1", - "type": "T1w" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude1.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "magnitude1" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude2.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "magnitude2" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.json": { - "subject": "01", - "session": "1", - "run": "1", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude1.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "magnitude1" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude2.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "magnitude2" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.json": { - "subject": "01", - "session": "1", - "run": "2", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "phasediff" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_bold.nii.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "bold", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_physio.tsv.gz": { - "subject": "01", - "session": "1", - "run": "1", - "type": "physio", - "task": "rest_acq", - "acquisition": "fullbrain_run" - }, - "/mnt/c/Users/tyark/Dropbox/Code/grabbit/grabbit/tests/data/7t_trt/sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-2_bold.nii.gz": { - "subject": "01", - "session": "1", - "run": "2", - "type": "bold", - "task": "rest_acq", - "acquisition": "fullbrain_run" + "dataset_description.json": { + "test.type": "description" + }, + "participants.tsv": { + "test.type": "trt/participants" + }, + "task-rest_acq-fullbrain_bold.json": { + "test.type": "bold", + "test.task": "rest_acq" + }, + "task-rest_acq-fullbrain_run-1_physio.json": { + "test.run": "1", + "test.type": "physio", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "task-rest_acq-fullbrain_run-2_physio.json": { + "test.run": "2", + "test.type": "physio", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "task-rest_acq-prefrontal_bold.json": { + "test.type": "bold", + "test.task": "rest_acq" + }, + "task-rest_acq-prefrontal_physio.json": { + "test.type": "physio", + "test.task": "rest_acq" + }, + "test.bval": { + "test.type": "trt/test", + "test.bval": "test.bval" + }, + "models/excluded_model.json": { + "test.type": "model" + }, + "sub-01/sub-01_sessions.tsv": { + "test.subject": "01", + "test.type": "sessions" + }, + "sub-01/ses-1/sub-01_ses-1_scans.tsv": { + "test.subject": "01", + "test.session": "1", + "test.type": "scans" + }, + "sub-01/ses-1/anat/sub-01_ses-1_T1map.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.type": "T1map" + }, + "sub-01/ses-1/anat/sub-01_ses-1_T1w.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.type": "T1w" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude1.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "magnitude1" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_magnitude2.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "magnitude2" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.json": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "phasediff" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-1_phasediff.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "phasediff" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude1.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "magnitude1" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_magnitude2.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "magnitude2" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.json": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "phasediff" + }, + "sub-01/ses-1/fmap/sub-01_ses-1_run-2_phasediff.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "phasediff" + }, + "sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_bold.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "bold", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-1_physio.tsv.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "1", + "test.type": "physio", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" + }, + "sub-01/ses-1/func/sub-01_ses-1_task-rest_acq-fullbrain_run-2_bold.nii.gz": { + "test.subject": "01", + "test.session": "1", + "test.run": "2", + "test.type": "bold", + "test.task": "rest_acq", + "test.acquisition": "fullbrain_run" } } diff --git a/grabbit/tests/specs/stamps.json b/grabbit/tests/specs/stamps.json index d5eb2d3..37ce56d 100644 --- a/grabbit/tests/specs/stamps.json +++ b/grabbit/tests/specs/stamps.json @@ -1,4 +1,5 @@ { + "name": "stamps", "entities": [ { "name": "name", diff --git a/grabbit/tests/specs/test.json b/grabbit/tests/specs/test.json index a1dd0d3..d93c986 100644 --- a/grabbit/tests/specs/test.json +++ b/grabbit/tests/specs/test.json @@ -1,4 +1,5 @@ { + "name": "test", "index" : { "exclude" : [".*derivatives.*"] }, @@ -25,11 +26,11 @@ }, { "name": "task", - "pattern": "task-(.*?)-" + "pattern": "task-(.*?)_" }, { "name": "acquisition", - "pattern": "acq-(.*?)-" + "pattern": "acq-(.*?)_" }, { "name": "bval", diff --git a/grabbit/tests/specs/test_include.json b/grabbit/tests/specs/test_include.json index 2529bd5..f3a87ef 100644 --- a/grabbit/tests/specs/test_include.json +++ b/grabbit/tests/specs/test_include.json @@ -1,4 +1,5 @@ { + "name": "test_with_includes", "index" : { "include" : ["sub-(\\d+)", "ses-.*", "func", "fmap", ".*\\..*"] }, @@ -25,11 +26,11 @@ }, { "name": "task", - "pattern": "task-(.*?)-" + "pattern": "task-(.*?)_" }, { "name": "acquisition", - "pattern": "acq-(.*?)-" + "pattern": "acq-(.*?)_" }, { "name": "bval", diff --git a/grabbit/tests/specs/test_with_mapper.json b/grabbit/tests/specs/test_with_mapper.json index 063aa0d..84c3d28 100644 --- a/grabbit/tests/specs/test_with_mapper.json +++ b/grabbit/tests/specs/test_with_mapper.json @@ -1,4 +1,5 @@ { + "name": "test_with_mapper", "index" : { "exclude" : [".*derivatives.*"] }, @@ -25,11 +26,11 @@ }, { "name": "task", - "pattern": "task-(.*?)-" + "pattern": "task-(.*?)_" }, { "name": "acquisition", - "pattern": "acq-(.*?)-" + "pattern": "acq-(.*?)_" }, { "name": "bval", diff --git a/grabbit/tests/test_core.py b/grabbit/tests/test_core.py index a98e3bd..02eb754 100644 --- a/grabbit/tests/test_core.py +++ b/grabbit/tests/test_core.py @@ -1,5 +1,5 @@ import pytest -from grabbit import File, Entity, Layout, merge_layouts +from grabbit import File, Entity, Layout, Tag, merge_layouts import os import posixpath as psp import tempfile @@ -30,15 +30,18 @@ def bids_layout(request): hdfs = pytest.importorskip("hdfs") from grabbit.extensions import HDFSLayout client = hdfs.Config().get_client() - root = psp.join('hdfs://localhost:9000{0}'.format(client.root), 'data', '7t_trt') - config = psp.join('hdfs://localhost:9000{0}'.format(client.root), 'specs', 'test.json') + root = psp.join('hdfs://localhost:9000{0}'.format( + client.root), 'data', '7t_trt') + config = psp.join('hdfs://localhost:9000{0}'.format( + client.root), 'specs', 'test.json') return HDFSLayout(root, config, regex_search=True) + @pytest.fixture(scope='module') def stamp_layout(): root = os.path.join(DIRNAME, 'data', 'valuable_stamps') config = os.path.join(DIRNAME, 'specs', 'stamps.json') - return Layout(root, config) + return Layout(root, config, config_filename='dir_config.json') @pytest.fixture(scope='module') @@ -63,7 +66,7 @@ class TestFile: assert file._matches() assert file._matches(extensions='nii.gz') assert not file._matches(extensions=['.txt', '.rtf']) - file.entities = {'task': 'rest', 'run': '2'} + file.tags = {'task': Tag(None, 'rest'), 'run': Tag(None, '2')} assert file._matches(entities={'task': 'rest', 'run': 2}) assert not file._matches(entities={'task': 'rest', 'run': 4}) assert not file._matches(entities={'task': 'st'}) @@ -75,7 +78,7 @@ class TestFile: regex_search=True) def test_named_tuple(self, file): - file.entities = {'attrA': 'apple', 'attrB': 'banana'} + file.tags = {'attrA': Tag(None, 'apple'), 'attrB': Tag(None, 'banana')} tup = file.as_named_tuple() assert(tup.filename == file.path) assert isinstance(tup, tuple) @@ -98,7 +101,7 @@ class TestEntity: tmpdir.mkdir("tmp").join(filename).write("###") f = File(os.path.join(str(tmpdir), filename)) e = Entity('avaricious', 'aardvark-(\d+)') - e.matches(f) + e.matches(f, update_file=True) assert 'avaricious' in f.entities assert f.entities['avaricious'] == '4' @@ -261,7 +264,7 @@ class TestLayout: assert targ not in layout_include.files with pytest.raises(ValueError): - layout_include._load_config({'entities': [], + layout_include._load_domain({'entities': [], 'index': {'include': 'test', 'exclude': 'test'}}) @@ -273,9 +276,11 @@ class TestLayout: index = json.load(infile) assert len(index) == len(bids_layout.files) # Check that entities for first 10 files match + files = list(bids_layout.files.values()) for i in range(10): - f = list(bids_layout.files.values())[i] - assert f.entities == index[f.path] + f = files[i] + entities = {v.entity.id: v.value for v in f.tags.values()} + assert entities == index[f.path] os.unlink(tmp) def test_load_index(self, bids_layout): @@ -324,22 +329,59 @@ class TestLayout: def test_clone(self, bids_layout): lc = bids_layout.clone() attrs = ['root', 'mandatory', 'dynamic_getters', 'regex_search', - 'filtering_regex', 'entity_mapper'] + 'entity_mapper'] for a in attrs: assert getattr(bids_layout, a) == getattr(lc, a) assert set(bids_layout.files.keys()) == set(lc.files.keys()) assert set(bids_layout.entities.keys()) == set(lc.entities.keys()) + def test_excludes(self, tmpdir): + root = tmpdir.mkdir("ohmyderivatives").mkdir("ds") + config = os.path.join(DIRNAME, 'specs', 'test.json') + layout = Layout(str(root), config, regex_search=True) + assert layout._check_inclusions(str(root.join("ohmyimportantfile"))) + assert not layout._check_inclusions(str(root.join("badbadderivatives"))) + + def test_multiple_domains(self, stamp_layout): + layout = stamp_layout.clone() + assert {'stamps', 'usa_stamps'} == set(layout.domains.keys()) + usa = layout.domains['usa_stamps'] + general = layout.domains['stamps'] + assert len(usa.files) == 3 + assert len(layout.files) == len(general.files) + assert not set(usa.files) - set(general.files) + assert layout.entities['usa_stamps.name'] == usa.entities['name'] + assert layout.entities['stamps.name'] == general.entities['name'] + assert usa.entities['name'] != general.entities['name'] + f = layout.get(name='5c_Francis_E_Willard', return_type='obj')[0] + assert f.entities == {'name': '5c_Francis_E_Willard', + 'value': '1dollar'} + + def test_get_by_domain(self, stamp_layout): + files = stamp_layout.get(domains='usa_stamps') + assert len(files) == 3 + files = stamp_layout.get(domains=['nonexistent', 'doms']) + assert not files + files = stamp_layout.get(domains='usa_stamps', value='35', + regex_search=True) + assert len(files) == 1 + files = stamp_layout.get(value='35', regex_search=True) + assert len(files) == 2 + def test_merge_layouts(bids_layout, stamp_layout): layout = merge_layouts([bids_layout, stamp_layout]) assert len(layout.files) == len(bids_layout.files) + \ len(stamp_layout.files) - assert 'country' in layout.entities - assert 'subject' in layout.entities + assert 'stamps.country' in layout.entities + assert 'test.subject' in layout.entities + dom = layout.domains['stamps'] + assert 'country' in dom.entities + dom = layout.domains['test'] + assert 'subject' in dom.entities # Make sure first Layout was cloned and not passed by reference - patt = layout.entities['subject'].pattern - assert patt == bids_layout.entities['subject'].pattern - bids_layout.entities['subject'].pattern = "meh" + patt = layout.entities['test.subject'].pattern + assert patt == bids_layout.entities['test.subject'].pattern + bids_layout.entities['test.subject'].pattern = "meh" assert patt != "meh" diff --git a/grabbit/tests/test_writable.py b/grabbit/tests/test_writable.py index 22c295a..68b4442 100644 --- a/grabbit/tests/test_writable.py +++ b/grabbit/tests/test_writable.py @@ -1,5 +1,5 @@ import pytest -from grabbit import Layout, File +from grabbit import Layout, File, Tag from grabbit.extensions.writable import build_path import os import shutil @@ -13,6 +13,7 @@ def writable_file(tmpdir): fn.write('###') return File(os.path.join(str(fn))) + @pytest.fixture def layout(): data_dir = join(dirname(__file__), 'data', '7t_trt') @@ -20,15 +21,20 @@ def layout(): layout = Layout(data_dir, config=config) return layout + class TestWritableFile: def test_build_path(self, writable_file): - writable_file.entities = {'task': 'rest', 'run': '2', 'subject': '3'} + writable_file.tags = { + 'task': Tag(None, 'rest'), 'run': Tag(None, '2'), + 'subject': Tag(None, '3') + } # Single simple pattern with pytest.raises(TypeError): build_path(writable_file.entities) - pat = join(writable_file.dirname, '{task}/sub-{subject}/run-{run}.nii.gz') + pat = join(writable_file.dirname, + '{task}/sub-{subject}/run-{run}.nii.gz') target = join(writable_file.dirname, 'rest/sub-3/run-2.nii.gz') assert build_path(writable_file.entities, pat) == target @@ -79,11 +85,13 @@ class TestWritableFile: assert not build_path(entities, pats, True) def test_build_file(self, writable_file, tmpdir, caplog): - writable_file.entities = {'task': 'rest', 'run': '2', 'subject': '3'} + writable_file.tags = {'task': Tag(None, 'rest'), 'run': Tag(None, '2'), + 'subject': Tag(None, '3')} # Simple write out new_dir = join(writable_file.dirname, 'rest') - pat = join(writable_file.dirname, '{task}/sub-{subject}/run-{run}.nii.gz') + pat = join(writable_file.dirname, + '{task}/sub-{subject}/run-{run}.nii.gz') target = join(writable_file.dirname, 'rest/sub-3/run-2.nii.gz') writable_file.copy(pat) assert exists(target) @@ -98,7 +106,8 @@ class TestWritableFile: assert log_message == 'A file at path {} already exists, ' \ 'skipping writing file.'.format(target) writable_file.copy(pat, conflicts='append') - append_target = join(writable_file.dirname, 'rest/sub-3/run-2_1.nii.gz') + append_target = join(writable_file.dirname, + 'rest/sub-3/run-2_1.nii.gz') assert exists(append_target) writable_file.copy(pat, conflicts='overwrite') assert exists(target) @@ -138,12 +147,13 @@ class TestWritableLayout: '/sess-2' '/r-1' '/type-bold' - '/task-rest_acq.nii.gz') + '/task-rest.nii.gz') example_file2 = join(str(tmpdir), 'sub-04' '/sess-2' '/r-1' '/type-bold' - '/task-rest_acq.nii.gz') + '/task-rest.nii.gz') + assert exists(example_file) assert not exists(example_file2) @@ -157,7 +167,7 @@ class TestWritableLayout: '/sess-2' '/r-1' '/type-bold' - '/task-rest_acq.nii.gz') + '/task-rest.nii.gz') assert exists(example_file) assert exists(example_file2) @@ -181,6 +191,7 @@ class TestWritableLayout: data_dir = join(dirname(__file__), 'data', '7t_trt') config = join(dirname(__file__), 'specs', 'test.json') layout = Layout(data_dir, config=[config, { + 'name': "test_writable", 'default_path_patterns': ['sub-{subject}/ses-{session}/{subject}' '{session}{run}{type}{task}{acquisition}' '{bval}'] @@ -200,8 +211,8 @@ class TestWritableLayout: def test_build_file_from_layout(self, tmpdir, layout): entities = {'subject': 'Bob', 'session': '01', 'run': '1'} pat = join(str(tmpdir), 'sub-{subject}' - '/sess-{session}' - '/r-{run}.nii.gz') + '/sess-{session}' + '/r-{run}.nii.gz') path = layout.build_path(entities, path_patterns=pat) assert path == join(str(tmpdir), 'sub-Bob/sess-01/r-1.nii.gz')
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_hyperlinks", "has_many_modified_files", "has_many_hunks", "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 2 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "runipy" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirement.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
async-generator==1.10 attrs==22.2.0 backcall==0.2.0 bleach==4.1.0 certifi==2021.5.30 decorator==5.1.1 defusedxml==0.7.1 entrypoints==0.4 -e git+https://github.com/grabbles/grabbit.git@c1a811a2a41153afec970f73923c0a53c66ef694#egg=grabbit importlib-metadata==4.8.3 iniconfig==1.1.1 ipykernel==5.5.6 ipython==7.16.3 ipython-genutils==0.2.0 jedi==0.17.2 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nest-asyncio==1.6.0 packaging==21.3 pandocfilters==1.5.1 parso==0.7.1 pexpect==4.9.0 pickleshare==0.7.5 pluggy==1.0.0 prompt-toolkit==3.0.36 ptyprocess==0.7.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 python-dateutil==2.9.0.post0 pyzmq==25.1.2 runipy==0.1.5 six==1.17.0 testpath==0.6.0 tomli==1.2.3 tornado==6.1 traitlets==4.3.3 typing_extensions==4.1.1 wcwidth==0.2.13 webencodings==0.5.1 zipp==3.6.0
name: grabbit channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - async-generator==1.10 - attrs==22.2.0 - backcall==0.2.0 - bleach==4.1.0 - decorator==5.1.1 - defusedxml==0.7.1 - entrypoints==0.4 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - ipykernel==5.5.6 - ipython==7.16.3 - ipython-genutils==0.2.0 - jedi==0.17.2 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nest-asyncio==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - parso==0.7.1 - pexpect==4.9.0 - pickleshare==0.7.5 - pluggy==1.0.0 - prompt-toolkit==3.0.36 - ptyprocess==0.7.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pyzmq==25.1.2 - runipy==0.1.5 - six==1.17.0 - testpath==0.6.0 - tomli==1.2.3 - tornado==6.1 - traitlets==4.3.3 - typing-extensions==4.1.1 - wcwidth==0.2.13 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/grabbit
[ "grabbit/tests/test_core.py::TestFile::test_init", "grabbit/tests/test_core.py::TestFile::test_matches", "grabbit/tests/test_core.py::TestFile::test_named_tuple", "grabbit/tests/test_core.py::TestEntity::test_init", "grabbit/tests/test_core.py::TestEntity::test_matches", "grabbit/tests/test_core.py::TestEntity::test_unique_and_count", "grabbit/tests/test_core.py::TestEntity::test_add_file", "grabbit/tests/test_core.py::TestLayout::test_init[local]", "grabbit/tests/test_core.py::TestLayout::test_absolute_paths[local]", "grabbit/tests/test_core.py::TestLayout::test_querying[local]", "grabbit/tests/test_core.py::TestLayout::test_natsort[local]", "grabbit/tests/test_core.py::TestLayout::test_unique_and_count[local]", "grabbit/tests/test_core.py::TestLayout::test_get_nearest[local]", "grabbit/tests/test_core.py::TestLayout::test_index_regex[local]", "grabbit/tests/test_core.py::TestLayout::test_save_index[local]", "grabbit/tests/test_core.py::TestLayout::test_load_index[local]", "grabbit/tests/test_core.py::TestLayout::test_clone[local]", "grabbit/tests/test_core.py::test_merge_layouts[local]", "grabbit/tests/test_core.py::TestLayout::test_dynamic_getters[/grabbit/grabbit/tests/data/7t_trt-/grabbit/grabbit/tests/specs/test.json]", "grabbit/tests/test_core.py::TestLayout::test_entity_mapper", "grabbit/tests/test_core.py::TestLayout::test_excludes", "grabbit/tests/test_core.py::TestLayout::test_multiple_domains", "grabbit/tests/test_core.py::TestLayout::test_get_by_domain", "grabbit/tests/test_writable.py::TestWritableFile::test_build_path", "grabbit/tests/test_writable.py::TestWritableFile::test_strict_build_path", "grabbit/tests/test_writable.py::TestWritableFile::test_build_file", "grabbit/tests/test_writable.py::TestWritableLayout::test_write_files", "grabbit/tests/test_writable.py::TestWritableLayout::test_write_contents_to_file", "grabbit/tests/test_writable.py::TestWritableLayout::test_write_contents_to_file_defaults", "grabbit/tests/test_writable.py::TestWritableLayout::test_build_file_from_layout" ]
[]
[]
[]
MIT License
2,175
[ "grabbit/__init__.py", "grabbit/core.py" ]
[ "grabbit/__init__.py", "grabbit/core.py" ]
regro__regolith-97
f5a3d3b860a7c03f800e8e8d2fc9daf843de4ad8
2018-02-18 23:20:31
3ff1b5be8d5625572421f7b340ef577f6b7d574d
diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a5d66376..ce3426be 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,17 +4,6 @@ Regolith Change Log .. current developments -v0.1.9 -==================== - -**Fixed:** - -* ``all_documents`` now returns the values of an empty dict if the collection - doesn't exist - - - - v0.1.8 ==================== diff --git a/news/fix_abstract_error.rst b/news/fix_abstract_error.rst new file mode 100644 index 00000000..a9120cdb --- /dev/null +++ b/news/fix_abstract_error.rst @@ -0,0 +1,14 @@ +**Added:** None + +**Changed:** None + +**Deprecated:** None + +**Removed:** None + +**Fixed:** + +* ``all_documents`` now returns the values of an empty dict if the collection + doesn't exist + +**Security:** None diff --git a/news/fix_get_author.rst b/news/fix_get_author.rst new file mode 100644 index 00000000..2114a39b --- /dev/null +++ b/news/fix_get_author.rst @@ -0,0 +1,13 @@ +**Added:** None + +**Changed:** None + +**Deprecated:** None + +**Removed:** None + +**Fixed:** + +* Use get syntax with ``filter_publications`` in case author not in dict + +**Security:** None diff --git a/regolith/__init__.py b/regolith/__init__.py index 1c98a23a..c3bb2961 100644 --- a/regolith/__init__.py +++ b/regolith/__init__.py @@ -1,1 +1,1 @@ -__version__ = '0.1.9' +__version__ = '0.1.8' diff --git a/regolith/tools.py b/regolith/tools.py index 46e1e680..076b25df 100644 --- a/regolith/tools.py +++ b/regolith/tools.py @@ -100,7 +100,7 @@ def month_and_year(m=None, y=None): def filter_publications(citations, authors, reverse=False, bold=True): - """Filter publications by the author(s) + """Filter publications by the author(s)/editor(s) Parameters ---------- @@ -115,7 +115,9 @@ def filter_publications(citations, authors, reverse=False, bold=True): """ pubs = [] for pub in citations: - if len(set(pub['author']) & authors) == 0: + if len(set(pub.get('author', [])) + & set(pub.get('editor', [])) + & authors) == 0: continue pub = deepcopy(pub) if bold: diff --git a/setup.py b/setup.py index 3cdd619e..b6f62850 100755 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ def main(): description='A research group content management system', long_description=readme, license='CC0', - version='0.1.9', + version='0.1.8', author='Anthony Scopatz', maintainer='Anthony Scopatz', author_email='[email protected]',
use get for all dict access in builders see https://travis-ci.org/Billingegroup/rg-db-public/jobs/343122765#L1681
regro/regolith
diff --git a/tests/test_tools.py b/tests/test_tools.py new file mode 100644 index 00000000..0b4e9060 --- /dev/null +++ b/tests/test_tools.py @@ -0,0 +1,6 @@ +from regolith.tools import filter_publications + + +def test_author_publications(): + citations = [{'author': ['CJ', 'SJLB']}, {'editor': 'SJLB'}] + filter_publications(citations, {'SJLB'})
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 0 }, "num_modified_files": 4 }
0.1
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest_v2", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": null, "python": "3.9", "reqs_path": null, "test_cmd": "pytest -vvs ." }
exceptiongroup==1.2.2 iniconfig==2.1.0 packaging==24.2 pluggy==1.5.0 pytest==8.3.5 -e git+https://github.com/regro/regolith.git@f5a3d3b860a7c03f800e8e8d2fc9daf843de4ad8#egg=regolith tomli==2.2.1
name: regolith channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - exceptiongroup==1.2.2 - iniconfig==2.1.0 - packaging==24.2 - pluggy==1.5.0 - pytest==8.3.5 - tomli==2.2.1 prefix: /opt/conda/envs/regolith
[ "tests/test_tools.py::test_author_publications" ]
[]
[]
[]
Creative Commons Zero v1.0 Universal license (CC0 1.0)
2,176
[ "regolith/__init__.py", "setup.py", "news/fix_get_author.rst", "CHANGELOG.rst", "news/fix_abstract_error.rst", "regolith/tools.py" ]
[ "regolith/__init__.py", "setup.py", "news/fix_get_author.rst", "CHANGELOG.rst", "news/fix_abstract_error.rst", "regolith/tools.py" ]
imageio__imageio-310
b122f914d2b8f6971f958d925b79261fc8df51f8
2018-02-19 16:32:53
a22145a9ebbd3aa3bc742911e052ed1903381486
diff --git a/.travis.yml b/.travis.yml index 99368c9..11e3828 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,7 +21,7 @@ matrix: env: TEST_UNIT=0 TEST_STYLE=1 # - python: "2.7" - env: TEST_UNIT=1 TEST_INSTALL=1 + env: TEST_UNIT=1 TEST_INSTALL=1 IS_LEGACY=1 - python: "3.4" env: TEST_UNIT=1 TEST_FULL=1 - python: "3.5" @@ -91,6 +91,11 @@ install: easy_install -q simpleitk; true; conda install -y astropy; fi; + # Install more deps on legacy py + - if [ "${IS_LEGACY}" == "1" ]; then + pip install enum34 futures; + fi; + # Install imageio, use installed version on only one machine - if [ "${TEST_INSTALL}" == "1" ]; then python setup.py build_with_fi install > ${REDIRECT_TO}; diff --git a/imageio/core/functions.py b/imageio/core/functions.py index 2771fb3..581bc9e 100644 --- a/imageio/core/functions.py +++ b/imageio/core/functions.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # imageio is distributed under the terms of the (new) BSD License. -""" +""" These functions represent imageio's main interface for the user. They provide a common API to read and write image data for a large variety of formats. All read and write functions accept keyword @@ -70,10 +70,10 @@ from .. import formats def help(name=None): """ help(name=None) - + Print the documentation of the format specified by name, or a list - of supported formats if name is omitted. - + of supported formats if name is omitted. + Parameters ---------- name : str @@ -91,14 +91,15 @@ def help(name=None): def get_reader(uri, format=None, mode='?', **kwargs): """ get_reader(uri, format=None, mode='?', **kwargs) - + Returns a :class:`.Reader` object which can be used to read data and meta data from the specified file. - + Parameters ---------- - uri : {str, bytes, file} - The resource to load the image from, e.g. a filename, http address or + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. format : str The format to use to read the file. By default imageio selects @@ -110,11 +111,11 @@ def get_reader(uri, format=None, mode='?', **kwargs): kwargs : ... Further keyword arguments are passed to the reader. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Create request object request = Request(uri, 'r' + mode, **kwargs) - + # Get format if format is not None: format = formats[format] @@ -123,21 +124,22 @@ def get_reader(uri, format=None, mode='?', **kwargs): if format is None: raise ValueError('Could not find a format to read the specified file ' 'in mode %r' % mode) - + # Return its reader object return format.get_reader(request) def get_writer(uri, format=None, mode='?', **kwargs): """ get_writer(uri, format=None, mode='?', **kwargs) - + Returns a :class:`.Writer` object which can be used to write data and meta data to the specified file. - + Parameters ---------- - uri : {str, file} - The resource to write the image to, e.g. a filename or file object, + uri : {str, pathlib.Path, file} + The resource to write the image to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. format : str The format to use to write the file. By default imageio selects @@ -149,15 +151,15 @@ def get_writer(uri, format=None, mode='?', **kwargs): kwargs : ... Further keyword arguments are passed to the writer. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Signal extension when returning as bytes, needed by e.g. ffmpeg if uri == RETURN_BYTES and isinstance(format, str): uri = RETURN_BYTES + '.' + format.strip('. ') - + # Create request object request = Request(uri, 'w' + mode, **kwargs) - + # Get format if format is not None: format = formats[format] @@ -166,7 +168,7 @@ def get_writer(uri, format=None, mode='?', **kwargs): if format is None: raise ValueError('Could not find a format to write the specified file ' 'in mode %r' % mode) - + # Return its writer object return format.get_writer(request) @@ -175,17 +177,18 @@ def get_writer(uri, format=None, mode='?', **kwargs): def imread(uri, format=None, **kwargs): """ imread(uri, format=None, **kwargs) - + Reads an image from the specified file. Returns a numpy array, which comes with a dict of meta data at its 'meta' attribute. - + Note that the image data is returned as-is, and may not always have a dtype of uint8 (and thus may differ from what e.g. PIL returns). - + Parameters ---------- - uri : {str, bytes, file} - The resource to load the image from, e.g. a filename, http address or + uri : {str, pathlib.Path, bytes, file} + The resource to load the image from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. format : str The format to use to read the file. By default imageio selects @@ -193,8 +196,8 @@ def imread(uri, format=None, **kwargs): kwargs : ... Further keyword arguments are passed to the reader. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Get reader and read first reader = read(uri, format, 'i', **kwargs) with reader: @@ -203,13 +206,14 @@ def imread(uri, format=None, **kwargs): def imwrite(uri, im, format=None, **kwargs): """ imwrite(uri, im, format=None, **kwargs) - + Write an image to the specified file. - + Parameters ---------- - uri : {str, file} - The resource to write the image to, e.g. a filename or file object, + uri : {str, pathlib.Path, file} + The resource to write the image to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. im : numpy.ndarray The image data. Must be NxM, NxMx3 or NxMx4. @@ -219,8 +223,8 @@ def imwrite(uri, im, format=None, **kwargs): kwargs : ... Further keyword arguments are passed to the writer. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Test image if isinstance(im, np.ndarray): if im.ndim == 2: @@ -231,12 +235,12 @@ def imwrite(uri, im, format=None, **kwargs): raise ValueError('Image must be 2D (grayscale, RGB, or RGBA).') else: raise ValueError('Image must be a numpy array.') - + # Get writer and write first writer = get_writer(uri, format, 'i', **kwargs) with writer: writer.append_data(im) - + # Return a result if there is any return writer.request.get_result() @@ -245,14 +249,15 @@ def imwrite(uri, im, format=None, **kwargs): def mimread(uri, format=None, memtest=True, **kwargs): """ mimread(uri, format=None, memtest=True, **kwargs) - + Reads multiple images from the specified file. Returns a list of numpy arrays, each with a dict of meta data at its 'meta' attribute. - + Parameters ---------- - uri : {str, bytes, file} - The resource to load the images from, e.g. a filename, http address or + uri : {str, pathlib.Path, bytes, file} + The resource to load the images from, e.g. a filename,pathlib.Path, + http address or file object, see the docs for more info. format : str The format to use to read the file. By default imageio selects @@ -266,11 +271,11 @@ def mimread(uri, format=None, memtest=True, **kwargs): kwargs : ... Further keyword arguments are passed to the reader. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Get reader reader = read(uri, format, 'I', **kwargs) - + # Read ims = [] nbytes = 0 @@ -283,19 +288,20 @@ def mimread(uri, format=None, memtest=True, **kwargs): raise RuntimeError('imageio.mimread() has read over 256 MiB of ' 'image data.\nStopped to avoid memory problems.' ' Use imageio.get_reader() or memtest=False.') - + return ims def mimwrite(uri, ims, format=None, **kwargs): """ mimwrite(uri, ims, format=None, **kwargs) - + Write multiple images to the specified file. - + Parameters ---------- - uri : {str, file} - The resource to write the images to, e.g. a filename or file object, + uri : {str, pathlib.Path, file} + The resource to write the images to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. ims : sequence of numpy arrays The image data. Each array must be NxM, NxMx3 or NxMx4. @@ -305,17 +311,16 @@ def mimwrite(uri, ims, format=None, **kwargs): kwargs : ... Further keyword arguments are passed to the writer. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ # Get writer writer = get_writer(uri, format, 'I', **kwargs) written = 0 with writer: - + # Iterate over images (ims may be a generator) for im in ims: - + # Test image if isinstance(im, np.ndarray): if im.ndim == 2: @@ -327,7 +332,7 @@ def mimwrite(uri, ims, format=None, **kwargs): '(grayscale, RGB, or RGBA).') else: raise ValueError('Image must be a numpy array.') - + # Add image writer.append_data(im) written += 1 @@ -345,14 +350,15 @@ def mimwrite(uri, ims, format=None, **kwargs): def volread(uri, format=None, **kwargs): """ volread(uri, format=None, **kwargs) - + Reads a volume from the specified file. Returns a numpy array, which comes with a dict of meta data at its 'meta' attribute. - + Parameters ---------- - uri : {str, bytes, file} - The resource to load the volume from, e.g. a filename, http address or + uri : {str, pathlib.Path, bytes, file} + The resource to load the volume from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. format : str The format to use to read the file. By default imageio selects @@ -360,8 +366,8 @@ def volread(uri, format=None, **kwargs): kwargs : ... Further keyword arguments are passed to the reader. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Get reader and read first reader = read(uri, format, 'v', **kwargs) with reader: @@ -370,13 +376,14 @@ def volread(uri, format=None, **kwargs): def volwrite(uri, im, format=None, **kwargs): """ volwrite(uri, vol, format=None, **kwargs) - + Write a volume to the specified file. - + Parameters ---------- - uri : {str, file} - The resource to write the image to, e.g. a filename or file object, + uri : {str, pathlib.Path, file} + The resource to write the image to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. vol : numpy.ndarray The image data. Must be NxMxL (or NxMxLxK if each voxel is a tuple). @@ -386,8 +393,8 @@ def volwrite(uri, im, format=None, **kwargs): kwargs : ... Further keyword arguments are passed to the writer. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Test image if isinstance(im, np.ndarray): if im.ndim == 3: @@ -399,12 +406,12 @@ def volwrite(uri, im, format=None, **kwargs): 'a tuple.') else: raise ValueError('Image must be a numpy array.') - + # Get writer and write first writer = get_writer(uri, format, 'v', **kwargs) with writer: writer.append_data(im) - + # Return a result if there is any return writer.request.get_result() @@ -413,14 +420,15 @@ def volwrite(uri, im, format=None, **kwargs): def mvolread(uri, format=None, memtest=True, **kwargs): """ mvolread(uri, format=None, memtest=True, **kwargs) - + Reads multiple volumes from the specified file. Returns a list of numpy arrays, each with a dict of meta data at its 'meta' attribute. - + Parameters ---------- - uri : {str, bytes, file} - The resource to load the volumes from, e.g. a filename, http address or + uri : {str, pathlib.Path, bytes, file} + The resource to load the volumes from, e.g. a filename, pathlib.Path, + http address or file object, see the docs for more info. format : str The format to use to read the file. By default imageio selects @@ -434,11 +442,11 @@ def mvolread(uri, format=None, memtest=True, **kwargs): kwargs : ... Further keyword arguments are passed to the reader. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Get reader and read all reader = read(uri, format, 'V', **kwargs) - + ims = [] nbytes = 0 for im in reader: @@ -450,19 +458,20 @@ def mvolread(uri, format=None, memtest=True, **kwargs): raise RuntimeError('imageio.mvolread() has read over 1 GiB of ' 'image data.\nStopped to avoid memory problems.' ' Use imageio.get_reader() or memtest=False.') - + return ims def mvolwrite(uri, ims, format=None, **kwargs): """ mvolwrite(uri, vols, format=None, **kwargs) - + Write multiple volumes to the specified file. - + Parameters ---------- - uri : {str, file} - The resource to write the volumes to, e.g. a filename or file object, + uri : {str, pathlib.Path, file} + The resource to write the volumes to, e.g. a filename, pathlib.Path + or file object, see the docs for more info. ims : sequence of numpy arrays The image data. Each array must be NxMxL (or NxMxLxK if each @@ -473,17 +482,17 @@ def mvolwrite(uri, ims, format=None, **kwargs): kwargs : ... Further keyword arguments are passed to the writer. See :func:`.help` to see what arguments are available for a particular format. - """ - + """ + # Get writer writer = get_writer(uri, format, 'V', **kwargs) written = 0 with writer: - + # Iterate over images (ims may be a generator) for im in ims: - + # Test image if isinstance(im, np.ndarray): if im.ndim == 3: @@ -495,7 +504,7 @@ def mvolwrite(uri, ims, format=None, **kwargs): 'a tuple.') else: raise ValueError('Image must be a numpy array.') - + # Add image writer.append_data(im) written += 1 diff --git a/imageio/core/request.py b/imageio/core/request.py index e7cf533..d8e2221 100644 --- a/imageio/core/request.py +++ b/imageio/core/request.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # imageio is distributed under the terms of the (new) BSD License. -""" +""" Definition of the Request object, which acts as a kind of bridge between what the user wants and what the plugins can. """ @@ -17,6 +17,13 @@ import shutil from ..core import string_types, binary_type, urlopen, get_remote_file +if sys.version_info < (3,): + FileNotFoundError = OSError +try: + from pathlib import Path +except ImportError: + Path = None + # URI types URI_BYTES = 1 URI_FILE = 2 @@ -37,10 +44,10 @@ EXAMPLE_IMAGES = { 'checkerboard.png': 'Black and white image of a chekerboard', 'clock.png': 'Photo of a clock with motion blur (Stefan van der Walt)', 'coffee.png': 'Image of a cup of coffee (Rachel Michetti)', - + 'chelsea.png': 'Image of Stefan\'s cat', 'wikkie.png': 'Image of Almar\'s cat', - + 'coins.png': 'Image showing greek coins from Pompeii', 'horse.png': 'Image showing the silhouette of a horse (Andreas Preuss)', 'hubble_deep_field.png': 'Photograph taken by Hubble telescope (NASA)', @@ -48,7 +55,7 @@ EXAMPLE_IMAGES = { 'moon.png': 'Image showing a portion of the surface of the moon', 'page.png': 'A scanned page of text', 'text.png': 'A photograph of handdrawn text', - + 'chelsea.zip': 'The chelsea.png in a zipfile (for testing)', 'newtonscradle.gif': 'Animated GIF of a newton\'s cradle', 'cockatoo.mp4': 'Video file of a cockatoo', @@ -58,14 +65,14 @@ EXAMPLE_IMAGES = { class Request(object): """ Request(uri, mode, **kwargs) - + Represents a request for reading or saving an image resource. This object wraps information to that request and acts as an interface for the plugins to several resources; it allows the user to read from filenames, files, http, zipfiles, raw bytes, etc., but offer a simple interface to the plugins via ``get_file()`` and ``get_local_filename()``. - + For each read/write operation a single Request instance is used and passed to the can_read/can_write method of a format, and subsequently to the Reader/Writer class. This allows rudimentary passing of @@ -82,29 +89,29 @@ class Request(object): "i" for an image, "I" for multiple images, "v" for a volume, "V" for multiple volumes, "?" for don't care. """ - + def __init__(self, uri, mode, **kwargs): - - # General + + # General self._uri_type = None self._filename = None self._extension = None self._kwargs = kwargs self._result = None # Some write actions may have a result - + # To handle the user-side self._filename_zip = None # not None if a zipfile is used self._bytes = None # Incoming bytes self._zipfile = None # To store a zipfile instance (if used) - + # To handle the plugin side self._file = None # To store the file instance self._filename_local = None # not None if using tempfile on this FS self._firstbytes = None # For easy header parsing - + # To store formats that may be able to fulfil this request #self._potential_formats = [] - + # Check mode self._mode = mode if not isinstance(mode, string_types): @@ -115,7 +122,7 @@ class Request(object): raise ValueError('Request requires mode[0] to be "r" or "w"') if mode[1] not in 'iIvV?': raise ValueError('Request requires mode[1] to be in "iIvV?"') - + # Parse what was given self._parse_uri(uri) @@ -133,7 +140,7 @@ class Request(object): py3k = sys.version_info[0] == 3 is_read_request = self.mode[0] == 'r' is_write_request = self.mode[0] == 'w' - + if isinstance(uri, string_types): # Explicit if uri.startswith('imageio:'): @@ -190,6 +197,9 @@ class Request(object): self._uri_type = URI_BYTES self._filename = '<bytes>' self._bytes = uri + elif Path is not None and isinstance(uri, Path): + self._uri_type = URI_FILENAME + self._filename = str(uri) # Files elif is_read_request: if hasattr(uri, 'read') and hasattr(uri, 'close'): @@ -201,35 +211,35 @@ class Request(object): self._uri_type = URI_FILE self._filename = '<file>' self._file = uri - + # Expand user dir if self._uri_type == URI_FILENAME and self._filename.startswith('~'): self._filename = os.path.expanduser(self._filename) - + # Check if a zipfile if self._uri_type == URI_FILENAME: # Search for zip extension followed by a path separater for needle in ['.zip/', '.zip\\']: zip_i = self._filename.lower().find(needle) - if zip_i > 0: + if zip_i > 0: zip_i += 4 self._uri_type = URI_ZIPPED - self._filename_zip = (self._filename[:zip_i], + self._filename_zip = (self._filename[:zip_i], self._filename[zip_i:].lstrip('/\\')) break - + # Check if we could read it if self._uri_type is None: uri_r = repr(uri) if len(uri_r) > 60: uri_r = uri_r[:57] + '...' raise IOError("Cannot understand given URI: %s." % uri_r) - + # Check if this is supported noWriting = [URI_HTTP, URI_FTP] if is_write_request and self._uri_type in noWriting: raise IOError('imageio does not support writing to http/ftp.') - + # Deprecated way to load standard images, give a sensible error message if is_read_request and self._uri_type in [URI_FILENAME, URI_ZIPPED]: fn = self._filename @@ -239,16 +249,16 @@ class Request(object): raise IOError('No such file: %r. This file looks like one of ' 'the standard images, but from imageio 2.1, ' 'standard images have to be specified using ' - '"imageio:%s".' % (fn, fn)) - - # Make filename absolute + '"imageio:%s".' % (fn, fn)) + + # Make filename absolute if self._uri_type in [URI_FILENAME, URI_ZIPPED]: if self._filename_zip: self._filename_zip = (os.path.abspath(self._filename_zip[0]), self._filename_zip[1]) else: self._filename = os.path.abspath(self._filename) - + # Check whether file name is valid if self._uri_type in [URI_FILENAME, URI_ZIPPED]: fn = self._filename @@ -257,13 +267,14 @@ class Request(object): if is_read_request: # Reading: check that the file exists (but is allowed a dir) if not os.path.exists(fn): - raise IOError("No such file: '%s'" % fn) + raise FileNotFoundError("No such file: '%s'" % fn) else: # Writing: check that the directory to write to does exist dn = os.path.dirname(fn) if not os.path.exists(dn): - raise IOError("The directory %r does not exist" % dn) - + raise FileNotFoundError("The directory %r does not exist" + % dn) + @property def filename(self): """ The uri for which reading/saving was requested. This @@ -272,7 +283,7 @@ class Request(object): but use ``get_file()`` or ``get_local_filename()`` instead. """ return self._filename - + @property def extension(self): """ The (lowercase) extension of the requested filename. @@ -290,46 +301,46 @@ class Request(object): "V" for multiple volumes, "?" for don't care. """ return self._mode - + @property def kwargs(self): """ The dict of keyword arguments supplied by the user. """ return self._kwargs - + ## For obtaining data - + def get_file(self): """ get_file() Get a file object for the resource associated with this request. If this is a reading request, the file is in read mode, otherwise in write mode. This method is not thread safe. Plugins do not need to close the file when done. - + This is the preferred way to read/write the data. But if a format cannot handle file-like objects, they should use ``get_local_filename()``. """ want_to_write = self.mode[0] == 'w' - + # Is there already a file? - # Either _uri_type == URI_FILE, or we already opened the file, + # Either _uri_type == URI_FILE, or we already opened the file, # e.g. by using firstbytes if self._file is not None: return self._file - + if self._uri_type == URI_BYTES: - if want_to_write: + if want_to_write: self._file = BytesIO() else: self._file = BytesIO(self._bytes) - + elif self._uri_type == URI_FILENAME: if want_to_write: self._file = open(self.filename, 'wb') else: self._file = open(self.filename, 'rb') - + elif self._uri_type == URI_ZIPPED: # Get the correct filename filename, name = self._filename_zip @@ -340,21 +351,21 @@ class Request(object): # Open zipfile and open new file object for specific file self._zipfile = zipfile.ZipFile(filename, 'r') self._file = self._zipfile.open(name, 'r') - + elif self._uri_type in [URI_HTTP or URI_FTP]: assert not want_to_write # This should have been tested in init self._file = urlopen(self.filename, timeout=5) fix_HTTPResponse(self._file) - + return self._file - + def get_local_filename(self): """ get_local_filename() If the filename is an existing file on this filesystem, return that. Otherwise a temporary file is created on the local file system which can be used by the format to read from or write to. """ - + if self._uri_type == URI_FILENAME: return self._filename else: @@ -369,27 +380,27 @@ class Request(object): with open(self._filename_local, 'wb') as file: shutil.copyfileobj(self.get_file(), file) return self._filename_local - + def finish(self): """ finish() For internal use (called when the context of the reader/writer exits). Finishes this request. Close open files and process results. """ - + # Init bytes = None - + # Collect bytes from temp file if self.mode[0] == 'w' and self._filename_local: with open(self._filename_local, 'rb') as file: bytes = file.read() - + # Collect bytes from BytesIO file object. written = (self.mode[0] == 'w') and self._file if written and self._uri_type in [URI_BYTES, URI_ZIPPED]: bytes = self._file.getvalue() - + # Close open files that we know of (and are responsible for) if self._file and self._uri_type != URI_FILE: self._file.close() @@ -404,7 +415,7 @@ class Request(object): except Exception: # pragma: no cover pass self._filename_local = None - + # Handle bytes that we collected if bytes is not None: if self._uri_type == URI_BYTES: @@ -413,26 +424,26 @@ class Request(object): zf = zipfile.ZipFile(self._filename_zip[0], 'a') zf.writestr(self._filename_zip[1], bytes) zf.close() - + # Detach so gc can clean even if a reference of self lingers self._bytes = None - + def get_result(self): """ For internal use. In some situations a write action can have a result (bytes data). That is obtained with this function. """ self._result, res = None, self._result return res - + @property def firstbytes(self): - """ The first 256 bytes of the file. These can be used to + """ The first 256 bytes of the file. These can be used to parse the header to determine the file-format. """ if self._firstbytes is None: self._read_first_bytes() return self._firstbytes - + def _read_first_bytes(self, N=256): if self._bytes is not None: self._firstbytes = self._bytes[:N] @@ -466,7 +477,7 @@ class Request(object): def read_n_bytes(f, N): """ read_n_bytes(file, n) - + Read n bytes from the given file, or less if the file has less bytes. Returns zero bytes if the file is closed. """ @@ -485,27 +496,27 @@ def fix_HTTPResponse(f): to use the file object. """ count = [0] - + def read(n=None): res = ori_read(n) count[0] += len(res) return res - + def tell(): return count[0] - + def seek(i, mode=0): if not (mode == 0 and i == count[0]): ori_seek(i, mode) - + def fail_seek(i, mode=0): raise RuntimeError('No seeking allowed!') - + # Note, there is currently no protection from wrapping an object more than # once, it will (probably) work though, because closures. ori_read = f.read ori_seek = f.seek if hasattr(f, 'seek') else fail_seek - + f.read = read f.tell = tell f.seek = seek diff --git a/imageio/plugins/_tifffile.py b/imageio/plugins/_tifffile.py index e868b18..c827205 100644 --- a/imageio/plugins/_tifffile.py +++ b/imageio/plugins/_tifffile.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#! /usr/bin/python3 # -*- coding: utf-8 -*- # tifffile.py @@ -31,26 +31,27 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -"""Read image and meta data from (bio)TIFF files. Save numpy arrays as TIFF. +"""Read image and meta data from (bio) TIFF® files. Save numpy arrays as TIFF. Image and metadata can be read from TIFF, BigTIFF, OME-TIFF, STK, LSM, NIH, SGI, ImageJ, MicroManager, FluoView, SEQ and GEL files. -Only a subset of the TIFF specification is supported, mainly uncompressed -and losslessly compressed 2**(0 to 6) bit integer, 16, 32 and 64-bit float, -grayscale and RGB(A) images, which are commonly used in bio-scientific imaging. -Specifically, reading JPEG and CCITT compressed image data, chroma subsampling, -or EXIF, IPTC, GPS, and XMP metadata is not implemented. Only primary info -records are read for STK, FluoView, MicroManager, and NIH Image formats. - -TIFF, the Tagged Image File Format aka Thousands of Incompatible File Formats, -is under the control of Adobe Systems. BigTIFF allows for files greater than -4 GB. STK, LSM, FluoView, SGI, SEQ, GEL, and OME-TIFF, are custom extensions + +Tifffile is not a general purpose TIFF library. Only a subset of the TIFF +specification is supported, mainly uncompressed and losslessly compressed +2**(0 to 6) bit integer, 16, 32 and 64-bit float, grayscale and RGB(A) images, +which are commonly used in bio-scientific imaging. Specifically, reading image +trees defined via SubIFDs, JPEG and CCITT compression, chroma subsampling, +or IPTC and XMP metadata are not implemented. + +TIFF®, the tagged Image File Format, is a trademark and under control of +Adobe Systems Incorporated. BigTIFF allows for files greater than 4 GB. +STK, LSM, FluoView, SGI, SEQ, GEL, and OME-TIFF, are custom extensions defined by Molecular Devices (Universal Imaging Corporation), Carl Zeiss MicroImaging, Olympus, Silicon Graphics International, Media Cybernetics, Molecular Dynamics, and the Open Microscopy Environment consortium respectively. -For command line usage run `python tifffile.py --help` +For command line usage run C{python -m tifffile --help} :Author: `Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`_ @@ -58,18 +59,74 @@ For command line usage run `python tifffile.py --help` :Organization: Laboratory for Fluorescence Dynamics, University of California, Irvine -:Version: 2017.01.12 +:Version: 2017.09.29 Requirements ------------ -* `CPython 2.7 or 3.5 <http://www.python.org>`_ (64-bit recommended) -* `Numpy 1.11 <http://www.numpy.org>`_ -* `Matplotlib 1.5 <http://www.matplotlib.org>`_ (optional for plotting) +* `CPython 3.6 64-bit <http://www.python.org>`_ +* `Numpy 1.13 <http://www.numpy.org>`_ +* `Matplotlib 2.0 <http://www.matplotlib.org>`_ (optional for plotting) * `Tifffile.c 2017.01.10 <http://www.lfd.uci.edu/~gohlke/>`_ (recommended for faster decoding of PackBits and LZW encoded strings) Revisions --------- +2017.09.29 (tentative) + Many backwards incompatible changes improving speed and resource usage: + Pass 2268 tests. + Add detail argument to __str__ function. Remove info functions. + Fix potential issue correcting offsets of large LSM files with positions. + Remove TiffFile iterator interface; use TiffFile.pages instead. + Do not make tag values available as TiffPage attributes. + Use str (not bytes) type for tag and metadata strings (WIP). + Use documented standard tag and value names (WIP). + Use enums for some documented TIFF tag values. + Remove 'memmap' and 'tmpfile' options; use out='memmap' instead. + Add option to specify output in asarray functions. + Add option to concurrently decode image strips or tiles using threads. + Add TiffPage.asrgb function (WIP). + Do not apply colormap in asarray. + Remove 'colormapped', 'rgbonly', and 'scale_mdgel' options from asarray. + Consolidate metadata in TiffFile _metadata functions. + Remove non-tag metadata properties from TiffPage. + Add function to convert LSM to tiled BIN files. + Align image data in file. + Make TiffPage.dtype a numpy.dtype. + Add 'ndim' and 'size' properties to TiffPage and TiffPageSeries. + Allow imsave to write non-BigTIFF files up to ~4 GB. + Only read one page for shaped series if possible. + Add memmap function to create memory-mapped array stored in TIFF file. + Add option to save empty arrays to TIFF files. + Add option to save truncated TIFF files. + Allow single tile images to be saved contiguously. + Add optional movie mode for files with uniform pages. + Lazy load pages. + Use lightweight TiffFrame for IFDs sharing properties with key TiffPage. + Move module constants to 'TIFF' namespace (speed up module import). + Remove 'fastij' option from TiffFile. + Remove 'pages' parameter from TiffFile. + Remove TIFFfile alias. + Deprecate Python 2. + Require enum34 and futures packages on Python 2.7. + Remove Record class and return all metadata as dict instead. + Add functions to parse STK, MetaSeries, ScanImage, SVS, Pilatus metadata. + Read tags from EXIF and GPS IFDs. + Use pformat for tag and metadata values. + Fix reading some UIC tags (bug fix). + Do not modify input array in imshow (bug fix). + Fix Python implementation of unpack_ints. +2017.05.23 + Pass 1961 tests. + Write correct number of sample_format values (bug fix). + Use Adobe deflate code to write ZIP compressed files. + Add option to pass tag values as packed binary data for writing. + Defer tag validation to attribute access. + Use property instead of lazyattr decorator for simple expressions. +2017.03.17 + Write IFDs and tag values on word boundaries. + Read ScanImage metadata. + Remove is_rgb and is_indexed attributes from TiffFile. + Create files used by doctests. 2017.01.12 Read Zeiss SEM metadata. Read OME-TIFF with invalid references to external files. @@ -122,14 +179,14 @@ Revisions Do not color-map ImageJ hyperstacks (backwards incompatible). Towards supporting Leica SCN. 2015.09.25 - Read images with reversed bit order (fill_order is lsb2msb). + Read images with reversed bit order (FillOrder is LSB2MSB). 2015.09.21 Read RGB OME-TIFF. Warn about malformed OME-XML. 2015.09.16 Detect some corrupted ImageJ metadata. Better axes labels for 'shaped' files. - Do not create TiffTags for default values. + Do not create TiffTag for default values. Chroma subsampling is not supported. Memory-map data in TiffPageSeries if possible (optional). 2015.08.17 @@ -138,7 +195,7 @@ Revisions Read and write LZMA compressed data. Specify datetime when saving (optional). Save tiled and color-mapped images (optional). - Ignore void byte_counts and offsets if possible. + Ignore void bytecounts and offsets if possible. Ignore bogus image_depth tag created by ISS Vista software. Decode floating point horizontal differencing (not tiled). Save image data contiguously if possible. @@ -148,7 +205,7 @@ Revisions Try to read incomplete tiles. Open file dialog if no filename is passed on command line. Ignore errors when decoding OME-XML. - Rename decoder functions (backwards incompatible) + Rename decoder functions (backwards incompatible). 2014.08.24 TiffWriter class for incremental writing images. Simplify examples. @@ -218,44 +275,49 @@ Other Python packages and modules for reading bio-scientific TIFF files: Acknowledgements ---------------- -* Egor Zindy, University of Manchester, for cz_lsm_scan_info specifics. -* Wim Lewis for a bug fix and some read_cz_lsm functions. +* Egor Zindy, University of Manchester, for lsm_scan_info specifics. +* Wim Lewis for a bug fix and some LSM functions. * Hadrien Mary for help on reading MicroManager files. * Christian Kliche for help writing tiled and color-mapped files. References ---------- -(1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated. +1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated. http://partners.adobe.com/public/developer/tiff/ -(2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html -(3) MetaMorph Stack (STK) Image File Format. +2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html +3) MetaMorph Stack (STK) Image File Format. http://support.meta.moleculardevices.com/docs/t10243.pdf -(4) Image File Format Description LSM 5/7 Release 6.0 (ZEN 2010). +4) Image File Format Description LSM 5/7 Release 6.0 (ZEN 2010). Carl Zeiss MicroImaging GmbH. BioSciences. May 10, 2011 -(5) File Format Description - LSM 5xx Release 2.0. - http://ibb.gsf.de/homepage/karsten.rodenacker/IDL/Lsmfile.doc -(6) The OME-TIFF format. +5) The OME-TIFF format. http://www.openmicroscopy.org/site/support/file-formats/ome-tiff -(7) UltraQuant(r) Version 6.0 for Windows Start-Up Guide. +6) UltraQuant(r) Version 6.0 for Windows Start-Up Guide. http://www.ultralum.com/images%20ultralum/pdf/UQStart%20Up%20Guide.pdf -(8) Micro-Manager File Formats. +7) Micro-Manager File Formats. http://www.micro-manager.org/wiki/Micro-Manager_File_Formats -(9) Tags for TIFF and Related Specifications. Digital Preservation. +8) Tags for TIFF and Related Specifications. Digital Preservation. http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml +9) ScanImage BigTiff Specification - ScanImage 2016. + http://scanimage.vidriotechnologies.com/display/SI2016/ + ScanImage+BigTiff+Specification +10) CIPA DC-008-2016: Exchangeable image file format for digital still cameras: + Exif Version 2.31. + http://www.cipa.jp/std/documents/e/DC-008-Translation-2016-E.pdf Examples -------- +>>> # write and read numpy array >>> data = numpy.random.rand(5, 301, 219) >>> imsave('temp.tif', data) - >>> image = imread('temp.tif') >>> numpy.testing.assert_array_equal(image, data) +>>> # iterate over pages and tags >>> with TiffFile('temp.tif') as tif: ... images = tif.asarray() -... for page in tif: +... for page in tif.pages: ... for tag in page.tags.values(): -... t = tag.name, tag.value +... _ = tag.name, tag.value ... image = page.asarray() """ @@ -264,19 +326,24 @@ from __future__ import division, print_function import sys import os +import io import re import glob import math import zlib import time import json +import enum import struct import warnings import tempfile import datetime +import threading import collections -from fractions import Fraction -from xml.etree import cElementTree as etree +import multiprocessing +import concurrent.futures +# from fractions import Fraction # delay import +# from xml.etree import cElementTree as etree # delay import import numpy @@ -288,16 +355,67 @@ except ImportError: except ImportError: lzma = None -__version__ = '2017.01.12' +__version__ = '2017.09.29' __docformat__ = 'restructuredtext en' __all__ = ( - 'imsave', 'imread', 'imshow', 'TiffFile', 'TiffWriter', 'TiffSequence', - # utility functions used in oiffile and czifile - 'FileHandle', 'lazyattr', 'natural_sorted', 'decode_lzw', 'stripnull') + 'imsave', 'imread', 'imshow', 'memmap', + 'TiffFile', 'TiffWriter', 'TiffSequence', + # utility functions used by oiffile or czifile + 'FileHandle', 'lazyattr', 'natural_sorted', 'decode_lzw', 'stripnull', + 'create_output', 'repeat_nd', 'format_size', 'product') + + +def imread(files, **kwargs): + """Return image data from TIFF file(s) as numpy array. + + Refer to the TiffFile class and member functions for documentation. + + Parameters + ---------- + files : str, binary stream, or sequence + File name, seekable binary stream, glob pattern, or sequence of + file names. + kwargs : dict + Parameters 'multifile' and 'is_ome' are passed to the TiffFile class. + The 'pattern' parameter is passed to the TiffSequence class. + Other parameters are passed to the asarray functions. + The first image series is returned if no arguments are provided. + + Examples + -------- + >>> # get image from first page + >>> imsave('temp.tif', numpy.random.rand(3, 4, 301, 219)) + >>> im = imread('temp.tif', key=0) + >>> im.shape + (4, 301, 219) + + >>> # get images from sequence of files + >>> ims = imread(['temp.tif', 'temp.tif']) + >>> ims.shape + (2, 3, 4, 301, 219) + + """ + kwargs_file = parse_kwargs(kwargs, 'multifile', 'is_ome') + kwargs_seq = parse_kwargs(kwargs, 'pattern') + + if isinstance(files, basestring) and any(i in files for i in '?*'): + files = glob.glob(files) + if not files: + raise ValueError('no files found') + if not hasattr(files, 'seek') and len(files) == 1: + files = files[0] + if isinstance(files, basestring) or hasattr(files, 'seek'): + with TiffFile(files, **kwargs_file) as tif: + return tif.asarray(**kwargs) + else: + with TiffSequence(files, **kwargs_seq) as imseq: + return imseq.asarray(**kwargs) -def imsave(file, data, **kwargs): - """Write image data to TIFF file. + +def imsave(file, data=None, shape=None, dtype=None, bigsize=2**32-2**25, + **kwargs): + """Write numpy array to TIFF file. Refer to the TiffWriter class and member functions for documentation. @@ -308,86 +426,204 @@ def imsave(file, data, **kwargs): data : array_like Input image. The last dimensions are assumed to be image depth, height, width, and samples. + If data is None, an empty array of the specified shape and dtype is + saved to file. + shape : tuple + If data is None, shape of an empty array to save to the file. + dtype : numpy.dtype + If data is None, data-type of an empty array to save to the file. + bigsize : int + Create a BigTIFF file if the size of data in bytes is larger than + this threshold and 'imagej' or 'truncate' are not enabled. + By default, the threshold is 4 GB minus 32 MB reserved for metadata. + Use the 'bigtiff' parameter to explicitly specify the type of + file created. kwargs : dict Parameters 'append', 'byteorder', 'bigtiff', 'software', and 'imagej', - are passed to the TiffWriter class. - Parameters 'photometric', 'planarconfig', 'resolution', 'compress', - 'colormap', 'tile', 'description', 'datetime', 'metadata', 'contiguous' - and 'extratags' are passed to the TiffWriter.save function. + are passed to TiffWriter(). + Other parameters are passed to TiffWriter.save(). + + Returns + ------- + If the image data are written contiguously, return offset and bytecount + of image data in the file. Examples -------- + >>> # save a RGB image + >>> data = numpy.random.randint(0, 255, (256, 256, 3), 'uint8') + >>> imsave('temp.tif', data, photometric='rgb') + + >>> # save a random array and metadata, using compression >>> data = numpy.random.rand(2, 5, 3, 301, 219) >>> imsave('temp.tif', data, compress=6, metadata={'axes': 'TZCYX'}) """ tifargs = parse_kwargs(kwargs, 'append', 'bigtiff', 'byteorder', 'software', 'imagej') - - if 'bigtiff' not in tifargs and 'imagej' not in tifargs and ( - data.size*data.dtype.itemsize > 2000*2**20): + if data is None: + size = product(shape) * numpy.dtype(dtype).itemsize + else: + try: + size = data.nbytes + except Exception: + size = 0 + if size > bigsize and 'bigtiff' not in tifargs and not ( + tifargs.get('imagej', False) or tifargs.get('truncate', False)): tifargs['bigtiff'] = True with TiffWriter(file, **tifargs) as tif: - tif.save(data, **kwargs) + return tif.save(data, shape, dtype, **kwargs) + + +def memmap(filename, shape=None, dtype=None, page=None, series=0, mode='r+', + **kwargs): + """Return memory-mapped numpy array stored in TIFF file. + + Memory-mapping requires data stored in native byte order, without tiling, + compression, predictors, etc. + If shape and dtype are provided, existing files will be overwritten or + appended to depending on the 'append' parameter. + Otherwise the image data of a specified page or series in an existing + file will be memory-mapped. By default, the image data of the first page + series is memory-mapped. + Call flush() to write any changes in the array to the file. + Raise ValueError if the image data in the file is not memory-mappable + + Parameters + ---------- + filename : str + Name of the TIFF file which stores the array. + shape : tuple + Shape of the empty array. + dtype : numpy.dtype + Data-type of the empty array. + page : int + Index of the page which image data to memory-map. + series : int + Index of the page series which image data to memory-map. + mode : {'r+', 'r', 'c'}, optional + The file open mode. Default is to open existing file for reading and + writing ('r+'). + kwargs : dict + Additional parameters passed to imsave() or TiffFile(). + + Examples + -------- + >>> # create an empty TIFF file and write to memory-mapped image + >>> im = memmap('temp.tif', shape=(256, 256), dtype='float32') + >>> im[255, 255] = 1.0 + >>> im.flush() + >>> im.shape, im.dtype + ((256, 256), dtype('float32')) + >>> del im + + >>> # memory-map image data in a TIFF file + >>> im = memmap('temp.tif', page=0) + >>> im[255, 255] + 1.0 + + """ + if shape is not None and dtype is not None: + # create a new, empty array + kwargs.update(data=None, shape=shape, dtype=dtype, returnoffset=True, + align=TIFF.ALLOCATIONGRANULARITY) + result = imsave(filename, **kwargs) + if result is None: + # TODO: fail before creating file or writing data + raise ValueError("image data is not memory-mappable") + offset = result[0] + else: + # use existing file + with TiffFile(filename, **kwargs) as tif: + if page is not None: + page = tif.pages[page] + if not page.is_memmappable: + raise ValueError("image data is not memory-mappable") + offset, _ = page.is_contiguous + shape = page.shape + dtype = page.dtype + else: + series = tif.series[series] + if series.offset is None: + raise ValueError("image data is not memory-mappable") + shape = series.shape + dtype = series.dtype + offset = series.offset + return numpy.memmap(filename, dtype, mode, offset, shape, 'C') + + +class lazyattr(object): + """Attribute whose value is computed on first access.""" + # TODO: help() doesn't work + __slots__ = ('func',) + + def __init__(self, func): + self.func = func + # self.__name__ = func.__name__ + # self.__doc__ = func.__doc__ + # self.lock = threading.RLock() + + def __get__(self, instance, owner): + # with self.lock: + if instance is None: + return self + try: + value = self.func(instance) + except AttributeError as e: + raise RuntimeError(e) + if value is NotImplemented: + return getattr(super(owner, instance), self.func.__name__) + setattr(instance, self.func.__name__, value) + return value class TiffWriter(object): - """Write image data to TIFF file. + """Write numpy arrays to TIFF file. TiffWriter instances must be closed using the 'close' method, which is automatically called when using the 'with' context manager. + TiffWriter's main purpose is saving nD numpy array's as TIFF, + not to create any possible TIFF format. Specifically, JPEG compression, + SubIFDs, ExifIFD, or GPSIFD tags are not supported. + Examples -------- + >>> # successively append images to BigTIFF file >>> data = numpy.random.rand(2, 5, 3, 301, 219) >>> with TiffWriter('temp.tif', bigtiff=True) as tif: ... for i in range(data.shape[0]): ... tif.save(data[i], compress=6) """ - TYPES = {'B': 1, 's': 2, 'H': 3, 'I': 4, '2I': 5, 'b': 6, - 'h': 8, 'i': 9, '2i': 10, 'f': 11, 'd': 12, 'Q': 16, 'q': 17} - TAGS = { - 'new_subfile_type': 254, 'subfile_type': 255, - 'image_width': 256, 'image_length': 257, 'bits_per_sample': 258, - 'compression': 259, 'photometric': 262, 'document_name': 269, - 'image_description': 270, 'strip_offsets': 273, 'orientation': 274, - 'samples_per_pixel': 277, 'rows_per_strip': 278, - 'strip_byte_counts': 279, 'x_resolution': 282, 'y_resolution': 283, - 'planar_configuration': 284, 'page_name': 285, 'resolution_unit': 296, - 'software': 305, 'datetime': 306, 'predictor': 317, 'color_map': 320, - 'tile_width': 322, 'tile_length': 323, 'tile_offsets': 324, - 'tile_byte_counts': 325, 'extra_samples': 338, 'sample_format': 339, - 'smin_sample_value': 340, 'smax_sample_value': 341, - 'image_depth': 32997, 'tile_depth': 32998} - - def __init__(self, file, append=False, bigtiff=False, byteorder=None, - software='tifffile.py', imagej=False): + def __init__(self, file, bigtiff=False, byteorder=None, + software='tifffile.py', append=False, imagej=False): """Open a TIFF file for writing. - Existing files are overwritten by default. - Use bigtiff=True when creating files larger than 2 GB. + An empty TIFF file is created if the file does not exist, else the + file is overwritten with an empty empty TIFF file unless 'append' + is true. Use bigtiff=True when creating files larger than 4 GB. Parameters ---------- file : str, binary stream, or FileHandle File name or writable binary stream, such as a open file or BytesIO. - The file is created if it does not exist. - append : bool - If True and 'file' is an existing standard TIFF file, image data - and tags are appended to the file. - Appending data may corrupt specifically formatted TIFF files - such as LSM, STK, ImageJ, NIH, or FluoView. bigtiff : bool If True, the BigTIFF format is used. byteorder : {'<', '>'} The endianness of the data in the file. - By default this is the system's native byte order. + By default, this is the system's native byte order. software : str Name of the software used to create the file. Saved with the first page in the file only. + Must be 7-bit ASCII. + append : bool + If True and 'file' is an existing standard TIFF file, image data + and tags are appended to the file. + Appending data may corrupt specifically formatted TIFF files + such as LSM, STK, ImageJ, NIH, or FluoView. imagej : bool If True, write an ImageJ hyperstack compatible file. This format can handle data types uint8, uint16, or float32 and @@ -405,16 +641,15 @@ class TiffWriter(object): with FileHandle(file, mode='rb', size=0) as fh: pos = fh.tell() try: - with TiffFile(fh, pages=[0]) as tif: + with TiffFile(fh) as tif: if (append != 'force' and - any(getattr(tif, 'is_'+a) for a in - ('lsm', 'stk', 'imagej', 'nih', 'fluoview', - 'micromanager'))): - raise ValueError("contains metadata") + any(getattr(tif, 'is_'+a) for a in ( + 'lsm', 'stk', 'imagej', 'nih', + 'fluoview', 'micromanager'))): + raise ValueError("file contains metadata") byteorder = tif.byteorder bigtiff = tif.is_bigtiff - imagej = tif.is_imagej - self._ifd_offset = tif._ifd_offset + self._ifdoffset = tif.pages.next_page_offset if tif.pages: software = None except Exception as e: @@ -424,45 +659,45 @@ class TiffWriter(object): except (IOError, FileNotFoundError): append = False - if byteorder not in (None, '<', '>'): - raise ValueError("invalid byteorder %s" % byteorder) - if byteorder is None: + if byteorder in (None, '='): byteorder = '<' if sys.byteorder == 'little' else '>' + elif byteorder not in ('<', '>'): + raise ValueError("invalid byteorder %s" % byteorder) if imagej and bigtiff: - warnings.warn("writing incompatible bigtiff ImageJ") + warnings.warn("writing incompatible BigTIFF ImageJ") self._byteorder = byteorder self._software = software self._imagej = bool(imagej) + self._truncate = False self._metadata = None self._colormap = None - self._description_offset = 0 - self._description_len_offset = 0 - self._description_len = 0 - + self._descriptionoffset = 0 + self._descriptionlen = 0 + self._descriptionlenoffset = 0 self._tags = None self._shape = None # normalized shape of data in consecutive pages - self._data_shape = None # shape of data in consecutive pages - self._data_dtype = None # data type - self._data_offset = None # offset to data - self._data_byte_counts = None # byte counts per plane - self._tag_offsets = None # strip or tile offset tag code + self._datashape = None # shape of data in consecutive pages + self._datadtype = None # data type + self._dataoffset = None # offset to data + self._databytecounts = None # byte counts per plane + self._tagoffsets = None # strip or tile offset tag code if bigtiff: self._bigtiff = True - self._offset_size = 8 - self._tag_size = 20 - self._numtag_format = 'Q' - self._offset_format = 'Q' - self._value_format = '8s' + self._offsetsize = 8 + self._tagsize = 20 + self._tagnoformat = 'Q' + self._offsetformat = 'Q' + self._valueformat = '8s' else: self._bigtiff = False - self._offset_size = 4 - self._tag_size = 12 - self._numtag_format = 'H' - self._offset_format = 'I' - self._value_format = '4s' + self._offsetsize = 4 + self._tagsize = 12 + self._tagnoformat = 'H' + self._offsetformat = 'I' + self._valueformat = '4s' if append: self._fh = FileHandle(file, mode='r+b', size=0) @@ -475,77 +710,100 @@ class TiffWriter(object): else: self._fh.write(struct.pack(byteorder+'H', 42)) # first IFD - self._ifd_offset = self._fh.tell() - self._fh.write(struct.pack(byteorder+self._offset_format, 0)) + self._ifdoffset = self._fh.tell() + self._fh.write(struct.pack(byteorder+self._offsetformat, 0)) - def save(self, data, photometric=None, planarconfig=None, tile=None, - contiguous=True, compress=0, colormap=None, - description=None, datetime=None, resolution=None, + def save(self, data=None, shape=None, dtype=None, returnoffset=False, + photometric=None, planarconfig=None, tile=None, + contiguous=True, align=16, truncate=False, compress=0, + colormap=None, description=None, datetime=None, resolution=None, metadata={}, extratags=()): - """Write image data and tags to TIFF file. - + """Write numpy array and tags to TIFF file. + + The data shape's last dimensions are assumed to be image depth, + height (length), width, and samples. + If a colormap is provided, the data's dtype must be uint8 or uint16 + and the data values are indices into the last dimension of the + colormap. + If shape and dtype are specified, an empty array is saved. + This option can not be used with compression or multiple tiles. Image data are written in one stripe per plane by default. Dimensions larger than 2 to 4 (depending on photometric mode, planar configuration, and SGI mode) are flattened and saved as separate pages. - The 'sample_format' and 'bits_per_sample' tags are derived from + The 'SampleFormat' and 'BitsPerSample' tags are derived from the data type. Parameters ---------- - data : numpy.ndarray - Input image. The last dimensions are assumed to be image depth, - height (length), width, and samples. - If a colormap is provided, the dtype must be uint8 or uint16 and - the data values are indices into the last dimension of the - colormap. - photometric : {'minisblack', 'miniswhite', 'rgb', 'palette', 'cfa'} + data : numpy.ndarray or None + Input image array. + shape : tuple or None + Shape of the empty array to save. Used only if data is None. + dtype : numpy.dtype or None + Data-type of the empty array to save. Used only if data is None. + returnoffset : bool + If True and the image data in the file is memory-mappable, return + the offset and number of bytes of the image data in the file. + photometric : {'MINISBLACK', 'MINISWHITE', 'RGB', 'PALETTE', 'CFA'} The color space of the image data. - By default this setting is inferred from the data shape and the + By default, this setting is inferred from the data shape and the value of colormap. For CFA images, DNG tags must be specified in extratags. - planarconfig : {'contig', 'planar'} + planarconfig : {'CONTIG', 'SEPARATE'} Specifies if samples are stored contiguous or in separate planes. - By default this setting is inferred from the data shape. + By default, this setting is inferred from the data shape. If this parameter is set, extra samples are used to store grayscale images. - 'contig': last dimension contains samples. - 'planar': third last dimension contains samples. + 'CONTIG': last dimension contains samples. + 'SEPARATE': third last dimension contains samples. tile : tuple of int The shape (depth, length, width) of image tiles to write. If None (default), image data are written in one stripe per plane. The tile length and width must be a multiple of 16. - If the tile depth is provided, the SGI image_depth and tile_depth - tags are used to save volume data. Few software can read the - SGI format, e.g. MeVisLab. + If the tile depth is provided, the SGI ImageDepth and TileDepth + tags are used to save volume data. + Unless a single tile is used, tiles cannot be used to write + contiguous files. + Few software can read the SGI format, e.g. MeVisLab. contiguous : bool If True (default) and the data and parameters are compatible with - previous ones, if any, the data are stored contiguously after - the previous one. Parameters 'photometric' and 'planarconfig' are - ignored. - compress : int or 'lzma' + previous ones, if any, the image data are stored contiguously after + the previous one. Parameters 'photometric' and 'planarconfig' + are ignored. Parameters 'description', datetime', and 'extratags' + are written to the first page of a contiguous series only. + align : int + Byte boundary on which to align the image data in the file. + Default 16. Use mmap.ALLOCATIONGRANULARITY for memory-mapped data. + Following contiguous writes are not aligned. + truncate : bool + If True, only write the first page including shape metadata if + possible (uncompressed, contiguous, not tiled). + Other TIFF readers will only be able to read part of the data. + compress : int or 'LZMA' Values from 0 to 9 controlling the level of zlib compression. If 0, data are written uncompressed (default). Compression cannot be used to write contiguous files. - If 'lzma', LZMA compression is used, which is not available on + If 'LZMA', LZMA compression is used, which is not available on all platforms. colormap : numpy.ndarray RGB color values for the corresponding data value. Must be of shape (3, 2**(data.itemsize*8)) and dtype uint16. description : str - The subject of the image. Saved with the first page only. - Cannot be used with the ImageJ format. + The subject of the image. Must be 7-bit ASCII. Cannot be used with + the ImageJ format. Saved with the first page only. datetime : datetime - Date and time of image creation. Saved with the first page only. - If None (default), the current date and time is used. + Date and time of image creation. If None (default), the current + date and time is used. Saved with the first page only. resolution : (float, float[, str]) or ((int, int), (int, int)[, str]) X and Y resolutions in pixels per resolution unit as float or - rational numbers. - A third, optional parameter specifies the resolution unit, - which must be None (default for ImageJ), 'inch' (default), or 'cm'. + rational numbers. A third, optional parameter specifies the + resolution unit, which must be None (default for ImageJ), + 'INCH' (default), or 'CENTIMETER'. metadata : dict Additional meta data to be saved along with shape information - in JSON or ImageJ formats in an image_description tag. - If None, do not write a second image_description tag. + in JSON or ImageJ formats in an ImageDescription tag. + If None, do not write a second ImageDescription tag. + Strings must be 7-bit ASCII. Saved with the first page only. extratags : sequence of tuples Additional tags as [(code, dtype, count, value, writeonce)]. @@ -555,9 +813,12 @@ class TiffWriter(object): Data type of items in 'value' in Python struct format. One of B, s, H, I, 2I, b, h, i, 2i, f, d, Q, or q. count : int - Number of data values. Not used for string values. + Number of data values. Not used for string or byte string + values. value : sequence 'Count' values compatible with 'dtype'. + Byte strings must contain count values of dtype packed as + binary data. writeonce : bool If True, the tag is written to the first page only. @@ -565,55 +826,85 @@ class TiffWriter(object): # TODO: refactor this function fh = self._fh byteorder = self._byteorder - numtag_format = self._numtag_format - value_format = self._value_format - offset_format = self._offset_format - offset_size = self._offset_size - tag_size = self._tag_size - data = numpy.asarray(data, dtype=byteorder+data.dtype.char, order='C') - if data.size == 0: - raise ValueError("can not save empty array") + if data is None: + if compress: + raise ValueError("can not save compressed empty file") + datashape = shape + datadtype = numpy.dtype(dtype).newbyteorder(byteorder) + datadtypechar = datadtype.char + data = None + else: + data = numpy.asarray(data, byteorder+data.dtype.char, 'C') + if data.size == 0: + raise ValueError("can not save empty array") + datashape = data.shape + datadtype = data.dtype + datadtypechar = data.dtype.char + + returnoffset = returnoffset and datadtype.isnative + datasize = product(datashape) * datadtype.itemsize # just append contiguous data if possible - if self._data_shape: - if (not contiguous or - self._data_shape[1:] != data.shape or - self._data_dtype != data.dtype or - (compress and self._tags) or - tile or - not numpy.array_equal(colormap, self._colormap)): + self._truncate = bool(truncate) + if self._datashape: + if (not contiguous + or self._datashape[1:] != datashape + or self._datadtype != datadtype + or (compress and self._tags) + or tile + or not numpy.array_equal(colormap, self._colormap)): # incompatible shape, dtype, compression mode, or colormap self._write_remaining_pages() self._write_image_description() - self._description_offset = 0 - self._description_len_offset = 0 - self._data_shape = None + self._truncate = False + self._descriptionoffset = 0 + self._descriptionlenoffset = 0 + self._datashape = None self._colormap = None if self._imagej: raise ValueError( "ImageJ does not support non-contiguous data") else: # consecutive mode - self._data_shape = (self._data_shape[0] + 1,) + data.shape + self._datashape = (self._datashape[0] + 1,) + datashape if not compress: # write contiguous data, write ifds/tags later - fh.write_array(data) + offset = fh.tell() + if data is None: + fh.write_empty(datasize) + else: + fh.write_array(data) + if returnoffset: + return offset, datasize return - if photometric not in (None, 'minisblack', 'miniswhite', - 'rgb', 'palette', 'cfa'): - raise ValueError("invalid photometric %s" % photometric) - if planarconfig not in (None, 'contig', 'planar'): - raise ValueError("invalid planarconfig %s" % planarconfig) + input_shape = datashape + tagnoformat = self._tagnoformat + valueformat = self._valueformat + offsetformat = self._offsetformat + offsetsize = self._offsetsize + tagsize = self._tagsize + + MINISBLACK = TIFF.PHOTOMETRIC.MINISBLACK + RGB = TIFF.PHOTOMETRIC.RGB + CFA = TIFF.PHOTOMETRIC.CFA + PALETTE = TIFF.PHOTOMETRIC.PALETTE + CONTIG = TIFF.PLANARCONFIG.CONTIG + SEPARATE = TIFF.PLANARCONFIG.SEPARATE + + if photometric is not None: + photometric = enumarg(TIFF.PHOTOMETRIC, photometric) + if planarconfig: + planarconfig = enumarg(TIFF.PLANARCONFIG, planarconfig) # prepare compression if not compress: compress = False - compress_tag = 1 - elif compress == 'lzma': + compresstag = 1 + elif compress == 'LZMA': compress = lzma.compress - compress_tag = 34925 + compresstag = 34925 if self._imagej: raise ValueError("ImageJ can not handle LZMA compression") elif not 0 <= compress <= 9: @@ -621,7 +912,7 @@ class TiffWriter(object): elif compress: def compress(data, level=compress): return zlib.compress(data, level) - compress_tag = 32946 + compresstag = 8 # prepare ImageJ format if self._imagej: @@ -629,32 +920,32 @@ class TiffWriter(object): warnings.warn("not writing description to ImageJ file") description = None volume = False - if data.dtype.char not in 'BHhf': - raise ValueError("ImageJ does not support data type '%s'" - % data.dtype.char) - ijrgb = photometric == 'rgb' if photometric else None - if data.dtype.char not in 'B': + if datadtypechar not in 'BHhf': + raise ValueError( + "ImageJ does not support data type '%s'" % datadtypechar) + ijrgb = photometric == RGB if photometric else None + if datadtypechar not in 'B': ijrgb = False - ijshape = imagej_shape(data.shape, ijrgb) + ijshape = imagej_shape(datashape, ijrgb) if ijshape[-1] in (3, 4): - photometric = 'rgb' - if data.dtype.char not in 'B': + photometric = RGB + if datadtypechar not in 'B': raise ValueError("ImageJ does not support data type '%s' " - "for RGB" % data.dtype.char) + "for RGB" % datadtypechar) elif photometric is None: - photometric = 'minisblack' + photometric = MINISBLACK planarconfig = None - if planarconfig == 'planar': + if planarconfig == SEPARATE: raise ValueError("ImageJ does not support planar images") else: - planarconfig = 'contig' if ijrgb else None + planarconfig = CONTIG if ijrgb else None # verify colormap and indices if colormap is not None: - if data.dtype.char not in 'BH': + if datadtypechar not in 'BH': raise ValueError("invalid data dtype for palette mode") colormap = numpy.asarray(colormap, dtype=byteorder+'H') - if colormap.shape != (3, 2**(data.itemsize * 8)): + if colormap.shape != (3, 2**(datadtype.itemsize * 8)): raise ValueError("invalid color map shape") self._colormap = colormap @@ -671,82 +962,75 @@ class TiffWriter(object): # normalize data shape to 5D or 6D, depending on volume: # (pages, planar_samples, [depth,] height, width, contig_samples) - data_shape = data.shape - - if photometric == 'rgb': - data = reshape_nd(data, 3) - else: - data = reshape_nd(data, 2) - - shape = data.shape + datashape = reshape_nd(datashape, 3 if photometric == RGB else 2) + shape = datashape + ndim = len(datashape) samplesperpixel = 1 extrasamples = 0 - if volume and data.ndim < 3: + if volume and ndim < 3: volume = False if colormap is not None: - photometric = 'palette' + photometric = PALETTE planarconfig = None if photometric is None: - photometric = 'minisblack' - if planarconfig == 'contig': - if data.ndim > 2 and shape[-1] in (3, 4): - photometric = 'rgb' - elif planarconfig == 'planar': - if volume and data.ndim > 3 and shape[-4] in (3, 4): - photometric = 'rgb' - elif data.ndim > 2 and shape[-3] in (3, 4): - photometric = 'rgb' - elif data.ndim > 2 and shape[-1] in (3, 4): - photometric = 'rgb' + photometric = MINISBLACK + if planarconfig == CONTIG: + if ndim > 2 and shape[-1] in (3, 4): + photometric = RGB + elif planarconfig == SEPARATE: + if volume and ndim > 3 and shape[-4] in (3, 4): + photometric = RGB + elif ndim > 2 and shape[-3] in (3, 4): + photometric = RGB + elif ndim > 2 and shape[-1] in (3, 4): + photometric = RGB elif self._imagej: - photometric = 'minisblack' - elif volume and data.ndim > 3 and shape[-4] in (3, 4): - photometric = 'rgb' - elif data.ndim > 2 and shape[-3] in (3, 4): - photometric = 'rgb' + photometric = MINISBLACK + elif volume and ndim > 3 and shape[-4] in (3, 4): + photometric = RGB + elif ndim > 2 and shape[-3] in (3, 4): + photometric = RGB if planarconfig and len(shape) <= (3 if volume else 2): planarconfig = None - photometric = 'minisblack' - if photometric == 'rgb': + photometric = MINISBLACK + if photometric == RGB: if len(shape) < 3: raise ValueError("not a RGB(A) image") if len(shape) < 4: volume = False if planarconfig is None: if shape[-1] in (3, 4): - planarconfig = 'contig' + planarconfig = CONTIG elif shape[-4 if volume else -3] in (3, 4): - planarconfig = 'planar' + planarconfig = SEPARATE elif shape[-1] > shape[-4 if volume else -3]: - planarconfig = 'planar' + planarconfig = SEPARATE else: - planarconfig = 'contig' - if planarconfig == 'contig': - data = data.reshape((-1, 1) + shape[(-4 if volume else -3):]) - samplesperpixel = data.shape[-1] + planarconfig = CONTIG + if planarconfig == CONTIG: + datashape = (-1, 1) + shape[(-4 if volume else -3):] + samplesperpixel = datashape[-1] else: - data = data.reshape( - (-1,) + shape[(-4 if volume else -3):] + (1,)) - samplesperpixel = data.shape[1] + datashape = (-1,) + shape[(-4 if volume else -3):] + (1,) + samplesperpixel = datashape[1] if samplesperpixel > 3: extrasamples = samplesperpixel - 3 - elif photometric == 'cfa': + elif photometric == CFA: if len(shape) != 2: raise ValueError("invalid CFA image") volume = False planarconfig = None - data = data.reshape((-1, 1) + shape[-2:] + (1,)) + datashape = (-1, 1) + shape[-2:] + (1,) if 50706 not in (et[0] for et in extratags): raise ValueError("must specify DNG tags for CFA image") elif planarconfig and len(shape) > (3 if volume else 2): - if planarconfig == 'contig': - data = data.reshape((-1, 1) + shape[(-4 if volume else -3):]) - samplesperpixel = data.shape[-1] + if planarconfig == CONTIG: + datashape = (-1, 1) + shape[(-4 if volume else -3):] + samplesperpixel = datashape[-1] else: - data = data.reshape( - (-1,) + shape[(-4 if volume else -3):] + (1,)) - samplesperpixel = data.shape[1] + datashape = (-1,) + shape[(-4 if volume else -3):] + (1,) + samplesperpixel = datashape[1] extrasamples = samplesperpixel - 1 else: planarconfig = None @@ -755,34 +1039,38 @@ class TiffWriter(object): shape = shape[:-1] if len(shape) < 3: volume = False - data = data.reshape( - (-1, 1) + shape[(-3 if volume else -2):] + (1,)) + datashape = (-1, 1) + shape[(-3 if volume else -2):] + (1,) # normalize shape to 6D - assert len(data.shape) in (5, 6) - if len(data.shape) == 5: - data = data.reshape(data.shape[:2] + (1,) + data.shape[2:]) - shape = data.shape + assert len(datashape) in (5, 6) + if len(datashape) == 5: + datashape = datashape[:2] + (1,) + datashape[2:] + if datashape[0] == -1: + s0 = product(input_shape) // product(datashape[1:]) + datashape = (s0,) + datashape[1:] + shape = datashape + if data is not None: + data = data.reshape(shape) if tile and not volume: tile = (1, tile[-2], tile[-1]) - if photometric == 'palette': + if photometric == PALETTE: if (samplesperpixel != 1 or extrasamples or shape[1] != 1 or shape[-1] != 1): raise ValueError("invalid data shape for palette mode") - if photometric == 'rgb' and samplesperpixel == 2: + if photometric == RGB and samplesperpixel == 2: raise ValueError("not a RGB image (samplesperpixel=2)") bytestr = bytes if sys.version[0] == '2' else ( - lambda x: bytes(x, 'utf-8') if isinstance(x, str) else x) + lambda x: bytes(x, 'ascii') if isinstance(x, str) else x) tags = [] # list of (code, ifdentry, ifdvalue, writeonce) - strip_or_tile = 'tile' if tile else 'strip' - tag_byte_counts = TiffWriter.TAGS[strip_or_tile + '_byte_counts'] - tag_offsets = TiffWriter.TAGS[strip_or_tile + '_offsets'] - self._tag_offsets = tag_offsets + strip_or_tile = 'Tile' if tile else 'Strip' + tagbytecounts = TIFF.TAG_NAMES[strip_or_tile + 'ByteCounts'] + tag_offsets = TIFF.TAG_NAMES[strip_or_tile + 'Offsets'] + self._tagoffsets = tag_offsets def pack(fmt, *val): return struct.pack(byteorder+fmt, *val) @@ -790,13 +1078,15 @@ class TiffWriter(object): def addtag(code, dtype, count, value, writeonce=False): # Compute ifdentry & ifdvalue bytes from code, dtype, count, value # Append (code, ifdentry, ifdvalue, writeonce) to tags list - code = int(TiffWriter.TAGS.get(code, code)) + code = int(TIFF.TAG_NAMES.get(code, code)) try: - tifftype = TiffWriter.TYPES[dtype] + tifftype = TIFF.DATA_DTYPES[dtype] except KeyError: raise ValueError("unknown dtype %s" % dtype) rawcount = count + if dtype == 's': + # strings value = bytestr(value) + b'\0' count = rawcount = len(value) rawcount = value.find(b'\0\0') @@ -805,25 +1095,35 @@ class TiffWriter(object): else: rawcount += 1 # length of string without buffer value = (value,) + elif isinstance(value, bytes): + # packed binary data + dtsize = struct.calcsize(dtype) + if len(value) % dtsize: + raise ValueError('invalid packed binary data') + count = len(value) // dtsize if len(dtype) > 1: count *= int(dtype[:-1]) dtype = dtype[-1] ifdentry = [pack('HH', code, tifftype), - pack(offset_format, rawcount)] + pack(offsetformat, rawcount)] ifdvalue = None - if struct.calcsize(dtype) * count <= offset_size: + if struct.calcsize(dtype) * count <= offsetsize: # value(s) can be written directly - if count == 1: + if isinstance(value, bytes): + ifdentry.append(pack(valueformat, value)) + elif count == 1: if isinstance(value, (tuple, list, numpy.ndarray)): value = value[0] - ifdentry.append(pack(value_format, pack(dtype, value))) + ifdentry.append(pack(valueformat, pack(dtype, value))) else: - ifdentry.append(pack(value_format, + ifdentry.append(pack(valueformat, pack(str(count)+dtype, *value))) else: # use offset to value(s) - ifdentry.append(pack(offset_format, 0)) - if isinstance(value, numpy.ndarray): + ifdentry.append(pack(offsetformat, 0)) + if isinstance(value, bytes): + ifdvalue = value + elif isinstance(value, numpy.ndarray): assert value.size == count assert value.dtype.char == dtype ifdvalue = value.tostring() @@ -835,6 +1135,7 @@ class TiffWriter(object): def rational(arg, max_denominator=1000000): # return nominator and denominator from float or two integers + from fractions import Fraction # delayed import try: f = Fraction.from_float(arg) except TypeError: @@ -844,94 +1145,103 @@ class TiffWriter(object): if description: # user provided description - addtag('image_description', 's', 0, description, writeonce=True) + addtag('ImageDescription', 's', 0, description, writeonce=True) - # write shape and metadata to image_description - self._metadata = {} if not metadata else metadata + # write shape and metadata to ImageDescription + self._metadata = {} if not metadata else metadata.copy() if self._imagej: description = imagej_description( - data_shape, shape[-1] in (3, 4), self._colormap is not None, + input_shape, shape[-1] in (3, 4), self._colormap is not None, **self._metadata) elif metadata or metadata == {}: - description = image_description( - data_shape, self._colormap is not None, **self._metadata) + if self._truncate: + self._metadata.update(truncated=True) + description = json_description(input_shape, **self._metadata) else: description = None if description: - # add 32 bytes buffer + # add 64 bytes buffer # the image description might be updated later with the final shape - description += b'\0'*32 - self._description_len = len(description) - addtag('image_description', 's', 0, description, writeonce=True) + description = str2bytes(description, 'ascii') + description += b'\0'*64 + self._descriptionlen = len(description) + addtag('ImageDescription', 's', 0, description, writeonce=True) if self._software: - addtag('software', 's', 0, self._software, writeonce=True) + addtag('Software', 's', 0, self._software, writeonce=True) self._software = None # only save to first page in file if datetime is None: datetime = self._now() - addtag('datetime', 's', 0, datetime.strftime("%Y:%m:%d %H:%M:%S"), + addtag('DateTime', 's', 0, datetime.strftime("%Y:%m:%d %H:%M:%S"), writeonce=True) - addtag('compression', 'H', 1, compress_tag) - addtag('image_width', 'I', 1, shape[-2]) - addtag('image_length', 'I', 1, shape[-3]) + addtag('Compression', 'H', 1, compresstag) + addtag('ImageWidth', 'I', 1, shape[-2]) + addtag('ImageLength', 'I', 1, shape[-3]) if tile: - addtag('tile_width', 'I', 1, tile[-1]) - addtag('tile_length', 'I', 1, tile[-2]) + addtag('TileWidth', 'I', 1, tile[-1]) + addtag('TileLength', 'I', 1, tile[-2]) if tile[0] > 1: - addtag('image_depth', 'I', 1, shape[-4]) - addtag('tile_depth', 'I', 1, tile[0]) - addtag('new_subfile_type', 'I', 1, 0) - addtag('sample_format', 'H', 1, - {'u': 1, 'i': 2, 'f': 3, 'c': 6}[data.dtype.kind]) - addtag('photometric', 'H', 1, {'miniswhite': 0, 'minisblack': 1, - 'rgb': 2, 'palette': 3, - 'cfa': 32803}[photometric]) + addtag('ImageDepth', 'I', 1, shape[-4]) + addtag('TileDepth', 'I', 1, tile[0]) + addtag('NewSubfileType', 'I', 1, 0) + sampleformat = {'u': 1, 'i': 2, 'f': 3, 'c': 6}[datadtype.kind] + addtag('SampleFormat', 'H', samplesperpixel, + (sampleformat,) * samplesperpixel) + addtag('PhotometricInterpretation', 'H', 1, photometric.value) if colormap is not None: - addtag('color_map', 'H', colormap.size, colormap) - addtag('samples_per_pixel', 'H', 1, samplesperpixel) + addtag('ColorMap', 'H', colormap.size, colormap) + addtag('SamplesPerPixel', 'H', 1, samplesperpixel) if planarconfig and samplesperpixel > 1: - addtag('planar_configuration', 'H', 1, 1 - if planarconfig == 'contig' else 2) - addtag('bits_per_sample', 'H', samplesperpixel, - (data.dtype.itemsize * 8,) * samplesperpixel) + addtag('PlanarConfiguration', 'H', 1, planarconfig.value) + addtag('BitsPerSample', 'H', samplesperpixel, + (datadtype.itemsize * 8,) * samplesperpixel) else: - addtag('bits_per_sample', 'H', 1, data.dtype.itemsize * 8) + addtag('BitsPerSample', 'H', 1, datadtype.itemsize * 8) if extrasamples: - if photometric == 'rgb' and extrasamples == 1: - addtag('extra_samples', 'H', 1, 1) # associated alpha channel + if photometric == RGB and extrasamples == 1: + addtag('ExtraSamples', 'H', 1, 1) # associated alpha channel else: - addtag('extra_samples', 'H', extrasamples, (0,) * extrasamples) + addtag('ExtraSamples', 'H', extrasamples, (0,) * extrasamples) if resolution: - addtag('x_resolution', '2I', 1, rational(resolution[0])) - addtag('y_resolution', '2I', 1, rational(resolution[1])) + addtag('XResolution', '2I', 1, rational(resolution[0])) + addtag('YResolution', '2I', 1, rational(resolution[1])) if len(resolution) > 2: - resolution_unit = {None: 1, 'inch': 2, 'cm': 3}[resolution[2]] + unit = resolution[2] + if unit is not None: + unit = unit.upper() + unit = {None: 1, 'INCH': 2, 'CM': 3, 'CENTIMETER': 3}[unit] elif self._imagej: - resolution_unit = 1 + unit = 1 else: - resolution_unit = 2 - addtag('resolution_unit', 'H', 1, resolution_unit) + unit = 2 + addtag('ResolutionUnit', 'H', 1, unit) if not tile: - addtag('rows_per_strip', 'I', 1, shape[-3]) # * shape[-4] + addtag('RowsPerStrip', 'I', 1, shape[-3]) # * shape[-4] + contiguous = not compress if tile: # use one chunk per tile per plane tiles = ((shape[2] + tile[0] - 1) // tile[0], (shape[3] + tile[1] - 1) // tile[1], (shape[4] + tile[2] - 1) // tile[2]) numtiles = product(tiles) * shape[1] - strip_byte_counts = [ - product(tile) * shape[-1] * data.dtype.itemsize] * numtiles - addtag(tag_byte_counts, offset_format, numtiles, strip_byte_counts) - addtag(tag_offsets, offset_format, numtiles, [0] * numtiles) - # allocate tile buffer - chunk = numpy.empty(tile + (shape[-1],), dtype=data.dtype) + stripbytecounts = [ + product(tile) * shape[-1] * datadtype.itemsize] * numtiles + addtag(tagbytecounts, offsetformat, numtiles, stripbytecounts) + addtag(tag_offsets, offsetformat, numtiles, [0] * numtiles) + contiguous = contiguous and product(tiles) == 1 + if not contiguous: + # allocate tile buffer + chunk = numpy.empty(tile + (shape[-1],), dtype=datadtype) else: # use one strip per plane - strip_byte_counts = [ - data[0, 0].size * data.dtype.itemsize] * shape[1] - addtag(tag_byte_counts, offset_format, shape[1], strip_byte_counts) - addtag(tag_offsets, offset_format, shape[1], [0] * shape[1]) + stripbytecounts = [ + product(datashape[2:]) * datadtype.itemsize] * shape[1] + addtag(tagbytecounts, offsetformat, shape[1], stripbytecounts) + addtag(tag_offsets, offsetformat, shape[1], [0] * shape[1]) + + if data is None and not contiguous: + raise ValueError("can not write non-contiguous empty file") # add extra tags from user for t in extratags: @@ -943,48 +1253,64 @@ class TiffWriter(object): tags = sorted(tags, key=lambda x: x[0]) if not (self._bigtiff or self._imagej) and ( - fh.tell() + data.size*data.dtype.itemsize > 2**31-1): + fh.tell() + datasize > 2**31-1): raise ValueError("data too large for standard TIFF file") - # if not compressed or tiled, write the first ifd and then all data - # contiguously; else, write all ifds and data interleaved - for pageindex in range(shape[0] if (compress or tile) else 1): + # if not compressed or multi-tiled, write the first ifd and then + # all data contiguously; else, write all ifds and data interleaved + for pageindex in range(1 if contiguous else shape[0]): # update pointer at ifd_offset pos = fh.tell() - fh.seek(self._ifd_offset) - fh.write(pack(offset_format, pos)) + if pos % 2: + # location of IFD must begin on a word boundary + fh.write(b'\0') + pos += 1 + fh.seek(self._ifdoffset) + fh.write(pack(offsetformat, pos)) fh.seek(pos) # write ifdentries - fh.write(pack(numtag_format, len(tags))) + fh.write(pack(tagnoformat, len(tags))) tag_offset = fh.tell() fh.write(b''.join(t[1] for t in tags)) - self._ifd_offset = fh.tell() - fh.write(pack(offset_format, 0)) # offset to next IFD + self._ifdoffset = fh.tell() + fh.write(pack(offsetformat, 0)) # offset to next IFD # write tag values and patch offsets in ifdentries, if necessary for tagindex, tag in enumerate(tags): if tag[2]: pos = fh.tell() - fh.seek(tag_offset + tagindex*tag_size + offset_size + 4) - fh.write(pack(offset_format, pos)) + if pos % 2: + # tag value is expected to begin on word boundary + fh.write(b'\0') + pos += 1 + fh.seek(tag_offset + tagindex*tagsize + offsetsize + 4) + fh.write(pack(offsetformat, pos)) fh.seek(pos) if tag[0] == tag_offsets: - strip_offsets_offset = pos - elif tag[0] == tag_byte_counts: - strip_byte_counts_offset = pos + stripoffsetsoffset = pos + elif tag[0] == tagbytecounts: + strip_bytecounts_offset = pos elif tag[0] == 270 and tag[2].endswith(b'\0\0\0\0'): # image description buffer - self._description_offset = pos - self._description_len_offset = ( - tag_offset + tagindex * tag_size + 4) + self._descriptionoffset = pos + self._descriptionlenoffset = ( + tag_offset + tagindex * tagsize + 4) fh.write(tag[2]) # write image data data_offset = fh.tell() + skip = align - data_offset % align + fh.seek(skip, 1) + data_offset += skip if compress: - strip_byte_counts = [] - if tile: + stripbytecounts = [] + if contiguous: + if data is None: + fh.write_empty(datasize) + else: + fh.write_array(data) + elif tile: for plane in data[pageindex]: for tz in range(tiles[0]): for ty in range(tiles[1]): @@ -999,7 +1325,7 @@ class TiffWriter(object): tx*tile[2]:tx*tile[2]+c2] if compress: t = compress(chunk) - strip_byte_counts.append(len(t)) + stripbytecounts.append(len(t)) fh.write(t) else: fh.write_array(chunk) @@ -1007,35 +1333,32 @@ class TiffWriter(object): elif compress: for plane in data[pageindex]: plane = compress(plane) - strip_byte_counts.append(len(plane)) + stripbytecounts.append(len(plane)) fh.write(plane) - else: - fh.write_array(data) - # update strip/tile offsets and byte_counts if necessary + # update strip/tile offsets and bytecounts if necessary pos = fh.tell() for tagindex, tag in enumerate(tags): if tag[0] == tag_offsets: # strip/tile offsets if tag[2]: - fh.seek(strip_offsets_offset) + fh.seek(stripoffsetsoffset) strip_offset = data_offset - for size in strip_byte_counts: - fh.write(pack(offset_format, strip_offset)) + for size in stripbytecounts: + fh.write(pack(offsetformat, strip_offset)) strip_offset += size else: - fh.seek(tag_offset + tagindex*tag_size + - offset_size + 4) - fh.write(pack(offset_format, data_offset)) - elif tag[0] == tag_byte_counts: # strip/tile byte_counts + fh.seek(tag_offset + tagindex*tagsize + offsetsize + 4) + fh.write(pack(offsetformat, data_offset)) + elif tag[0] == tagbytecounts: # strip/tile bytecounts if compress: if tag[2]: - fh.seek(strip_byte_counts_offset) - for size in strip_byte_counts: - fh.write(pack(offset_format, size)) + fh.seek(strip_bytecounts_offset) + for size in stripbytecounts: + fh.write(pack(offsetformat, size)) else: - fh.seek(tag_offset + tagindex*tag_size + - offset_size + 4) - fh.write(pack(offset_format, strip_byte_counts[0])) + fh.seek(tag_offset + tagindex*tagsize + + offsetsize + 4) + fh.write(pack(offsetformat, stripbytecounts[0])) break fh.seek(pos) fh.flush() @@ -1044,129 +1367,142 @@ class TiffWriter(object): if pageindex == 0: tags = [tag for tag in tags if not tag[-1]] - # if uncompressed, write remaining ifds/tags later - if not (compress or tile): - self._tags = tags - self._shape = shape - self._data_shape = (1,) + data_shape - self._data_dtype = data.dtype - self._data_offset = data_offset - self._data_byte_counts = strip_byte_counts + self._datashape = (1,) + input_shape + self._datadtype = datadtype + self._dataoffset = data_offset + self._databytecounts = stripbytecounts + + if contiguous: + # write remaining ifds/tags later + self._tags = tags + # return offset and size of image data + if returnoffset: + return data_offset, sum(stripbytecounts) def _write_remaining_pages(self): """Write outstanding IFDs and tags to file.""" - if not self._tags: + if not self._tags or self._truncate: return fh = self._fh byteorder = self._byteorder - numtag_format = self._numtag_format - offset_format = self._offset_format - offset_size = self._offset_size - tag_size = self._tag_size - data_offset = self._data_offset - page_data_size = sum(self._data_byte_counts) - tag_bytes = b''.join(t[1] for t in self._tags) - numpages = self._shape[0] * self._data_shape[0] - 1 + offsetformat = self._offsetformat + offsetsize = self._offsetsize + tagnoformat = self._tagnoformat + tagsize = self._tagsize + dataoffset = self._dataoffset + pagedatasize = sum(self._databytecounts) + pageno = self._shape[0] * self._datashape[0] - 1 + + def pack(fmt, *val): + return struct.pack(byteorder+fmt, *val) + # construct template IFD in memory + # need to patch offsets to next IFD and data before writing to disk + ifd = io.BytesIO() + ifd.write(pack(tagnoformat, len(self._tags))) + tagoffset = ifd.tell() + ifd.write(b''.join(t[1] for t in self._tags)) + ifdoffset = ifd.tell() + ifd.write(pack(offsetformat, 0)) # offset to next IFD + # tag values + for tagindex, tag in enumerate(self._tags): + offset2value = tagoffset + tagindex*tagsize + offsetsize + 4 + if tag[2]: + pos = ifd.tell() + if pos % 2: # tag value is expected to begin on word boundary + ifd.write(b'\0') + pos += 1 + ifd.seek(offset2value) + ifd.write(pack(offsetformat, pos + fh.tell())) + ifd.seek(pos) + ifd.write(tag[2]) + if tag[0] == self._tagoffsets: + # save strip/tile offsets for later updates + stripoffset2offset = offset2value + stripoffset2value = pos + elif tag[0] == self._tagoffsets: + # save strip/tile offsets for later updates + stripoffset2offset = None + stripoffset2value = offset2value + # size to word boundary + if ifd.tell() % 2: + ifd.write(b'\0') + + # check if all IFDs fit in file pos = fh.tell() - if not self._bigtiff and pos + len(tag_bytes) * numpages > 2**32 - 256: + if not self._bigtiff and pos + ifd.tell() * pageno > 2**32 - 256: if self._imagej: warnings.warn("truncating ImageJ file") return - raise ValueError("data too large for non-bigtiff file") - - def pack(fmt, *val): - return struct.pack(byteorder+fmt, *val) - - for _ in range(numpages): - # update pointer at ifd_offset - pos = fh.tell() - fh.seek(self._ifd_offset) - fh.write(pack(offset_format, pos)) - fh.seek(pos) - - # write ifd entries - fh.write(pack(numtag_format, len(self._tags))) - tag_offset = fh.tell() - fh.write(tag_bytes) - self._ifd_offset = fh.tell() - fh.write(pack(offset_format, 0)) # offset to next IFD - - # offset to image data - data_offset += page_data_size - - # write tag values and patch offsets in ifdentries, if necessary - for tagindex, tag in enumerate(self._tags): - if tag[2]: - pos = fh.tell() - fh.seek(tag_offset + tagindex*tag_size + offset_size + 4) - fh.write(pack(offset_format, pos)) - fh.seek(pos) - if tag[0] == self._tag_offsets: - strip_offsets_offset = pos - fh.write(tag[2]) + raise ValueError("data too large for non-BigTIFF file") - # update strip/tile offsets if necessary + for _ in range(pageno): + # update pointer at IFD offset pos = fh.tell() - for tagindex, tag in enumerate(self._tags): - if tag[0] == self._tag_offsets: # strip/tile offsets - if tag[2]: - fh.seek(strip_offsets_offset) - strip_offset = data_offset - for size in self._data_byte_counts: - fh.write(pack(offset_format, strip_offset)) - strip_offset += size - else: - fh.seek(tag_offset + tagindex*tag_size + - offset_size + 4) - fh.write(pack(offset_format, data_offset)) - break + fh.seek(self._ifdoffset) + fh.write(pack(offsetformat, pos)) fh.seek(pos) + self._ifdoffset = pos + ifdoffset + # update strip/tile offsets in IFD + dataoffset += pagedatasize # offset to image data + if stripoffset2offset is None: + ifd.seek(stripoffset2value) + ifd.write(pack(offsetformat, dataoffset)) + else: + ifd.seek(stripoffset2offset) + ifd.write(pack(offsetformat, pos + stripoffset2value)) + ifd.seek(stripoffset2value) + stripoffset = dataoffset + for size in self._databytecounts: + ifd.write(pack(offsetformat, stripoffset)) + stripoffset += size + # write ifd entry + fh.write(ifd.getvalue()) self._tags = None - self._data_dtype = None - self._data_offset = None - self._data_byte_counts = None + self._datadtype = None + self._dataoffset = None + self._databytecounts = None # do not reset _shape or _data_shape def _write_image_description(self): - """Write meta data to image_description tag.""" - if (not self._data_shape or self._data_shape[0] == 1 or - self._description_offset <= 0): + """Write meta data to ImageDescription tag.""" + if (not self._datashape or self._datashape[0] == 1 or + self._descriptionoffset <= 0): return colormapped = self._colormap is not None if self._imagej: isrgb = self._shape[-1] in (3, 4) description = imagej_description( - self._data_shape, isrgb, colormapped, **self._metadata) + self._datashape, isrgb, colormapped, **self._metadata) else: - description = image_description( - self._data_shape, colormapped, **self._metadata) + description = json_description(self._datashape, **self._metadata) # rewrite description and its length to file - description = description[:self._description_len-1] + description = description.encode('utf-8') + description = description[:self._descriptionlen-1] pos = self._fh.tell() - self._fh.seek(self._description_offset) + self._fh.seek(self._descriptionoffset) self._fh.write(description) - self._fh.seek(self._description_len_offset) - self._fh.write(struct.pack(self._byteorder+self._offset_format, + self._fh.seek(self._descriptionlenoffset) + self._fh.write(struct.pack(self._byteorder+self._offsetformat, len(description)+1)) self._fh.seek(pos) - self._description_offset = 0 - self._description_len_offset = 0 - self._description_len = 0 + self._descriptionoffset = 0 + self._descriptionlenoffset = 0 + self._descriptionlen = 0 def _now(self): """Return current date and time.""" return datetime.datetime.now() - def close(self, truncate=False): - """Write remaining pages (if not truncate) and close file handle.""" - if not truncate: + def close(self): + """Write remaining pages and close file handle.""" + if not self._truncate: self._write_remaining_pages() self._write_image_description() self._fh.close() @@ -1178,98 +1514,44 @@ class TiffWriter(object): self.close() -def imread(files, **kwargs): - """Return image data from TIFF file(s) as numpy array. - - Refer to the TiffFile class and member functions for documentation. - - Parameters - ---------- - files : str, binary stream, or sequence - File name, seekable binary stream, glob pattern, or sequence of - file names. - kwargs : dict - Parameters 'multifile', 'multifile_close', 'pages', 'fastij', and - 'is_ome' are passed to the TiffFile class. - The 'pattern' parameter is passed to the TiffSequence class. - Other parameters are passed to the asarray functions. - The first image series is returned if no arguments are provided. - - Examples - -------- - >>> imsave('temp.tif', numpy.random.rand(3, 4, 301, 219)) - >>> im = imread('temp.tif', key=0) - >>> im.shape - (4, 301, 219) - >>> ims = imread(['temp.tif', 'temp.tif']) - >>> ims.shape - (2, 3, 4, 301, 219) - - """ - kwargs_file = parse_kwargs(kwargs, 'multifile', 'multifile_close', - 'pages', 'fastij', 'is_ome') - kwargs_seq = parse_kwargs(kwargs, 'pattern') - - if isinstance(files, basestring) and any(i in files for i in '?*'): - files = glob.glob(files) - if not files: - raise ValueError('no files found') - if not hasattr(files, 'seek') and len(files) == 1: - files = files[0] - - if isinstance(files, basestring) or hasattr(files, 'seek'): - with TiffFile(files, **kwargs_file) as tif: - return tif.asarray(**kwargs) - else: - with TiffSequence(files, **kwargs_seq) as imseq: - return imseq.asarray(**kwargs) - - -class lazyattr(object): - """Lazy object attribute whose value is computed on first access.""" - __slots__ = ('func',) - - def __init__(self, func): - self.func = func - - def __get__(self, instance, owner): - if instance is None: - return self - value = self.func(instance) - if value is NotImplemented: - return getattr(super(owner, instance), self.func.__name__) - setattr(instance, self.func.__name__, value) - return value - - class TiffFile(object): - """Read image and metadata from TIFF, STK, LSM, and FluoView files. + """Read image and metadata from TIFF file. TiffFile instances must be closed using the 'close' method, which is automatically called when using the 'with' context manager. Attributes ---------- - pages : list of TiffPage - All TIFF pages in file. + pages : TiffPages + Sequence of TIFF pages in file. series : list of TiffPageSeries - TIFF pages with compatible shapes and types. - micromanager_metadata: dict - Extra MicroManager non-TIFF metadata in the file, if exists. + Sequences of closely related TIFF pages. These are computed + from OME, LSM, ImageJ, etc. metadata or based on similarity + of page properties such as shape, dtype, compression, etc. + byteorder : '>', '<' + The endianness of data in the file. + '>': big-endian (Motorola). + '>': little-endian (Intel). + is_flag : bool + If True, file is of a certain format. + Flags are: bigtiff, movie, shaped, ome, imagej, stk, lsm, fluoview, + nih, vista, 'micromanager, metaseries, mdgel, mediacy, tvips, fei, + sem, scn, svs, scanimage, andor, epics, pilatus. All attributes are read-only. Examples -------- + >>> # read image array from TIFF file + >>> imsave('temp.tif', numpy.random.rand(5, 301, 219)) >>> with TiffFile('temp.tif') as tif: ... data = tif.asarray() - ... data.shape + >>> data.shape (5, 301, 219) """ def __init__(self, arg, name=None, offset=None, size=None, - multifile=True, multifile_close=True, pages=None, - fastij=True, is_ome=None): + multifile=True, movie=None, **kwargs): """Initialize instance from file. Parameters @@ -1280,45 +1562,90 @@ class TiffFile(object): name : str Optional name of file in case 'arg' is a file handle. offset : int - Optional start position of embedded file. By default this is + Optional start position of embedded file. By default, this is the current file position. size : int - Optional size of embedded file. By default this is the number + Optional size of embedded file. By default, this is the number of bytes from the 'offset' to the end of the file. multifile : bool If True (default), series may include pages from multiple files. Currently applies to OME-TIFF only. - multifile_close : bool - If True (default), keep the handles of other files in multifile - series closed. This is inefficient when few files refer to - many pages. If False, the C runtime may run out of resources. - pages : sequence of int - Indices of the pages to read. If None (default) all pages are read. - Can be used to read only the first page with pages=[0]. - Specifying pages might invalidate series based on metadata. - fastij : bool - If True (default), try to use only the metadata from the first page - of ImageJ files. Significantly speeds up loading movies with - thousands of pages. - is_ome : bool - If False, disable processing of OME-XML metadata. + movie : bool + If True, assume that later pages differ from first page only by + data offsets and bytecounts. Significantly increases speed and + reduces memory usage when reading movies with thousands of pages. + Enabling this for non-movie files will result in data corruption + or crashes. Python 3 only. + kwargs : bool + 'is_ome': If False, disable processing of OME-XML metadata. """ - if is_ome is False: - self.is_ome = False + if 'fastij' in kwargs: + del kwargs['fastij'] + raise DeprecationWarning("The fastij option will be removed.") + for key, value in kwargs.items(): + if key[:3] == 'is_' and key[3:] in TIFF.FILE_FLAGS: + if value is not None and not value: + setattr(self, key, bool(value)) + else: + raise TypeError( + "got an unexpected keyword argument '%s'" % key) - self._fh = FileHandle(arg, mode='rb', - name=name, offset=offset, size=size) - self.offset_size = None - self.pages = [] + fh = FileHandle(arg, mode='rb', name=name, offset=offset, size=size) + self._fh = fh self._multifile = bool(multifile) - self._multifile_close = bool(multifile_close) - self._files = {self._fh.name: self} # cache of TiffFiles - self._ifd_offset = 0 # offset to offset of next IFD + self._files = {fh.name: self} # cache of TiffFiles try: - self._fromfile(pages, fastij) + fh.seek(0) + try: + byteorder = {b'II': '<', b'MM': '>'}[fh.read(2)] + except KeyError: + raise ValueError("invalid TIFF file") + sys_byteorder = {'big': '>', 'little': '<'}[sys.byteorder] + self.is_native = byteorder == sys_byteorder + + version = struct.unpack(byteorder+'H', fh.read(2))[0] + if version == 43: + # BigTiff + self.is_bigtiff = True + offsetsize, zero = struct.unpack(byteorder+'HH', fh.read(4)) + if zero or offsetsize != 8: + raise ValueError("invalid BigTIFF file") + self.byteorder = byteorder + self.offsetsize = 8 + self.offsetformat = byteorder+'Q' + self.tagnosize = 8 + self.tagnoformat = byteorder+'Q' + self.tagsize = 20 + self.tagformat1 = byteorder+'HH' + self.tagformat2 = byteorder+'Q8s' + elif version == 42: + self.is_bigtiff = False + self.byteorder = byteorder + self.offsetsize = 4 + self.offsetformat = byteorder+'I' + self.tagnosize = 2 + self.tagnoformat = byteorder+'H' + self.tagsize = 12 + self.tagformat1 = byteorder+'HH' + self.tagformat2 = byteorder+'I4s' + else: + raise ValueError("not a TIFF file") + + # file handle is at offset to offset to first page + self.pages = TiffPages(self) + + if self.is_lsm and (self.filehandle.size >= 2**32 or + self.pages[0].compression != 1 or + self.pages[1].compression != 1): + self._lsm_load_pages() + self._lsm_fix_strip_offsets() + self._lsm_fix_strip_bytecounts() + elif movie: + self.pages.useframes = True + except Exception: - self._fh.close() + fh.close() raise @property @@ -1331,112 +1658,24 @@ class TiffFile(object): """Return name of file handle.""" return self._fh.name + @lazyattr + def fstat(self): + """Return status of file handle as stat_result object.""" + try: + return os.fstat(self._fh.fileno()) + except Exception: # io.UnsupportedOperation + return None + def close(self): """Close open file handle(s).""" for tif in self._files.values(): - tif._fh.close() + tif.filehandle.close() self._files = {} - def _fromfile(self, pages=None, fastij=True): - """Read TIFF header and all page records from file.""" - self._fh.seek(0) - try: - self.byteorder = {b'II': '<', b'MM': '>'}[self._fh.read(2)] - except KeyError: - raise ValueError("invalid TIFF file") - self._is_native = self.byteorder == {'big': '>', - 'little': '<'}[sys.byteorder] - version = struct.unpack(self.byteorder+'H', self._fh.read(2))[0] - if version == 43: - # BigTiff - self.offset_size, zero = struct.unpack(self.byteorder+'HH', - self._fh.read(4)) - if zero or self.offset_size != 8: - raise ValueError("invalid BigTIFF file") - elif version == 42: - self.offset_size = 4 - else: - raise ValueError("not a TIFF file") - - self._ifd_offset = self._fh.tell() - - self.pages = [] - pageindex = -1 - while True: - pageindex += 1 - skip = pages and pageindex not in pages - try: - page = TiffPage(self, skip) - except StopIteration: - break - if skip: - continue - self.pages.append(page) - if fastij: - if page._patch_imagej(): - break # only read the first page of ImageJ files - fastij = False - - # TiffPage() leaves the file cursor at offset to offset of next IFD - self._ifd_offset = self._fh.tell() - - if self.is_micromanager: - # MicroManager files contain metadata not stored in TIFF tags. - self.micromanager_metadata = read_micromanager_metadata(self._fh) - - if self.is_lsm: - self._fix_lsm_strip_offsets() - self._fix_lsm_strip_byte_counts() - - def _fix_lsm_strip_offsets(self): - """Unwrap strip offsets for LSM files greater than 4 GB.""" - # each series and position require separate unwrapping (undocumented) - for series in self.series: - positions = 1 - for i in 0, 1: - if series.axes[i] in 'PM': - positions *= series.shape[i] - positions = len(series.pages) // positions - for i, page in enumerate(series.pages): - if not i % positions: - wrap = 0 - previous_offset = 0 - strip_offsets = [] - for current_offset in page.strip_offsets: - if current_offset < previous_offset: - wrap += 2**32 - strip_offsets.append(current_offset + wrap) - previous_offset = current_offset - page.strip_offsets = tuple(strip_offsets) - - def _fix_lsm_strip_byte_counts(self): - """Set strip_byte_counts to size of compressed data. - - The strip_byte_counts tag in LSM files contains the number of bytes - for the uncompressed data. - - """ - if not self.pages: - return - strips = {} - for page in self.pages: - assert len(page.strip_offsets) == len(page.strip_byte_counts) - for offset, bytecount in zip(page.strip_offsets, - page.strip_byte_counts): - strips[offset] = bytecount - offsets = sorted(strips.keys()) - offsets.append(min(offsets[-1] + strips[offsets[-1]], self._fh.size)) - for i, offset in enumerate(offsets[:-1]): - strips[offset] = min(strips[offset], offsets[i+1] - offset) - for page in self.pages: - if page.compression: - page.strip_byte_counts = tuple( - strips[offset] for offset in page.strip_offsets) - - def asarray(self, key=None, series=None, memmap=False, tempdir=None): + def asarray(self, key=None, series=None, out=None, maxworkers=1): """Return image data from multiple TIFF pages as numpy array. - By default the first image series is returned. + By default, the data from the first series is returned. Parameters ---------- @@ -1444,12 +1683,19 @@ class TiffFile(object): Defines which pages to return as array. series : int or TiffPageSeries Defines which series of pages to return as array. - memmap : bool - If True, return an read-only array stored in a binary file on disk - if possible. The TIFF file is used if possible, else a temporary - file is created. - tempdir : str - The directory where the memory-mapped file will be created. + out : numpy.ndarray, str, or file-like object; optional + Buffer where image data will be saved. + If numpy.ndarray, a writable array of compatible dtype and shape. + If str or open file, the file name or file object used to + create a memory-map to an array stored in a binary file on disk. + maxworkers : int + Maximum number of threads to concurrently get data from pages. + Default is 1. If None, up to half the CPU cores are used. + Reading data from file is limited to a single thread. + Using multiple threads can significantly speed up this function + if the bottleneck is decoding compressed data. + If the bottleneck is I/O or pure Python code, using multiple + threads might be detrimental. """ if not self.pages: @@ -1467,7 +1713,7 @@ class TiffFile(object): if key is None: pass - elif isinstance(key, int): + elif isinstance(key, inttypes): pages = [pages[key]] elif isinstance(key, slice): pages = pages[key] @@ -1476,76 +1722,31 @@ class TiffFile(object): else: raise TypeError("key must be an int, slice, or sequence") - if not len(pages): + if not pages: raise ValueError("no pages selected") if self.is_nih: - if pages[0].is_indexed: - result = stack_pages(pages, colormapped=False, squeeze=False) - result = apply_colormap(result, pages[0].color_map) - else: - result = stack_pages(pages, memmap=memmap, tempdir=tempdir, - colormapped=False, squeeze=False) - elif len(pages) == 1: - result = pages[0].asarray(memmap=memmap) - elif self.is_ome: - assert not self.is_indexed, "color mapping disabled for ome-tiff" - if any(p is None for p in pages): - # zero out missing pages - firstpage = next(p for p in pages if p) - nopage = numpy.zeros_like( - firstpage.asarray(memmap=False)) - if memmap: - with tempfile.NamedTemporaryFile() as fh: - result = numpy.memmap(fh, series.dtype, shape=series.shape) - result = result.reshape(-1) - else: - result = numpy.empty(series.shape, series.dtype).reshape(-1) - index = 0 - - class KeepOpen: - # keep Tiff files open between consecutive pages - def __init__(self, parent, close): - self.master = parent - self.parent = parent - self._close = close - - def open(self, page): - if self._close and page and page.parent != self.parent: - if self.parent != self.master: - self.parent.filehandle.close() - self.parent = page.parent - self.parent.filehandle.open() - - def close(self): - if self._close and self.parent != self.master: - self.parent.filehandle.close() - - keep = KeepOpen(self, self._multifile_close) - for page in pages: - keep.open(page) - if page: - a = page.asarray(memmap=False, colormapped=False, - reopen=False) - else: - a = nopage - try: - result[index:index + a.size] = a.reshape(-1) - except ValueError as e: - warnings.warn("ome-tiff: %s" % e) - break - index += a.size - keep.close() + result = stack_pages(pages, out=out, maxworkers=maxworkers, + squeeze=False) elif key is None and series and series.offset: - if memmap: + if out == 'memmap' and pages[0].is_memmappable: result = self.filehandle.memmap_array( series.dtype, series.shape, series.offset) else: + if out is not None: + out = create_output(out, series.shape, series.dtype) self.filehandle.seek(series.offset) - result = self.filehandle.read_array( - series.dtype, product(series.shape)) + i = product(series.shape) + result = self.filehandle.read_array(series.dtype, i, out=out) + if not self.is_native: + result.byteswap(True) + elif len(pages) == 1: + result = pages[0].asarray(out=out) else: - result = stack_pages(pages, memmap=memmap, tempdir=tempdir) + result = stack_pages(pages, out=out, maxworkers=maxworkers) + + if result is None: + return if key is None: try: @@ -1567,150 +1768,262 @@ class TiffFile(object): @lazyattr def series(self): - """Return pages with compatible properties as TiffPageSeries.""" + """Return related pages as TiffPageSeries. + + Side effect: after calling this function, TiffFile.pages might contain + TiffPage and TiffFrame instances. + + """ if not self.pages: return [] + useframes = self.pages.useframes + keyframe = self.pages.keyframe series = [] - if self.is_ome: - series = self._ome_series() - elif self.is_fluoview: - series = self._fluoview_series() - elif self.is_lsm: - series = self._lsm_series() - elif self.is_imagej: - series = self._imagej_series() - elif self.is_nih: - series = self._nih_series() - + for name in 'ome imagej lsm fluoview nih mdgel shaped'.split(): + if getattr(self, 'is_' + name, False): + series = getattr(self, '_%s_series' % name)() + break if not series: - # generic detection of series - shapes = [] - pages = {} - index = 0 - for page in self.pages: - if not page.shape: - continue - if page.is_shaped: - index += 1 # shape starts a new series - shape = page.shape + (index, page.axes, - page.compression in TIFF_DECOMPESSORS) - if shape in pages: - pages[shape].append(page) - else: - shapes.append(shape) - pages[shape] = [page] - series = [] - for s in shapes: - shape = ((len(pages[s]),) + s[:-3] if len(pages[s]) > 1 - else s[:-3]) - axes = (('I' + s[-2]) if len(pages[s]) > 1 else s[-2]) - page0 = pages[s][0] - if page0.is_shaped: - metadata = image_description_dict(page0.is_shaped) - reshape = metadata['shape'] - if 'axes' in metadata: - reaxes = metadata['axes'] - if len(reaxes) == len(reshape): - axes = reaxes - shape = reshape - else: - warnings.warn("axes do not match shape") - try: - axes = reshape_axes(axes, shape, reshape) - shape = reshape - except ValueError as e: - warnings.warn(str(e)) - series.append( - TiffPageSeries(pages[s], shape, page0.dtype, axes)) - - for i, s in enumerate(series): - s.index = i + self.pages.useframes = useframes + self.pages.keyframe = keyframe + series = self._generic_series() # remove empty series, e.g. in MD Gel files series = [s for s in series if sum(s.shape) > 0] + + for i, s in enumerate(series): + s.index = i return series - def _fluoview_series(self): - """Return image series in FluoView file.""" - page0 = self.pages[0] - dims = { - b'X': 'X', b'Y': 'Y', b'Z': 'Z', b'T': 'T', - b'WAVELENGTH': 'C', b'TIME': 'T', b'XY': 'R', - b'EVENT': 'V', b'EXPOSURE': 'L'} - mmhd = list(reversed(page0.mm_header.dimensions)) - axes = ''.join(dims.get(i[0].strip().upper(), 'Q') - for i in mmhd if i[1] > 1) - shape = tuple(int(i[1]) for i in mmhd if i[1] > 1) - return [TiffPageSeries(self.pages, shape, page0.dtype, axes)] + def _generic_series(self): + """Return image series in file.""" + if self.pages.useframes: + # movie mode + page = self.pages[0] + shape = page.shape + axes = page.axes + if len(self.pages) > 1: + shape = (len(self.pages),) + shape + axes = 'I' + axes + return [TiffPageSeries(self.pages[:], shape, page.dtype, axes, + stype='movie')] + + self.pages.clear(False) + self.pages.load() + result = [] + keys = [] + series = {} + compressions = TIFF.DECOMPESSORS + for page in self.pages: + if not page.shape: + continue + key = page.shape + (page.axes, page.compression in compressions) + if key in series: + series[key].append(page) + else: + keys.append(key) + series[key] = [page] + for key in keys: + pages = series[key] + page = pages[0] + shape = page.shape + axes = page.axes + if len(pages) > 1: + shape = (len(pages),) + shape + axes = 'I' + axes + result.append(TiffPageSeries(pages, shape, page.dtype, axes, + stype='Generic')) + + return result + + def _shaped_series(self): + """Return image series in "shaped" file.""" + pages = self.pages + pages.useframes = True + lenpages = len(pages) + + def append_series(series, pages, axes, shape, reshape, name): + page = pages[0] + if not axes: + shape = page.shape + axes = page.axes + if len(pages) > 1: + shape = (len(pages),) + shape + axes = 'Q' + axes + size = product(shape) + resize = product(reshape) + if page.is_contiguous and resize > size and resize % size == 0: + # truncated file + axes = 'Q' + axes + shape = (resize // size,) + shape + try: + axes = reshape_axes(axes, shape, reshape) + shape = reshape + except ValueError as e: + warnings.warn(str(e)) + series.append(TiffPageSeries(pages, shape, page.dtype, axes, + name=name, stype='Shaped')) + + keyframe = axes = shape = reshape = name = None + series = [] + index = 0 + while True: + if index >= lenpages: + break + # new keyframe; start of new series + pages.keyframe = index + keyframe = pages[index] + if not keyframe.is_shaped: + warnings.warn("invalid shape metadata or corrupted file") + return + # read metadata + axes = None + shape = None + metadata = json_description_metadata(keyframe.is_shaped) + name = metadata.get('name', '') + reshape = metadata['shape'] + truncated = metadata.get('truncated', False) + if 'axes' in metadata: + axes = metadata['axes'] + if len(axes) == len(reshape): + shape = reshape + else: + axes = '' + warnings.warn("axes do not match shape") + # skip pages if possible + spages = [keyframe] + size = product(reshape) + npages, mod = divmod(size, product(keyframe.shape)) + if mod: + warnings.warn("series shape not matching page shape") + return + if 1 < npages <= lenpages - index: + size *= keyframe._dtype.itemsize + if truncated: + npages = 1 + elif not (keyframe.is_final and + keyframe.offset + size < pages[index+1].offset): + # need to read all pages for series + for j in range(index+1, index+npages): + page = pages[j] + page.keyframe = keyframe + spages.append(page) + append_series(series, spages, axes, shape, reshape, name) + index += npages - def _lsm_series(self): - """Return image series in LSM file.""" - page0 = self.pages[0] - lsmi = page0.cz_lsm_info - axes = CZ_SCAN_TYPES[lsmi.scan_type] - if page0.is_rgb: - axes = axes.replace('C', '').replace('XY', 'XYC') - if hasattr(lsmi, 'dimension_p') and lsmi.dimension_p > 1: - axes += 'P' - if hasattr(lsmi, 'dimension_m') and lsmi.dimension_m > 1: - axes += 'M' - axes = axes[::-1] - shape = tuple(getattr(lsmi, CZ_DIMENSIONS[i]) for i in axes) - pages = [p for p in self.pages if not p.is_reduced] - dtype = pages[0].dtype - series = [TiffPageSeries(pages, shape, dtype, axes)] - if len(pages) != len(self.pages): # reduced RGB pages - pages = [p for p in self.pages if p.is_reduced] - cp = 1 - i = 0 - while cp < len(pages) and i < len(shape)-2: - cp *= shape[i] - i += 1 - shape = shape[:i] + pages[0].shape - axes = axes[:i] + 'CYX' - dtype = pages[0].dtype - series.append(TiffPageSeries(pages, shape, dtype, axes)) return series def _imagej_series(self): """Return image series in ImageJ file.""" # ImageJ's dimension order is always TZCYXS # TODO: fix loading of color, composite or palette images + self.pages.useframes = True + self.pages.keyframe = 0 + + ij = self.imagej_metadata + pages = self.pages + page = pages[0] + + def is_hyperstack(): + # ImageJ hyperstack store all image metadata in the first page and + # image data is stored contiguously before the second page, if any. + if not page.is_final: + return False + images = ij.get('images', 0) + if images <= 1: + return False + offset, count = page.is_contiguous + if (count != product(page.shape) * page.bitspersample // 8 + or offset + count*images > self.filehandle.size): + raise ValueError() + # check that next page is stored after data + if len(pages) > 1 and offset + count*images > pages[1].offset: + return False + return True + + try: + hyperstack = is_hyperstack() + except ValueError: + warnings.warn("invalid ImageJ metadata or corrupted file") + return + if hyperstack: + # no need to read other pages + pages = [page] + else: + self.pages.load() + shape = [] axes = [] - page0 = self.pages[0] - ij = page0.imagej_tags if 'frames' in ij: shape.append(ij['frames']) axes.append('T') if 'slices' in ij: shape.append(ij['slices']) axes.append('Z') - if 'channels' in ij and not (self.is_rgb and not + if 'channels' in ij and not (page.photometric == 2 and not ij.get('hyperstack', False)): shape.append(ij['channels']) axes.append('C') - remain = ij.get('images', len(self.pages)) // (product(shape) - if shape else 1) + remain = ij.get('images', len(pages))//(product(shape) if shape else 1) if remain > 1: shape.append(remain) axes.append('I') - if page0.axes[0] == 'I': + if page.axes[0] == 'I': # contiguous multiple images - shape.extend(page0.shape[1:]) - axes.extend(page0.axes[1:]) - elif page0.axes[:2] == 'SI': + shape.extend(page.shape[1:]) + axes.extend(page.axes[1:]) + elif page.axes[:2] == 'SI': # color-mapped contiguous multiple images - shape = page0.shape[0:1] + tuple(shape) + page0.shape[2:] - axes = list(page0.axes[0]) + axes + list(page0.axes[2:]) + shape = page.shape[0:1] + tuple(shape) + page.shape[2:] + axes = list(page.axes[0]) + axes + list(page.axes[2:]) + else: + shape.extend(page.shape) + axes.extend(page.axes) + return [TiffPageSeries(pages, shape, page.dtype, axes, stype='ImageJ')] + + def _fluoview_series(self): + """Return image series in FluoView file.""" + self.pages.useframes = True + self.pages.keyframe = 0 + self.pages.load() + mm = self.fluoview_metadata + mmhd = list(reversed(mm['Dimensions'])) + axes = ''.join(TIFF.MM_DIMENSIONS.get(i[0].upper(), 'Q') + for i in mmhd if i[1] > 1) + shape = tuple(int(i[1]) for i in mmhd if i[1] > 1) + return [TiffPageSeries(self.pages, shape, self.pages[0].dtype, axes, + name=mm['ImageName'], stype='FluoView')] + + def _mdgel_series(self): + """Return image series in MD Gel file.""" + # only a single page, scaled according to metadata in second page + self.pages.useframes = False + self.pages.keyframe = 0 + self.pages.load() + md = self.mdgel_metadata + if md['FileTag'] in (2, 128): + dtype = numpy.dtype('float32') + scale = md['ScalePixel'] + scale = scale[0] / scale[1] # rational + if md['FileTag'] == 2: + # squary root data format + def transform(a): + return a.astype('float32')**2 * scale + else: + def transform(a): + return a.astype('float32') * scale else: - shape.extend(page0.shape) - axes.extend(page0.axes) - return [TiffPageSeries(self.pages, shape, page0.dtype, axes)] + transform = None + page = self.pages[0] + return [TiffPageSeries([page], page.shape, dtype, page.axes, + transform=transform, stype='MDGel')] def _nih_series(self): """Return image series in NIH file.""" + self.pages.useframes = True + self.pages.keyframe = 0 + self.pages.load() page0 = self.pages[0] if len(self.pages) == 1: shape = page0.shape @@ -1718,18 +2031,29 @@ class TiffFile(object): else: shape = (len(self.pages),) + page0.shape axes = 'I' + page0.axes - return [TiffPageSeries(self.pages, shape, page0.dtype, axes)] + return [ + TiffPageSeries(self.pages, shape, page0.dtype, axes, stype='NIH')] def _ome_series(self): """Return image series in OME-TIFF file(s).""" - omexml = self.pages[0].tags['image_description'].value + from xml.etree import cElementTree as etree # delayed import + omexml = self.pages[0].description try: root = etree.fromstring(omexml) except etree.ParseError as e: - # TODO: test this + # TODO: test badly encoded ome-xml warnings.warn("ome-xml: %s" % e) - omexml = omexml.decode('utf-8', 'ignore').encode('utf-8') - root = etree.fromstring(omexml) + try: + # might work on Python 2 + omexml = omexml.decode('utf-8', 'ignore').encode('utf-8') + root = etree.fromstring(omexml) + except Exception: + return + + self.pages.useframes = True + self.pages.keyframe = 0 + self.pages.load() + uuid = root.attrib.get('UUID', None) self._files = {uuid: self} dirname = self._fh.dirname @@ -1751,7 +2075,7 @@ class TiffFile(object): continue axis = along.tag[-1] newaxis = along.attrib.get('Type', 'other') - newaxis = AXES_LABELS[newaxis] + newaxis = TIFF.AXES_LABELS[newaxis] if 'Start' in along.attrib: step = float(along.attrib.get('Step', 1)) start = float(along.attrib['Start']) @@ -1761,25 +2085,42 @@ class TiffFile(object): labels = [label.text for label in along if label.tag.endswith('Label')] modulo[axis] = (newaxis, labels) + if not element.tag.endswith('Image'): continue + + attr = element.attrib + name = attr.get('Name', None) + for pixels in element: if not pixels.tag.endswith('Pixels'): continue - atr = pixels.attrib - dtype = atr.get('Type', None) - axes = ''.join(reversed(atr['DimensionOrder'])) - shape = list(int(atr['Size'+ax]) for ax in axes) + attr = pixels.attrib + dtype = attr.get('PixelType', None) + axes = ''.join(reversed(attr['DimensionOrder'])) + shape = list(int(attr['Size'+ax]) for ax in axes) size = product(shape[:-2]) - ifds = [None] * (size // self.pages[0].samples_per_pixel) + ifds = None + spp = 1 # samples per pixel for data in pixels: + if data.tag.endswith('Channel'): + attr = data.attrib + if ifds is None: + spp = int(attr.get('SamplesPerPixel', spp)) + ifds = [None] * (size // spp) + elif int(attr.get('SamplesPerPixel', 1)) != spp: + raise ValueError( + "Can't handle differing SamplesPerPixel") + continue + if ifds is None: + ifds = [None] * (size // spp) if not data.tag.endswith('TiffData'): continue - atr = data.attrib - ifd = int(atr.get('IFD', 0)) - num = int(atr.get('NumPlanes', 1 if 'IFD' in atr else 0)) - num = int(atr.get('PlaneCount', num)) - idx = [int(atr.get('First'+ax, 0)) for ax in axes[:-2]] + attr = data.attrib + ifd = int(attr.get('IFD', 0)) + num = int(attr.get('NumPlanes', 1 if 'IFD' in attr else 0)) + num = int(attr.get('PlaneCount', num)) + idx = [int(attr.get('First'+ax, 0)) for ax in axes[:-2]] try: idx = numpy.ravel_multi_index(idx, shape[:-2]) except ValueError: @@ -1797,14 +2138,15 @@ class TiffFile(object): fname = uuid.attrib['FileName'] try: tif = TiffFile(os.path.join(dirname, fname)) + tif.pages.useframes = True + tif.pages.keyframe = 0 + tif.pages.load() except (IOError, FileNotFoundError, ValueError): - # TODO: close open file handle warnings.warn( "ome-xml: failed to read '%s'" % fname) break self._files[uuid.text] = tif - if self._multifile_close: - tif.close() + tif.close() pages = self._files[uuid.text].pages try: for i in range(num if num else len(pages)): @@ -1820,11 +2162,34 @@ class TiffFile(object): ifds[idx + i] = pages[ifd + i] except IndexError: warnings.warn("ome-xml: index out of range") + if all(i is None for i in ifds): # skip images without data continue - dtype = next(i for i in ifds if i).dtype - series.append(TiffPageSeries(ifds, shape, dtype, axes, self)) + + # set a keyframe on all ifds + keyframe = None + for i in ifds: + # try find a TiffPage + if i and i == i.keyframe: + keyframe = i + break + if not keyframe: + # reload a TiffPage from file + for i, keyframe in enumerate(ifds): + if keyframe: + keyframe.parent.pages.keyframe = keyframe.index + keyframe = keyframe.parent.pages[keyframe.index] + ifds[i] = keyframe + break + for i in ifds: + if i is not None: + i.keyframe = keyframe + + dtype = keyframe.dtype + series.append( + TiffPageSeries(ifds, shape, dtype, axes, parent=self, + name=name, stype='OME')) for serie in series: shape = list(serie.shape) for axis, (newaxis, labels) in modulo.items(): @@ -1842,17 +2207,145 @@ class TiffFile(object): serie.shape, serie.axes = squeeze_axes(serie.shape, serie.axes) return series - def __len__(self): - """Return number of image pages in file.""" - return len(self.pages) + def _lsm_series(self): + """Return main image series in LSM file. Skip thumbnails.""" + lsmi = self.lsm_metadata + axes = TIFF.CZ_LSMINFO_SCANTYPE[lsmi['ScanType']] + if self.pages[0].photometric == 2: # RGB; more than one channel + axes = axes.replace('C', '').replace('XY', 'XYC') + if lsmi.get('DimensionP', 0) > 1: + axes += 'P' + if lsmi.get('DimensionM', 0) > 1: + axes += 'M' + axes = axes[::-1] + shape = tuple(int(lsmi[TIFF.CZ_LSMINFO_DIMENSIONS[i]]) for i in axes) + name = lsmi.get('Name', '') + self.pages.keyframe = 0 + pages = self.pages[::2] + dtype = pages[0].dtype + series = [TiffPageSeries(pages, shape, dtype, axes, name=name, + stype='LSM')] - def __getitem__(self, key): - """Return specified page.""" - return self.pages[key] + if self.pages[1].is_reduced: + self.pages.keyframe = 1 + pages = self.pages[1::2] + dtype = pages[0].dtype + cp, i = 1, 0 + while cp < len(pages) and i < len(shape)-2: + cp *= shape[i] + i += 1 + shape = shape[:i] + pages[0].shape + axes = axes[:i] + 'CYX' + series.append(TiffPageSeries(pages, shape, dtype, axes, name=name, + stype='LSMreduced')) - def __iter__(self): - """Return iterator over pages.""" - return iter(self.pages) + return series + + def _lsm_load_pages(self): + """Load all pages from LSM file.""" + self.pages.cache = True + self.pages.useframes = True + # second series: thumbnails + self.pages.keyframe = 1 + keyframe = self.pages[1] + for page in self.pages[1::2]: + page.keyframe = keyframe + # first series: data + self.pages.keyframe = 0 + keyframe = self.pages[0] + for page in self.pages[::2]: + page.keyframe = keyframe + + def _lsm_fix_strip_offsets(self): + """Unwrap strip offsets for LSM files greater than 4 GB. + + Each series and position require separate unwrapping (undocumented). + + """ + if self.filehandle.size < 2**32: + return + + pages = self.pages + npages = len(pages) + series = self.series[0] + axes = series.axes + + # find positions + positions = 1 + for i in 0, 1: + if series.axes[i] in 'PM': + positions *= series.shape[i] + + # make time axis first + if positions > 1: + ntimes = 0 + for i in 1, 2: + if axes[i] == 'T': + ntimes = series.shape[i] + break + if ntimes: + div, mod = divmod(npages, 2*positions*ntimes) + assert mod == 0 + shape = (positions, ntimes, div, 2) + indices = numpy.arange(product(shape)).reshape(shape) + indices = numpy.moveaxis(indices, 1, 0) + else: + indices = numpy.arange(npages).reshape(-1, 2) + + # images of reduced page might be stored first + if pages[0].dataoffsets[0] > pages[1].dataoffsets[0]: + indices = indices[..., ::-1] + + # unwrap offsets + wrap = 0 + previousoffset = 0 + for i in indices.flat: + page = pages[i] + dataoffsets = [] + for currentoffset in page.dataoffsets: + if currentoffset < previousoffset: + wrap += 2**32 + dataoffsets.append(currentoffset + wrap) + previousoffset = currentoffset + page.dataoffsets = tuple(dataoffsets) + + def _lsm_fix_strip_bytecounts(self): + """Set databytecounts to size of compressed data. + + The StripByteCounts tag in LSM files contains the number of bytes + for the uncompressed data. + + """ + pages = self.pages + if pages[0].compression == 1: + return + # sort pages by first strip offset + pages = sorted(pages, key=lambda p: p.dataoffsets[0]) + npages = len(pages) - 1 + for i, page in enumerate(pages): + if page.index % 2: + continue + offsets = page.dataoffsets + bytecounts = page.databytecounts + if i < npages: + lastoffset = pages[i+1].dataoffsets[0] + else: + # LZW compressed strips might be longer than uncompressed + lastoffset = min(offsets[-1] + 2*bytecounts[-1], self._fh.size) + offsets = offsets + (lastoffset,) + page.databytecounts = tuple(offsets[j+1] - offsets[j] + for j in range(len(bytecounts))) + + def __getattr__(self, name): + """Return 'is_flag' attributes from first page.""" + if name[3:] in TIFF.FILE_FLAGS: + if not self.pages: + return False + value = bool(getattr(self.pages[0], name)) + setattr(self, name, value) + return value + raise AttributeError("'%s' object has no attribute '%s'" % + (self.__class__.__name__, name)) def __enter__(self): return self @@ -1860,171 +2353,520 @@ class TiffFile(object): def __exit__(self, exc_type, exc_value, traceback): self.close() - def __str__(self): - """Return string containing information about file.""" - result = [ - "TIFF file: %s" % self._fh.name, + def __str__(self, detail=0): + """Return string containing information about file. + + The detail parameter specifies the level of detail returned: + + 0: file only. + 1: all series, first page of series and its tags. + 2: large tag values and file metadata. + 3: all pages. + + """ + info = [ + "TiffFile '%s'" % snipstr(self._fh.name, 32), format_size(self._fh.size), - {'<': 'little endian', '>': 'big endian'}[self.byteorder]] + {'<': 'LittleEndian', '>': 'BigEndian'}[self.byteorder]] if self.is_bigtiff: - result.append("bigtiff") - attrs = ['mdgel', 'mediacy', 'stk', 'lsm', 'vista', 'imagej', - 'fluoview', 'micromanager', 'nih', 'ome', 'scn', 'tvips', - 'fei', 'sem'] - result.extend((attr for attr in attrs if getattr(self, 'is_' + attr))) + info.append('BigTiff') + info.append('|'.join(f.upper() for f in self.flags)) if len(self.pages) > 1: - result.append("%i pages" % len(self.pages)) + info.append('%i Pages' % len(self.pages)) if len(self.series) > 1: - result.append("%i series" % len(self.series)) + info.append('%i Series' % len(self.series)) if len(self._files) > 1: - result.append("%i files" % (len(self._files))) - return ", ".join(result) - - def info(self, series=None, pages=None): - """Return string with detailed information about file.""" - if series is None: - series = self.series + info.append('%i Files' % (len(self._files))) + info = ' '.join(info) + if detail <= 0: + return info + info = [info] + info.append('\n'.join(str(s) for s in self.series)) + if detail >= 3: + info.extend((TiffPage.__str__(p, detail=detail) + for p in self.pages + if p is not None)) else: - series = [self.series[i] for i in sequence(series)] + info.extend((TiffPage.__str__(s.pages[0], detail=detail) + for s in self.series + if s.pages[0] is not None)) + if detail >= 2: + for name in sorted(self.flags): + if hasattr(self, name + '_metadata'): + m = getattr(self, name + '_metadata') + if m: + info.append( + "%s_METADATA\n%s" % (name.upper(), pformat(m))) + return '\n\n'.join(info).replace('\n\n\n', '\n\n') - result = [str(self)] - for s in series: - result.append(str(s)) - if pages is None: - result.append(s.pages[0].info()) - - if pages is not None: - if pages == 'all': - pages = self.pages - else: - pages = [self.pages[i] for i in sequence(pages)] - for p in pages: - result.append(p.info()) - - return '\n\n'.join(result) + @lazyattr + def flags(self): + """Return set of file flags.""" + return set(name.lower() for name in sorted(TIFF.FILE_FLAGS) + if getattr(self, 'is_' + name)) @lazyattr - def fstat(self): + def is_mdgel(self): + """File has MD Gel format.""" try: - return os.fstat(self._fh.fileno()) - except Exception: # io.UnsupportedOperation - return None + return self.pages[0].is_mdgel or self.pages[1].is_mdgel + except IndexError: + return False + + @property + def is_movie(self): + """Return if file is a movie.""" + return self.pages.useframes @lazyattr - def is_bigtiff(self): - """File has BigTIFF format.""" - return self.offset_size != 4 + def shaped_metadata(self): + """Return Tifffile metadata from JSON descriptions as dicts.""" + if not self.is_shaped: + return + return tuple(json_description_metadata(s.pages[0].is_shaped) + for s in self.series if s.stype.lower() == 'shaped') @lazyattr - def is_rgb(self): - """File contains only RGB images.""" - return self.pages and all(p.is_rgb for p in self.pages) + def ome_metadata(self): + """Return OME XML as dict.""" + if not self.is_ome: + return + return xml2dict(self.pages[0].description) @lazyattr - def is_indexed(self): - """File contains only indexed images.""" - return self.pages and all(p.is_indexed for p in self.pages) + def lsm_metadata(self): + """Return LSM metadata from CZ_LSMINFO tag as dict.""" + if not self.is_lsm: + return + return self.pages[0].tags['CZ_LSMINFO'].value @lazyattr - def is_mdgel(self): - """File has MD Gel format.""" - return any(p.is_mdgel for p in self.pages) + def stk_metadata(self): + """Return STK metadata from UIC tags as dict.""" + if not self.is_stk: + return + page = self.pages[0] + tags = page.tags + result = {} + result['NumberPlanes'] = tags['UIC2tag'].count + if page.description: + result['PlaneDescriptions'] = page.description.split('\0') + # result['plane_descriptions'] = stk_description_metadata( + # page.image_description) + if 'UIC1tag' in tags: + result.update(tags['UIC1tag'].value) + if 'UIC3tag' in tags: + result.update(tags['UIC3tag'].value) # wavelengths + if 'UIC4tag' in tags: + result.update(tags['UIC4tag'].value) # override uic1 tags + uic2tag = tags['UIC2tag'].value + result['ZDistance'] = uic2tag['ZDistance'] + result['TimeCreated'] = uic2tag['TimeCreated'] + result['TimeModified'] = uic2tag['TimeModified'] + try: + result['DatetimeCreated'] = numpy.array( + [julian_datetime(*dt) for dt in + zip(uic2tag['DateCreated'], uic2tag['TimeCreated'])], + dtype='datetime64[ns]') + result['DatetimeModified'] = numpy.array( + [julian_datetime(*dt) for dt in + zip(uic2tag['DateModified'], uic2tag['TimeModified'])], + dtype='datetime64[ns]') + except ValueError as e: + warnings.warn("stk_metadata: %s" % e) + return result @lazyattr - def is_mediacy(self): - """File was created by Media Cybernetics software.""" - return any(p.is_mediacy for p in self.pages) + def imagej_metadata(self): + """Return consolidated ImageJ metadata as dict.""" + if not self.is_imagej: + return + page = self.pages[0] + result = imagej_description_metadata(page.is_imagej) + if 'IJMetadata' in page.tags: + try: + result.update(page.tags['IJMetadata'].value) + except Exception: + pass + return result @lazyattr - def is_stk(self): - """File has MetaMorph STK format.""" - return self.pages and all(p.is_stk for p in self.pages) + def fluoview_metadata(self): + """Return consolidated FluoView metadata as dict.""" + if not self.is_fluoview: + return + result = {} + page = self.pages[0] + result.update(page.tags['MM_Header'].value) + # TODO: read stamps from all pages + result['Stamp'] = page.tags['MM_Stamp'].value + # skip parsing image description; not reliable + # try: + # t = fluoview_description_metadata(page.image_description) + # if t is not None: + # result['ImageDescription'] = t + # except Exception as e: + # warnings.warn( + # "failed to read FluoView image description: %s" % e) + return result @lazyattr - def is_lsm(self): - """File was created by Carl Zeiss software.""" - return len(self.pages) and self.pages[0].is_lsm + def nih_metadata(self): + """Return NIH Image metadata from NIHImageHeader tag as dict.""" + if not self.is_nih: + return + return self.pages[0].tags['NIHImageHeader'].value @lazyattr - def is_vista(self): - """File was created by ISS Vista.""" - return len(self.pages) and self.pages[0].is_vista + def fei_metadata(self): + """Return FEI metadata from SFEG or HELIOS tags as dict.""" + if not self.is_fei: + return + tags = self.pages[0].tags + if 'FEI_SFEG' in tags: + return tags['FEI_SFEG'].value + if 'FEI_HELIOS' in tags: + return tags['FEI_HELIOS'].value @lazyattr - def is_imagej(self): - """File has ImageJ format.""" - return len(self.pages) and self.pages[0].is_imagej + def sem_metadata(self): + """Return SEM metadata from CZ_SEM tag as dict.""" + if not self.is_sem: + return + return self.pages[0].tags['CZ_SEM'].value @lazyattr - def is_micromanager(self): - """File was created by MicroManager.""" - return len(self.pages) and self.pages[0].is_micromanager + def mdgel_metadata(self): + """Return consolidated metadata from MD GEL tags as dict.""" + for page in self.pages[:2]: + if 'MDFileTag' in page.tags: + tags = page.tags + break + else: + return + result = {} + for code in range(33445, 33453): + name = TIFF.TAGS[code] + if name not in tags: + continue + result[name[2:]] = tags[name].value + return result @lazyattr - def is_nih(self): - """File has NIH Image format.""" - return len(self.pages) and self.pages[0].is_nih + def andor_metadata(self): + """Return Andor tags as dict.""" + return self.pages[0].andor_tags @lazyattr - def is_fluoview(self): - """File was created by Olympus FluoView.""" - return len(self.pages) and self.pages[0].is_fluoview + def epics_metadata(self): + """Return EPICS areaDetector tags as dict.""" + return self.pages[0].epics_tags @lazyattr - def is_ome(self): - """File has OME-TIFF format.""" - return len(self.pages) and self.pages[0].is_ome + def tvips_metadata(self): + """Return TVIPS tag as dict.""" + if not self.is_tvips: + return + return self.pages[0].tags['TVIPS'].value @lazyattr - def is_scn(self): - """File has Leica SCN format.""" - return len(self.pages) and self.pages[0].is_scn + def metaseries_metadata(self): + """Return MetaSeries metadata from image description as dict.""" + if not self.is_metaseries: + return + return metaseries_description_metadata(self.pages[0].description) @lazyattr - def is_tvips(self): - """File was created using EM-MENU software.""" - return len(self.pages) and self.pages[0].is_tvips + def pilatus_metadata(self): + """Return Pilatus metadata from image description as dict.""" + if not self.is_pilatus: + return + return pilatus_description_metadata(self.pages[0].description) @lazyattr - def is_fei(self): - """File was created using FEI software.""" - return len(self.pages) and self.pages[0].is_fei + def micromanager_metadata(self): + """Return consolidated MicroManager metadata as dict.""" + if not self.is_micromanager: + return + # from file header + result = read_micromanager_metadata(self._fh) + # from tag + result.update(self.pages[0].tags['MicroManagerMetadata'].value) + return result @lazyattr - def is_sem(self): - """File contains Zeiss SEM metadata.""" - return len(self.pages) and self.pages[0].is_sem + def scanimage_metadata(self): + """Return ScanImage non-varying frame and ROI metadata as dict.""" + if not self.is_scanimage: + return + result = {} + try: + framedata, roidata = read_scanimage_metadata(self._fh) + result['FrameData'] = framedata + result.update(roidata) + except ValueError: + pass + # TODO: scanimage_artist_metadata + try: + result['Description'] = scanimage_description_metadata( + self.pages[0].description) + except Exception as e: + warnings.warn("scanimage_description_metadata failed: %s" % e) + return result + + +class TiffPages(object): + """Sequence of TIFF image file directories.""" + def __init__(self, parent): + """Initialize instance from file. Read first TiffPage from file. + + The file position must be at an offset to an offset to a TiffPage. + + """ + self.parent = parent + self.pages = [] # cache of TiffPages, TiffFrames, or their offsets + self.complete = False # True if offsets to all pages were read + self._tiffpage = TiffPage # class for reading tiff pages + self._keyframe = None + self._cache = True + + # read offset to first page + fh = parent.filehandle + self._nextpageoffset = fh.tell() + offset = struct.unpack(parent.offsetformat, + fh.read(parent.offsetsize))[0] + + if offset == 0: + # warnings.warn("file contains no pages") + self.complete = True + return + if offset >= fh.size: + warnings.warn("invalid page offset (%i)" % offset) + self.complete = True + return + + # always read and cache first page + fh.seek(offset) + page = TiffPage(parent, index=0) + self.pages.append(page) + self._keyframe = page + + @property + def cache(self): + """Return if pages/frames are currenly being cached.""" + return self._cache + + @cache.setter + def cache(self, value): + """Enable or disable caching of pages/frames. Clear cache if False.""" + value = bool(value) + if self._cache and not value: + self.clear() + self._cache = value + + @property + def useframes(self): + """Return if currently using TiffFrame (True) or TiffPage (False).""" + return self._tiffpage == TiffFrame and TiffFrame is not TiffPage + + @useframes.setter + def useframes(self, value): + """Set to use TiffFrame (True) or TiffPage (False).""" + self._tiffpage = TiffFrame if value else TiffPage + + @property + def keyframe(self): + """Return index of current keyframe.""" + return self._keyframe.index + + @keyframe.setter + def keyframe(self, index): + """Set current keyframe. Load TiffPage from file if necessary.""" + if self.complete or 0 <= index < len(self.pages): + page = self.pages[index] + if isinstance(page, TiffPage): + self._keyframe = page + return + elif isinstance(page, TiffFrame): + # remove existing frame + self.pages[index] = page.offset + # load TiffPage from file + useframes = self.useframes + self._tiffpage = TiffPage + self._keyframe = self[index] + self.useframes = useframes + + @property + def next_page_offset(self): + """Return offset where offset to a new page can be stored.""" + if not self.complete: + self._seek(-1) + return self._nextpageoffset + + def load(self): + """Read all remaining pages from file.""" + fh = self.parent.filehandle + keyframe = self._keyframe + pages = self.pages + if not self.complete: + self._seek(-1) + for i, page in enumerate(pages): + if isinstance(page, inttypes): + fh.seek(page) + page = self._tiffpage(self.parent, index=i, keyframe=keyframe) + pages[i] = page + + def clear(self, fully=True): + """Delete all but first page from cache. Set keyframe to first page.""" + pages = self.pages + if not self._cache or len(pages) < 1: + return + self._keyframe = pages[0] + if fully: + # delete all but first TiffPage/TiffFrame + for i, page in enumerate(pages[1:]): + if not isinstance(page, inttypes): + pages[i+1] = page.offset + elif TiffFrame is not TiffPage: + # delete only TiffFrames + for i, page in enumerate(pages): + if isinstance(page, TiffFrame): + pages[i] = page.offset + + def _seek(self, index): + """Seek file to offset of specified page.""" + pages = self.pages + if not pages: + return + + fh = self.parent.filehandle + if fh.closed: + raise RuntimeError("FileHandle is closed") + + if self.complete or 0 <= index < len(pages): + page = pages[index] + offset = page if isinstance(page, inttypes) else page.offset + fh.seek(offset) + return + + offsetformat = self.parent.offsetformat + offsetsize = self.parent.offsetsize + tagnoformat = self.parent.tagnoformat + tagnosize = self.parent.tagnosize + tagsize = self.parent.tagsize + unpack = struct.unpack + + page = pages[-1] + offset = page if isinstance(page, inttypes) else page.offset + + while True: + # read offsets to pages from file until index is reached + fh.seek(offset) + # skip tags + try: + tagno = unpack(tagnoformat, fh.read(tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + warnings.warn("corrupted tag list at offset %i" % offset) + del pages[-1] + self.complete = True + break + self._nextpageoffset = offset + tagnosize + tagno * tagsize + fh.seek(self._nextpageoffset) + + # read offset to next page + offset = unpack(offsetformat, fh.read(offsetsize))[0] + if offset == 0: + self.complete = True + break + if offset >= fh.size: + warnings.warn("invalid page offset (%i)" % offset) + self.complete = True + break + + pages.append(offset) + if 0 <= index < len(pages): + break + + if index >= len(pages): + raise IndexError('list index out of range') + + page = pages[index] + fh.seek(page if isinstance(page, inttypes) else page.offset) + + def __bool__(self): + """Return True if file contains any pages.""" + return len(self.pages) > 0 + + def __len__(self): + """Return number of pages in file.""" + if not self.complete: + self._seek(-1) + return len(self.pages) + + def __getitem__(self, key): + """Return specified page(s) from cache or file.""" + pages = self.pages + if not pages: + raise IndexError('list index out of range') + if key is 0: + return pages[key] + + if isinstance(key, slice): + start, stop, _ = key.indices(2**31) + if not self.complete and max(stop, start) > len(pages): + self._seek(-1) + return [self[i] for i in range(*key.indices(len(pages)))] + + if self.complete and key >= len(pages): + raise IndexError('list index out of range') + + try: + page = pages[key] + except IndexError: + page = 0 + if not isinstance(page, inttypes): + return page + + self._seek(key) + page = self._tiffpage(self.parent, index=key, keyframe=self._keyframe) + if self._cache: + pages[key] = page + return page + + def __iter__(self): + """Return iterator over all pages.""" + i = 0 + while True: + try: + yield self[i] + i += 1 + except IndexError: + break class TiffPage(object): - """A TIFF image file directory (IFD). + """TIFF image file directory (IFD). Attributes ---------- index : int Index of page in file. - dtype : str {TIFF_SAMPLE_DTYPES} - Data type of image, color-mapped if applicable. + dtype : numpy.dtype or None + Data type of the image in IFD. shape : tuple - Dimensions of the image array in TIFF page, - color-mapped and with extra samples if applicable. + Dimensions of the image in IFD. axes : str Axes label codes: 'X' width, 'Y' height, 'S' sample, 'I' image series|page|plane, 'Z' depth, 'C' color|em-wavelength|channel, 'E' ex-wavelength|lambda, 'T' time, 'R' region|tile, 'A' angle, 'P' phase, 'H' lifetime, 'L' exposure, 'V' event, 'Q' unknown, '_' missing - tags : TiffTags - Dictionary of tags in page. - Tag values are also directly accessible as attributes. - color_map : numpy.ndarray + tags : dict + Dictionary of tags in IFD. {tag.name: TiffTag} + colormap : numpy.ndarray Color look up table, if exists. - cz_lsm_scan_info: Record(dict) - LSM scan info attributes, if exists. - imagej_tags: Record(dict) - Consolidated ImageJ description and metadata tags, if exists. - uic_tags: Record(dict) - Consolidated MetaMorph STK/UIC tags, if exists. All attributes are read-only. @@ -2032,568 +2874,441 @@ class TiffPage(object): ----- The internal, normalized '_shape' attribute is 6 dimensional: - 0. number planes/images (stk, ij). - 1. planar samples_per_pixel. - 2. image_depth Z (sgi). - 3. image_length Y. - 4. image_width X. - 5. contig samples_per_pixel. + 0 : number planes/images (stk, ij). + 1 : planar samplesperpixel. + 2 : imagedepth Z (sgi). + 3 : imagelength Y. + 4 : imagewidth X. + 5 : contig samplesperpixel. """ - def __init__(self, parent, skip=False): + # default properties; will be updated from tags + imagewidth = 0 + imagelength = 0 + imagedepth = 1 + tilewidth = 0 + tilelength = 0 + tiledepth = 1 + bitspersample = 1 + samplesperpixel = 1 + sampleformat = 1 + rowsperstrip = 2**32-1 + compression = 1 + planarconfig = 1 + fillorder = 1 + photometric = 0 + predictor = 1 + extrasamples = 1 + colormap = None + software = '' + description = '' + description1 = '' + + def __init__(self, parent, index, keyframe=None): """Initialize instance from file. - If skip, seek to next IFD offset without reading tags. + The file handle position must be at offset to a valid IFD. """ self.parent = parent - - self.index = len(parent.pages) - self.shape = self._shape = () - self.dtype = self._dtype = None + self.index = index + self.shape = () + self._shape = () + self.dtype = None + self._dtype = None self.axes = "" - self.tags = TiffTags() - self._offset = 0 # offset to this IDF - - self._fromfile(skip) - if skip: - return - self._process_tags() + self.tags = {} - def _fromfile(self, skip=False): - """Read TIFF IFD structure and its tags from file. + self.dataoffsets = () + self.databytecounts = () - The file cursor is left at the storage position of the offset to the - next IFD (if any). - - Raises StopIteration if offset (first bytes read) is 0 - or a corrupted page list is encountered. - - """ - fh = self.parent.filehandle - byteorder = self.parent.byteorder - offset_size = self.parent.offset_size - pos = fh.tell() - - # read offset to this IFD - fmt = {4: 'I', 8: 'Q'}[offset_size] - offset = struct.unpack(byteorder + fmt, fh.read(offset_size))[0] - if not offset: - fh.seek(pos) - raise StopIteration() - if offset >= fh.size: - warnings.warn("invalid page offset > file size") - fh.seek(pos) - raise StopIteration() - self._offset = offset - - # read standard tags - fh.seek(offset) - fmt, size, tagsize = {4: ('H', 2, 12), 8: ('Q', 8, 20)}[offset_size] + # read TIFF IFD structure and its tags from file + fh = parent.filehandle + self.offset = fh.tell() # offset to this IDF try: - numtags = struct.unpack(byteorder + fmt, fh.read(size))[0] - if numtags > 4096: + tagno = struct.unpack(parent.tagnoformat, + fh.read(parent.tagnosize))[0] + if tagno > 4096: raise ValueError("suspicious number of tags") except Exception: - warnings.warn("corrupted page list at offset %i" % offset) - fh.seek(pos) - raise StopIteration() - - if skip: - fh.seek(offset + size + numtags * tagsize) - return + raise ValueError("corrupted tag list at offset %i" % self.offset) + tagsize = parent.tagsize + data = fh.read(tagsize * tagno) tags = self.tags - tagcode = 0 - for _ in range(numtags): + index = -tagsize + for _ in range(tagno): + index += tagsize try: - tag = TiffTag(self.parent) + tag = TiffTag(self.parent, data[index:index+tagsize]) except TiffTag.Error as e: warnings.warn(str(e)) continue - if tagcode > tag.code: - # expected for early LSM and tifffile versions - warnings.warn("tags are not ordered by code") - tagcode = tag.code - if tag.name not in tags: - tags[tag.name] = tag + tagname = tag.name + if tagname not in tags: + name = tagname + tags[name] = tag else: # some files contain multiple tags with same code - # e.g. MicroManager files contain two image_description tags + # e.g. MicroManager files contain two ImageDescription tags i = 1 while True: - name = "%s_%i" % (tag.name, i) + name = "%s%i" % (tagname, i) if name not in tags: tags[name] = tag break + name = TIFF.TAG_ATTRIBUTES.get(name, '') + if name: + setattr(self, name, tag.value) + + if not tags: + return # found in FIBICS - pos = fh.tell() # where offset to next IFD is stored + # consolidate private tags; remove them from self.tags + if self.is_andor: + self.andor_tags + elif self.is_epics: + self.epics_tags if self.is_lsm or (self.index and self.parent.is_lsm): # correct non standard LSM bitspersample tags - self.tags['bits_per_sample']._fix_lsm_bitspersample(self) - - if self.is_lsm: - # read LSM info subrecords - for name, reader in CZ_LSM_INFO_READERS.items(): - try: - offset = self.cz_lsm_info['offset_'+name] - except KeyError: - continue - if offset < 8: - # older LSM revision - continue - fh.seek(offset) - try: - setattr(self, 'cz_lsm_'+name, reader(fh)) - except ValueError: - pass - elif self.is_stk and 'uic1tag' in tags and not tags['uic1tag'].value: - # read uic1tag now that plane count is known - uic1tag = tags['uic1tag'] - fh.seek(uic1tag.value_offset) - tags['uic1tag'].value = Record( - read_uic1tag(fh, byteorder, uic1tag.dtype, uic1tag.count, - tags['uic2tag'].count)) - - fh.seek(pos) - - def _process_tags(self): - """Validate standard tags and initialize attributes. - - Raise ValueError if tag values are not supported. - - """ - tags = self.tags - for code, (name, default, dtype, count, validate) in TIFF_TAGS.items(): - if name in tags: - #tags[name] = TiffTag(code, dtype=dtype, count=count, - # value=default, name=name) - if validate: - try: - if tags[name].count == 1: - setattr(self, name, validate[tags[name].value]) - else: - setattr(self, name, tuple( - validate[value] for value in tags[name].value)) - except KeyError: - raise ValueError("%s.value (%s) not supported" % - (name, tags[name].value)) - elif default is not None: - setattr(self, name, validate[default] if validate else default) + self.tags['BitsPerSample']._fix_lsm_bitspersample(self) + + if self.is_vista or (self.index and self.parent.is_vista): + # ISS Vista writes wrong ImageDepth tag + self.imagedepth = 1 + + if self.is_stk and 'UIC1tag' in tags and not tags['UIC1tag'].value: + # read UIC1tag now that plane count is known + uic1tag = tags['UIC1tag'] + fh.seek(uic1tag.valueoffset) + tags['UIC1tag'].value = read_uic1tag( + fh, self.parent.byteorder, uic1tag.dtype, + uic1tag.count, None, tags['UIC2tag'].count) + + if 'IJMetadata' in tags: + # decode IJMetadata tag + try: + tags['IJMetadata'].value = imagej_metadata( + tags['IJMetadata'].value, + tags['IJMetadataByteCounts'].value, + self.parent.byteorder) + except Exception as e: + warnings.warn(str(e)) - if 'bits_per_sample' in tags: - tag = tags['bits_per_sample'] + if 'BitsPerSample' in tags: + tag = tags['BitsPerSample'] if tag.count == 1: - self.bits_per_sample = tag.value + self.bitspersample = tag.value else: # LSM might list more items than samples_per_pixel - value = tag.value[:self.samples_per_pixel] + value = tag.value[:self.samplesperpixel] if any((v-value[0] for v in value)): - self.bits_per_sample = value + self.bitspersample = value else: - self.bits_per_sample = value[0] + self.bitspersample = value[0] - if 'sample_format' in tags: - tag = tags['sample_format'] + if 'SampleFormat' in tags: + tag = tags['SampleFormat'] if tag.count == 1: - self.sample_format = TIFF_SAMPLE_FORMATS[tag.value] + self.sampleformat = tag.value else: - value = tag.value[:self.samples_per_pixel] + value = tag.value[:self.samplesperpixel] if any((v-value[0] for v in value)): - self.sample_format = [TIFF_SAMPLE_FORMATS[v] - for v in value] + self.sampleformat = value else: - self.sample_format = TIFF_SAMPLE_FORMATS[value[0]] - - if 'photometric' not in tags: - self.photometric = None - - if 'image_length' in tags: - if 'rows_per_strip' not in tags: - self.rows_per_strip = self.image_length - self.strips_per_image = int(math.floor( - float(self.image_length + self.rows_per_strip - 1) / - self.rows_per_strip)) - else: - self.strips_per_image = 0 - - key = (self.sample_format, self.bits_per_sample) - self.dtype = self._dtype = TIFF_SAMPLE_DTYPES.get(key, None) - - if 'image_length' not in self.tags or 'image_width' not in self.tags: - # some GEL file pages are missing image data - self.image_length = 0 - self.image_width = 0 - self.image_depth = 0 - self.strip_offsets = 0 - self._shape = () - self.shape = () - self.axes = '' - - if self.is_vista or self.parent.is_vista: - # ISS Vista writes wrong image_depth tag - self.image_depth = 1 - - if self.is_indexed: - self.dtype = self.tags['color_map'].dtype[1] - self.color_map = numpy.array(self.color_map, self.dtype) - dmax = self.color_map.max() - if dmax < 256: - self.dtype = numpy.uint8 - self.color_map = self.color_map.astype(self.dtype) - #else: - # self.dtype = numpy.uint8 - # self.color_map >>= 8 - # self.color_map = self.color_map.astype(self.dtype) - # TODO: support other photometric modes than RGB - self.color_map.shape = (3, -1) + self.sampleformat = value[0] + + if 'ImageLength' in tags: + if 'RowsPerStrip' not in tags or tags['RowsPerStrip'].count > 1: + self.rowsperstrip = self.imagelength + # self.stripsperimage = int(math.floor( + # float(self.imagelength + self.rowsperstrip - 1) / + # self.rowsperstrip)) + + # determine dtype + dtype = self.sampleformat, self.bitspersample + dtype = TIFF.SAMPLE_DTYPES.get(dtype, None) + if dtype is not None: + dtype = numpy.dtype(dtype) + self.dtype = self._dtype = dtype # determine shape of data - image_length = self.image_length - image_width = self.image_width - image_depth = self.image_depth - samples_per_pixel = self.samples_per_pixel + imagelength = self.imagelength + imagewidth = self.imagewidth + imagedepth = self.imagedepth + samplesperpixel = self.samplesperpixel if self.is_stk: - assert self.image_depth == 1 - planes = self.tags['uic2tag'].count - if self.is_contig: - self._shape = (planes, 1, 1, image_length, image_width, - samples_per_pixel) - if samples_per_pixel == 1: - self.shape = (planes, image_length, image_width) + assert self.imagedepth == 1 + uictag = tags['UIC2tag'].value + planes = tags['UIC2tag'].count + if self.planarconfig == 1: + self._shape = ( + planes, 1, 1, imagelength, imagewidth, samplesperpixel) + if samplesperpixel == 1: + self.shape = (planes, imagelength, imagewidth) self.axes = 'YX' else: - self.shape = (planes, image_length, image_width, - samples_per_pixel) + self.shape = ( + planes, imagelength, imagewidth, samplesperpixel) self.axes = 'YXS' else: - self._shape = (planes, samples_per_pixel, 1, image_length, - image_width, 1) - if samples_per_pixel == 1: - self.shape = (planes, image_length, image_width) + self._shape = ( + planes, samplesperpixel, 1, imagelength, imagewidth, 1) + if samplesperpixel == 1: + self.shape = (planes, imagelength, imagewidth) self.axes = 'YX' else: - self.shape = (planes, samples_per_pixel, image_length, - image_width) + self.shape = ( + planes, samplesperpixel, imagelength, imagewidth) self.axes = 'SYX' # detect type of series if planes == 1: self.shape = self.shape[1:] - elif numpy.all(self.uic2tag.z_distance != 0): + elif numpy.all(uictag['ZDistance'] != 0): self.axes = 'Z' + self.axes - elif numpy.all(numpy.diff(self.uic2tag.time_created) != 0): + elif numpy.all(numpy.diff(uictag['TimeCreated']) != 0): self.axes = 'T' + self.axes else: self.axes = 'I' + self.axes - # DISABLED - if self.is_indexed: - assert False, "color mapping disabled for stk" - if self.color_map.shape[1] >= 2**self.bits_per_sample: - if image_depth == 1: - self.shape = (planes, image_length, image_width, - self.color_map.shape[0]) - else: - self.shape = (planes, image_depth, image_length, - image_width, self.color_map.shape[0]) - self.axes = self.axes + 'S' - else: - warnings.warn("palette cannot be applied") - self.is_indexed = False - elif self.is_indexed: - samples = 1 - if 'extra_samples' in self.tags: - samples += self.tags['extra_samples'].count - if self.is_contig: - self._shape = (1, 1, image_depth, image_length, image_width, - samples) - else: - self._shape = (1, samples, image_depth, image_length, - image_width, 1) - if self.color_map.shape[1] >= 2**self.bits_per_sample: - if image_depth == 1: - self.shape = (image_length, image_width, - self.color_map.shape[0]) - self.axes = 'YXS' - else: - self.shape = (image_depth, image_length, image_width, - self.color_map.shape[0]) - self.axes = 'ZYXS' - else: - warnings.warn("palette cannot be applied") - self.is_indexed = False - if image_depth == 1: - self.shape = (image_length, image_width) - self.axes = 'YX' - else: - self.shape = (image_depth, image_length, image_width) - self.axes = 'ZYX' - elif self.is_rgb or samples_per_pixel > 1: - if self.is_contig: - self._shape = (1, 1, image_depth, image_length, image_width, - samples_per_pixel) - if image_depth == 1: - self.shape = (image_length, image_width, samples_per_pixel) + elif self.photometric == 2 or samplesperpixel > 1: # PHOTOMETRIC.RGB + if self.planarconfig == 1: + self._shape = ( + 1, 1, imagedepth, imagelength, imagewidth, samplesperpixel) + if imagedepth == 1: + self.shape = (imagelength, imagewidth, samplesperpixel) self.axes = 'YXS' else: - self.shape = (image_depth, image_length, image_width, - samples_per_pixel) + self.shape = ( + imagedepth, imagelength, imagewidth, samplesperpixel) self.axes = 'ZYXS' else: - self._shape = (1, samples_per_pixel, image_depth, - image_length, image_width, 1) - if image_depth == 1: - self.shape = (samples_per_pixel, image_length, image_width) + self._shape = (1, samplesperpixel, imagedepth, + imagelength, imagewidth, 1) + if imagedepth == 1: + self.shape = (samplesperpixel, imagelength, imagewidth) self.axes = 'SYX' else: - self.shape = (samples_per_pixel, image_depth, - image_length, image_width) + self.shape = ( + samplesperpixel, imagedepth, imagelength, imagewidth) self.axes = 'SZYX' - if False and self.is_rgb and 'extra_samples' in self.tags: - # DISABLED: only use RGB and first alpha channel if exists - extra_samples = self.extra_samples - if self.tags['extra_samples'].count == 1: - extra_samples = (extra_samples,) - for exs in extra_samples: - if exs in ('unassalpha', 'assocalpha', 'unspecified'): - if self.is_contig: - self.shape = self.shape[:-1] + (4,) - else: - self.shape = (4,) + self.shape[1:] - break else: - self._shape = (1, 1, image_depth, image_length, image_width, 1) - if image_depth == 1: - self.shape = (image_length, image_width) + self._shape = (1, 1, imagedepth, imagelength, imagewidth, 1) + if imagedepth == 1: + self.shape = (imagelength, imagewidth) self.axes = 'YX' else: - self.shape = (image_depth, image_length, image_width) + self.shape = (imagedepth, imagelength, imagewidth) self.axes = 'ZYX' - if not self.compression and 'strip_byte_counts' not in tags: - self.strip_byte_counts = ( - product(self.shape) * (self.bits_per_sample // 8),) + # dataoffsets and databytecounts + if 'TileOffsets' in tags: + self.dataoffsets = tags['TileOffsets'].value + elif 'StripOffsets' in tags: + self.dataoffsets = tags['StripOffsets'].value + else: + self.dataoffsets = (0,) + + if 'TileByteCounts' in tags: + self.databytecounts = tags['TileByteCounts'].value + elif 'StripByteCounts' in tags: + self.databytecounts = tags['StripByteCounts'].value + elif self.compression == 1: + self.databytecounts = ( + product(self.shape) * (self.bitspersample // 8),) + else: + raise ValueError("ByteCounts not found") assert len(self.shape) == len(self.axes) - def _patch_imagej(self): - """Return if ImageJ data are contiguous and adjust page attributes. - - Patch 'strip_offsets' and 'strip_byte_counts' tags to span the - complete contiguous data. - - ImageJ stores all image metadata in the first page and image data is - stored contiguously before the second page, if any. No need to - read other pages. - - """ - if not self.is_imagej or not self.is_contiguous or self.parent.is_ome: - return - images = self.imagej_tags.get('images', 0) - if images <= 1: - return - offset, count = self.is_contiguous - shape = self.shape - if self.is_indexed: - shape = shape[:-1] - - fh = self.parent.filehandle - if (count != product(shape) * self.bits_per_sample // 8 or - offset + count*images > fh.size): - self.is_imagej = False - warnings.warn("invalid ImageJ metadata or corrupted file") - return - - # check that next page is stored after data - byteorder = self.parent.byteorder - offset_size = self.parent.offset_size - pos = fh.tell() - fmt = {4: 'I', 8: 'Q'}[offset_size] - nextpage = struct.unpack(byteorder + fmt, fh.read(offset_size))[0] - fh.seek(pos) - if nextpage and offset + count*images > nextpage: - return - - # patch metadata - pre = 'tile' if self.is_tiled else 'strip' - self.tags[pre+'_offsets'].value = (offset,) - self.tags[pre+'_byte_counts'].value = (count * images,) - self.shape = (images,) + self.shape - self._shape = (images,) + self._shape[1:] - self.axes = 'I' + self.axes - return True - - def asarray(self, squeeze=True, colormapped=True, rgbonly=False, - scale_mdgel=False, memmap=False, reopen=True, - maxsize=64*2**30): + def asarray(self, out=None, squeeze=True, lock=None, reopen=True, + maxsize=64*2**30, validate=True): """Read image data from file and return as numpy array. Raise ValueError if format is unsupported. - If any of 'squeeze', 'colormapped', or 'rgbonly' are not the default, - the shape of the returned array might be different from the page shape. Parameters ---------- + out : numpy.ndarray, str, or file-like object; optional + Buffer where image data will be saved. + If numpy.ndarray, a writable array of compatible dtype and shape. + If str or open file, the file name or file object used to + create a memory-map to an array stored in a binary file on disk. squeeze : bool If True, all length-1 dimensions (except X and Y) are - squeezed out from result. - colormapped : bool - If True, color mapping is applied for palette-indexed images. - rgbonly : bool - If True, return RGB(A) image without additional extra samples. - memmap : bool - If True, use numpy.memmap to read arrays from file if possible. - For use on 64-bit systems and files with few huge contiguous data. + squeezed out from the array. + If False, the shape of the returned array might be different from + the page.shape. + lock : {RLock, NullContext} + A reentrant lock used to syncronize reads from file. + If None (default), the lock of the parent's filehandle is used. reopen : bool - If True and the parent file handle is closed, the file is - temporarily re-opened (and closed if no exception occurs). - scale_mdgel : bool - If True, MD Gel data will be scaled according to the private - metadata in the second TIFF page. The dtype will be float32. + If True (default) and the parent file handle is closed, the file + is temporarily re-opened and closed if no exception occurs. maxsize: int or None Maximum size of data before a ValueError is raised. Can be used to catch DOS. Default: 64 GB. + validate : bool + If True (default), validate various parameters. + If None, only validate parameters and return None. """ - if not self._shape: + self_ = self + self = self.keyframe # self or keyframe + + if not self._shape or product(self._shape) == 0: return - if maxsize and product(self._shape) > maxsize: - raise ValueError("data is too large %s" % str(self._shape)) - - if self.dtype is None: - raise ValueError("data type not supported: %s%i" % ( - self.sample_format, self.bits_per_sample)) - if self.compression not in TIFF_DECOMPESSORS: - raise ValueError("cannot decompress %s" % self.compression) - if 'sample_format' in self.tags: - tag = self.tags['sample_format'] - if tag.count != 1 and any((i-tag.value[0] for i in tag.value)): - raise ValueError("sample formats do not match %s" % tag.value) - - if self.is_chroma_subsampled: - # TODO: implement chroma subsampling - raise NotImplementedError("chroma subsampling not supported") - fh = self.parent.filehandle - closed = fh.closed - if closed: - if reopen: - fh.open() - else: - raise IOError("file handle is closed") + tags = self.tags + + if validate or validate is None: + if maxsize and product(self._shape) > maxsize: + raise ValueError("data is too large %s" % str(self._shape)) + if self.dtype is None: + raise ValueError("data type not supported: %s%i" % ( + self.sampleformat, self.bitspersample)) + if self.compression not in TIFF.DECOMPESSORS: + raise ValueError( + "can not decompress %s" % self.compression.name) + if 'SampleFormat' in tags: + tag = tags['SampleFormat'] + if tag.count != 1 and any((i-tag.value[0] for i in tag.value)): + raise ValueError( + "sample formats do not match %s" % tag.value) + if self.is_chroma_subsampled: + # TODO: implement chroma subsampling + raise NotImplementedError("chroma subsampling not supported") + if validate is None: + return + + fh = self_.parent.filehandle + lock = fh.lock if lock is None else lock + with lock: + closed = fh.closed + if closed: + if reopen: + fh.open() + else: + raise IOError("file handle is closed") dtype = self._dtype shape = self._shape - image_width = self.image_width - image_length = self.image_length - image_depth = self.image_depth - typecode = self.parent.byteorder + dtype - bits_per_sample = self.bits_per_sample - lsb2msb = self.fill_order == 'lsb2msb' - - byte_counts, offsets = self._byte_counts_offsets - - if self.is_tiled: - tile_width = self.tile_width - tile_length = self.tile_length - tile_depth = self.tile_depth if 'tile_depth' in self.tags else 1 - tw = (image_width + tile_width - 1) // tile_width - tl = (image_length + tile_length - 1) // tile_length - td = (image_depth + tile_depth - 1) // tile_depth + imagewidth = self.imagewidth + imagelength = self.imagelength + imagedepth = self.imagedepth + bitspersample = self.bitspersample + typecode = self.parent.byteorder + dtype.char + lsb2msb = self.fillorder == 2 + offsets, bytecounts = self_.offsets_bytecounts + istiled = self.is_tiled + + if istiled: + tilewidth = self.tilewidth + tilelength = self.tilelength + tiledepth = self.tiledepth + tw = (imagewidth + tilewidth - 1) // tilewidth + tl = (imagelength + tilelength - 1) // tilelength + td = (imagedepth + tiledepth - 1) // tiledepth shape = (shape[0], shape[1], - td*tile_depth, tl*tile_length, tw*tile_width, shape[-1]) - tile_shape = (tile_depth, tile_length, tile_width, shape[-1]) - runlen = tile_width + td*tiledepth, tl*tilelength, tw*tilewidth, shape[-1]) + tileshape = (tiledepth, tilelength, tilewidth, shape[-1]) + runlen = tilewidth else: - runlen = image_width + runlen = imagewidth - if memmap and self._is_memmappable(rgbonly, colormapped): - result = fh.memmap_array(typecode, shape, offset=offsets[0]) + if out == 'memmap' and self.is_memmappable: + with lock: + result = fh.memmap_array(typecode, shape, offset=offsets[0]) elif self.is_contiguous: - fh.seek(offsets[0]) - result = fh.read_array(typecode, product(shape)) - result = result.astype('=' + dtype) + isnative = self.parent.is_native + if out is not None: + isnative = True + out = create_output(out, shape, dtype) + with lock: + fh.seek(offsets[0]) + result = fh.read_array(typecode, product(shape), out=out) + if not isnative: + result = result.astype('=' + dtype.char) if lsb2msb: reverse_bitorder(result) else: - if self.is_contig: - runlen *= self.samples_per_pixel - if bits_per_sample in (8, 16, 32, 64, 128): - if (bits_per_sample * runlen) % 8: + result = create_output(out, shape, dtype) + if self.planarconfig == 1: + runlen *= self.samplesperpixel + if bitspersample in (8, 16, 32, 64, 128): + if (bitspersample * runlen) % 8: raise ValueError("data and sample size mismatch") def unpack(x, typecode=typecode): - if self.predictor == 'float': + if self.predictor == 3: # PREDICTOR.FLOATINGPOINT # the floating point horizontal differencing decoder # needs the raw byte order - typecode = dtype + typecode = dtype.char try: return numpy.fromstring(x, typecode) except ValueError as e: # strips may be missing EOI - warnings.warn("unpack: %s" % e) - xlen = ((len(x) // (bits_per_sample // 8)) * - (bits_per_sample // 8)) + # warnings.warn("unpack: %s" % e) + xlen = ((len(x) // (bitspersample // 8)) * + (bitspersample // 8)) return numpy.fromstring(x[:xlen], typecode) - elif isinstance(bits_per_sample, tuple): + elif isinstance(bitspersample, tuple): def unpack(x): - return unpack_rgb(x, typecode, bits_per_sample) + return unpack_rgb(x, typecode, bitspersample) else: def unpack(x): - return unpack_ints(x, typecode, bits_per_sample, runlen) + return unpack_ints(x, typecode, bitspersample, runlen) - decompress = TIFF_DECOMPESSORS[self.compression] - if self.compression == 'jpeg': - table = self.jpeg_tables if 'jpeg_tables' in self.tags else b'' + decompress = TIFF.DECOMPESSORS[self.compression] + if self.compression == 7: # COMPRESSION.JPEG + if 'JPEGTables' in tags: + table = tags['JPEGTables'].value + else: + table = b'' def decompress(x): return decode_jpeg(x, table, self.photometric) - if self.is_tiled: - result = numpy.empty(shape, dtype) + if istiled: tw, tl, td, pl = 0, 0, 0, 0 - for offset, bytecount in zip(offsets, byte_counts): - fh.seek(offset) - tile = fh.read(bytecount) + for tile in buffered_read(fh, lock, offsets, bytecounts): if lsb2msb: tile = reverse_bitorder(tile) tile = decompress(tile) tile = unpack(tile) try: - tile.shape = tile_shape + tile.shape = tileshape except ValueError: # incomplete tiles; see gdal issue #1179 warnings.warn("invalid tile data") - t = numpy.zeros(tile_shape, dtype).reshape(-1) + t = numpy.zeros(tileshape, dtype).reshape(-1) s = min(tile.size, t.size) t[:s] = tile[:s] - tile = t.reshape(tile_shape) - if self.predictor == 'horizontal': + tile = t.reshape(tileshape) + if self.predictor == 2: # PREDICTOR.HORIZONTAL numpy.cumsum(tile, axis=-2, dtype=dtype, out=tile) - elif self.predictor == 'float': + elif self.predictor == 3: # PREDICTOR.FLOATINGPOINT raise NotImplementedError() - result[0, pl, td:td+tile_depth, - tl:tl+tile_length, tw:tw+tile_width, :] = tile + result[0, pl, td:td+tiledepth, + tl:tl+tilelength, tw:tw+tilewidth, :] = tile del tile - tw += tile_width + tw += tilewidth if tw >= shape[4]: - tw, tl = 0, tl + tile_length + tw, tl = 0, tl + tilelength if tl >= shape[3]: - tl, td = 0, td + tile_depth + tl, td = 0, td + tiledepth if td >= shape[2]: td, pl = 0, pl + 1 result = result[..., - :image_depth, :image_length, :image_width, :] + :imagedepth, :imagelength, :imagewidth, :] else: - strip_size = self.rows_per_strip * self.image_width - if self.planar_configuration == 'contig': - strip_size *= self.samples_per_pixel - result = numpy.empty(shape, dtype).reshape(-1) + strip_size = self.rowsperstrip * self.imagewidth + if self.planarconfig == 1: + strip_size *= self.samplesperpixel + result = result.reshape(-1) index = 0 - for offset, bytecount in zip(offsets, byte_counts): - fh.seek(offset) - strip = fh.read(bytecount) + for strip in buffered_read(fh, lock, offsets, bytecounts): if lsb2msb: strip = reverse_bitorder(strip) strip = decompress(strip) @@ -2606,35 +3321,13 @@ class TiffPage(object): result.shape = self._shape - if self.predictor and not (self.is_tiled and not self.is_contiguous): - if self.parent.is_lsm and not self.compression: + if self.predictor != 1 and not (istiled and not self.is_contiguous): + if self.parent.is_lsm and self.compression == 1: pass # work around bug in LSM510 software - elif self.predictor == 'horizontal': + elif self.predictor == 2: # PREDICTOR.HORIZONTAL numpy.cumsum(result, axis=-2, dtype=dtype, out=result) - elif self.predictor == 'float': + elif self.predictor == 3: # PREDICTOR.FLOATINGPOINT result = decode_floats(result) - if colormapped and self.is_indexed: - if self.color_map.shape[1] >= 2**bits_per_sample: - # FluoView and LSM might fail here - result = apply_colormap(result[:, 0:1, :, :, :, 0:1], - self.color_map) - elif rgbonly and self.is_rgb and 'extra_samples' in self.tags: - # return only RGB and first alpha channel if exists - extra_samples = self.extra_samples - if self.tags['extra_samples'].count == 1: - extra_samples = (extra_samples,) - for i, exs in enumerate(extra_samples): - if exs in ('unassalpha', 'assocalpha', 'unspecified'): - if self.is_contig: - result = result[..., [0, 1, 2, 3+i]] - else: - result = result[:, [0, 1, 2, 3+i]] - break - else: - if self.is_contig: - result = result[..., :3] - else: - result = result[:, :3] if squeeze: try: @@ -2643,445 +3336,599 @@ class TiffPage(object): warnings.warn("failed to reshape from %s to %s" % ( str(result.shape), str(self.shape))) - if scale_mdgel and self.parent.is_mdgel: - # MD Gel stores private metadata in the second page - tags = self.parent.pages[1] - if tags.md_file_tag in (2, 128): - scale = tags.md_scale_pixel - scale = scale[0] / scale[1] # rational - result = result.astype('float32') - if tags.md_file_tag == 2: - result **= 2 # squary root data format - result *= scale - if closed: # TODO: file should remain open if an exception occurred above fh.close() return result - @lazyattr - def _byte_counts_offsets(self): - """Return simplified byte_counts and offsets.""" - if 'tile_offsets' in self.tags: - byte_counts = self.tile_byte_counts - offsets = self.tile_offsets - else: - byte_counts = self.strip_byte_counts - offsets = self.strip_offsets - - j = 0 - for i, (b, o) in enumerate(zip(byte_counts, offsets)): - if b > 0 and o > 0: - if i > j: - byte_counts[j] = b - offsets[j] = o - j += 1 - elif b > 0 >= o: - raise ValueError("invalid offset") + def asrgb(self, uint8=False, alpha=None, colormap=None, + dmin=None, dmax=None, *args, **kwargs): + """Return image data as RGB(A). + + Work in progress. + + """ + data = self.asarray(*args, **kwargs) + self = self.keyframe # self or keyframe + photometric = self.photometric + PHOTOMETRIC = TIFF.PHOTOMETRIC + + if photometric == PHOTOMETRIC.PALETTE: + colormap = self.colormap + if (colormap.shape[1] < 2**self.bitspersample or + self.dtype.char not in 'BH'): + raise ValueError("can not apply colormap") + if uint8: + if colormap.max() > 255: + colormap >>= 8 + colormap = colormap.astype('uint8') + if 'S' in self.axes: + data = data[..., 0] if self.planarconfig == 1 else data[0] + data = apply_colormap(data, colormap) + + elif photometric == PHOTOMETRIC.RGB: + if 'ExtraSamples' in self.tags: + if alpha is None: + alpha = TIFF.EXTRASAMPLE + extrasamples = self.extrasamples + if self.tags['ExtraSamples'].count == 1: + extrasamples = (extrasamples,) + for i, exs in enumerate(extrasamples): + if exs in alpha: + if self.planarconfig == 1: + data = data[..., [0, 1, 2, 3+i]] + else: + data = data[:, [0, 1, 2, 3+i]] + break else: - warnings.warn("empty byte count") - if j == 0: - j = 1 + if self.planarconfig == 1: + data = data[..., :3] + else: + data = data[:, :3] + # TODO: convert to uint8 + + elif photometric == PHOTOMETRIC.MINISBLACK: + raise NotImplementedError() + elif photometric == PHOTOMETRIC.MINISWHITE: + raise NotImplementedError() + elif photometric == PHOTOMETRIC.SEPARATED: + raise NotImplementedError() + else: + raise NotImplementedError() + return data - return byte_counts[:j], offsets[:j] + def aspage(self): + return self - def _is_memmappable(self, rgbonly, colormapped): - """Return if page's image data in file can be memory-mapped.""" - return (self.parent.filehandle.is_file and - self.is_contiguous and - (self.bits_per_sample == 8 or self.parent._is_native) and - self.fill_order == 'msb2lsb' and - not self.predictor and - not self.is_chroma_subsampled and - not (rgbonly and 'extra_samples' in self.tags) and - not (colormapped and self.is_indexed)) + @property + def keyframe(self): + return self + + @keyframe.setter + def keyframe(self, index): + return + + @lazyattr + def offsets_bytecounts(self): + """Return simplified offsets and bytecounts.""" + if self.is_contiguous: + offset, byte_count = self.is_contiguous + return [offset], [byte_count] + return clean_offsets_counts(self.dataoffsets, self.databytecounts) @lazyattr def is_contiguous(self): """Return offset and size of contiguous data, else None. - Excludes prediction, fill_order, and colormapping. + Excludes prediction and fill_order. """ - if self.compression or self.bits_per_sample not in (8, 16, 32, 64): + if (self.compression != 1 + or self.bitspersample not in (8, 16, 32, 64)): return - if self.is_tiled: - if (self.image_width != self.tile_width or - self.image_length % self.tile_length or - self.tile_width % 16 or self.tile_length % 16): + if 'TileWidth' in self.tags: + if (self.imagewidth != self.tilewidth or + self.imagelength % self.tilelength or + self.tilewidth % 16 or self.tilelength % 16): return - if ('image_depth' in self.tags and 'tile_depth' in self.tags and - (self.image_length != self.tile_length or - self.image_depth % self.tile_depth)): + if ('ImageDepth' in self.tags and 'TileDepth' in self.tags and + (self.imagelength != self.tilelength or + self.imagedepth % self.tiledepth)): return - offsets = self.tile_offsets - byte_counts = self.tile_byte_counts - else: - offsets = self.strip_offsets - byte_counts = self.strip_byte_counts + + offsets = self.dataoffsets + bytecounts = self.databytecounts if len(offsets) == 1: - return offsets[0], byte_counts[0] - if self.is_stk or all(offsets[i] + byte_counts[i] == offsets[i+1] or - byte_counts[i+1] == 0 # no data/ignore offset + return offsets[0], bytecounts[0] + if self.is_stk or all((offsets[i] + bytecounts[i] == offsets[i+1] or + bytecounts[i+1] == 0) # no data/ignore offset for i in range(len(offsets)-1)): - return offsets[0], sum(byte_counts) + return offsets[0], sum(bytecounts) - def __getattr__(self, name): - """Return tag value.""" - if name in self.tags: - value = self.tags[name].value - setattr(self, name, value) - return value - raise AttributeError(name) + @lazyattr + def is_final(self): + """Return if page's image data is stored in final form. - def __str__(self): - """Return string containing information about page.""" - s = ', '.join(s for s in ( - 'x'.join(str(i) for i in self.shape), - str(numpy.dtype(self.dtype)), - '%s bit' % str(self.bits_per_sample), - self.photometric if 'photometric' in self.tags else '', - self.compression if self.compression else 'raw', - '|'.join(t[3:] for t in ( - 'is_stk', 'is_lsm', 'is_nih', 'is_ome', 'is_imagej', - 'is_micromanager', 'is_fluoview', 'is_mdgel', 'is_mediacy', - 'is_scn', 'is_sgi', 'is_reduced', 'is_tiled', - 'is_contiguous') if getattr(self, t))) if s) - return "Page %i: %s" % (self.index, s) - - def info(self): - """Return string with detailed information about page.""" - result = ['\n'.join((str(self), str(self.tags)))] - if self.is_indexed: - result.append('Color Map: %s, %s' % (self.color_map.shape, - self.color_map.dtype)) - for attr in ('cz_lsm_info', 'cz_lsm_scan_info', 'uic_tags', - 'mm_header', 'imagej_tags', 'micromanager_metadata', - 'nih_image_header', 'tvips_metadata', 'sfeg_metadata', - 'helios_metadata', 'sem_metadata'): - if hasattr(self, attr): - result.append('\n'.join(( - attr.upper(), str(Record(getattr(self, attr)))))) - if self.is_micromanager: - result.append('MICROMANAGER_FILE_METADATA\n%s' % - Record(self.micromanager_metadata)) - return '\n\n'.join(result) + Excludes byte-swapping. + + """ + return (self.is_contiguous and self.fillorder == 1 and + self.predictor == 1 and not self.is_chroma_subsampled) @lazyattr - def uic_tags(self): - """Consolidate UIC tags.""" - if not self.is_stk: - raise AttributeError("uic_tags") + def is_memmappable(self): + """Return if page's image data in file can be memory-mapped.""" + return (self.parent.filehandle.is_file and self.is_final and + (self.bitspersample == 8 or self.parent.is_native) and + self.is_contiguous[0] % self.dtype.itemsize == 0) + + def __str__(self, detail=0): + """Return string containing information about page.""" + if self.keyframe != self: + return TiffFrame.__str__(self, detail) + attr = '' + for name in ('memmappable', 'final', 'contiguous'): + attr = getattr(self, 'is_'+name) + if attr: + attr = name.upper() + break + info = ' '.join(s for s in ( + 'x'.join(str(i) for i in self.shape), + '%s%s' % (TIFF.SAMPLEFORMAT(self.sampleformat).name, + self.bitspersample), + '|'.join(i for i in ( + TIFF.PHOTOMETRIC(self.photometric).name, + 'TILED' if self.is_tiled else '', + self.compression.name if self.compression != 1 else '', + self.planarconfig.name if self.planarconfig != 1 else '', + self.predictor.name if self.predictor != 1 else '', + self.fillorder.name if self.fillorder != 1 else '') + if i), # noqa + attr, + '|'.join((f.upper() for f in self.flags)) + ) if s) # noqa + info = "TiffPage %i @%i %s" % (self.index, self.offset, info) + if detail <= 0: + return info + info = [info] tags = self.tags - result = Record() - result.number_planes = tags['uic2tag'].count - if 'image_description' in tags: - result.plane_descriptions = self.image_description.split(b'\x00') - if 'uic1tag' in tags: - result.update(tags['uic1tag'].value) - if 'uic3tag' in tags: - result.update(tags['uic3tag'].value) # wavelengths - if 'uic4tag' in tags: - result.update(tags['uic4tag'].value) # override uic1 tags - uic2tag = tags['uic2tag'].value - result.z_distance = uic2tag.z_distance - result.time_created = uic2tag.time_created - result.time_modified = uic2tag.time_modified - try: - result.datetime_created = [ - julian_datetime(*dt) for dt in - zip(uic2tag.date_created, uic2tag.time_created)] - result.datetime_modified = [ - julian_datetime(*dt) for dt in - zip(uic2tag.date_modified, uic2tag.time_modified)] - except ValueError as e: - warnings.warn("uic_tags: %s" % e) - return result + tlines = [] + vlines = [] + for tag in sorted(tags.values(), key=lambda x: x.code): + value = tag.__str__() + tlines.append(value[:TIFF.PRINT_LINE_WIDTH].lstrip()) + if detail > 1 and len(value) > TIFF.PRINT_LINE_WIDTH: + vlines.append("%s\n%s" % (tag.name.upper(), + pformat(tag.value))) + info.append('\n'.join(tlines)) + if detail > 1: + info.append('\n\n'.join(vlines)) + return '\n\n'.join(info) @lazyattr - def imagej_tags(self): - """Consolidate ImageJ metadata.""" - if not self.is_imagej: - raise AttributeError("imagej_tags") - result = imagej_description_dict(self.is_imagej) - if 'imagej_metadata' in self.tags: - try: - result.update(imagej_metadata( - self.tags['imagej_metadata'].value, - self.tags['imagej_byte_counts'].value, - self.parent.byteorder)) - except Exception as e: - warnings.warn(str(e)) - return Record(result) + def flags(self): + """Return set of flags.""" + return set((name.lower() for name in sorted(TIFF.FILE_FLAGS) + if getattr(self, 'is_' + name))) - @lazyattr - def is_rgb(self): - """Page contains a RGB image.""" - return ('photometric' in self.tags and - self.tags['photometric'].value == 2) + @property + def ndim(self): + """Return number of array dimensions.""" + return len(self.shape) - @lazyattr - def is_contig(self): - """Page contains contiguous image.""" - if 'planar_configuration' in self.tags: - return self.tags['planar_configuration'].value == 1 - return True + @property + def size(self): + """Return number of elements in array.""" + return product(self.shape) @lazyattr - def is_indexed(self): - """Page contains indexed, palette-colored image. + def andor_tags(self): + """Return consolidated metadata from Andor tags as dict. - Disable color-mapping for OME, LSM, STK, and ImageJ hyperstacks. + Remove Andor tags from self.tags. """ - if (self.is_stk or self.is_lsm or self.parent.is_lsm or - self.is_ome or self.parent.is_ome): - return False - if self.is_imagej: - if b'mode' in self.is_imagej: - return False - elif self.parent.is_imagej: - return self.parent.is_indexed - return ('photometric' in self.tags and - self.tags['photometric'].value == 3) + if not self.is_andor: + return + tags = self.tags + result = {'Id': tags['AndorId'].value} + for tag in list(self.tags.values()): + code = tag.code + if not 4864 < code < 5031: + continue + value = tag.value + name = tag.name[5:] if len(tag.name) > 5 else tag.name + result[name] = value + del tags[tag.name] + return result @lazyattr + def epics_tags(self): + """Return consolidated metadata from EPICS areaDetector tags as dict. + + Remove areaDetector tags from self.tags. + + """ + # TODO: obtain test file + if not self.is_epics: + return + result = {} + tags = self.tags + for tag in list(self.tags.values()): + code = tag.code + if not 65000 < code < 65500: + continue + value = tag.value + if code == 65000: + result['timeStamp'] = float(value) + elif code == 65001: + result['uniqueID'] = int(value) + elif code == 65002: + result['epicsTS'] = int(value) + elif code == 65003: + result['epicsTS'] = int(value) + else: + key, value = value.split(':') + result[key] = astype(value) + del tags[tag.name] + return result + + @property def is_tiled(self): """Page contains tiled image.""" - return 'tile_width' in self.tags + return 'TileWidth' in self.tags - @lazyattr + @property def is_reduced(self): """Page is reduced image of another image.""" - return ('new_subfile_type' in self.tags and - self.tags['new_subfile_type'].value & 1) + return ('NewSubfileType' in self.tags and + self.tags['NewSubfileType'].value & 1) - @lazyattr + @property def is_chroma_subsampled(self): """Page contains chroma subsampled image.""" - return ('ycbcr_subsampling' in self.tags and - self.tags['ycbcr_subsampling'].value != (1, 1)) + return ('YCbCrSubSampling' in self.tags and + self.tags['YCbCrSubSampling'].value != (1, 1)) @lazyattr - def is_mdgel(self): - """Page contains md_file_tag tag.""" - return 'md_file_tag' in self.tags + def is_imagej(self): + """Return ImageJ description if exists, else None.""" + for description in (self.description, self.description1): + if not description: + return + if description[:7] == 'ImageJ=': + return description @lazyattr + def is_shaped(self): + """Return description containing array shape if exists, else None.""" + for description in (self.description, self.description1): + if not description: + return + if description[:1] == '{' and '"shape":' in description: + return description + if description[:6] == 'shape=': + return description + + @property + def is_mdgel(self): + """Page contains MDFileTag tag.""" + return 'MDFileTag' in self.tags + + @property def is_mediacy(self): """Page contains Media Cybernetics Id tag.""" - return ('mc_id' in self.tags and - self.tags['mc_id'].value.startswith(b'MC TIFF')) + return ('MC_Id' in self.tags and + self.tags['MC_Id'].value[:7] == b'MC TIFF') - @lazyattr + @property def is_stk(self): """Page contains UIC2Tag tag.""" - return 'uic2tag' in self.tags + return 'UIC2tag' in self.tags - @lazyattr + @property def is_lsm(self): - """Page contains LSM CZ_LSM_INFO tag.""" - return 'cz_lsm_info' in self.tags + """Page contains CZ_LSMINFO tag.""" + return 'CZ_LSMINFO' in self.tags - @lazyattr + @property def is_fluoview(self): """Page contains FluoView MM_STAMP tag.""" - return 'mm_stamp' in self.tags + return 'MM_Stamp' in self.tags - @lazyattr + @property def is_nih(self): """Page contains NIH image header.""" - return 'nih_image_header' in self.tags + return 'NIHImageHeader' in self.tags - @lazyattr + @property def is_sgi(self): """Page contains SGI image and tile depth tags.""" - return 'image_depth' in self.tags and 'tile_depth' in self.tags + return 'ImageDepth' in self.tags and 'TileDepth' in self.tags - @lazyattr + @property def is_vista(self): """Software tag is 'ISS Vista'.""" - return ('software' in self.tags and - self.tags['software'].value == b'ISS Vista') + return self.software == 'ISS Vista' - @lazyattr + @property + def is_metaseries(self): + """Page contains MDS MetaSeries metadata in ImageDescription tag.""" + if self.index > 1 or self.software != 'MetaSeries': + return False + d = self.description + return d.startswith('<MetaData>') and d.endswith('</MetaData>') + + @property def is_ome(self): - """Page contains OME-XML in image_description tag.""" - if 'image_description' not in self.tags: + """Page contains OME-XML in ImageDescription tag.""" + if self.index > 1 or not self.description: return False - d = self.tags['image_description'].value.strip() - return d.startswith(b'<?xml version=') and d.endswith(b'</OME>') + d = self.description + return d[:14] == '<?xml version=' and d[-6:] == '</OME>' - @lazyattr + @property def is_scn(self): - """Page contains Leica SCN XML in image_description tag.""" - if 'image_description' not in self.tags: + """Page contains Leica SCN XML in ImageDescription tag.""" + if self.index > 1 or not self.description: return False - d = self.tags['image_description'].value.strip() - return d.startswith(b'<?xml version=') and d.endswith(b'</scn>') - - @lazyattr - def is_shaped(self): - """Return description containing shape if exists, else None.""" - if 'image_description' in self.tags: - description = self.tags['image_description'].value - if b'"shape":' in description or b'shape=(' in description: - return description - if 'image_description_1' in self.tags: - description = self.tags['image_description_1'].value - if b'"shape":' in description or b'shape=(' in description: - return description - - @lazyattr - def is_imagej(self): - """Return ImageJ description if exists, else None.""" - if 'image_description' in self.tags: - description = self.tags['image_description'].value - if description.startswith(b'ImageJ='): - return description - if 'image_description_1' in self.tags: - # Micromanager - description = self.tags['image_description_1'].value - if description.startswith(b'ImageJ='): - return description + d = self.description + return d[:14] == '<?xml version=' and d[-6:] == '</scn>' - @lazyattr + @property def is_micromanager(self): """Page contains Micro-Manager metadata.""" - return 'micromanager_metadata' in self.tags + return 'MicroManagerMetadata' in self.tags - @lazyattr + @property + def is_andor(self): + """Page contains Andor Technology tags.""" + return 'AndorId' in self.tags + + @property + def is_pilatus(self): + """Page contains Pilatus tags.""" + return (self.software[:8] == 'TVX TIFF' and + self.description[:2] == '# ') + + @property + def is_epics(self): + """Page contains EPICS areaDetector tags.""" + return self.description == 'EPICS areaDetector' + + @property def is_tvips(self): """Page contains TVIPS metadata.""" - return 'tvips_metadata' in self.tags + return 'TVIPS' in self.tags - @lazyattr + @property def is_fei(self): """Page contains SFEG or HELIOS metadata.""" - return 'sfeg_metadata' in self.tags or 'helios_metadata' in self.tags + return 'FEI_SFEG' in self.tags or 'FEI_HELIOS' in self.tags - @lazyattr + @property def is_sem(self): """Page contains Zeiss SEM metadata.""" - return 'sem_metadata' in self.tags + return 'CZ_SEM' in self.tags + + @property + def is_svs(self): + """Page contains Aperio metadata.""" + return self.description[:20] == 'Aperio Image Library' + + @property + def is_scanimage(self): + """Page contains ScanImage metadata.""" + return (self.description[:12] == 'state.config' or + self.software[:22] == 'SI.LINE_FORMAT_VERSION') + + +class TiffFrame(object): + """Lightweight TIFF image file directory (IFD). + + Only a limited number of tag values are read from file, e.g. StripOffsets, + and StripByteCounts. Other tag values are assumed to be identical with a + specified TiffPage instance, the keyframe. + + This is intended to reduce resource usage and speed up reading data from + file, not for introspection of metadata. + + Not compatible with Python 2. + + """ + __slots__ = ('keyframe', 'parent', 'index', 'offset', + 'dataoffsets', 'databytecounts') + + is_mdgel = False + tags = {} + + def __init__(self, parent, index, keyframe): + """Read specified tags from file. + + The file handle position must be at the offset to a valid IFD. + + """ + self.keyframe = keyframe + self.parent = parent + self.index = index + + unpack = struct.unpack + fh = parent.filehandle + self.offset = fh.tell() + try: + tagno = unpack(parent.tagnoformat, fh.read(parent.tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + raise ValueError("corrupted page list at offset %i" % self.offset) + + # tags = {} + tagcodes = {273, 279, 324, 325} # TIFF.FRAME_TAGS + tagsize = parent.tagsize + codeformat = parent.tagformat1[:2] + + data = fh.read(tagsize * tagno) + index = -tagsize + for _ in range(tagno): + index += tagsize + code = unpack(codeformat, data[index:index+2])[0] + if code not in tagcodes: + continue + try: + tag = TiffTag(parent, data[index:index+tagsize]) + except TiffTag.Error as e: + warnings.warn(str(e)) + continue + if code == 273 or code == 324: + setattr(self, 'dataoffsets', tag.value) + elif code == 279 or code == 325: + setattr(self, 'databytecounts', tag.value) + # elif code == 270: + # tagname = tag.name + # if tagname not in tags: + # tags[tagname] = bytes2str(tag.value) + # elif 'ImageDescription1' not in tags: + # tags['ImageDescription1'] = bytes2str(tag.value) + # else: + # tags[tag.name] = tag.value + + def aspage(self): + """Return TiffPage from file.""" + self.parent.filehandle.seek(self.offset) + return TiffPage(self.parent, index=self.index, keyframe=None) + + def asarray(self, *args, **kwargs): + """Read image data from file and return as numpy array.""" + # TODO: fix TypeError on Python 2 + # "TypeError: unbound method asarray() must be called with TiffPage + # instance as first argument (got TiffFrame instance instead)" + kwargs['validate'] = False + return TiffPage.asarray(self, *args, **kwargs) + + def asrgb(self, *args, **kwargs): + """Read image data from file and return RGB image as numpy array.""" + kwargs['validate'] = False + return TiffPage.asrgb(self, *args, **kwargs) + + @property + def offsets_bytecounts(self): + """Return simplified offsets and bytecounts.""" + if self.keyframe.is_contiguous: + return self.dataoffsets[:1], self.keyframe.is_contiguous[1:] + return clean_offsets_counts(self.dataoffsets, self.databytecounts) + + @property + def is_contiguous(self): + """Return offset and size of contiguous data, else None.""" + if self.keyframe.is_contiguous: + return self.dataoffsets[0], self.keyframe.is_contiguous[1] + + @property + def is_memmappable(self): + """Return if page's image data in file can be memory-mapped.""" + return self.keyframe.is_memmappable + + def __getattr__(self, name): + """Return attribute from keyframe.""" + if name in TIFF.FRAME_ATTRS: + return getattr(self.keyframe, name) + raise AttributeError("'%s' object has no attribute '%s'" % + (self.__class__.__name__, name)) + + def __str__(self, detail=0): + """Return string containing information about frame.""" + info = ' '.join(s for s in ( + 'x'.join(str(i) for i in self.shape), + str(self.dtype))) + return "TiffFrame %i @%i %s" % (self.index, self.offset, info) class TiffTag(object): - """A TIFF tag structure. + """TIFF tag structure. Attributes ---------- name : string - Attribute name of tag. + Name of tag. code : int Decimal code of tag. dtype : str - Datatype of tag data. One of TIFF_DATA_TYPES. + Datatype of tag data. One of TIFF DATA_FORMATS. count : int Number of values. value : various types Tag data as Python object. - value_offset : int - Location of value in file, if any. + valueoffset : int + Location of value in file. All attributes are read-only. """ - __slots__ = ('code', 'name', 'count', 'dtype', 'value', 'value_offset', - '_offset', '_value', '_type') + __slots__ = ('code', 'count', 'dtype', 'value', 'valueoffset') class Error(Exception): pass - def __init__(self, arg, **kwargs): - """Initialize instance from file or arguments.""" - self._offset = None - if hasattr(arg, '_fh'): - self._fromfile(arg, **kwargs) - else: - self._fromdata(arg, **kwargs) - - def _fromdata(self, code, dtype, count, value, name=None): - """Initialize instance from arguments.""" - self.code = int(code) - self.name = name if name else str(code) - self.dtype = TIFF_DATA_TYPES[dtype] - self.count = int(count) - self.value = value - self._value = value - self._type = dtype - - def _fromfile(self, parent): - """Read tag structure from open file. Advance file cursor.""" + def __init__(self, parent, tagheader, **kwargs): + """Initialize instance from tag header.""" fh = parent.filehandle byteorder = parent.byteorder - self._offset = fh.tell() - self.value_offset = self._offset + parent.offset_size + 4 - - fmt, size = {4: ('HHI4s', 12), 8: ('HHQ8s', 20)}[parent.offset_size] - data = fh.read(size) - code, dtype = struct.unpack(byteorder + fmt[:2], data[:4]) - count, value = struct.unpack(byteorder + fmt[2:], data[4:]) - self._value = value - self._type = dtype - - if code in TIFF_TAGS: - name, _, _, cout_, _ = TIFF_TAGS[code] - if cout_ and cout_ != count: - count = cout_ - warnings.warn("incorrect count for tag '%s'" % name) - elif code in CUSTOM_TAGS: - name = CUSTOM_TAGS[code][0] - else: - name = str(code) + unpack = struct.unpack + offsetsize = parent.offsetsize + + self.valueoffset = fh.tell() + offsetsize + 4 + code, dtype = unpack(parent.tagformat1, tagheader[:4]) + count, value = unpack(parent.tagformat2, tagheader[4:]) try: - dtype = TIFF_DATA_TYPES[self._type] + dtype = TIFF.DATA_FORMATS[dtype] except KeyError: - raise TiffTag.Error("unknown tag data type %i" % self._type) + raise TiffTag.Error("unknown tag data type %i" % dtype) - fmt = '%s%i%s' % (byteorder, count*int(dtype[0]), dtype[1]) + fmt = '%s%i%s' % (byteorder, count * int(dtype[0]), dtype[1]) size = struct.calcsize(fmt) - if size > parent.offset_size or code in CUSTOM_TAGS: - pos = fh.tell() - tof = {4: 'I', 8: 'Q'}[parent.offset_size] - self.value_offset = offset = struct.unpack(byteorder+tof, value)[0] - if offset < 0 or offset > parent.filehandle.size: - raise TiffTag.Error("corrupt file - invalid tag value offset") - elif offset < 4: - raise TiffTag.Error("corrupt value offset for tag %i" % code) + if size > offsetsize or code in TIFF.TAG_READERS: + self.valueoffset = offset = unpack(parent.offsetformat, value)[0] + if offset < 8 or offset > fh.size - size: + raise TiffTag.Error("invalid tag value offset") + # if offset % 2: + # warnings.warn("tag value does not begin on word boundary") fh.seek(offset) - if code in CUSTOM_TAGS: - readfunc = CUSTOM_TAGS[code][1] - value = readfunc(fh, byteorder, dtype, count) - if isinstance(value, dict): # numpy.core.records.record - value = Record(value) - elif code in TIFF_TAGS or dtype[-1] == 's': - value = struct.unpack(fmt, fh.read(size)) + if code in TIFF.TAG_READERS: + readfunc = TIFF.TAG_READERS[code] + value = readfunc(fh, byteorder, dtype, count, offsetsize) + elif code in TIFF.TAGS or dtype[-1] == 's': + value = unpack(fmt, fh.read(size)) else: - value = read_numpy(fh, byteorder, dtype, count) - fh.seek(pos) + value = read_numpy(fh, byteorder, dtype, count, offsetsize) else: - value = struct.unpack(fmt, value[:size]) - - if code not in CUSTOM_TAGS and code not in ( - 273, 279, 324, 325, 530, 531): - # scalar value if not strip/tile offsets/byte_counts or subsampling - if len(value) == 1: - value = value[0] + value = unpack(fmt, value[:size]) - if (dtype.endswith('s') and isinstance(value, bytes) and - self._type != 7): + process = code not in TIFF.TAG_READERS and code not in TIFF.TAG_TUPLE + if process and dtype[-1] == 's' and isinstance(value[0], bytes): # TIFF ASCII fields can contain multiple strings, # each terminated with a NUL - value = stripascii(value) + value = bytes2str(stripascii(value[0]).strip()) + else: + if code in TIFF.TAG_ENUM: + t = TIFF.TAG_ENUM[code] + try: + value = tuple(t(v) for v in value) + except ValueError as e: + warnings.warn(str(e)) + if process: + if len(value) == 1: + value = value[0] self.code = code - self.name = name self.dtype = dtype self.count = count self.value = value + @property + def name(self): + return TIFF.TAGS.get(self.code, str(self.code)) + def _fix_lsm_bitspersample(self, parent): """Correct LSM bitspersample tag. @@ -3092,20 +3939,30 @@ class TiffTag(object): if self.code == 258 and self.count == 2: # TODO: test this case; need example file warnings.warn("correcting LSM bitspersample tag") - fh = parent.filehandle - tof = {4: '<I', 8: '<Q'}[parent.offset_size] - self.value_offset = struct.unpack(tof, self._value)[0] - fh.seek(self.value_offset) - self.value = struct.unpack("<HH", fh.read(4)) - - def as_str(self): - """Return value as human readable string.""" - return ((str(self.value).split('\n', 1)[0]) if (self._type != 7) - else '<undefined>') + tof = parent.offsetformat[parent.offsetsize] + self.valueoffset = struct.unpack(tof, self._value)[0] + parent.filehandle.seek(self.valueoffset) + self.value = struct.unpack("<HH", parent.filehandle.read(4)) def __str__(self): """Return string containing information about tag.""" - return ' '.join(str(getattr(self, s)) for s in self.__slots__) + if self.code in TIFF.TAG_ENUM: + if self.count == 1: + value = TIFF.TAG_ENUM[self.code](self.value).name + else: + value = tuple(v.name for v in self.value) + elif isinstance(self.value, unicode): + value = pformat(self.value) + value = value.replace(u'\n', u'\\n').replace(u'\r', u'') + value = u'"%s"' % value + else: + value = pformat(self.value, linewidth=False, maxlines=2) + value = str(value).split('\n', 1)[0] + + tcode = "%i%s" % (self.count * int(self.dtype[0]), self.dtype[1]) + line = "TiffTag %i %s %s @%i %s" % ( + self.code, self.name, tcode, self.valueoffset, value) + return line class TiffPageSeries(object): @@ -3125,48 +3982,43 @@ class TiffPageSeries(object): Position of image data in file if memory-mappable, else None. """ - #__slots__ = 'pages', 'shape', 'dtype', 'axes', 'parent' - - def __init__(self, pages, shape, dtype, axes, parent=None): - # TODO? sort pages by page number? + def __init__(self, pages, shape, dtype, axes, + parent=None, name=None, transform=None, stype=None): + """Initialize instance.""" self.index = 0 self.pages = pages self.shape = tuple(shape) self.axes = ''.join(axes) self.dtype = numpy.dtype(dtype) + self.stype = stype if stype else '' + self.name = name if name else '' + self.transform = transform if parent: self.parent = parent - elif len(pages): + elif pages: self.parent = pages[0].parent else: self.parent = None - def asarray(self, memmap=False): - """Return image data from series of TIFF pages as numpy array. - - Parameters - ---------- - memmap : bool - If True, return an array stored in a binary file on disk - if possible. - - """ + def asarray(self, out=None): + """Return image data from series of TIFF pages as numpy array.""" if self.parent: - return self.parent.asarray(series=self, memmap=memmap) + result = self.parent.asarray(series=self, out=out) + if self.transform is not None: + result = self.transform(result) + return result @lazyattr def offset(self): - """Return offset to memory-mappable data in page series.""" - if len(self.pages) == 0: + """Return offset to series data in file, if any.""" + if not self.pages: return - rgbonly = False - colormapped = self.pages[0].is_indexed pos = 0 for page in self.pages: if page is None: return - if not page._is_memmappable(rgbonly, colormapped): + if not page.is_final: return if not pos: pos = page.is_contiguous[0] + page.is_contiguous[1] @@ -3175,11 +4027,23 @@ class TiffPageSeries(object): return pos += page.is_contiguous[1] - offset = self.pages[0].is_contiguous[0] - if (pos != offset + product(self.shape) * self.dtype.itemsize and - not self.pages[0].is_imagej): - return - return offset + page = self.pages[0] + offset = page.is_contiguous[0] + if (page.is_imagej or page.is_shaped) and len(self.pages) == 1: + # truncated files + return offset + if pos == offset + product(self.shape) * self.dtype.itemsize: + return offset + + @property + def ndim(self): + """Return number of array dimensions.""" + return len(self.shape) + + @property + def size(self): + """Return number of elements in array.""" + return int(product(self.shape)) def __len__(self): """Return number of TiffPages in series.""" @@ -3195,38 +4059,44 @@ class TiffPageSeries(object): def __str__(self): """Return string with information about series.""" - s = ', '.join(s for s in ( + s = ' '.join(s for s in ( + snipstr("'%s'" % self.name, 20) if self.name else '', 'x'.join(str(i) for i in self.shape), - str(numpy.dtype(self.dtype)), + str(self.dtype), self.axes, - '%i pages' % len(self.pages), - ('memmap-offset=%i' % self.offset) if self.offset else - 'not mem-mappable')) - return 'Series %i: %s' % (self.index, s) + self.stype, + '%i Pages' % len(self.pages), + ('Offset=%i' % self.offset) if self.offset else '') if s) + return 'TiffPageSeries %i %s' % (self.index, s) class TiffSequence(object): - """Sequence of image files. + """Sequence of TIFF files. - The data shape and dtype of all files must match. + The image data in all files must match shape, dtype, etc. Attributes ---------- files : list List of file names. shape : tuple - Shape of image sequence. + Shape of image sequence. Excludes shape of image array. axes : str Labels of axes in shape. Examples -------- - >>> tifs = TiffSequence("test.oif.files/*.tif") - >>> tifs.shape, tifs.axes - ((2, 100), 'CT') + >>> # read image stack from sequence of TIFF files + >>> imsave('temp_C001T001.tif', numpy.random.rand(64, 64)) + >>> imsave('temp_C001T002.tif', numpy.random.rand(64, 64)) + >>> tifs = TiffSequence("temp_C001*.tif") + >>> tifs.shape + (1, 2) + >>> tifs.axes + 'CT' >>> data = tifs.asarray() >>> data.shape - (2, 100, 256, 256) + (1, 2, 64, 64) """ _patterns = { @@ -3259,7 +4129,7 @@ class TiffSequence(object): pattern : str Regular expression pattern that matches axes names and sequence indices in file names. - By default this matches Olympus OIF and Leica TIFF series. + By default, this matches Olympus OIF and Leica TIFF series. """ if isinstance(files, basestring): @@ -3269,8 +4139,6 @@ class TiffSequence(object): raise ValueError("no files found") if not isinstance(files[0], basestring): raise ValueError("not a file name") - #if not os.path.isfile(files[0]): - # raise ValueError("file not found") self.files = files if hasattr(imread, 'asarray'): @@ -3291,16 +4159,16 @@ class TiffSequence(object): except self.ParseError: self.axes = 'I' self.shape = (len(files),) - self._start_index = (0,) + self._startindex = (0,) self._indices = tuple((i,) for i in range(len(files))) def __str__(self): """Return string with information about image sequence.""" return "\n".join([ self.files[0], - '* files: %i' % len(self.files), - '* axes: %s' % self.axes, - '* shape: %s' % str(self.shape)]) + ' size: %i' % len(self.files), + ' axes: %s' % self.axes, + ' shape: %s' % str(self.shape)]) def __len__(self): return len(self.files) @@ -3314,10 +4182,9 @@ class TiffSequence(object): def close(self): pass - def asarray(self, memmap=False, tempdir=None, *args, **kwargs): - """Read image data from all files and return as single numpy array. + def asarray(self, out=None, *args, **kwargs): + """Read image data from all files and return as numpy array. - If memmap is True, return an array stored in a binary file on disk. The args and kwargs parameters are passed to the imread function. Raise IndexError or ValueError if image shapes do not match. @@ -3325,14 +4192,10 @@ class TiffSequence(object): """ im = self.imread(self.files[0], *args, **kwargs) shape = self.shape + im.shape - if memmap: - with tempfile.NamedTemporaryFile(dir=tempdir) as fh: - result = numpy.memmap(fh, dtype=im.dtype, shape=shape) - else: - result = numpy.zeros(shape, dtype=im.dtype) + result = create_output(out, shape, dtype=im.dtype) result = result.reshape(-1, *im.shape) for index, fname in zip(self._indices, self.files): - index = [i-j for i, j in zip(index, self._start_index)] + index = [i-j for i, j in zip(index, self._startindex)] index = numpy.ravel_multi_index(index, self.shape) im = self.imread(fname, *args, **kwargs) result[index] = im @@ -3361,88 +4224,15 @@ class TiffSequence(object): raise ValueError("axes do not match within the image sequence") indices.append([int(m) for m in matches[1::2] if m]) shape = tuple(numpy.max(indices, axis=0)) - start_index = tuple(numpy.min(indices, axis=0)) - shape = tuple(i-j+1 for i, j in zip(shape, start_index)) + startindex = tuple(numpy.min(indices, axis=0)) + shape = tuple(i-j+1 for i, j in zip(shape, startindex)) if product(shape) != len(self.files): warnings.warn("files are missing. Missing data are zeroed") self.axes = axes.upper() self.shape = shape self._indices = indices - self._start_index = start_index - - -class Record(dict): - """Dictionary with attribute access. - - Can also be initialized with numpy.core.records.record. - - """ - __slots__ = () - - def __init__(self, arg=None, **kwargs): - if kwargs: - arg = kwargs - elif arg is None: - arg = {} - try: - dict.__init__(self, arg) - except (TypeError, ValueError): - # numpy records - for i, name in enumerate(arg.dtype.names): - v = arg[i] - self[name] = v if v.dtype.char != 'S' else stripnull(v) - - def __getattr__(self, name): - return self[name] - - def __setattr__(self, name, value): - self.__setitem__(name, value) - - def __str__(self): - """Pretty print Record.""" - s = [] - lists = [] - for k in sorted(self): - try: - if k.startswith('_'): # does not work with byte - continue - except AttributeError: - pass - v = self[k] - if isinstance(v, (list, tuple)) and len(v): - if isinstance(v[0], Record): - lists.append((k, v)) - continue - elif isinstance(v[0], TiffPage): - v = [i.index for i in v if i] - elif isinstance(v, Record): - s.append(("* %s:\n%s" % (k, str(v).replace('*', ' *')))) - continue - s.append( - ("* %s: %s" % (k, str(v))).split("\n", 1)[0] - [:PRINT_LINE_LEN].rstrip()) - for k, v in lists: - l = [] - for i, w in enumerate(v): - l.append("* %s[%i]\n %s" % (k, i, - str(w).replace("\n", "\n "))) - s.append('\n'.join(l)) - return '\n'.join(s) - - -class TiffTags(Record): - """Dictionary of TiffTag with attribute access.""" - - def __str__(self): - """Return string with information about all tags.""" - s = [] - for tag in sorted(self.values(), key=lambda x: x.code): - typecode = "%i%s" % (tag.count * int(tag.dtype[0]), tag.dtype[1]) - line = "* %i %s (%s) %s" % ( - tag.code, tag.name, typecode, tag.as_str()) - s.append(line[:PRINT_LINE_LEN].lstrip()) - return '\n'.join(s) + self._startindex = startindex class FileHandle(object): @@ -3474,7 +4264,7 @@ class FileHandle(object): All attributes are read-only. """ - __slots__ = ('_fh', '_file', '_mode', '_name', '_dir', + __slots__ = ('_fh', '_file', '_mode', '_name', '_dir', '_lock', '_offset', '_size', '_close', 'is_file') def __init__(self, file, mode='rb', name=None, offset=None, size=None): @@ -3490,10 +4280,10 @@ class FileHandle(object): name : str Optional name of file in case 'file' is a binary stream. offset : int - Optional start position of embedded file. By default this is + Optional start position of embedded file. By default, this is the current file position. size : int - Optional size of embedded file. By default this is the number + Optional size of embedded file. By default, this is the number of bytes from the 'offset' to the end of the file. """ @@ -3506,6 +4296,7 @@ class FileHandle(object): self._size = size self._close = True self.is_file = False + self._lock = NullContext() self.open() def open(self): @@ -3598,37 +4389,64 @@ class FileHandle(object): offset=self._offset + offset, shape=shape, order=order) - def read_array(self, dtype, count=-1, sep=""): + def read_array(self, dtype, count=-1, sep="", chunksize=2**25, out=None): """Return numpy array from file. Work around numpy issue #2230, "numpy.fromfile does not accept StringIO object" https://github.com/numpy/numpy/issues/2230. """ - try: - return numpy.fromfile(self._fh, dtype, count, sep) - except IOError: - if count < 0: - size = self._size - else: - size = count * numpy.dtype(dtype).itemsize - data = self._fh.read(size) - return numpy.fromstring(data, dtype, count, sep) + fh = self._fh + dtype = numpy.dtype(dtype) + size = self._size if count < 0 else count * dtype.itemsize + + if out is None: + try: + return numpy.fromfile(fh, dtype, count, sep) + except IOError: + # ByteIO + data = fh.read(size) + return numpy.fromstring(data, dtype, count, sep) + + # Read data from file in chunks and copy to output array + shape = out.shape + size = min(out.nbytes, size) + out = out.reshape(-1) + index = 0 + while size > 0: + data = fh.read(min(chunksize, size)) + datasize = len(data) + if datasize == 0: + break + size -= datasize + data = numpy.fromstring(data, dtype) + out[index:index+data.size] = data + index += data.size + + if hasattr(out, 'flush'): + out.flush() + return out.reshape(shape) def read_record(self, dtype, shape=1, byteorder=None): """Return numpy record from file.""" + rec = numpy.rec try: - rec = numpy.rec.fromfile(self._fh, dtype, shape, - byteorder=byteorder) + record = rec.fromfile(self._fh, dtype, shape, byteorder=byteorder) except Exception: dtype = numpy.dtype(dtype) if shape is None: shape = self._size // dtype.itemsize size = product(sequence(shape)) * dtype.itemsize data = self._fh.read(size) - return numpy.rec.fromstring(data, dtype, shape, - byteorder=byteorder) - return rec[0] if shape == 1 else rec + record = rec.fromstring(data, dtype, shape, byteorder=byteorder) + return record[0] if shape == 1 else record + + def write_empty(self, size): + """Append size bytes to file. Position must be at end of file.""" + if size < 1: + return + self._fh.seek(size-1, 1) + self._fh.write(b'\x00') def write_array(self, data): """Write numpy array to binary file.""" @@ -3692,20 +4510,1832 @@ class FileHandle(object): def closed(self): return self._fh is None + @property + def lock(self): + return self._lock + + @lock.setter + def lock(self, value): + self._lock = threading.RLock() if value else NullContext() + + +class NullContext(object): + """Null context manager. + + >>> with NullContext(): + ... pass + + """ + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +class OpenFileCache(object): + """Keep files open.""" + + __slots__ = ('files', 'past', 'lock', 'size') + + def __init__(self, size, lock=None): + """Initialize open file cache.""" + self.past = [] # FIFO of opened files + self.files = {} # refcounts of opened files + self.lock = NullContext() if lock is None else lock + self.size = int(size) + + def open(self, filehandle): + """Re-open file if necessary.""" + with self.lock: + if filehandle in self.files: + self.files[filehandle] += 1 + elif filehandle.closed: + filehandle.open() + self.files[filehandle] = 1 + self.past.append(filehandle) + + def close(self, filehandle): + """Close openend file if no longer used.""" + with self.lock: + if filehandle in self.files: + self.files[filehandle] -= 1 + # trim the file cache + index = 0 + size = len(self.past) + while size > self.size and index < size: + filehandle = self.past[index] + if self.files[filehandle] == 0: + filehandle.close() + del self.files[filehandle] + del self.past[index] + size -= 1 + else: + index += 1 + + def clear(self): + """Close all opened files if not in use.""" + with self.lock: + for filehandle, refcount in list(self.files.items()): + if refcount == 0: + filehandle.close() + del self.files[filehandle] + del self.past[self.past.index(filehandle)] + + +class LazyConst(object): + """Class whose attributes are computed on first access from its methods.""" + def __init__(self, cls): + self._cls = cls + self.__doc__ = getattr(cls, '__doc__') + + def __getattr__(self, name): + func = getattr(self._cls, name) + if not callable(func): + return func + try: + value = func() + except TypeError: + # Python 2 unbound method + value = func.__func__() + setattr(self, name, value) + return value + + +@LazyConst +class TIFF(object): + """Namespace for module constants.""" + + def TAGS(): + # TIFF tag codes and names + return { + 254: 'NewSubfileType', + 255: 'SubfileType', + 256: 'ImageWidth', + 257: 'ImageLength', + 258: 'BitsPerSample', + 259: 'Compression', + 262: 'PhotometricInterpretation', + 263: 'Threshholding', + 264: 'CellWidth', + 265: 'CellLength', + 266: 'FillOrder', + 269: 'DocumentName', + 270: 'ImageDescription', + 271: 'Make', + 272: 'Model', + 273: 'StripOffsets', + 274: 'Orientation', + 277: 'SamplesPerPixel', + 278: 'RowsPerStrip', + 279: 'StripByteCounts', + 280: 'MinSampleValue', + 281: 'MaxSampleValue', + 282: 'XResolution', + 283: 'YResolution', + 284: 'PlanarConfiguration', + 285: 'PageName', + 286: 'XPosition', + 287: 'YPosition', + 288: 'FreeOffsets', + 289: 'FreeByteCounts', + 290: 'GrayResponseUnit', + 291: 'GrayResponseCurve', + 292: 'T4Options', + 293: 'T6Options', + 296: 'ResolutionUnit', + 297: 'PageNumber', + 300: 'ColorResponseUnit', + 301: 'TransferFunction', + 305: 'Software', + 306: 'DateTime', + 315: 'Artist', + 316: 'HostComputer', + 317: 'Predictor', + 318: 'WhitePoint', + 319: 'PrimaryChromaticities', + 320: 'ColorMap', + 321: 'HalftoneHints', + 322: 'TileWidth', + 323: 'TileLength', + 324: 'TileOffsets', + 325: 'TileByteCounts', + 326: 'BadFaxLines', + 327: 'CleanFaxData', + 328: 'ConsecutiveBadFaxLines', + 330: 'SubIFDs', + 332: 'InkSet', + 333: 'InkNames', + 334: 'NumberOfInks', + 336: 'DotRange', + 337: 'TargetPrinter', + 338: 'ExtraSamples', + 339: 'SampleFormat', + 340: 'SMinSampleValue', + 341: 'SMaxSampleValue', + 342: 'TransferRange', + 343: 'ClipPath', + 344: 'XClipPathUnits', + 345: 'YClipPathUnits', + 346: 'Indexed', + 347: 'JPEGTables', + 351: 'OPIProxy', + 400: 'GlobalParametersIFD', + 401: 'ProfileType', + 402: 'FaxProfile', + 403: 'CodingMethods', + 404: 'VersionYear', + 405: 'ModeNumber', + 433: 'Decode', + 434: 'DefaultImageColor', + 435: 'T82Options', + 512: 'JPEGProc', + 513: 'JPEGInterchangeFormat', + 514: 'JPEGInterchangeFormatLength', + 515: 'JPEGRestartInterval', + 517: 'JPEGLosslessPredictors', + 518: 'JPEGPointTransforms', + 519: 'JPEGQTables', + 520: 'JPEGDCTables', + 521: 'JPEGACTables', + 529: 'YCbCrCoefficients', + 530: 'YCbCrSubSampling', + 531: 'YCbCrPositioning', + 532: 'ReferenceBlackWhite', + 559: 'StripRowCounts', + 700: 'XMP', + 4864: 'AndorId', # TODO: Andor Technology 4864 - 5030 + 4869: 'AndorTemperature', + 4876: 'AndorExposureTime', + 4878: 'AndorKineticCycleTime', + 4879: 'AndorAccumulations', + 4881: 'AndorAcquisitionCycleTime', + 4882: 'AndorReadoutTime', + 4884: 'AndorPhotonCounting', + 4885: 'AndorEmDacLevel', + 4890: 'AndorFrames', + 4896: 'AndorHorizontalFlip', + 4897: 'AndorVerticalFlip', + 4898: 'AndorClockwise', + 4899: 'AndorCounterClockwise', + 4904: 'AndorVerticalClockVoltage', + 4905: 'AndorVerticalShiftSpeed', + 4907: 'AndorPreAmpSetting', + 4908: 'AndorCameraSerial', + 4911: 'AndorActualTemperature', + 4912: 'AndorBaselineClamp', + 4913: 'AndorPrescans', + 4914: 'AndorModel', + 4915: 'AndorChipSizeX', + 4916: 'AndorChipSizeY', + 4944: 'AndorBaselineOffset', + 4966: 'AndorSoftwareVersion', + # Private tags + 32781: 'ImageID', + 32932: 'WangAnnotation', + 32995: 'Matteing', + 32996: 'DataType', + 32997: 'ImageDepth', + 32998: 'TileDepth', + 33300: 'ImageFullWidth', + 33301: 'ImageFullLength', + 33302: 'TextureFormat', + 33303: 'TextureWrapModes', + 33304: 'FieldOfViewCotangent', + 33305: 'MatrixWorldToScreen', + 33306: 'MatrixWorldToCamera', + 33421: 'CFARepeatPatternDim', + 33422: 'CFAPattern', + 33432: 'Copyright', + 33445: 'MDFileTag', + 33446: 'MDScalePixel', + 33447: 'MDColorTable', + 33448: 'MDLabName', + 33449: 'MDSampleInfo', + 33450: 'MDPrepDate', + 33451: 'MDPrepTime', + 33452: 'MDFileUnits', + 33550: 'ModelPixelScaleTag', + 33628: 'UIC1tag', # Metamorph Universal Imaging Corp STK + 33629: 'UIC2tag', + 33630: 'UIC3tag', + 33631: 'UIC4tag', + 33723: 'IPTC', + 33918: 'INGRPacketDataTag', + 33919: 'INGRFlagRegisters', + 33920: 'IrasBTransformationMatrix', + 33922: 'ModelTiepointTag', + 34118: 'CZ_SEM', # Zeiss SEM + 34122: 'IPLAB', # number of images + 34264: 'ModelTransformationTag', + 34361: 'MM_Header', + 34362: 'MM_Stamp', + 34363: 'MM_Unknown', + 34377: 'Photoshop', + 34386: 'MM_UserBlock', + 34412: 'CZ_LSMINFO', + 34665: 'ExifIFD', + 34675: 'ICCProfile', + 34680: 'FEI_SFEG', # + 34682: 'FEI_HELIOS', # + 34683: 'FEI_TITAN', # + 34732: 'ImageLayer', + 34735: 'GeoKeyDirectoryTag', + 34736: 'GeoDoubleParamsTag', + 34737: 'GeoAsciiParamsTag', + 34853: 'GPSIFD', + 34908: 'HylaFAXFaxRecvParams', + 34909: 'HylaFAXFaxSubAddress', + 34910: 'HylaFAXFaxRecvTime', + 34911: 'FaxDcs', + 37439: 'StoNits', + 37679: 'MODI_TXT', # Microsoft Office Document Imaging + 37681: 'MODI_POS', + 37680: 'MODI_OLE', + 37706: 'TVIPS', # offset to TemData structure + 37707: 'TVIPS1', + 37708: 'TVIPS2', # same TemData structure as undefined + 37724: 'ImageSourceData', + 40001: 'MC_IpWinScal', # Media Cybernetics + 40100: 'MC_IdOld', + 40965: 'InteroperabilityIFD', + 42112: 'GDAL_METADATA', + 42113: 'GDAL_NODATA', + 43314: 'NIHImageHeader', + 50215: 'OceScanjobDescription', + 50216: 'OceApplicationSelector', + 50217: 'OceIdentificationNumber', + 50218: 'OceImageLogicCharacteristics', + 50288: 'MC_Id', # Media Cybernetics + 50289: 'MC_XYPosition', + 50290: 'MC_ZPosition', + 50291: 'MC_XYCalibration', + 50292: 'MC_LensCharacteristics', + 50293: 'MC_ChannelName', + 50294: 'MC_ExcitationWavelength', + 50295: 'MC_TimeStamp', + 50296: 'MC_FrameProperties', + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50739: 'ShadowScale', + 50740: 'DNGPrivateData', + 50741: 'MakerNoteSafety', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + 50780: 'BestQualityScale', + 50781: 'RawDataUniqueID', + 50784: 'AliasLayerMetadata', + 50827: 'OriginalRawFileName', + 50828: 'OriginalRawFileData', + 50829: 'ActiveArea', + 50830: 'MaskedAreas', + 50831: 'AsShotICCProfile', + 50832: 'AsShotPreProfileMatrix', + 50833: 'CurrentICCProfile', + 50834: 'CurrentPreProfileMatrix', + 50838: 'IJMetadataByteCounts', + 50839: 'IJMetadata', + 51023: 'FibicsXML', # + 51123: 'MicroManagerMetadata', + 65200: 'FlexXML', # + 65563: 'PerSample', + } + + def TAG_NAMES(): + return {v: c for c, v in TIFF.TAGS.items()} + + def TAG_READERS(): + # Map TIFF tag codes to import functions + return { + 320: read_colormap, + 700: read_bytes, # read_utf8, + 34377: read_numpy, + 33723: read_bytes, + 34675: read_bytes, + 33628: read_uic1tag, # Universal Imaging Corp STK + 33629: read_uic2tag, + 33630: read_uic3tag, + 33631: read_uic4tag, + 34118: read_cz_sem, # Carl Zeiss SEM + 34361: read_mm_header, # Olympus FluoView + 34362: read_mm_stamp, + 34363: read_numpy, # MM_Unknown + 34386: read_numpy, # MM_UserBlock + 34412: read_cz_lsminfo, # Carl Zeiss LSM + 34680: read_fei_metadata, # S-FEG + 34682: read_fei_metadata, # Helios NanoLab + 37706: read_tvips_header, # TVIPS EMMENU + 43314: read_nih_image_header, + # 40001: read_bytes, + 40100: read_bytes, + 50288: read_bytes, + 50296: read_bytes, + 50839: read_bytes, + 51123: read_json, + 34665: read_exif_ifd, + 34853: read_gps_ifd, + 40965: read_interoperability_ifd + } + + def TAG_TUPLE(): + # Tags whose values must be stored as tuples + return frozenset((273, 279, 324, 325, 530, 531)) + + def TAG_ATTRIBUTES(): + # Map tag codes to TiffPage attribute names + return { + 'ImageWidth': 'imagewidth', + 'ImageLength': 'imagelength', + 'BitsPerSample': 'bitspersample', + 'Compression': 'compression', + 'PlanarConfiguration': 'planarconfig', + 'FillOrder': 'fillorder', + 'PhotometricInterpretation': 'photometric', + 'ColorMap': 'colormap', + 'ImageDescription': 'description', + 'ImageDescription1': 'description1', + 'SamplesPerPixel': 'samplesperpixel', + 'RowsPerStrip': 'rowsperstrip', + 'Software': 'software', + 'Predictor': 'predictor', + 'TileWidth': 'tilewidth', + 'TileLength': 'tilelength', + 'ExtraSamples': 'extrasamples', + 'SampleFormat': 'sampleformat', + 'ImageDepth': 'imagedepth', + 'TileDepth': 'tiledepth', + } + + def TAG_ENUM(): + return { + # 254: TIFF.FILETYPE, + 255: TIFF.OFILETYPE, + 259: TIFF.COMPRESSION, + 262: TIFF.PHOTOMETRIC, + 263: TIFF.THRESHHOLD, + 266: TIFF.FILLORDER, + 274: TIFF.ORIENTATION, + 284: TIFF.PLANARCONFIG, + 290: TIFF.GRAYRESPONSEUNIT, + # 292: TIFF.GROUP3OPT, + # 293: TIFF.GROUP4OPT, + 296: TIFF.RESUNIT, + 300: TIFF.COLORRESPONSEUNIT, + 317: TIFF.PREDICTOR, + 338: TIFF.EXTRASAMPLE, + 339: TIFF.SAMPLEFORMAT, + # 512: TIFF.JPEGPROC, + # 531: TIFF.YCBCRPOSITION, + } + + def FILETYPE(): + class FILETYPE(enum.IntFlag): + # Python 3.6 only + UNDEFINED = 0 + REDUCEDIMAGE = 1 + PAGE = 2 + MASK = 4 + return FILETYPE + + def OFILETYPE(): + class OFILETYPE(enum.IntEnum): + UNDEFINED = 0 + IMAGE = 1 + REDUCEDIMAGE = 2 + PAGE = 3 + return OFILETYPE + + def COMPRESSION(): + class COMPRESSION(enum.IntEnum): + NONE = 1 # Uncompressed + CCITTRLE = 2 # CCITT 1D + CCITT_T4 = 3 # 'T4/Group 3 Fax', + CCITT_T6 = 4 # 'T6/Group 4 Fax', + LZW = 5 + OJPEG = 6 # old-style JPEG + JPEG = 7 + ADOBE_DEFLATE = 8 + JBIG_BW = 9 + JBIG_COLOR = 10 + JPEG_99 = 99 + KODAK_262 = 262 + NEXT = 32766 + SONY_ARW = 32767 + PACKED_RAW = 32769 + SAMSUNG_SRW = 32770 + CCIRLEW = 32771 + SAMSUNG_SRW2 = 32772 + PACKBITS = 32773 + THUNDERSCAN = 32809 + IT8CTPAD = 32895 + IT8LW = 32896 + IT8MP = 32897 + IT8BL = 32898 + PIXARFILM = 32908 + PIXARLOG = 32909 + DEFLATE = 32946 + DCS = 32947 + APERIO_JP2000_YCBC = 33003 # Leica Aperio + APERIO_JP2000_RGB = 33005 # Leica Aperio + JBIG = 34661 + SGILOG = 34676 + SGILOG24 = 34677 + JPEG2000 = 34712 + NIKON_NEF = 34713 + JBIG2 = 34715 + MDI_BINARY = 34718 # 'Microsoft Document Imaging + MDI_PROGRESSIVE = 34719 # 'Microsoft Document Imaging + MDI_VECTOR = 34720 # 'Microsoft Document Imaging + JPEG_LOSSY = 34892 + LZMA = 34925 + OPS_PNG = 34933 # Objective Pathology Services + OPS_JPEGXR = 34934 # Objective Pathology Services + KODAK_DCR = 65000 + PENTAX_PEF = 65535 + # def __bool__(self): return self != 1 # Python 3.6 only + return COMPRESSION + + def PHOTOMETRIC(): + class PHOTOMETRIC(enum.IntEnum): + MINISWHITE = 0 + MINISBLACK = 1 + RGB = 2 + PALETTE = 3 + MASK = 4 + SEPARATED = 5 # CMYK + YCBCR = 6 + CIELAB = 8 + ICCLAB = 9 + ITULAB = 10 + CFA = 32803 # Color Filter Array + LOGL = 32844 + LOGLUV = 32845 + LINEAR_RAW = 34892 + return PHOTOMETRIC + + def THRESHHOLD(): + class THRESHHOLD(enum.IntEnum): + BILEVEL = 1 + HALFTONE = 2 + ERRORDIFFUSE = 3 + return THRESHHOLD + + def FILLORDER(): + class FILLORDER(enum.IntEnum): + MSB2LSB = 1 + LSB2MSB = 2 + return FILLORDER + + def ORIENTATION(): + class ORIENTATION(enum.IntEnum): + TOPLEFT = 1 + TOPRIGHT = 2 + BOTRIGHT = 3 + BOTLEFT = 4 + LEFTTOP = 5 + RIGHTTOP = 6 + RIGHTBOT = 7 + LEFTBOT = 8 + return ORIENTATION + + def PLANARCONFIG(): + class PLANARCONFIG(enum.IntEnum): + CONTIG = 1 + SEPARATE = 2 + return PLANARCONFIG + + def GRAYRESPONSEUNIT(): + class GRAYRESPONSEUNIT(enum.IntEnum): + _10S = 1 + _100S = 2 + _1000S = 3 + _10000S = 4 + _100000S = 5 + return GRAYRESPONSEUNIT + + def GROUP4OPT(): + class GROUP4OPT(enum.IntEnum): + UNCOMPRESSED = 2 + return GROUP4OPT + + def RESUNIT(): + class RESUNIT(enum.IntEnum): + NONE = 1 + INCH = 2 + CENTIMETER = 3 + # def __bool__(self): return self != 1 # Python 3.6 only + return RESUNIT + + def COLORRESPONSEUNIT(): + class COLORRESPONSEUNIT(enum.IntEnum): + _10S = 1 + _100S = 2 + _1000S = 3 + _10000S = 4 + _100000S = 5 + return COLORRESPONSEUNIT + + def PREDICTOR(): + class PREDICTOR(enum.IntEnum): + NONE = 1 + HORIZONTAL = 2 + FLOATINGPOINT = 3 + # def __bool__(self): return self != 1 # Python 3.6 only + return PREDICTOR + + def EXTRASAMPLE(): + class EXTRASAMPLE(enum.IntEnum): + UNSPECIFIED = 0 + ASSOCALPHA = 1 + UNASSALPHA = 2 + return EXTRASAMPLE + + def SAMPLEFORMAT(): + class SAMPLEFORMAT(enum.IntEnum): + UINT = 1 + INT = 2 + IEEEFP = 3 + VOID = 4 + COMPLEXINT = 5 + COMPLEXIEEEFP = 6 + return SAMPLEFORMAT + + def DATATYPES(): + class DATATYPES(enum.IntEnum): + NOTYPE = 0 + BYTE = 1 + ASCII = 2 + SHORT = 3 + LONG = 4 + RATIONAL = 5 + SBYTE = 6 + UNDEFINED = 7 + SSHORT = 8 + SLONG = 9 + SRATIONAL = 10 + FLOAT = 11 + DOUBLE = 12 + IFD = 13 + UNICODE = 14 + COMPLEX = 15 + LONG8 = 16 + SLONG8 = 17 + IFD8 = 18 + return DATATYPES + + def DATA_FORMATS(): + # Map TIFF DATATYPES to Python struct formats + return { + 1: '1B', # BYTE 8-bit unsigned integer. + 2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code; + # the last byte must be NULL (binary zero). + 3: '1H', # SHORT 16-bit (2-byte) unsigned integer + 4: '1I', # LONG 32-bit (4-byte) unsigned integer. + 5: '2I', # RATIONAL Two LONGs: the first represents the numerator + # of a fraction; the second, the denominator. + 6: '1b', # SBYTE An 8-bit signed (twos-complement) integer. + 7: '1p', # UNDEFINED An 8-bit byte that may contain anything, + # depending on the definition of the field. + 8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) + # integer. + 9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) + # integer. + 10: '2i', # SRATIONAL Two SLONGs: the first represents the + # numerator of a fraction, the second the denominator. + 11: '1f', # FLOAT Single precision (4-byte) IEEE format. + 12: '1d', # DOUBLE Double precision (8-byte) IEEE format. + 13: '1I', # IFD unsigned 4 byte IFD offset. + # 14: '', # UNICODE + # 15: '', # COMPLEX + 16: '1Q', # LONG8 unsigned 8 byte integer (BigTiff) + 17: '1q', # SLONG8 signed 8 byte integer (BigTiff) + 18: '1Q', # IFD8 unsigned 8 byte IFD offset (BigTiff) + } + + def DATA_DTYPES(): + # Map numpy dtypes to TIFF DATATYPES + return {'B': 1, 's': 2, 'H': 3, 'I': 4, '2I': 5, 'b': 6, + 'h': 8, 'i': 9, '2i': 10, 'f': 11, 'd': 12, 'Q': 16, 'q': 17} + + def SAMPLE_DTYPES(): + # Map TIFF SampleFormats and BitsPerSample to numpy dtype + return { + (1, 1): '?', # bitmap + (1, 2): 'B', + (1, 3): 'B', + (1, 4): 'B', + (1, 5): 'B', + (1, 6): 'B', + (1, 7): 'B', + (1, 8): 'B', + (1, 9): 'H', + (1, 10): 'H', + (1, 11): 'H', + (1, 12): 'H', + (1, 13): 'H', + (1, 14): 'H', + (1, 15): 'H', + (1, 16): 'H', + (1, 17): 'I', + (1, 18): 'I', + (1, 19): 'I', + (1, 20): 'I', + (1, 21): 'I', + (1, 22): 'I', + (1, 23): 'I', + (1, 24): 'I', + (1, 25): 'I', + (1, 26): 'I', + (1, 27): 'I', + (1, 28): 'I', + (1, 29): 'I', + (1, 30): 'I', + (1, 31): 'I', + (1, 32): 'I', + (1, 64): 'Q', + (2, 8): 'b', + (2, 16): 'h', + (2, 32): 'i', + (2, 64): 'q', + (3, 16): 'e', + (3, 32): 'f', + (3, 64): 'd', + (6, 64): 'F', + (6, 128): 'D', + (1, (5, 6, 5)): 'B', + } + + def DECOMPESSORS(): + decompressors = { + None: identityfunc, + 1: identityfunc, + 5: decode_lzw, + # 7: decode_jpeg, + 8: zlib.decompress, + 32946: zlib.decompress, + 32773: decode_packbits, + } + if lzma: + decompressors[34925] = lzma.decompress + return decompressors + + def FRAME_ATTRS(): + # Attributes that a TiffFrame shares with its keyframe + return set('shape ndim size dtype axes is_final'.split()) + + def FILE_FLAGS(): + # TiffFile and TiffPage 'is_\*' attributes + exclude = set('reduced final memmappable contiguous ' + 'chroma_subsampled'.split()) + return set(a[3:] for a in dir(TiffPage) + if a[:3] == 'is_' and a[3:] not in exclude) + + def FILE_EXTENSIONS(): + # TIFF file extensions + return tuple('tif tiff ome.tif lsm stk ' + 'gel seq svs bif tf8 tf2 btf'.split()) + + def FILEOPEN_FILTER(): + # String for use in Windows File Open box + return [("%s files" % ext.upper(), "*.%s" % ext) + for ext in TIFF.FILE_EXTENSIONS] + [("allfiles", "*")] + + def AXES_LABELS(): + # TODO: is there a standard for character axes labels? + axes = { + 'X': 'width', + 'Y': 'height', + 'Z': 'depth', + 'S': 'sample', # rgb(a) + 'I': 'series', # general sequence, plane, page, IFD + 'T': 'time', + 'C': 'channel', # color, emission wavelength + 'A': 'angle', + 'P': 'phase', # formerly F # P is Position in LSM! + 'R': 'tile', # region, point, mosaic + 'H': 'lifetime', # histogram + 'E': 'lambda', # excitation wavelength + 'L': 'exposure', # lux + 'V': 'event', + 'Q': 'other', + 'M': 'mosaic', # LSM 6 + } + axes.update(dict((v, k) for k, v in axes.items())) + return axes + + def ANDOR_TAGS(): + # Andor Technology tags #4864 - 5030 + return set(range(4864, 5030)) + + def EXIF_TAGS(): + return { + 33434: 'ExposureTime', + 33437: 'FNumber', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34858: 'TimeZoneOffset', + 34859: 'SelfTimerMode', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTimeDigitized', + 36873: 'GooglePlusUploadCode', + 36880: 'OffsetTime', + 36881: 'OffsetTimeOriginal', + 36882: 'OffsetTimeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37393: 'ImageNumber', + 37394: 'SecurityClassification', + 37395: 'ImageHistory', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubsecTime', + 37521: 'SubsecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 37888: 'Temperature', + 37889: 'Humidity', + 37890: 'Pressure', + 37891: 'WaterDepth', + 37892: 'Acceleration', + 37893: 'CameraElevationAngle', + 40960: 'FlashpixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42240: 'Gamma', + 59932: 'Padding', + 59933: 'OffsetSchema', + 65000: 'OwnerName', + 65001: 'SerialNumber', + 65002: 'Lens', + 65100: 'RawFile', + 65101: 'Converter', + 65102: 'WhiteBalance', + 65105: 'Exposure', + 65106: 'Shadows', + 65107: 'Brightness', + 65108: 'Contrast', + 65109: 'Saturation', + 65110: 'Sharpness', + 65111: 'Smoothness', + 65112: 'MoireFilter', + } + + def GPS_TAGS(): + return { + 0: 'GPSVersionID', + 1: 'GPSLatitudeRef', + 2: 'GPSLatitude', + 3: 'GPSLongitudeRef', + 4: 'GPSLongitude', + 5: 'GPSAltitudeRef', + 6: 'GPSAltitude', + 7: 'GPSTimeStamp', + 8: 'GPSSatellites', + 9: 'GPSStatus', + 10: 'GPSMeasureMode', + 11: 'GPSDOP', + 12: 'GPSSpeedRef', + 13: 'GPSSpeed', + 14: 'GPSTrackRef', + 15: 'GPSTrack', + 16: 'GPSImgDirectionRef', + 17: 'GPSImgDirection', + 18: 'GPSMapDatum', + 19: 'GPSDestLatitudeRef', + 20: 'GPSDestLatitude', + 21: 'GPSDestLongitudeRef', + 22: 'GPSDestLongitude', + 23: 'GPSDestBearingRef', + 24: 'GPSDestBearing', + 25: 'GPSDestDistanceRef', + 26: 'GPSDestDistance', + 27: 'GPSProcessingMethod', + 28: 'GPSAreaInformation', + 29: 'GPSDateStamp', + 30: 'GPSDifferential', + 31: 'GPSHPositioningError', + } + + def IOP_TAGS(): + return { + 1: 'InteroperabilityIndex', + 2: 'InteroperabilityVersion', + 4096: 'RelatedImageFileFormat', + 4097: 'RelatedImageWidth', + 4098: 'RelatedImageLength', + } + + def CZ_LSMINFO(): + return [ + ('MagicNumber', 'u4'), + ('StructureSize', 'i4'), + ('DimensionX', 'i4'), + ('DimensionY', 'i4'), + ('DimensionZ', 'i4'), + ('DimensionChannels', 'i4'), + ('DimensionTime', 'i4'), + ('DataType', 'i4'), # DATATYPES + ('ThumbnailX', 'i4'), + ('ThumbnailY', 'i4'), + ('VoxelSizeX', 'f8'), + ('VoxelSizeY', 'f8'), + ('VoxelSizeZ', 'f8'), + ('OriginX', 'f8'), + ('OriginY', 'f8'), + ('OriginZ', 'f8'), + ('ScanType', 'u2'), + ('SpectralScan', 'u2'), + ('TypeOfData', 'u4'), # TYPEOFDATA + ('OffsetVectorOverlay', 'u4'), + ('OffsetInputLut', 'u4'), + ('OffsetOutputLut', 'u4'), + ('OffsetChannelColors', 'u4'), + ('TimeIntervall', 'f8'), + ('OffsetChannelDataTypes', 'u4'), + ('OffsetScanInformation', 'u4'), # SCANINFO + ('OffsetKsData', 'u4'), + ('OffsetTimeStamps', 'u4'), + ('OffsetEventList', 'u4'), + ('OffsetRoi', 'u4'), + ('OffsetBleachRoi', 'u4'), + ('OffsetNextRecording', 'u4'), + # LSM 2.0 ends here + ('DisplayAspectX', 'f8'), + ('DisplayAspectY', 'f8'), + ('DisplayAspectZ', 'f8'), + ('DisplayAspectTime', 'f8'), + ('OffsetMeanOfRoisOverlay', 'u4'), + ('OffsetTopoIsolineOverlay', 'u4'), + ('OffsetTopoProfileOverlay', 'u4'), + ('OffsetLinescanOverlay', 'u4'), + ('ToolbarFlags', 'u4'), + ('OffsetChannelWavelength', 'u4'), + ('OffsetChannelFactors', 'u4'), + ('ObjectiveSphereCorrection', 'f8'), + ('OffsetUnmixParameters', 'u4'), + # LSM 3.2, 4.0 end here + ('OffsetAcquisitionParameters', 'u4'), + ('OffsetCharacteristics', 'u4'), + ('OffsetPalette', 'u4'), + ('TimeDifferenceX', 'f8'), + ('TimeDifferenceY', 'f8'), + ('TimeDifferenceZ', 'f8'), + ('InternalUse1', 'u4'), + ('DimensionP', 'i4'), + ('DimensionM', 'i4'), + ('DimensionsReserved', '16i4'), + ('OffsetTilePositions', 'u4'), + ('', '9u4'), # Reserved + ('OffsetPositions', 'u4'), + # ('', '21u4'), # must be 0 + ] + + def CZ_LSMINFO_READERS(): + # Import functions for CZ_LSMINFO sub-records + # TODO: read more CZ_LSMINFO sub-records + return { + 'ScanInformation': read_lsm_scaninfo, + 'TimeStamps': read_lsm_timestamps, + 'EventList': read_lsm_eventlist, + 'ChannelColors': read_lsm_channelcolors, + 'Positions': read_lsm_floatpairs, + 'TilePositions': read_lsm_floatpairs, + 'VectorOverlay': None, + 'InputLut': None, + 'OutputLut': None, + 'TimeIntervall': None, + 'ChannelDataTypes': None, + 'KsData': None, + 'Roi': None, + 'BleachRoi': None, + 'NextRecording': None, + 'MeanOfRoisOverlay': None, + 'TopoIsolineOverlay': None, + 'TopoProfileOverlay': None, + 'ChannelWavelength': None, + 'SphereCorrection': None, + 'ChannelFactors': None, + 'UnmixParameters': None, + 'AcquisitionParameters': None, + 'Characteristics': None, + } + + def CZ_LSMINFO_SCANTYPE(): + # Map CZ_LSMINFO.ScanType to dimension order + return { + 0: 'XYZCT', # 'Stack' normal x-y-z-scan + 1: 'XYZCT', # 'Z-Scan' x-z-plane Y=1 + 2: 'XYZCT', # 'Line' + 3: 'XYTCZ', # 'Time Series Plane' time series x-y XYCTZ ? Z=1 + 4: 'XYZTC', # 'Time Series z-Scan' time series x-z + 5: 'XYTCZ', # 'Time Series Mean-of-ROIs' + 6: 'XYZTC', # 'Time Series Stack' time series x-y-z + 7: 'XYCTZ', # Spline Scan + 8: 'XYCZT', # Spline Plane x-z + 9: 'XYTCZ', # Time Series Spline Plane x-z + 10: 'XYZCT', # 'Time Series Point' point mode + } + + def CZ_LSMINFO_DIMENSIONS(): + # Map dimension codes to CZ_LSMINFO attribute + return { + 'X': 'DimensionX', + 'Y': 'DimensionY', + 'Z': 'DimensionZ', + 'C': 'DimensionChannels', + 'T': 'DimensionTime', + 'P': 'DimensionP', + 'M': 'DimensionM', + } + + def CZ_LSMINFO_DATATYPES(): + # Description of CZ_LSMINFO.DataType + return { + 0: 'varying data types', + 1: '8 bit unsigned integer', + 2: '12 bit unsigned integer', + 5: '32 bit float', + } + + def CZ_LSMINFO_TYPEOFDATA(): + # Description of CZ_LSMINFO.TypeOfData + return { + 0: 'Original scan data', + 1: 'Calculated data', + 2: '3D reconstruction', + 3: 'Topography height map', + } + + def CZ_LSMINFO_SCANINFO_ARRAYS(): + return { + 0x20000000: 'Tracks', + 0x30000000: 'Lasers', + 0x60000000: 'DetectionChannels', + 0x80000000: 'IlluminationChannels', + 0xa0000000: 'BeamSplitters', + 0xc0000000: 'DataChannels', + 0x11000000: 'Timers', + 0x13000000: 'Markers', + } + + def CZ_LSMINFO_SCANINFO_STRUCTS(): + return { + # 0x10000000: "Recording", + 0x40000000: 'Track', + 0x50000000: 'Laser', + 0x70000000: 'DetectionChannel', + 0x90000000: 'IlluminationChannel', + 0xb0000000: 'BeamSplitter', + 0xd0000000: 'DataChannel', + 0x12000000: 'Timer', + 0x14000000: 'Marker', + } + + def CZ_LSMINFO_SCANINFO_ATTRIBUTES(): + return { + # Recording + 0x10000001: 'Name', + 0x10000002: 'Description', + 0x10000003: 'Notes', + 0x10000004: 'Objective', + 0x10000005: 'ProcessingSummary', + 0x10000006: 'SpecialScanMode', + 0x10000007: 'ScanType', + 0x10000008: 'ScanMode', + 0x10000009: 'NumberOfStacks', + 0x1000000a: 'LinesPerPlane', + 0x1000000b: 'SamplesPerLine', + 0x1000000c: 'PlanesPerVolume', + 0x1000000d: 'ImagesWidth', + 0x1000000e: 'ImagesHeight', + 0x1000000f: 'ImagesNumberPlanes', + 0x10000010: 'ImagesNumberStacks', + 0x10000011: 'ImagesNumberChannels', + 0x10000012: 'LinscanXySize', + 0x10000013: 'ScanDirection', + 0x10000014: 'TimeSeries', + 0x10000015: 'OriginalScanData', + 0x10000016: 'ZoomX', + 0x10000017: 'ZoomY', + 0x10000018: 'ZoomZ', + 0x10000019: 'Sample0X', + 0x1000001a: 'Sample0Y', + 0x1000001b: 'Sample0Z', + 0x1000001c: 'SampleSpacing', + 0x1000001d: 'LineSpacing', + 0x1000001e: 'PlaneSpacing', + 0x1000001f: 'PlaneWidth', + 0x10000020: 'PlaneHeight', + 0x10000021: 'VolumeDepth', + 0x10000023: 'Nutation', + 0x10000034: 'Rotation', + 0x10000035: 'Precession', + 0x10000036: 'Sample0time', + 0x10000037: 'StartScanTriggerIn', + 0x10000038: 'StartScanTriggerOut', + 0x10000039: 'StartScanEvent', + 0x10000040: 'StartScanTime', + 0x10000041: 'StopScanTriggerIn', + 0x10000042: 'StopScanTriggerOut', + 0x10000043: 'StopScanEvent', + 0x10000044: 'StopScanTime', + 0x10000045: 'UseRois', + 0x10000046: 'UseReducedMemoryRois', + 0x10000047: 'User', + 0x10000048: 'UseBcCorrection', + 0x10000049: 'PositionBcCorrection1', + 0x10000050: 'PositionBcCorrection2', + 0x10000051: 'InterpolationY', + 0x10000052: 'CameraBinning', + 0x10000053: 'CameraSupersampling', + 0x10000054: 'CameraFrameWidth', + 0x10000055: 'CameraFrameHeight', + 0x10000056: 'CameraOffsetX', + 0x10000057: 'CameraOffsetY', + 0x10000059: 'RtBinning', + 0x1000005a: 'RtFrameWidth', + 0x1000005b: 'RtFrameHeight', + 0x1000005c: 'RtRegionWidth', + 0x1000005d: 'RtRegionHeight', + 0x1000005e: 'RtOffsetX', + 0x1000005f: 'RtOffsetY', + 0x10000060: 'RtZoom', + 0x10000061: 'RtLinePeriod', + 0x10000062: 'Prescan', + 0x10000063: 'ScanDirectionZ', + # Track + 0x40000001: 'MultiplexType', # 0 After Line; 1 After Frame + 0x40000002: 'MultiplexOrder', + 0x40000003: 'SamplingMode', # 0 Sample; 1 Line Avg; 2 Frame Avg + 0x40000004: 'SamplingMethod', # 1 Mean; 2 Sum + 0x40000005: 'SamplingNumber', + 0x40000006: 'Acquire', + 0x40000007: 'SampleObservationTime', + 0x4000000b: 'TimeBetweenStacks', + 0x4000000c: 'Name', + 0x4000000d: 'Collimator1Name', + 0x4000000e: 'Collimator1Position', + 0x4000000f: 'Collimator2Name', + 0x40000010: 'Collimator2Position', + 0x40000011: 'IsBleachTrack', + 0x40000012: 'IsBleachAfterScanNumber', + 0x40000013: 'BleachScanNumber', + 0x40000014: 'TriggerIn', + 0x40000015: 'TriggerOut', + 0x40000016: 'IsRatioTrack', + 0x40000017: 'BleachCount', + 0x40000018: 'SpiCenterWavelength', + 0x40000019: 'PixelTime', + 0x40000021: 'CondensorFrontlens', + 0x40000023: 'FieldStopValue', + 0x40000024: 'IdCondensorAperture', + 0x40000025: 'CondensorAperture', + 0x40000026: 'IdCondensorRevolver', + 0x40000027: 'CondensorFilter', + 0x40000028: 'IdTransmissionFilter1', + 0x40000029: 'IdTransmission1', + 0x40000030: 'IdTransmissionFilter2', + 0x40000031: 'IdTransmission2', + 0x40000032: 'RepeatBleach', + 0x40000033: 'EnableSpotBleachPos', + 0x40000034: 'SpotBleachPosx', + 0x40000035: 'SpotBleachPosy', + 0x40000036: 'SpotBleachPosz', + 0x40000037: 'IdTubelens', + 0x40000038: 'IdTubelensPosition', + 0x40000039: 'TransmittedLight', + 0x4000003a: 'ReflectedLight', + 0x4000003b: 'SimultanGrabAndBleach', + 0x4000003c: 'BleachPixelTime', + # Laser + 0x50000001: 'Name', + 0x50000002: 'Acquire', + 0x50000003: 'Power', + # DetectionChannel + 0x70000001: 'IntegrationMode', + 0x70000002: 'SpecialMode', + 0x70000003: 'DetectorGainFirst', + 0x70000004: 'DetectorGainLast', + 0x70000005: 'AmplifierGainFirst', + 0x70000006: 'AmplifierGainLast', + 0x70000007: 'AmplifierOffsFirst', + 0x70000008: 'AmplifierOffsLast', + 0x70000009: 'PinholeDiameter', + 0x7000000a: 'CountingTrigger', + 0x7000000b: 'Acquire', + 0x7000000c: 'PointDetectorName', + 0x7000000d: 'AmplifierName', + 0x7000000e: 'PinholeName', + 0x7000000f: 'FilterSetName', + 0x70000010: 'FilterName', + 0x70000013: 'IntegratorName', + 0x70000014: 'ChannelName', + 0x70000015: 'DetectorGainBc1', + 0x70000016: 'DetectorGainBc2', + 0x70000017: 'AmplifierGainBc1', + 0x70000018: 'AmplifierGainBc2', + 0x70000019: 'AmplifierOffsetBc1', + 0x70000020: 'AmplifierOffsetBc2', + 0x70000021: 'SpectralScanChannels', + 0x70000022: 'SpiWavelengthStart', + 0x70000023: 'SpiWavelengthStop', + 0x70000026: 'DyeName', + 0x70000027: 'DyeFolder', + # IlluminationChannel + 0x90000001: 'Name', + 0x90000002: 'Power', + 0x90000003: 'Wavelength', + 0x90000004: 'Aquire', + 0x90000005: 'DetchannelName', + 0x90000006: 'PowerBc1', + 0x90000007: 'PowerBc2', + # BeamSplitter + 0xb0000001: 'FilterSet', + 0xb0000002: 'Filter', + 0xb0000003: 'Name', + # DataChannel + 0xd0000001: 'Name', + 0xd0000003: 'Acquire', + 0xd0000004: 'Color', + 0xd0000005: 'SampleType', + 0xd0000006: 'BitsPerSample', + 0xd0000007: 'RatioType', + 0xd0000008: 'RatioTrack1', + 0xd0000009: 'RatioTrack2', + 0xd000000a: 'RatioChannel1', + 0xd000000b: 'RatioChannel2', + 0xd000000c: 'RatioConst1', + 0xd000000d: 'RatioConst2', + 0xd000000e: 'RatioConst3', + 0xd000000f: 'RatioConst4', + 0xd0000010: 'RatioConst5', + 0xd0000011: 'RatioConst6', + 0xd0000012: 'RatioFirstImages1', + 0xd0000013: 'RatioFirstImages2', + 0xd0000014: 'DyeName', + 0xd0000015: 'DyeFolder', + 0xd0000016: 'Spectrum', + 0xd0000017: 'Acquire', + # Timer + 0x12000001: 'Name', + 0x12000002: 'Description', + 0x12000003: 'Interval', + 0x12000004: 'TriggerIn', + 0x12000005: 'TriggerOut', + 0x12000006: 'ActivationTime', + 0x12000007: 'ActivationNumber', + # Marker + 0x14000001: 'Name', + 0x14000002: 'Description', + 0x14000003: 'TriggerIn', + 0x14000004: 'TriggerOut', + } + + def NIH_IMAGE_HEADER(): + return [ + ('FileID', 'a8'), + ('nLines', 'i2'), + ('PixelsPerLine', 'i2'), + ('Version', 'i2'), + ('OldLutMode', 'i2'), + ('OldnColors', 'i2'), + ('Colors', 'u1', (3, 32)), + ('OldColorStart', 'i2'), + ('ColorWidth', 'i2'), + ('ExtraColors', 'u2', (6, 3)), + ('nExtraColors', 'i2'), + ('ForegroundIndex', 'i2'), + ('BackgroundIndex', 'i2'), + ('XScale', 'f8'), + ('Unused2', 'i2'), + ('Unused3', 'i2'), + ('UnitsID', 'i2'), # NIH_UNITS_TYPE + ('p1', [('x', 'i2'), ('y', 'i2')]), + ('p2', [('x', 'i2'), ('y', 'i2')]), + ('CurveFitType', 'i2'), # NIH_CURVEFIT_TYPE + ('nCoefficients', 'i2'), + ('Coeff', 'f8', 6), + ('UMsize', 'u1'), + ('UM', 'a15'), + ('UnusedBoolean', 'u1'), + ('BinaryPic', 'b1'), + ('SliceStart', 'i2'), + ('SliceEnd', 'i2'), + ('ScaleMagnification', 'f4'), + ('nSlices', 'i2'), + ('SliceSpacing', 'f4'), + ('CurrentSlice', 'i2'), + ('FrameInterval', 'f4'), + ('PixelAspectRatio', 'f4'), + ('ColorStart', 'i2'), + ('ColorEnd', 'i2'), + ('nColors', 'i2'), + ('Fill1', '3u2'), + ('Fill2', '3u2'), + ('Table', 'u1'), # NIH_COLORTABLE_TYPE + ('LutMode', 'u1'), # NIH_LUTMODE_TYPE + ('InvertedTable', 'b1'), + ('ZeroClip', 'b1'), + ('XUnitSize', 'u1'), + ('XUnit', 'a11'), + ('StackType', 'i2'), # NIH_STACKTYPE_TYPE + # ('UnusedBytes', 'u1', 200) + ] + + def NIH_COLORTABLE_TYPE(): + return ('CustomTable', 'AppleDefault', 'Pseudo20', 'Pseudo32', + 'Rainbow', 'Fire1', 'Fire2', 'Ice', 'Grays', 'Spectrum') + + def NIH_LUTMODE_TYPE(): + return ('PseudoColor', 'OldAppleDefault', 'OldSpectrum', 'GrayScale', + 'ColorLut', 'CustomGrayscale') + + def NIH_CURVEFIT_TYPE(): + return ('StraightLine', 'Poly2', 'Poly3', 'Poly4', 'Poly5', 'ExpoFit', + 'PowerFit', 'LogFit', 'RodbardFit', 'SpareFit1', + 'Uncalibrated', 'UncalibratedOD') + + def NIH_UNITS_TYPE(): + return ('Nanometers', 'Micrometers', 'Millimeters', 'Centimeters', + 'Meters', 'Kilometers', 'Inches', 'Feet', 'Miles', 'Pixels', + 'OtherUnits') + + def NIH_STACKTYPE_TYPE(): + return ('VolumeStack', 'RGBStack', 'MovieStack', 'HSVStack') + + def TVIPS_HEADER_V1(): + # TVIPS TemData structure from EMMENU Help file + return [ + ('Version', 'i4'), + ('CommentV1', 'a80'), + ('HighTension', 'i4'), + ('SphericalAberration', 'i4'), + ('IlluminationAperture', 'i4'), + ('Magnification', 'i4'), + ('PostMagnification', 'i4'), + ('FocalLength', 'i4'), + ('Defocus', 'i4'), + ('Astigmatism', 'i4'), + ('AstigmatismDirection', 'i4'), + ('BiprismVoltage', 'i4'), + ('SpecimenTiltAngle', 'i4'), + ('SpecimenTiltDirection', 'i4'), + ('IlluminationTiltDirection', 'i4'), + ('IlluminationTiltAngle', 'i4'), + ('ImageMode', 'i4'), + ('EnergySpread', 'i4'), + ('ChromaticAberration', 'i4'), + ('ShutterType', 'i4'), + ('DefocusSpread', 'i4'), + ('CcdNumber', 'i4'), + ('CcdSize', 'i4'), + ('OffsetXV1', 'i4'), + ('OffsetYV1', 'i4'), + ('PhysicalPixelSize', 'i4'), + ('Binning', 'i4'), + ('ReadoutSpeed', 'i4'), + ('GainV1', 'i4'), + ('SensitivityV1', 'i4'), + ('ExposureTimeV1', 'i4'), + ('FlatCorrected', 'i4'), + ('DeadPxCorrected', 'i4'), + ('ImageMean', 'i4'), + ('ImageStd', 'i4'), + ('DisplacementX', 'i4'), + ('DisplacementY', 'i4'), + ('DateV1', 'i4'), + ('TimeV1', 'i4'), + ('ImageMin', 'i4'), + ('ImageMax', 'i4'), + ('ImageStatisticsQuality', 'i4'), + ] + + def TVIPS_HEADER_V2(): + return [ + ('ImageName', 'V160'), # utf16 + ('ImageFolder', 'V160'), + ('ImageSizeX', 'i4'), + ('ImageSizeY', 'i4'), + ('ImageSizeZ', 'i4'), + ('ImageSizeE', 'i4'), + ('ImageDataType', 'i4'), + ('Date', 'i4'), + ('Time', 'i4'), + ('Comment', 'V1024'), + ('ImageHistory', 'V1024'), + ('Scaling', '16f4'), + ('ImageStatistics', '16c16'), + ('ImageType', 'i4'), + ('ImageDisplaType', 'i4'), + ('PixelSizeX', 'f4'), # distance between two px in x, [nm] + ('PixelSizeY', 'f4'), # distance between two px in y, [nm] + ('ImageDistanceZ', 'f4'), + ('ImageDistanceE', 'f4'), + ('ImageMisc', '32f4'), + ('TemType', 'V160'), + ('TemHighTension', 'f4'), + ('TemAberrations', '32f4'), + ('TemEnergy', '32f4'), + ('TemMode', 'i4'), + ('TemMagnification', 'f4'), + ('TemMagnificationCorrection', 'f4'), + ('PostMagnification', 'f4'), + ('TemStageType', 'i4'), + ('TemStagePosition', '5f4'), # x, y, z, a, b + ('TemImageShift', '2f4'), + ('TemBeamShift', '2f4'), + ('TemBeamTilt', '2f4'), + ('TilingParameters', '7f4'), # 0: tiling? 1:x 2:y 3: max x + # 4: max y 5: overlap x 6: overlap y + ('TemIllumination', '3f4'), # 0: spotsize 1: intensity + ('TemShutter', 'i4'), + ('TemMisc', '32f4'), + ('CameraType', 'V160'), + ('PhysicalPixelSizeX', 'f4'), + ('PhysicalPixelSizeY', 'f4'), + ('OffsetX', 'i4'), + ('OffsetY', 'i4'), + ('BinningX', 'i4'), + ('BinningY', 'i4'), + ('ExposureTime', 'f4'), + ('Gain', 'f4'), + ('ReadoutRate', 'f4'), + ('FlatfieldDescription', 'V160'), + ('Sensitivity', 'f4'), + ('Dose', 'f4'), + ('CamMisc', '32f4'), + ('FeiMicroscopeInformation', 'V1024'), + ('FeiSpecimenInformation', 'V1024'), + ('Magic', 'u4'), + ] + + def MM_HEADER(): + # Olympus FluoView MM_Header + MM_DIMENSION = [ + ('Name', 'a16'), + ('Size', 'i4'), + ('Origin', 'f8'), + ('Resolution', 'f8'), + ('Unit', 'a64')] + return [ + ('HeaderFlag', 'i2'), + ('ImageType', 'u1'), + ('ImageName', 'a257'), + ('OffsetData', 'u4'), + ('PaletteSize', 'i4'), + ('OffsetPalette0', 'u4'), + ('OffsetPalette1', 'u4'), + ('CommentSize', 'i4'), + ('OffsetComment', 'u4'), + ('Dimensions', MM_DIMENSION, 10), + ('OffsetPosition', 'u4'), + ('MapType', 'i2'), + ('MapMin', 'f8'), + ('MapMax', 'f8'), + ('MinValue', 'f8'), + ('MaxValue', 'f8'), + ('OffsetMap', 'u4'), + ('Gamma', 'f8'), + ('Offset', 'f8'), + ('GrayChannel', MM_DIMENSION), + ('OffsetThumbnail', 'u4'), + ('VoiceField', 'i4'), + ('OffsetVoiceField', 'u4'), + ] + + def MM_DIMENSIONS(): + # Map FluoView MM_Header.Dimensions to axes characters + return { + 'X': 'X', + 'Y': 'Y', + 'Z': 'Z', + 'T': 'T', + 'CH': 'C', + 'WAVELENGTH': 'C', + 'TIME': 'T', + 'XY': 'R', + 'EVENT': 'V', + 'EXPOSURE': 'L', + } + + def UIC_TAGS(): + # Map Universal Imaging Corporation MetaMorph internal tag ids to + # name and type + from fractions import Fraction + + return [ + ('AutoScale', int), + ('MinScale', int), + ('MaxScale', int), + ('SpatialCalibration', int), + ('XCalibration', Fraction), + ('YCalibration', Fraction), + ('CalibrationUnits', str), + ('Name', str), + ('ThreshState', int), + ('ThreshStateRed', int), + ('tagid_10', None), # undefined + ('ThreshStateGreen', int), + ('ThreshStateBlue', int), + ('ThreshStateLo', int), + ('ThreshStateHi', int), + ('Zoom', int), + ('CreateTime', julian_datetime), + ('LastSavedTime', julian_datetime), + ('currentBuffer', int), + ('grayFit', None), + ('grayPointCount', None), + ('grayX', Fraction), + ('grayY', Fraction), + ('grayMin', Fraction), + ('grayMax', Fraction), + ('grayUnitName', str), + ('StandardLUT', int), + ('wavelength', int), + ('StagePosition', '(%i,2,2)u4'), # N xy positions as fract + ('CameraChipOffset', '(%i,2,2)u4'), # N xy offsets as fract + ('OverlayMask', None), + ('OverlayCompress', None), + ('Overlay', None), + ('SpecialOverlayMask', None), + ('SpecialOverlayCompress', None), + ('SpecialOverlay', None), + ('ImageProperty', read_uic_image_property), + ('StageLabel', '%ip'), # N str + ('AutoScaleLoInfo', Fraction), + ('AutoScaleHiInfo', Fraction), + ('AbsoluteZ', '(%i,2)u4'), # N fractions + ('AbsoluteZValid', '(%i,)u4'), # N long + ('Gamma', 'I'), # 'I' uses offset + ('GammaRed', 'I'), + ('GammaGreen', 'I'), + ('GammaBlue', 'I'), + ('CameraBin', '2I'), + ('NewLUT', int), + ('ImagePropertyEx', None), + ('PlaneProperty', int), + ('UserLutTable', '(256,3)u1'), + ('RedAutoScaleInfo', int), + ('RedAutoScaleLoInfo', Fraction), + ('RedAutoScaleHiInfo', Fraction), + ('RedMinScaleInfo', int), + ('RedMaxScaleInfo', int), + ('GreenAutoScaleInfo', int), + ('GreenAutoScaleLoInfo', Fraction), + ('GreenAutoScaleHiInfo', Fraction), + ('GreenMinScaleInfo', int), + ('GreenMaxScaleInfo', int), + ('BlueAutoScaleInfo', int), + ('BlueAutoScaleLoInfo', Fraction), + ('BlueAutoScaleHiInfo', Fraction), + ('BlueMinScaleInfo', int), + ('BlueMaxScaleInfo', int), + # ('OverlayPlaneColor', read_uic_overlay_plane_color), + ] + + def PILATUS_HEADER(): + # PILATUS CBF Header Specification, Version 1.4 + # Map key to [value_indices], type + return { + 'Detector': ([slice(1, None)], str), + 'Pixel_size': ([1, 4], float), + 'Silicon': ([3], float), + 'Exposure_time': ([1], float), + 'Exposure_period': ([1], float), + 'Tau': ([1], float), + 'Count_cutoff': ([1], int), + 'Threshold_setting': ([1], float), + 'Gain_setting': ([1, 2], str), + 'N_excluded_pixels': ([1], int), + 'Excluded_pixels': ([1], str), + 'Flat_field': ([1], str), + 'Trim_file': ([1], str), + 'Image_path': ([1], str), + # optional + 'Wavelength': ([1], float), + 'Energy_range': ([1, 2], float), + 'Detector_distance': ([1], float), + 'Detector_Voffset': ([1], float), + 'Beam_xy': ([1, 2], float), + 'Flux': ([1], str), + 'Filter_transmission': ([1], float), + 'Start_angle': ([1], float), + 'Angle_increment': ([1], float), + 'Detector_2theta': ([1], float), + 'Polarization': ([1], float), + 'Alpha': ([1], float), + 'Kappa': ([1], float), + 'Phi': ([1], float), + 'Phi_increment': ([1], float), + 'Chi': ([1], float), + 'Chi_increment': ([1], float), + 'Oscillation_axis': ([slice(1, None)], str), + 'N_oscillations': ([1], int), + 'Start_position': ([1], float), + 'Position_increment': ([1], float), + 'Shutter_time': ([1], float), + 'Omega': ([1], float), + 'Omega_increment': ([1], float) + } + + def REVERSE_BITORDER_BYTES(): + # Bytes with reversed bitorder + return ( + b'\x00\x80@\xc0 \xa0`\xe0\x10\x90P\xd00\xb0p\xf0\x08\x88H\xc8(' + b'\xa8h\xe8\x18\x98X\xd88\xb8x\xf8\x04\x84D\xc4$\xa4d\xe4\x14' + b'\x94T\xd44\xb4t\xf4\x0c\x8cL\xcc,\xacl\xec\x1c\x9c\\\xdc<\xbc|' + b'\xfc\x02\x82B\xc2"\xa2b\xe2\x12\x92R\xd22\xb2r\xf2\n\x8aJ\xca*' + b'\xaaj\xea\x1a\x9aZ\xda:\xbaz\xfa\x06\x86F\xc6&\xa6f\xe6\x16' + b'\x96V\xd66\xb6v\xf6\x0e\x8eN\xce.\xaen\xee\x1e\x9e^\xde>\xbe~' + b'\xfe\x01\x81A\xc1!\xa1a\xe1\x11\x91Q\xd11\xb1q\xf1\t\x89I\xc9)' + b'\xa9i\xe9\x19\x99Y\xd99\xb9y\xf9\x05\x85E\xc5%\xa5e\xe5\x15' + b'\x95U\xd55\xb5u\xf5\r\x8dM\xcd-\xadm\xed\x1d\x9d]\xdd=\xbd}' + b'\xfd\x03\x83C\xc3#\xa3c\xe3\x13\x93S\xd33\xb3s\xf3\x0b\x8bK' + b'\xcb+\xabk\xeb\x1b\x9b[\xdb;\xbb{\xfb\x07\x87G\xc7\'\xa7g\xe7' + b'\x17\x97W\xd77\xb7w\xf7\x0f\x8fO\xcf/\xafo\xef\x1f\x9f_' + b'\xdf?\xbf\x7f\xff') + + def REVERSE_BITORDER_ARRAY(): + # Numpy array of bytes with reversed bitorder + return numpy.fromstring(TIFF.REVERSE_BITORDER_BYTES, dtype='uint8') + + def ALLOCATIONGRANULARITY(): + # alignment for writing contiguous data to TIFF + import mmap # delayed import + return mmap.ALLOCATIONGRANULARITY + + # Max line length of printed output + PRINT_LINE_WIDTH = 100 + + # Max number of lines to print + PRINT_MAX_LINES = 512 + + +def read_tags(fh, byteorder, offsetsize, tagnames, + customtags=None, maxifds=None): + """Read tags from chain of IFDs and return as list of dicts. + + The file handle position must be at a valid IFD header. + + """ + if offsetsize == 4: + offsetformat = byteorder+'I' + tagnosize = 2 + tagnoformat = byteorder+'H' + tagsize = 12 + tagformat1 = byteorder+'HH' + tagformat2 = byteorder+'I4s' + elif offsetsize == 8: + offsetformat = byteorder+'Q' + tagnosize = 8 + tagnoformat = byteorder+'Q' + tagsize = 20 + tagformat1 = byteorder+'HH' + tagformat2 = byteorder+'Q8s' + else: + raise ValueError("invalid offset size") + + if customtags is None: + customtags = {} + if maxifds is None: + maxifds = 2**32 + + result = [] + unpack = struct.unpack + offset = fh.tell() + while len(result) < maxifds: + # loop over IFDs + try: + tagno = unpack(tagnoformat, fh.read(tagnosize))[0] + if tagno > 4096: + raise ValueError("suspicious number of tags") + except Exception: + warnings.warn("corrupted tag list at offset %i" % offset) + break + + tags = {} + data = fh.read(tagsize * tagno) + pos = fh.tell() + index = 0 + for _ in range(tagno): + code, type_ = unpack(tagformat1, data[index:index+4]) + count, value = unpack(tagformat2, data[index+4:index+tagsize]) + index += tagsize + name = tagnames.get(code, str(code)) + try: + dtype = TIFF.DATA_FORMATS[type_] + except KeyError: + raise TiffTag.Error("unknown tag data type %i" % type_) + + fmt = '%s%i%s' % (byteorder, count * int(dtype[0]), dtype[1]) + size = struct.calcsize(fmt) + if size > offsetsize or code in customtags: + offset = unpack(offsetformat, value)[0] + if offset < 8 or offset > fh.size - size: + raise TiffTag.Error("invalid tag value offset") + fh.seek(offset) + if code in customtags: + readfunc = customtags[code][1] + value = readfunc(fh, byteorder, dtype, count, offsetsize) + elif code in tagnames or dtype[-1] == 's': + value = unpack(fmt, fh.read(size)) + else: + value = read_numpy(fh, byteorder, dtype, count, offsetsize) + else: + value = unpack(fmt, value[:size]) + + if code not in customtags and code not in TIFF.TAG_TUPLE: + if len(value) == 1: + value = value[0] + if type_ != 7 and dtype[-1] == 's' and isinstance(value, bytes): + # TIFF ASCII fields can contain multiple strings, + # each terminated with a NUL + value = bytes2str(stripascii(value)) + + tags[name] = value + + result.append(tags) + # read offset to next page + fh.seek(pos) + offset = unpack(offsetformat, fh.read(offsetsize))[0] + if offset == 0: + break + if offset >= fh.size: + warnings.warn("invalid page offset (%i)" % offset) + break + fh.seek(offset) + + if maxifds == 1: + result = result[0] + return result + + +def read_exif_ifd(fh, byteorder, dtype, count, offsetsize): + """Read EXIF tags from file and return as dict.""" + tags = read_tags(fh, byteorder, offsetsize, TIFF.EXIF_TAGS, maxifds=1) + if 'ExifVersion' in tags: + tags['ExifVersion'] = bytes2str(tags['ExifVersion']) + return tags + + +def read_gps_ifd(fh, byteorder, dtype, count, offsetsize): + """Read GPS tags from file and return as dict.""" + return read_tags(fh, byteorder, offsetsize, TIFF.GPS_TAGS, maxifds=1) + -def read_bytes(fh, byteorder, dtype, count): +def read_interoperability_ifd(fh, byteorder, dtype, count, offsetsize): + """Read Interoperability tags from file and return as dict.""" + tag_names = {1: 'InteroperabilityIndex'} + return read_tags(fh, byteorder, offsetsize, tag_names, maxifds=1) + + +def read_bytes(fh, byteorder, dtype, count, offsetsize): """Read tag data from file and return as byte string.""" dtype = 'b' if dtype[-1] == 's' else byteorder+dtype[-1] return fh.read_array(dtype, count).tostring() -def read_numpy(fh, byteorder, dtype, count): +def read_utf8(fh, byteorder, dtype, count, offsetsize): + """Read tag data from file and return as unicode string.""" + return fh.read(count).decode('utf-8') + + +def read_numpy(fh, byteorder, dtype, count, offsetsize): """Read tag data from file and return as numpy array.""" dtype = 'b' if dtype[-1] == 's' else byteorder+dtype[-1] return fh.read_array(dtype, count) -def read_json(fh, byteorder, dtype, count): +def read_colormap(fh, byteorder, dtype, count, offsetsize): + """Read ColorMap data from file and return as numpy array.""" + cmap = fh.read_array(byteorder+dtype[-1], count) + cmap.shape = (3, -1) + return cmap + + +def read_json(fh, byteorder, dtype, count, offsetsize): """Read JSON tag data from file and return as object.""" data = fh.read(count) try: @@ -3714,20 +6344,28 @@ def read_json(fh, byteorder, dtype, count): warnings.warn("invalid JSON '%s'" % data) -def read_mm_header(fh, byteorder, dtype, count): - """Read MM_HEADER tag from file and return as numpy.rec.array.""" - return fh.read_record(MM_HEADER, byteorder=byteorder) +def read_mm_header(fh, byteorder, dtype, count, offsetsize): + """Read FluoView mm_header tag from file and return as dict.""" + mmh = fh.read_record(TIFF.MM_HEADER, byteorder=byteorder) + mmh = recarray2dict(mmh) + mmh['Dimensions'] = [ + (bytes2str(d[0]).strip(), d[1], d[2], d[3], bytes2str(d[4]).strip()) + for d in mmh['Dimensions']] + d = mmh['GrayChannel'] + mmh['GrayChannel'] = ( + bytes2str(d[0]).strip(), d[1], d[2], d[3], bytes2str(d[4]).strip()) + return mmh -def read_mm_stamp(fh, byteorder, dtype, count): - """Read MM_STAMP tag from file and return as numpy.ndarray.""" +def read_mm_stamp(fh, byteorder, dtype, count, offsetsize): + """Read FluoView mm_stamp tag from file and return as numpy.ndarray.""" return fh.read_array(byteorder+'f8', 8) -def read_uic1tag(fh, byteorder, dtype, count, plane_count=None): - """Read MetaMorph STK UIC1Tag from file and return as dictionary. +def read_uic1tag(fh, byteorder, dtype, count, offsetsize, planecount=None): + """Read MetaMorph STK UIC1Tag from file and return as dict. - Return empty dictionary if plane_count is unknown. + Return empty dictionary if planecount is unknown. """ assert dtype in ('2I', '1I') and byteorder == '<' @@ -3735,53 +6373,52 @@ def read_uic1tag(fh, byteorder, dtype, count, plane_count=None): if dtype == '2I': # pre MetaMorph 2.5 (not tested) values = fh.read_array('<u4', 2*count).reshape(count, 2) - result = {'z_distance': values[:, 0] / values[:, 1]} - elif plane_count: + result = {'ZDistance': values[:, 0] / values[:, 1]} + elif planecount: for _ in range(count): tagid = struct.unpack('<I', fh.read(4))[0] if tagid in (28, 29, 37, 40, 41): # silently skip unexpected tags fh.read(4) continue - name, value = read_uic_tag(fh, tagid, plane_count, offset=True) + name, value = read_uic_tag(fh, tagid, planecount, offset=True) result[name] = value return result -def read_uic2tag(fh, byteorder, dtype, plane_count): - """Read MetaMorph STK UIC2Tag from file and return as dictionary.""" +def read_uic2tag(fh, byteorder, dtype, planecount, offsetsize): + """Read MetaMorph STK UIC2Tag from file and return as dict.""" assert dtype == '2I' and byteorder == '<' - values = fh.read_array('<u4', 6*plane_count).reshape(plane_count, 6) + values = fh.read_array('<u4', 6*planecount).reshape(planecount, 6) return { - 'z_distance': values[:, 0] / values[:, 1], - 'date_created': values[:, 2], # julian days - 'time_created': values[:, 3], # milliseconds - 'date_modified': values[:, 4], # julian days - 'time_modified': values[:, 5], # milliseconds - } + 'ZDistance': values[:, 0] / values[:, 1], + 'DateCreated': values[:, 2], # julian days + 'TimeCreated': values[:, 3], # milliseconds + 'DateModified': values[:, 4], # julian days + 'TimeModified': values[:, 5]} # milliseconds -def read_uic3tag(fh, byteorder, dtype, plane_count): - """Read MetaMorph STK UIC3Tag from file and return as dictionary.""" +def read_uic3tag(fh, byteorder, dtype, planecount, offsetsize): + """Read MetaMorph STK UIC3Tag from file and return as dict.""" assert dtype == '2I' and byteorder == '<' - values = fh.read_array('<u4', 2*plane_count).reshape(plane_count, 2) - return {'wavelengths': values[:, 0] / values[:, 1]} + values = fh.read_array('<u4', 2*planecount).reshape(planecount, 2) + return {'Wavelengths': values[:, 0] / values[:, 1]} -def read_uic4tag(fh, byteorder, dtype, plane_count): - """Read MetaMorph STK UIC4Tag from file and return as dictionary.""" +def read_uic4tag(fh, byteorder, dtype, planecount, offsetsize): + """Read MetaMorph STK UIC4Tag from file and return as dict.""" assert dtype == '1I' and byteorder == '<' result = {} while True: tagid = struct.unpack('<H', fh.read(2))[0] if tagid == 0: break - name, value = read_uic_tag(fh, tagid, plane_count, offset=False) + name, value = read_uic_tag(fh, tagid, planecount, offset=False) result[name] = value return result -def read_uic_tag(fh, tagid, plane_count, offset): +def read_uic_tag(fh, tagid, planecount, offset): """Read a single UIC tag value from file and return tag name and value. UIC1Tags use an offset. @@ -3792,18 +6429,22 @@ def read_uic_tag(fh, tagid, plane_count, offset): return value[0] if count == 1 else value try: - name, dtype = UIC_TAGS[tagid] - except KeyError: + name, dtype = TIFF.UIC_TAGS[tagid] + except IndexError: # unknown tag - return '_tagid_%i' % tagid, read_int() + return '_TagId%i' % tagid, read_int() + + Fraction = TIFF.UIC_TAGS[4][1] if offset: pos = fh.tell() if dtype not in (int, None): off = read_int() if off < 8: - warnings.warn("invalid offset for uic tag '%s': %i" - % (name, off)) + if dtype is str: + return name, '' + warnings.warn("invalid offset for uic tag '%s': %i" % + (name, off)) return name, off fh.seek(off) @@ -3829,7 +6470,7 @@ def read_uic_tag(fh, tagid, plane_count, offset): size = read_int() if 0 <= size < 2**10: value = struct.unpack('%is' % size, fh.read(size))[0][:-1] - value = stripnull(value) + value = bytes2str(stripnull(value)) elif offset: value = '' warnings.warn("corrupt string in uic tag '%s'" % name) @@ -3838,11 +6479,11 @@ def read_uic_tag(fh, tagid, plane_count, offset): elif dtype == '%ip': # sequence of pascal strings value = [] - for _ in range(plane_count): + for _ in range(planecount): size = read_int() if 0 <= size < 2**10: string = struct.unpack('%is' % size, fh.read(size))[0][:-1] - string = stripnull(string) + string = bytes2str(stripnull(string)) value.append(string) elif offset: warnings.warn("corrupt string in uic tag '%s'" % name) @@ -3852,7 +6493,7 @@ def read_uic_tag(fh, tagid, plane_count, offset): # struct or numpy type dtype = '<' + dtype if '%i' in dtype: - dtype = dtype % plane_count + dtype = dtype % planecount if '(' in dtype: # numpy type value = fh.read_array(dtype, 1)[0] @@ -3886,76 +6527,119 @@ def read_uic_image_property(fh): return dict(name=name, flags=flags, value=value) -def read_cz_lsm_info(fh, byteorder, dtype, count): - """Read CS_LSM_INFO tag from file and return as numpy.rec.array.""" +def read_cz_lsminfo(fh, byteorder, dtype, count, offsetsize): + """Read CZ_LSMINFO tag from file and return as dict.""" assert byteorder == '<' magic_number, structure_size = struct.unpack('<II', fh.read(8)) if magic_number not in (50350412, 67127628): - raise ValueError("invalid CS_LSM_INFO structure") + raise ValueError("invalid CZ_LSMINFO structure") fh.seek(-8, 1) - if structure_size < numpy.dtype(CZ_LSM_INFO).itemsize: + if structure_size < numpy.dtype(TIFF.CZ_LSMINFO).itemsize: # adjust structure according to structure_size - cz_lsm_info = [] + lsminfo = [] size = 0 - for name, dtype in CZ_LSM_INFO: + for name, dtype in TIFF.CZ_LSMINFO: size += numpy.dtype(dtype).itemsize if size > structure_size: break - cz_lsm_info.append((name, dtype)) + lsminfo.append((name, dtype)) else: - cz_lsm_info = CZ_LSM_INFO + lsminfo = TIFF.CZ_LSMINFO - return fh.read_record(cz_lsm_info, byteorder=byteorder) + lsminfo = fh.read_record(lsminfo, byteorder=byteorder) + lsminfo = recarray2dict(lsminfo) + # read LSM info subrecords at offsets + for name, reader in TIFF.CZ_LSMINFO_READERS.items(): + if reader is None: + continue + offset = lsminfo.get('Offset' + name, 0) + if offset < 8: + continue + fh.seek(offset) + try: + lsminfo[name] = reader(fh) + except ValueError: + pass + return lsminfo -def read_cz_lsm_floatpairs(fh): + +def read_lsm_floatpairs(fh): """Read LSM sequence of float pairs from file and return as list.""" size = struct.unpack('<i', fh.read(4))[0] return fh.read_array('<2f8', count=size) -def read_cz_lsm_positions(fh): +def read_lsm_positions(fh): """Read LSM positions from file and return as list.""" size = struct.unpack('<I', fh.read(4))[0] return fh.read_array('<2f8', count=size) -def read_cz_lsm_time_stamps(fh): +def read_lsm_timestamps(fh): """Read LSM time stamps from file and return as list.""" size, count = struct.unpack('<ii', fh.read(8)) if size != (8 + 8 * count): - raise ValueError("lsm_time_stamps block is too short") + warnings.warn("invalid LSM TimeStamps block") + return [] # return struct.unpack('<%dd' % count, fh.read(8*count)) return fh.read_array('<f8', count=count) -def read_cz_lsm_event_list(fh): +def read_lsm_eventlist(fh): """Read LSM events from file and return as list of (time, type, text).""" count = struct.unpack('<II', fh.read(8))[1] events = [] while count > 0: esize, etime, etype = struct.unpack('<IdI', fh.read(16)) - etext = stripnull(fh.read(esize - 16)) + etext = bytes2str(stripnull(fh.read(esize - 16))) events.append((etime, etype, etext)) count -= 1 return events -def read_cz_lsm_scan_info(fh): - """Read LSM scan information from file and return as Record.""" - block = Record() +def read_lsm_channelcolors(fh): + """Read LSM ChannelColors structure from file and return as dict.""" + result = {'Mono': False, 'Colors': [], 'ColorNames': []} + pos = fh.tell() + (size, ncolors, nnames, + coffset, noffset, mono) = struct.unpack('<IIIIII', fh.read(24)) + if ncolors != nnames: + warnings.warn("invalid LSM ChannelColors structure") + return result + result['Mono'] = bool(mono) + # Colors + fh.seek(pos + coffset) + colors = fh.read_array('uint8', count=ncolors*4).reshape((ncolors, 4)) + result['Colors'] = colors.tolist() + # ColorNames + fh.seek(pos + noffset) + buffer = fh.read(size - noffset) + names = [] + while len(buffer) > 4: + size = struct.unpack('<I', buffer[:4])[0] + names.append(bytes2str(buffer[4:3+size])) + buffer = buffer[4+size:] + result['ColorNames'] = names + return result + + +def read_lsm_scaninfo(fh): + """Read LSM ScanInfo structure from file and return as dict.""" + block = {} blocks = [block] unpack = struct.unpack - if 0x10000000 != struct.unpack('<I', fh.read(4))[0]: + if struct.unpack('<I', fh.read(4))[0] != 0x10000000: # not a Recording sub block - raise ValueError("not a lsm_scan_info structure") + warnings.warn("invalid LSM ScanInfo structure") + return block fh.read(8) while True: entry, dtype, size = unpack('<III', fh.read(12)) if dtype == 2: # ascii - value = stripnull(fh.read(size)) + value = bytes2str(stripnull(fh.read(size))) elif dtype == 4: # long value = unpack('<i', fh.read(4))[0] @@ -3964,76 +6648,84 @@ def read_cz_lsm_scan_info(fh): value = unpack('<d', fh.read(8))[0] else: value = 0 - if entry in CZ_LSM_SCAN_INFO_ARRAYS: + if entry in TIFF.CZ_LSMINFO_SCANINFO_ARRAYS: blocks.append(block) - name = CZ_LSM_SCAN_INFO_ARRAYS[entry] + name = TIFF.CZ_LSMINFO_SCANINFO_ARRAYS[entry] newobj = [] - setattr(block, name, newobj) + block[name] = newobj block = newobj - elif entry in CZ_LSM_SCAN_INFO_STRUCTS: + elif entry in TIFF.CZ_LSMINFO_SCANINFO_STRUCTS: blocks.append(block) - newobj = Record() + newobj = {} block.append(newobj) block = newobj - elif entry in CZ_LSM_SCAN_INFO_ATTRIBUTES: - name = CZ_LSM_SCAN_INFO_ATTRIBUTES[entry] - setattr(block, name, value) + elif entry in TIFF.CZ_LSMINFO_SCANINFO_ATTRIBUTES: + name = TIFF.CZ_LSMINFO_SCANINFO_ATTRIBUTES[entry] + block[name] = value elif entry == 0xffffffff: # end sub block block = blocks.pop() else: # unknown entry - setattr(block, "entry_0x%x" % entry, value) + block["Entry0x%x" % entry] = value if not blocks: break return block -def read_tvips_header(fh, byteorder, dtype, count): - """Read TVIPS EM-MENU headers and return as Record.""" - header = Record(fh.read_record(TVIPS_HEADER_V1, byteorder=byteorder)) - if header.version == 2: - header = Record(fh.read_record(TVIPS_HEADER_V2, byteorder=byteorder)) - if header.magic != int(0xaaaaaaaa): - raise ValueError("invalid TVIPS v2 magic number") +def read_tvips_header(fh, byteorder, dtype, count, offsetsize): + """Read TVIPS EM-MENU headers and return as dict.""" + result = {} + header = fh.read_record(TIFF.TVIPS_HEADER_V1, byteorder=byteorder) + for name, typestr in TIFF.TVIPS_HEADER_V1: + result[name] = header[name].tolist() + if header['Version'] == 2: + header = fh.read_record(TIFF.TVIPS_HEADER_V2, byteorder=byteorder) + if header['Magic'] != int(0xaaaaaaaa): + warnings.warn("invalid TVIPS v2 magic number") + return {} # decode utf16 strings - for name, typestr in TVIPS_HEADER_V2: + for name, typestr in TIFF.TVIPS_HEADER_V2: if typestr.startswith('V'): s = header[name].tostring().decode('utf16', errors='ignore') - header[name] = stripnull(s, null='\0') + result[name] = stripnull(s, null='\0') + else: + result[name] = header[name].tolist() # convert nm to m - for axis in 'xy': - header['physical_pixel_size_' + axis] /= 1e9 - header['pixel_size_' + axis] /= 1e9 + for axis in 'XY': + header['PhysicalPixelSize' + axis] /= 1e9 + header['PixelSize' + axis] /= 1e9 elif header.version != 1: - raise ValueError("unknown TVIPS header version") - return header + warnings.warn("unknown TVIPS header version") + return {} + return result -def read_fei_metadata(fh, byteorder, dtype, count): - """Read FEI SFEG/HELIOS headers and return as nested Record.""" - result = Record() - section = Record() - for line in fh.read(count).splitlines(): +def read_fei_metadata(fh, byteorder, dtype, count, offsetsize): + """Read FEI SFEG/HELIOS headers and return as dict.""" + result = {} + section = {} + data = bytes2str(fh.read(count)) + for line in data.splitlines(): line = line.strip() - if line.startswith(b'['): - section = Record() - result[bytes2str(line[1:-1])] = section + if line.startswith('['): + section = {} + result[line[1:-1]] = section continue try: - key, value = line.split(b'=') + key, value = line.split('=') except ValueError: continue - section[bytes2str(key)] = astype(value) + section[key] = astype(value) return result -def read_sem_metadata(fh, byteorder, dtype, count): - """Read Zeiss SEM tag and return as Record.""" - result = Record({'': ()}) +def read_cz_sem(fh, byteorder, dtype, count, offsetsize): + """Read Zeiss SEM tag and return as dict.""" + result = {'': ()} key = None - for line in fh.read(count).splitlines(): - line = line.decode('cp1252') + data = bytes2str(fh.read(count)) + for line in data.splitlines(): if line.isupper(): key = line.lower() elif key: @@ -4066,13 +6758,42 @@ def read_sem_metadata(fh, byteorder, dtype, count): return result -def read_nih_image_header(fh, byteorder, dtype, count): - """Read NIH_IMAGE_HEADER tag from file and return as numpy.rec.array.""" - a = fh.read_record(NIH_IMAGE_HEADER, byteorder=byteorder) - a = a.newbyteorder(byteorder) - a.xunit = a.xunit[:a._xunit_len] - a.um = a.um[:a._um_len] - return a +def read_nih_image_header(fh, byteorder, dtype, count, offsetsize): + """Read NIH_IMAGE_HEADER tag from file and return as dict.""" + a = fh.read_record(TIFF.NIH_IMAGE_HEADER, byteorder=byteorder) + a = a.newbyteorder(byteorder) + a = recarray2dict(a) + a['XUnit'] = a['XUnit'][:a['XUnitSize']] + a['UM'] = a['UM'][:a['UMsize']] + return a + + +def read_scanimage_metadata(fh): + """Read ScanImage BigTIFF v3 static and ROI metadata from open file. + + Return non-varying frame data as dict and ROI group data as JSON. + + The settings can be used to read image data and metadata without parsing + the TIFF file. + + Raise ValueError if file does not contain valid ScanImage v3 metadata. + + """ + fh.seek(0) + try: + byteorder, version = struct.unpack('<2sH', fh.read(4)) + if byteorder != b'II' or version != 43: + raise Exception + fh.seek(16) + magic, version, size0, size1 = struct.unpack('<IIII', fh.read(16)) + if magic != 117637889 or version != 3: + raise Exception + except Exception: + raise ValueError("not a ScanImage BigTIFF v3 file") + + frame_data = matlabstr2py(bytes2str(fh.read(size0)[:-1])) + roi_data = read_json(fh, '<', None, size1, None) + return frame_data, roi_data def read_micromanager_metadata(fh): @@ -4080,7 +6801,7 @@ def read_micromanager_metadata(fh): The settings can be used to read image data without parsing the TIFF file. - Raise ValueError if file does not contain valid MicroManager metadata. + Raise ValueError if the file does not contain valid MicroManager metadata. """ fh.seek(0) @@ -4096,60 +6817,75 @@ def read_micromanager_metadata(fh): ) = struct.unpack(byteorder + "IIIIIIII", fh.read(32)) if summary_header != 2355492: - raise ValueError("invalid MicroManager summary_header") - result['summary'] = read_json(fh, byteorder, None, summary_length) + raise ValueError("invalid MicroManager summary header") + result['Summary'] = read_json(fh, byteorder, None, summary_length, None) if index_header != 54773648: - raise ValueError("invalid MicroManager index_header") + raise ValueError("invalid MicroManager index header") fh.seek(index_offset) header, count = struct.unpack(byteorder + "II", fh.read(8)) if header != 3453623: - raise ValueError("invalid MicroManager index_header") + raise ValueError("invalid MicroManager index header") data = struct.unpack(byteorder + "IIIII"*count, fh.read(20*count)) - result['index_map'] = { - 'channel': data[::5], 'slice': data[1::5], 'frame': data[2::5], - 'position': data[3::5], 'offset': data[4::5]} + result['IndexMap'] = {'Channel': data[::5], + 'Slice': data[1::5], + 'Frame': data[2::5], + 'Position': data[3::5], + 'Offset': data[4::5]} if display_header != 483765892: - raise ValueError("invalid MicroManager display_header") + raise ValueError("invalid MicroManager display header") fh.seek(display_offset) header, count = struct.unpack(byteorder + "II", fh.read(8)) if header != 347834724: - raise ValueError("invalid MicroManager display_header") - result['display_settings'] = read_json(fh, byteorder, None, count) + raise ValueError("invalid MicroManager display header") + result['DisplaySettings'] = read_json(fh, byteorder, None, count, None) if comments_header != 99384722: - raise ValueError("invalid MicroManager comments_header") + raise ValueError("invalid MicroManager comments header") fh.seek(comments_offset) header, count = struct.unpack(byteorder + "II", fh.read(8)) if header != 84720485: - raise ValueError("invalid MicroManager comments_header") - result['comments'] = read_json(fh, byteorder, None, count) + raise ValueError("invalid MicroManager comments header") + result['Comments'] = read_json(fh, byteorder, None, count, None) return result +def read_metaseries_catalog(fh): + """Read MetaSeries non-TIFF hint catalog from file. + + Raise ValueError if the file does not contain a valid hint catalog. + + """ + # TODO: implement read_metaseries_catalog + raise NotImplementedError() + + def imagej_metadata(data, bytecounts, byteorder): - """Return dictionary from ImageJ metadata tag value.""" - _str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252') + """Return IJMetadata tag value as dict. - def read_string(data, byteorder): - return _str(stripnull(data[0 if byteorder == '<' else 1::2])) + The 'info' string can have multiple formats, e.g. OIF or ScanImage, + that might be parsed into dicts using the matlabstr2py or + oiffile.SettingsFile functions. + + """ + def readstring(data, byteorder): + return data.decode('utf-16' + {'>': 'be', '<': 'le'}[byteorder]) - def read_double(data, byteorder): + def readdouble(data, byteorder): return struct.unpack(byteorder+('d' * (len(data) // 8)), data) - def read_bytes(data, byteorder): - #return struct.unpack('b' * len(data), data) + def readbytes(data, byteorder): return numpy.fromstring(data, 'uint8') metadata_types = { # big endian - b'info': ('info', read_string), - b'labl': ('labels', read_string), - b'rang': ('ranges', read_double), - b'luts': ('luts', read_bytes), - b'roi ': ('roi', read_bytes), - b'over': ('overlays', read_bytes)} + b'info': ('Info', readstring), + b'labl': ('Labels', readstring), + b'rang': ('Ranges', readdouble), + b'luts': ('LUTs', readbytes), + b'roi ': ('ROI', readbytes), + b'over': ('Overlays', readbytes)} metadata_types.update( # little endian dict((k[::-1], v) for k, v in metadata_types.items())) @@ -4170,7 +6906,7 @@ def imagej_metadata(data, bytecounts, byteorder): result = {} for mtype, count in zip(header[::2], header[1::2]): values = [] - name, func = metadata_types.get(mtype, (_str(mtype), read_bytes)) + name, func = metadata_types.get(mtype, (bytes2str(mtype), read_bytes)) for _ in range(count): counter += 1 pos1 = pos + bytecounts[counter] @@ -4180,35 +6916,35 @@ def imagej_metadata(data, bytecounts, byteorder): return result -def imagej_description_dict(description): - """Return dictionary from ImageJ image description byte string. +def imagej_description_metadata(description): + """Return metatata from ImageJ image description as dict. Raise ValueError if not a valid ImageJ description. - >>> description = b'ImageJ=1.11a\\nimages=510\\nhyperstack=true\\n' - >>> imagej_description_dict(description) # doctest: +SKIP + >>> description = 'ImageJ=1.11a\\nimages=510\\nhyperstack=true\\n' + >>> imagej_description_metadata(description) # doctest: +SKIP {'ImageJ': '1.11a', 'images': 510, 'hyperstack': True} """ def _bool(val): - return {b'true': True, b'false': False}[val.lower()] + return {'true': True, 'false': False}[val.lower()] - _str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252') result = {} for line in description.splitlines(): try: - key, val = line.split(b'=') + key, val = line.split('=') except Exception: continue key = key.strip() val = val.strip() - for dtype in (int, float, _bool, _str): + for dtype in (int, float, _bool): try: val = dtype(val) break except Exception: pass - result[_str(key)] = val + result[key] = val + if 'ImageJ' not in result: raise ValueError("not a ImageJ image description") return result @@ -4216,7 +6952,7 @@ def imagej_description_dict(description): def imagej_description(shape, rgb=None, colormaped=False, version='1.11a', hyperstack=None, mode=None, loop=None, **kwargs): - """Return ImageJ image decription from data shape as byte string. + """Return ImageJ image description from data shape. ImageJ can handle up to 6 dimensions in order TZCYXS. @@ -4240,7 +6976,6 @@ def imagej_description(shape, rgb=None, colormaped=False, version='1.11a', append = [] result.append('images=%i' % product(shape[:-3])) if hyperstack is None: - #if product(shape[:-3]) > 1: hyperstack = True append.append('hyperstack=true') else: @@ -4262,7 +6997,7 @@ def imagej_description(shape, rgb=None, colormaped=False, version='1.11a', for key, value in kwargs.items(): append.append('%s=%s' % (key.lower(), value)) - return str2bytes('\n'.join(result + append + [''])) + return '\n'.join(result + append + ['']) def imagej_shape(shape, rgb=None): @@ -4286,67 +7021,291 @@ def imagej_shape(shape, rgb=None): raise ValueError("invalid ImageJ hyperstack: not a non-RGB image") if rgb or shape[-1] == 1: return (1, ) * (6 - ndim) + shape - else: - return (1, ) * (5 - ndim) + shape + (1,) + return (1, ) * (5 - ndim) + shape + (1,) + + +def json_description(shape, **metadata): + """Return JSON image description from data shape and other meta data. + + Return UTF-8 encoded JSON. + + >>> json_description((256, 256, 3), axes='YXS') # doctest: +SKIP + b'{"shape": [256, 256, 3], "axes": "YXS"}' + + """ + metadata.update(shape=shape) + return json.dumps(metadata) # .encode('utf-8') -def image_description_dict(description): - """Return dictionary from image description byte string. +def json_description_metadata(description): + """Return metatata from JSON formated image description as dict. Raise ValuError if description is of unknown format. - >>> image_description_dict(b'shape=(256, 256, 3)') - {'shape': (256, 256, 3)} - >>> description = b'{"shape": [256, 256, 3], "axes": "YXS"}' - >>> image_description_dict(description) # doctest: +SKIP + >>> description = '{"shape": [256, 256, 3], "axes": "YXS"}' + >>> json_description_metadata(description) # doctest: +SKIP {'shape': [256, 256, 3], 'axes': 'YXS'} + >>> json_description_metadata('shape=(256, 256, 3)') + {'shape': (256, 256, 3)} """ - if description.startswith(b'shape='): - # old style 'shaped' description - shape = tuple(int(i) for i in description[7:-1].split(b',')) + if description[:6] == 'shape=': + # old style 'shaped' description; not JSON + shape = tuple(int(i) for i in description[7:-1].split(',')) return dict(shape=shape) - if description.startswith(b'{') and description.endswith(b'}'): + if description[:1] == '{' and description[-1:] == '}': # JSON description - return json.loads(description.decode('utf-8')) - raise ValueError("unknown image description") + return json.loads(description) + raise ValueError("invalid JSON image description", description) -def image_description(shape, colormaped=False, **metadata): - """Return image description from data shape and meta data. +def fluoview_description_metadata(description, ignoresections=None): + """Return metatata from FluoView image description as dict. - Return UTF-8 encoded JSON. + The FluoView image description format is unspecified. Expect failures. - >>> image_description((256, 256, 3), axes='YXS') # doctest: +SKIP - b'{"shape": [256, 256, 3], "axes": "YXS"}' + >>> descr = ('[Intensity Mapping]\\nMap Ch0: Range=00000 to 02047\\n' + ... '[Intensity Mapping End]') + >>> fluoview_description_metadata(descr) + {'Intensity Mapping': {'Map Ch0: Range': '00000 to 02047'}} """ - if colormaped: - shape = shape + (3,) - metadata.update({'shape': shape}) - return json.dumps(metadata).encode('utf-8') + if not description.startswith('['): + raise ValueError("invalid FluoView image description") + if ignoresections is None: + ignoresections = {'Region Info (Fields)', 'Protocol Description'} + + result = {} + sections = [result] + comment = False + for line in description.splitlines(): + if not comment: + line = line.strip() + if not line: + continue + if line[0] == '[': + if line[-5:] == ' End]': + # close section + del sections[-1] + section = sections[-1] + name = line[1:-5] + if comment: + section[name] = '\n'.join(section[name]) + if name[:4] == 'LUT ': + a = numpy.array(section[name], dtype='uint8') + a.shape = -1, 3 + section[name] = a + continue + # new section + comment = False + name = line[1:-1] + if name[:4] == 'LUT ': + section = [] + elif name in ignoresections: + section = [] + comment = True + else: + section = {} + sections.append(section) + result[name] = section + continue + # add entry + if comment: + section.append(line) + continue + line = line.split('=', 1) + if len(line) == 1: + section[line[0].strip()] = None + continue + key, value = line + if key[:4] == 'RGB ': + section.extend(int(rgb) for rgb in value.split()) + else: + section[key.strip()] = astype(value.strip()) + return result + + +def pilatus_description_metadata(description): + """Return metatata from Pilatus image description as dict. + + Return metadata from Pilatus pixel array detectors by Dectris, created + by camserver or TVX software. + + >>> pilatus_description_metadata('# Pixel_size 172e-6 m x 172e-6 m') + {'Pixel_size': (0.000172, 0.000172)} + + """ + result = {} + if not description.startswith('# '): + return result + for c in '#:=,()': + description = description.replace(c, ' ') + for line in description.split('\n'): + if line[:2] != ' ': + continue + line = line.split() + name = line[0] + if line[0] not in TIFF.PILATUS_HEADER: + try: + result['DateTime'] = datetime.datetime.strptime( + ' '.join(line), '%Y-%m-%dT%H %M %S.%f') + except Exception: + result[name] = ' '.join(line[1:]) + continue + indices, dtype = TIFF.PILATUS_HEADER[line[0]] + if isinstance(indices[0], slice): + # assumes one slice + values = line[indices[0]] + else: + values = [line[i] for i in indices] + if dtype is float and values[0] == 'not': + values = ['NaN'] + values = tuple(dtype(v) for v in values) + if dtype == str: + values = ' '.join(values) + elif len(values) == 1: + values = values[0] + result[name] = values + return result + + +def svs_description_metadata(description): + """Return metatata from Aperio image description as dict. + + The Aperio image description format is unspecified. Expect failures. + + >>> svs_description_metadata('Aperio Image Library v1.0') + {'Aperio Image Library': 'v1.0'} + + """ + if not description.startswith('Aperio Image Library '): + raise ValueError("invalid Aperio image description") + result = {} + lines = description.split('\n') + key, value = lines[0].strip().rsplit(None, 1) # 'Aperio Image Library' + result[key.strip()] = value.strip() + if len(lines) == 1: + return result + items = lines[1].split('|') + result[''] = items[0].strip() # TODO: parse this? + for item in items[1:]: + key, value = item.split(' = ') + result[key.strip()] = astype(value.strip()) + return result + + +def stk_description_metadata(description): + """Return metadata from MetaMorph image description as list of dict. + + The MetaMorph image description format is unspecified. Expect failures. + """ + description = description.strip() + if not description: + return [] + try: + description = bytes2str(description) + except UnicodeDecodeError: + warnings.warn("failed to parse MetaMorph image description") + return [] + result = [] + for plane in description.split('\x00'): + d = {} + for line in plane.split('\r\n'): + line = line.split(':', 1) + if len(line) > 1: + name, value = line + d[name.strip()] = astype(value.strip()) + else: + value = line[0].strip() + if value: + if '' in d: + d[''].append(value) + else: + d[''] = [value] + result.append(d) + return result + + +def metaseries_description_metadata(description): + """Return metatata from MetaSeries image description as dict.""" + if not description.startswith('<MetaData>'): + raise ValueError("invalid MetaSeries image description") + + from xml.etree import cElementTree as etree # delayed import + root = etree.fromstring(description) + types = {'float': float, 'int': int, + 'bool': lambda x: asbool(x, 'on', 'off')} + + def parse(root, result): + # recursive + for child in root: + attrib = child.attrib + if not attrib: + result[child.tag] = parse(child, {}) + continue + if 'id' in attrib: + i = attrib['id'] + t = attrib['type'] + v = attrib['value'] + if t in types: + result[i] = types[t](v) + else: + result[i] = v + return result + + adict = parse(root, {}) + if 'Description' in adict: + adict['Description'] = adict['Description'].replace('&#13;&#10;', '\n') + return adict + + +def scanimage_description_metadata(description): + """Return metatata from ScanImage image description as dict.""" + return matlabstr2py(description) -def _replace_by(module_function, package=__package__, warn=False): + +def scanimage_artist_metadata(artist): + """Return metatata from ScanImage artist tag as dict.""" + try: + return json.loads(artist) + except ValueError: + warnings.warn("invalid JSON '%s'" % artist) + + +def _replace_by(module_function, package=__package__, warn=None, prefix='_'): """Try replace decorated function by module.function.""" + def _warn(e, warn): + if warn is None: + warn = "\n Functionality might be degraded or be slow.\n" + elif warn is True: + warn = '' + elif not warn: + return + warnings.warn("%s%s" % (e, warn)) + try: from importlib import import_module - except ImportError: - warnings.warn('could not import module importlib') - return lambda func: func + except ImportError as e: + _warn(e, warn) + return identityfunc def decorate(func, module_function=module_function, warn=warn): + module, function = module_function.split('.') try: - module, function = module_function.split('.') if package: module = import_module('.' + module, package=package) else: module = import_module(module) + except Exception as e: + _warn(e, warn) + return func + try: func, oldfunc = getattr(module, function), func - globals()['__old_' + func.__name__] = oldfunc - except Exception: - if warn: - warnings.warn("failed to import %s" % module_function) + except Exception as e: + _warn(e, warn) + return func + globals()[prefix + func.__name__] = oldfunc return func return decorate @@ -4393,11 +7352,11 @@ def decode_floats(data): def decode_jpeg(encoded, tables=b'', photometric=None, - ycbcr_subsampling=None, ycbcr_positioning=None): + ycbcrsubsampling=None, ycbcrpositioning=None): """Decode JPEG encoded byte string (using _czifile extension module).""" from czifile import _czifile image = _czifile.decode_jpeg(encoded, tables) - if photometric == 'rgb' and ycbcr_subsampling and ycbcr_positioning: + if photometric == 2 and ycbcrsubsampling and ycbcrpositioning: # TODO: convert YCbCr to RGB pass return image.tostring() @@ -4410,7 +7369,7 @@ def decode_packbits(encoded): PackBits is a simple byte-oriented run-length compression scheme. """ - func = ord if sys.version[0] == '2' else lambda x: x + func = ord if sys.version[0] == '2' else identityfunc result = [] result_extend = result.extend i = 0 @@ -4519,6 +7478,9 @@ def decode_lzw(encoded): def unpack_ints(data, dtype, itemsize, runlen=0): """Decompress byte string to array of integers of any bit size <= 32. + This Python implementation is slow and only handles itemsizes 1, 2, 4, 8, + 16, 32, and 64. + Parameters ---------- data : byte str @@ -4530,6 +7492,13 @@ def unpack_ints(data, dtype, itemsize, runlen=0): runlen : int Number of consecutive integers, after which to start at next byte. + Examples + -------- + >>> unpack_ints(b'a', 'B', 1) + array([0, 1, 1, 0, 0, 0, 0, 1], dtype=uint8) + >>> unpack_ints(b'ab', 'B', 2) + array([1, 2, 0, 1, 1, 2, 0, 2], dtype=uint8) + """ if itemsize == 1: # bitarray data = numpy.fromstring(data, '|B') @@ -4542,8 +7511,8 @@ def unpack_ints(data, dtype, itemsize, runlen=0): dtype = numpy.dtype(dtype) if itemsize in (8, 16, 32, 64): return numpy.fromstring(data, dtype) - if itemsize < 1 or itemsize > 32: - raise ValueError("itemsize out of range: %i" % itemsize) + if itemsize not in (1, 2, 4, 8, 16, 32): + raise ValueError("itemsize not supported: %i" % itemsize) if dtype.kind not in "biu": raise ValueError("invalid dtype") @@ -4551,8 +7520,8 @@ def unpack_ints(data, dtype, itemsize, runlen=0): if itembytes != dtype.itemsize: raise ValueError("dtype.itemsize too small") if runlen == 0: - runlen = len(data) // itembytes - skipbits = runlen*itemsize % 8 + runlen = (8 * len(data)) // itemsize + skipbits = runlen * itemsize % 8 if skipbits: skipbits = 8 - skipbits shrbits = itembytes*8 - itemsize @@ -4563,7 +7532,7 @@ def unpack_ints(data, dtype, itemsize, runlen=0): l = runlen * (len(data)*8 // (runlen*itemsize + skipbits)) result = numpy.empty((l,), dtype) bitcount = 0 - for i in range(len(result)): + for i in range(l): start = bitcount // 8 s = data[start:start+itembytes] try: @@ -4637,7 +7606,7 @@ def reverse_bitorder(data): """Reverse bits in each byte of byte string or numpy array. Decode data where pixels with lower column values are stored in the - lower-order bits of the bytes (fill_order == 'lsb2msb'). + lower-order bits of the bytes (FillOrder is LSB2MSB). Parameters ---------- @@ -4647,33 +7616,19 @@ def reverse_bitorder(data): Examples -------- - >>> reverse_bitorder(b'\x01\x64') - b'\x80&' - + >>> reverse_bitorder(b'\\x01\\x64') + b'\\x80&' >>> data = numpy.array([1, 666], dtype='uint16') >>> reverse_bitorder(data) >>> data array([ 128, 16473], dtype=uint16) """ - table = ( - b'\x00\x80@\xc0 \xa0`\xe0\x10\x90P\xd00\xb0p\xf0\x08\x88H\xc8(\xa8h' - b'\xe8\x18\x98X\xd88\xb8x\xf8\x04\x84D\xc4$\xa4d\xe4\x14\x94T\xd44' - b'\xb4t\xf4\x0c\x8cL\xcc,\xacl\xec\x1c\x9c\\\xdc<\xbc|\xfc\x02\x82B' - b'\xc2"\xa2b\xe2\x12\x92R\xd22\xb2r\xf2\n\x8aJ\xca*\xaaj\xea\x1a' - b'\x9aZ\xda:\xbaz\xfa\x06\x86F\xc6&\xa6f\xe6\x16\x96V\xd66\xb6v\xf6' - b'\x0e\x8eN\xce.\xaen\xee\x1e\x9e^\xde>\xbe~\xfe\x01\x81A\xc1!\xa1a' - b'\xe1\x11\x91Q\xd11\xb1q\xf1\t\x89I\xc9)\xa9i\xe9\x19\x99Y\xd99' - b'\xb9y\xf9\x05\x85E\xc5%\xa5e\xe5\x15\x95U\xd55\xb5u\xf5\r\x8dM' - b'\xcd-\xadm\xed\x1d\x9d]\xdd=\xbd}\xfd\x03\x83C\xc3#\xa3c\xe3\x13' - b'\x93S\xd33\xb3s\xf3\x0b\x8bK\xcb+\xabk\xeb\x1b\x9b[\xdb;\xbb{\xfb' - b'\x07\x87G\xc7\'\xa7g\xe7\x17\x97W\xd77\xb7w\xf7\x0f\x8fO\xcf/\xafo' - b'\xef\x1f\x9f_\xdf?\xbf\x7f\xff') try: view = data.view('uint8') - numpy.take(numpy.fromstring(table, dtype='uint8'), view, out=view) + numpy.take(TIFF.REVERSE_BITORDER_ARRAY, view, out=view) except AttributeError: - return data.translate(table) + return data.translate(TIFF.REVERSE_BITORDER_BYTES) except ValueError: raise NotImplementedError("slices of arrays not supported") @@ -4717,30 +7672,66 @@ def reorient(image, orientation): Non-squeezed output of asarray() functions. Axes -3 and -2 must be image length and width respectively. orientation : int or str - One of TIFF_ORIENTATIONS keys or values. + One of TIFF.ORIENTATION names or values. """ - o = TIFF_ORIENTATIONS.get(orientation, orientation) - if o == 'top_left': + ORIENTATION = TIFF.ORIENTATION + orientation = enumarg(ORIENTATION, orientation) + + if orientation == ORIENTATION.TOPLEFT: return image - elif o == 'top_right': + elif orientation == ORIENTATION.TOPRIGHT: return image[..., ::-1, :] - elif o == 'bottom_left': + elif orientation == ORIENTATION.BOTLEFT: return image[..., ::-1, :, :] - elif o == 'bottom_right': + elif orientation == ORIENTATION.BOTRIGHT: return image[..., ::-1, ::-1, :] - elif o == 'left_top': + elif orientation == ORIENTATION.LEFTTOP: return numpy.swapaxes(image, -3, -2) - elif o == 'right_top': + elif orientation == ORIENTATION.RIGHTTOP: return numpy.swapaxes(image, -3, -2)[..., ::-1, :] - elif o == 'left_bottom': + elif orientation == ORIENTATION.RIGHTBOT: return numpy.swapaxes(image, -3, -2)[..., ::-1, :, :] - elif o == 'right_bottom': + elif orientation == ORIENTATION.LEFTBOT: return numpy.swapaxes(image, -3, -2)[..., ::-1, ::-1, :] -def reshape_nd(image, ndim): - """Return image array with at least ndim dimensions. +def repeat_nd(a, repeats): + """Return read-only view into input array with elements repeated. + + Zoom nD image by integer factors using nearest neighbor interpolation + (box filter). + + Parameters + ---------- + a : array_like + Input array. + repeats : sequence of int + The number of repetitions to apply along each dimension of input array. + + Example + ------- + >>> repeat_nd([[1, 2], [3, 4]], (2, 2)) + array([[1, 1, 2, 2], + [1, 1, 2, 2], + [3, 3, 4, 4], + [3, 3, 4, 4]]) + + """ + a = numpy.asarray(a) + reshape = [] + shape = [] + strides = [] + for i, j, k in zip(a.strides, a.shape, repeats): + shape.extend((j, k)) + strides.extend((i, 0)) + reshape.append(j * k) + return numpy.lib.stride_tricks.as_strided( + a, shape, strides, writeable=False).reshape(reshape) + + +def reshape_nd(data_or_shape, ndim): + """Return image array or shape with at least ndim dimensions. Prepend 1s to image shape as necessary. @@ -4752,12 +7743,16 @@ def reshape_nd(image, ndim): (1, 2, 3) >>> reshape_nd(numpy.empty((3, 4, 5)), 3).shape (3, 4, 5) + >>> reshape_nd((2, 3), 3) + (1, 2, 3) """ - if image.ndim >= ndim: - return image - image = image.reshape((1,) * (ndim - image.ndim) + image.shape) - return image + is_shape = isinstance(data_or_shape, tuple) + shape = data_or_shape if is_shape else data_or_shape.shape + if len(shape) >= ndim: + return data_or_shape + shape = (1,) * (ndim - len(shape)) + shape + return shape if is_shape else data_or_shape.reshape(shape) def squeeze_axes(shape, axes, skip='XY'): @@ -4800,7 +7795,7 @@ def transpose_axes(image, axes, asaxes='CTZYX'): return image -def reshape_axes(axes, shape, newshape): +def reshape_axes(axes, shape, newshape, unknown='Q'): """Return axes matching new shape. Unknown dimensions are labelled 'Q'. @@ -4815,7 +7810,10 @@ def reshape_axes(axes, shape, newshape): newshape = tuple(newshape) if len(axes) != len(shape): raise ValueError("axes do not match shape") - if product(shape) != product(newshape): + + size = product(shape) + newsize = product(newshape) + if size != newsize: raise ValueError("can not reshape %s to %s" % (shape, newshape)) if not axes or not newshape: return '' @@ -4837,42 +7835,274 @@ def reshape_axes(axes, shape, newshape): result.append(axes[i]) i -= 1 else: - result.append('Q') + result.append(unknown) return ''.join(reversed(result[lendiff:])) -def stack_pages(pages, memmap=False, tempdir=None, *args, **kwargs): +def stack_pages(pages, out=None, maxworkers=1, *args, **kwargs): """Read data from sequence of TiffPage and stack them vertically. - If memmap is True, return an array stored in a binary file on disk. - Additional parameters are passsed to the page asarray function. + Additional parameters are passsed to the TiffPage.asarray function. """ - if len(pages) == 0: + npages = len(pages) + if npages == 0: raise ValueError("no pages") - if len(pages) == 1: - return pages[0].asarray(memmap=memmap, *args, **kwargs) - - data0 = pages[0].asarray(*args, **kwargs) - shape = (len(pages),) + data0.shape - if memmap: - with tempfile.NamedTemporaryFile(dir=tempdir) as fh: - data = numpy.memmap(fh, dtype=data0.dtype, shape=shape) + if npages == 1: + return pages[0].asarray(out=out, *args, **kwargs) + + page0 = next(p for p in pages if p is not None) + page0.asarray(validate=None) # ThreadPoolExecutor swallows exceptions + shape = (npages,) + page0.keyframe.shape + dtype = page0.keyframe.dtype + out = create_output(out, shape, dtype) + + if maxworkers is None: + maxworkers = multiprocessing.cpu_count() // 2 + page0.parent.filehandle.lock = maxworkers > 1 + + filecache = OpenFileCache(size=max(4, maxworkers), + lock=page0.parent.filehandle.lock) + + def func(page, index, out=out, filecache=filecache, + args=args, kwargs=kwargs): + """Read, decode, and copy page data.""" + if page is not None: + filecache.open(page.parent.filehandle) + out[index] = page.asarray(lock=filecache.lock, reopen=False, + validate=False, *args, **kwargs) + filecache.close(page.parent.filehandle) + + if maxworkers < 2: + for i, page in enumerate(pages): + func(page, i) else: - data = numpy.empty(shape, dtype=data0.dtype) + with concurrent.futures.ThreadPoolExecutor(maxworkers) as executor: + executor.map(func, pages, range(npages)) - data[0] = data0 - if memmap: - data.flush() - del data0 - for i, page in enumerate(pages[1:]): - data[i+1] = page.asarray(*args, **kwargs) - if memmap: - data.flush() + filecache.clear() + page0.parent.filehandle.lock = None - return data + return out + + +def clean_offsets_counts(offsets, counts): + """Return cleaned offsets and byte counts. + + Remove zero offsets and counts. Use to sanitize _offsets and _bytecounts + tag values for strips or tiles. + + """ + offsets = list(offsets) + counts = list(counts) + assert len(offsets) == len(counts) + j = 0 + for i, (o, b) in enumerate(zip(offsets, counts)): + if o > 0 and b > 0: + if i > j: + offsets[j] = o + counts[j] = b + j += 1 + elif b > 0 and o <= 0: + raise ValueError("invalid offset") + else: + warnings.warn("empty byte count") + if j == 0: + j = 1 + return offsets[:j], counts[:j] + + +def buffered_read(fh, lock, offsets, bytecounts, buffersize=2**26): + """Return iterator over blocks read from file.""" + length = len(offsets) + i = 0 + while i < length: + data = [] + with lock: + size = 0 + while size < buffersize and i < length: + fh.seek(offsets[i]) + bytecount = bytecounts[i] + data.append(fh.read(bytecount)) + size += bytecount + i += 1 + for block in data: + yield block + + +def create_output(out, shape, dtype, mode='w+', suffix='.memmap'): + """Return numpy array where image data of shape and dtype can copied. + + The 'out' parameter may have the following values or types: + + None + An empty array of shape and dtype is created and returned. + numpy.ndarray + An existing writable array of compatible dtype and shape. A view of + the same array is returned after verification. + 'memmap' or 'memmap:tempdir' + A memory-map to an array stored in a temporary binary file on disk + is created and returned. + str or open file + The file name or file object used to create a memory-map to an array + stored in a binary file on disk. The created memory-mapped array is + returned. + + """ + if out is None: + return numpy.zeros(shape, dtype) + if isinstance(out, str) and out[:6] == 'memmap': + tempdir = out[7:] if len(out) > 7 else None + with tempfile.NamedTemporaryFile(dir=tempdir, suffix=suffix) as fh: + return numpy.memmap(fh, shape=shape, dtype=dtype, mode=mode) + if isinstance(out, numpy.ndarray): + if product(shape) != product(out.shape): + raise ValueError("incompatible output shape") + if not numpy.can_cast(dtype, out.dtype): + raise ValueError("incompatible output dtype") + return out.reshape(shape) + return numpy.memmap(out, shape=shape, dtype=dtype, mode=mode) + + +def matlabstr2py(s): + """Return Python object from Matlab string representation. + + Return str, bool, int, float, list (Matlab arrays or cells), or + dict (Matlab structures) types. + + Use to access ScanImage metadata. + + >>> matlabstr2py('1') + 1 + >>> matlabstr2py("['x y z' true false; 1 2.0 -3e4; NaN Inf @class]") + [['x y z', True, False], [1, 2.0, -30000.0], [nan, inf, '@class']] + >>> d = matlabstr2py("SI.hChannels.channelType = {'stripe' 'stripe'}\\n" + ... "SI.hChannels.channelsActive = 2") + >>> d['SI.hChannels.channelType'] + ['stripe', 'stripe'] + + """ + # TODO: handle invalid input + # TODO: review unboxing of multidimensional arrays + + def lex(s): + # return sequence of tokens from matlab string representation + tokens = ['['] + while True: + t, i = next_token(s) + if t is None: + break + if t == ';': + tokens.extend((']', '[')) + elif t == '[': + tokens.extend(('[', '[')) + elif t == ']': + tokens.extend((']', ']')) + else: + tokens.append(t) + s = s[i:] + tokens.append(']') + return tokens + + def next_token(s): + # return next token in matlab string + length = len(s) + if length == 0: + return None, 0 + i = 0 + while i < length and s[i] == ' ': + i += 1 + if i == length: + return None, i + if s[i] in '{[;]}': + return s[i], i + 1 + if s[i] == "'": + j = i + 1 + while j < length and s[j] != "'": + j += 1 + return s[i: j+1], j + 1 + j = i + while j < length and not s[j] in ' {[;]}': + j += 1 + return s[i:j], j + + def value(s, fail=False): + # return Python value of token + s = s.strip() + if not s: + return s + if len(s) == 1: + try: + return int(s) + except Exception: + if fail: + raise ValueError() + return s + if s[0] == "'": + if fail and s[-1] != "'" or "'" in s[1:-1]: + raise ValueError() + return s[1:-1] + if fail and any(i in s for i in " ';[]{}"): + raise ValueError() + if s[0] == '@': + return s + if s == 'true': + return True + if s == 'false': + return False + if '.' in s or 'e' in s: + return float(s) + try: + return int(s) + except Exception: + pass + try: + return float(s) # nan, inf + except Exception: + if fail: + raise ValueError() + return s + + def parse(s): + # return Python value from string representation of Matlab value + s = s.strip() + try: + return value(s, fail=True) + except ValueError: + pass + result = add2 = [] + levels = [add2] + for t in lex(s): + if t in '[{': + add2 = [] + levels.append(add2) + elif t in ']}': + x = levels.pop() + if len(x) == 1 and isinstance(x[0], list): + x = x[0] + add2 = levels[-1] + add2.append(x) + else: + add2.append(value(t)) + if len(result) == 1 and isinstance(result[0], list): + result = result[0] + return result + + if '\r' in s or '\n' in s: + # structure + d = {} + for line in s.splitlines(): + if not line.strip(): + continue + k, v = line.split('=', 1) + k = k.strip() + if any(c in k for c in " ';[]{}"): + continue + d[k] = parse(v.strip()) + return d + return parse(s) def stripnull(string, null=b'\x00'): @@ -4891,7 +8121,7 @@ def stripnull(string, null=b'\x00'): def stripascii(string): - """Return string truncated at last byte that is 7bit ASCII. + """Return string truncated at last byte that is 7-bit ASCII. Clean NULL separated and terminated TIFF strings. @@ -4902,35 +8132,88 @@ def stripascii(string): """ # TODO: pythonize this - ord_ = ord if sys.version_info[0] < 3 else lambda x: x i = len(string) while i: i -= 1 - if 8 < ord_(string[i]) < 127: + if 8 < byte2int(string[i]) < 127: break else: i = -1 return string[:i+1] +def asbool(value, true=(b'true', u'true'), false=(b'false', u'false')): + """Return string as bool if possible, else raise TypeError. + + >>> asbool(b' False ') + False + + """ + value = value.strip().lower() + if value in true: # might raise UnicodeWarning/BytesWarning + return True + if value in false: + return False + raise TypeError() + + def astype(value, types=None): - """Return argument as one of types if possible.""" + """Return argument as one of types if possible. + + >>> astype('42') + 42 + >>> astype('3.14') + 3.14 + >>> astype('True') + True + >>> astype(b'Neee-Wom') + 'Neee-Wom' + + """ if types is None: - types = int, float, bytes2str + types = int, float, asbool, bytes2str for typ in types: try: return typ(value) - except (ValueError, TypeError, UnicodeEncodeError): + except (ValueError, AttributeError, TypeError, UnicodeEncodeError): pass return value -def format_size(size): - """Return file size as string from byte size.""" - for unit in ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'): - if size < 2048: - return "%.f %s" % (size, unit) +def format_size(size, threshold=1536): + """Return file size as string from byte size. + + >>> format_size(1234) + '1234 B' + >>> format_size(12345678901) + '11.50 GiB' + + """ + if size < threshold: + return "%i B" % size + for unit in ('KiB', 'MiB', 'GiB', 'TiB', 'PiB'): size /= 1024.0 + if size < threshold: + return "%.2f %s" % (size, unit) + + +def identityfunc(arg): + """Single argument identity function. + + >>> identityfunc('arg') + 'arg' + + """ + return arg + + +def nullfunc(*args, **kwargs): + """Null function. + + >>> nullfunc('arg', kwarg='kwarg') + + """ + return def sequence(value): @@ -4946,13 +8229,14 @@ def sequence(value): len(value) return value except TypeError: - return value, + return (value,) def product(iterable): """Return product of sequence of numbers. Equivalent of functools.reduce(operator.mul, iterable, 1). + Multiplying numpy integers might overflow. >>> product([2**8, 2**30]) 274877906944 @@ -4977,6 +8261,7 @@ def natural_sorted(iterable): """ def sortkey(x): return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)] + numbers = re.compile(r'(\d+)') return sorted(iterable, key=sortkey) @@ -5027,6 +8312,180 @@ def julian_datetime(julianday, milisecond=0): hour, minute, second, milisecond) +def byteorder_isnative(byteorder): + """Return if byteorder matches the system's byteorder. + + >>> byteorder_isnative('=') + True + + """ + if byteorder == '=' or byteorder == sys.byteorder: + return True + keys = {'big': '>', 'little': '<'} + return keys.get(byteorder, byteorder) == keys[sys.byteorder] + + +def recarray2dict(recarray): + """Return numpy.recarray as dict.""" + # TODO: subarrays + result = {} + for descr, value in zip(recarray.dtype.descr, recarray): + name, dtype = descr[:2] + if dtype[1] == 'S': + value = bytes2str(stripnull(value)) + elif value.ndim < 2: + value = value.tolist() + result[name] = value + return result + + +def xml2dict(xml, sanitize=True, prefix=None): + """Return XML as dict. + + >>> xml2dict('<?xml version="1.0" ?><root attr="name"><key>1</key></root>') + {'root': {'key': 1, 'attr': 'name'}} + + """ + from collections import defaultdict # delayed import + from xml.etree import cElementTree as etree # delayed import + + at = tx = '' + if prefix: + at, tx = prefix + + def astype(value): + # return value as int, float, bool, or str + for t in (int, float, asbool): + try: + return t(value) + except Exception: + pass + return value + + def etree2dict(t): + # adapted from https://stackoverflow.com/a/10077069/453463 + key = t.tag + if sanitize: + key = key.rsplit('}', 1)[-1] + d = {key: {} if t.attrib else None} + children = list(t) + if children: + dd = defaultdict(list) + for dc in map(etree2dict, children): + for k, v in dc.items(): + dd[k].append(astype(v)) + d = {key: {k: astype(v[0]) if len(v) == 1 else astype(v) + for k, v in dd.items()}} + if t.attrib: + d[key].update((at + k, astype(v)) for k, v in t.attrib.items()) + if t.text: + text = t.text.strip() + if children or t.attrib: + if text: + d[key][tx + 'value'] = astype(text) + else: + d[key] = astype(text) + return d + + return etree2dict(etree.fromstring(xml)) + + +def pformat_xml(arg): + """Return pretty formatted XML.""" + try: + import lxml.etree as etree # delayed import + if not isinstance(arg, bytes): + arg = arg.encode('utf-8') + xml = etree.fromstring(arg) + xml = etree.tostring(xml, pretty_print=True, encoding="unicode") + except Exception: + xml = bytes2str(arg).replace('><', '>\n<').replace('><', '>\n<') + return xml.replace(' ', ' ').replace('\t', ' ') + + +def pformat(arg, maxlines=None, linewidth=None, compact=True): + """Return pretty formatted representation of object as string.""" + if maxlines is None: + maxlines = TIFF.PRINT_MAX_LINES + elif not maxlines: + maxlines = 2**32 + if linewidth is None: + linewidth = TIFF.PRINT_LINE_WIDTH + elif not linewidth: + linewidth = 2**32 + + numpy.set_printoptions(threshold=100, linewidth=linewidth) + + if isinstance(arg, basestring): + if arg[:5].lower() in ('<?xml', b'<?xml'): + arg = pformat_xml(arg) + elif isinstance(arg, bytes): + try: + arg = bytes2str(arg) + arg = arg.replace('\r', '\n').replace('\n\n', '\n') + except Exception: + import binascii # delayed import + import pprint # delayed import + arg = binascii.hexlify(arg) + arg = pprint.pformat(arg, width=linewidth) + maxlines = min(maxlines, 16) + arg = arg.rstrip() + elif isinstance(arg, numpy.record): + arg = arg.pprint() + else: + from pprint import pformat # delayed import + compact = {} if sys.version_info[0] == 2 else dict(compact=compact) + arg = pformat(arg, width=linewidth, **compact) + + argl = list(arg.splitlines()) + if len(argl) > maxlines: + arg = '\n'.join(argl[:maxlines] + + ['...truncated to %i lines.' % maxlines]) + return arg + + +def snipstr(string, length=16, ellipse=None): + """Return string cut in middle to specified length. + + >>> snipstr('abcdefghijklmnop', 8) + 'abcd…nop' + + """ + size = len(string) + if size <= length: + return string + if ellipse is None: + if isinstance(string, bytes): + ellipse = b'...' + else: + ellipse = u'\u2026' + esize = len(ellipse) + if length < esize + 1: + return string[:length] + if length < esize + 4: + return string[:length-esize] + ellipse + half = (length - esize) // 2 + return string[:half + (length-esize) % 2] + ellipse + string[-half:] + + +def enumarg(enum, arg): + """Return enum member from its name or value. + + >>> enumarg(TIFF.PHOTOMETRIC, 2) + <PHOTOMETRIC.RGB: 2> + >>> enumarg(TIFF.PHOTOMETRIC, 'RGB') + <PHOTOMETRIC.RGB: 2> + + """ + try: + return enum(arg) + except Exception: + try: + return enum[arg.upper()] + except Exception: + raise ValueError("invalid argument %s" % arg) + + def parse_kwargs(kwargs, *keys, **keyvalues): """Return dict with keys from keys|keyvals and values from kwargs|keyvals. @@ -5068,958 +8527,85 @@ def update_kwargs(kwargs, **keyvalues): kwargs[key] = value -class TIFF_SUBFILE_TYPES(object): - def __getitem__(self, key): - result = [] - if key & 1: - result.append('reduced_image') - if key & 2: - result.append('page') - if key & 4: - result.append('mask') - return tuple(result) - - -TIFF_PHOTOMETRICS = { - 0: 'miniswhite', - 1: 'minisblack', - 2: 'rgb', - 3: 'palette', - 4: 'mask', - 5: 'separated', # CMYK - 6: 'ycbcr', - 8: 'cielab', - 9: 'icclab', - 10: 'itulab', - 32803: 'cfa', # Color Filter Array - 32844: 'logl', - 32845: 'logluv', - 34892: 'linear_raw' -} - -TIFF_COMPESSIONS = { - 1: None, - 2: 'ccittrle', - 3: 'ccittfax3', - 4: 'ccittfax4', - 5: 'lzw', - 6: 'ojpeg', - 7: 'jpeg', - 8: 'adobe_deflate', - 9: 't85', - 10: 't43', - 32766: 'next', - 32771: 'ccittrlew', - 32773: 'packbits', - 32809: 'thunderscan', - 32895: 'it8ctpad', - 32896: 'it8lw', - 32897: 'it8mp', - 32898: 'it8bl', - 32908: 'pixarfilm', - 32909: 'pixarlog', - 32946: 'deflate', - 32947: 'dcs', - 34661: 'jbig', - 34676: 'sgilog', - 34677: 'sgilog24', - 34712: 'jp2000', - 34713: 'nef', - 34925: 'lzma', -} - -TIFF_DECOMPESSORS = { - None: lambda x: x, - 'adobe_deflate': zlib.decompress, - 'deflate': zlib.decompress, - 'packbits': decode_packbits, - 'lzw': decode_lzw, - # 'jpeg': decode_jpeg -} - -if lzma: - TIFF_DECOMPESSORS['lzma'] = lzma.decompress - -TIFF_DATA_TYPES = { - 1: '1B', # BYTE 8-bit unsigned integer. - 2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code; - # the last byte must be NULL (binary zero). - 3: '1H', # SHORT 16-bit (2-byte) unsigned integer - 4: '1I', # LONG 32-bit (4-byte) unsigned integer. - 5: '2I', # RATIONAL Two LONGs: the first represents the numerator of - # a fraction; the second, the denominator. - 6: '1b', # SBYTE An 8-bit signed (twos-complement) integer. - 7: '1s', # UNDEFINED An 8-bit byte that may contain anything, - # depending on the definition of the field. - 8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer. - 9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer. - 10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator - # of a fraction, the second the denominator. - 11: '1f', # FLOAT Single precision (4-byte) IEEE format. - 12: '1d', # DOUBLE Double precision (8-byte) IEEE format. - 13: '1I', # IFD unsigned 4 byte IFD offset. - #14: '', # UNICODE - #15: '', # COMPLEX - 16: '1Q', # LONG8 unsigned 8 byte integer (BigTiff) - 17: '1q', # SLONG8 signed 8 byte integer (BigTiff) - 18: '1Q', # IFD8 unsigned 8 byte IFD offset (BigTiff) -} - -TIFF_SAMPLE_FORMATS = { - 1: 'uint', - 2: 'int', - 3: 'float', - #4: 'void', - #5: 'complex_int', - 6: 'complex', -} - -TIFF_SAMPLE_DTYPES = { - ('uint', 1): '?', # bitmap - ('uint', 2): 'B', - ('uint', 3): 'B', - ('uint', 4): 'B', - ('uint', 5): 'B', - ('uint', 6): 'B', - ('uint', 7): 'B', - ('uint', 8): 'B', - ('uint', 9): 'H', - ('uint', 10): 'H', - ('uint', 11): 'H', - ('uint', 12): 'H', - ('uint', 13): 'H', - ('uint', 14): 'H', - ('uint', 15): 'H', - ('uint', 16): 'H', - ('uint', 17): 'I', - ('uint', 18): 'I', - ('uint', 19): 'I', - ('uint', 20): 'I', - ('uint', 21): 'I', - ('uint', 22): 'I', - ('uint', 23): 'I', - ('uint', 24): 'I', - ('uint', 25): 'I', - ('uint', 26): 'I', - ('uint', 27): 'I', - ('uint', 28): 'I', - ('uint', 29): 'I', - ('uint', 30): 'I', - ('uint', 31): 'I', - ('uint', 32): 'I', - ('uint', 64): 'Q', - ('int', 8): 'b', - ('int', 16): 'h', - ('int', 32): 'i', - ('int', 64): 'q', - ('float', 16): 'e', - ('float', 32): 'f', - ('float', 64): 'd', - ('complex', 64): 'F', - ('complex', 128): 'D', - ('uint', (5, 6, 5)): 'B', -} - -TIFF_ORIENTATIONS = { - 1: 'top_left', - 2: 'top_right', - 3: 'bottom_right', - 4: 'bottom_left', - 5: 'left_top', - 6: 'right_top', - 7: 'right_bottom', - 8: 'left_bottom', -} - -# TODO: is there a standard for character axes labels? -AXES_LABELS = { - 'X': 'width', - 'Y': 'height', - 'Z': 'depth', - 'S': 'sample', # rgb(a) - 'I': 'series', # general sequence, plane, page, IFD - 'T': 'time', - 'C': 'channel', # color, emission wavelength - 'A': 'angle', - 'P': 'phase', # formerly F # P is Position in LSM! - 'R': 'tile', # region, point, mosaic - 'H': 'lifetime', # histogram - 'E': 'lambda', # excitation wavelength - 'L': 'exposure', # lux - 'V': 'event', - 'Q': 'other', - 'M': 'mosaic', # LSM 6 -} - -AXES_LABELS.update(dict((v, k) for k, v in AXES_LABELS.items())) - -# Map OME pixel types to numpy dtype -OME_PIXEL_TYPES = { - 'int8': 'i1', - 'int16': 'i2', - 'int32': 'i4', - 'uint8': 'u1', - 'uint16': 'u2', - 'uint32': 'u4', - 'float': 'f4', - # 'bit': 'bit', - 'double': 'f8', - 'complex': 'c8', - 'double-complex': 'c16', -} - -# NIH Image PicHeader v1.63 -NIH_IMAGE_HEADER = [ - ('fileid', 'a8'), - ('nlines', 'i2'), - ('pixelsperline', 'i2'), - ('version', 'i2'), - ('oldlutmode', 'i2'), - ('oldncolors', 'i2'), - ('colors', 'u1', (3, 32)), - ('oldcolorstart', 'i2'), - ('colorwidth', 'i2'), - ('extracolors', 'u2', (6, 3)), - ('nextracolors', 'i2'), - ('foregroundindex', 'i2'), - ('backgroundindex', 'i2'), - ('xscale', 'f8'), - ('_x0', 'i2'), - ('_x1', 'i2'), - ('units_t', 'i2'), # NIH_UNITS_TYPE - ('p1', [('x', 'i2'), ('y', 'i2')]), - ('p2', [('x', 'i2'), ('y', 'i2')]), - ('curvefit_t', 'i2'), # NIH_CURVEFIT_TYPE - ('ncoefficients', 'i2'), - ('coeff', 'f8', 6), - ('_um_len', 'u1'), - ('um', 'a15'), - ('_x2', 'u1'), - ('binarypic', 'b1'), - ('slicestart', 'i2'), - ('sliceend', 'i2'), - ('scalemagnification', 'f4'), - ('nslices', 'i2'), - ('slicespacing', 'f4'), - ('currentslice', 'i2'), - ('frameinterval', 'f4'), - ('pixelaspectratio', 'f4'), - ('colorstart', 'i2'), - ('colorend', 'i2'), - ('ncolors', 'i2'), - ('fill1', '3u2'), - ('fill2', '3u2'), - ('colortable_t', 'u1'), # NIH_COLORTABLE_TYPE - ('lutmode_t', 'u1'), # NIH_LUTMODE_TYPE - ('invertedtable', 'b1'), - ('zeroclip', 'b1'), - ('_xunit_len', 'u1'), - ('xunit', 'a11'), - ('stacktype_t', 'i2'), # NIH_STACKTYPE_TYPE -] - -NIH_COLORTABLE_TYPE = ( - 'CustomTable', 'AppleDefault', 'Pseudo20', 'Pseudo32', 'Rainbow', - 'Fire1', 'Fire2', 'Ice', 'Grays', 'Spectrum') - -NIH_LUTMODE_TYPE = ( - 'PseudoColor', 'OldAppleDefault', 'OldSpectrum', 'GrayScale', - 'ColorLut', 'CustomGrayscale') - -NIH_CURVEFIT_TYPE = ( - 'StraightLine', 'Poly2', 'Poly3', 'Poly4', 'Poly5', 'ExpoFit', - 'PowerFit', 'LogFit', 'RodbardFit', 'SpareFit1', 'Uncalibrated', - 'UncalibratedOD') - -NIH_UNITS_TYPE = ( - 'Nanometers', 'Micrometers', 'Millimeters', 'Centimeters', 'Meters', - 'Kilometers', 'Inches', 'Feet', 'Miles', 'Pixels', 'OtherUnits') - -NIH_STACKTYPE_TYPE = ( - 'VolumeStack', 'RGBStack', 'MovieStack', 'HSVStack') - -# Map Universal Imaging Corporation MetaMorph internal tag ids to name and type -UIC_TAGS = { - 0: ('auto_scale', int), - 1: ('min_scale', int), - 2: ('max_scale', int), - 3: ('spatial_calibration', int), - 4: ('x_calibration', Fraction), - 5: ('y_calibration', Fraction), - 6: ('calibration_units', str), - 7: ('name', str), - 8: ('thresh_state', int), - 9: ('thresh_state_red', int), - 10: ('tagid_10', None), # undefined - 11: ('thresh_state_green', int), - 12: ('thresh_state_blue', int), - 13: ('thresh_state_lo', int), - 14: ('thresh_state_hi', int), - 15: ('zoom', int), - 16: ('create_time', julian_datetime), - 17: ('last_saved_time', julian_datetime), - 18: ('current_buffer', int), - 19: ('gray_fit', None), - 20: ('gray_point_count', None), - 21: ('gray_x', Fraction), - 22: ('gray_y', Fraction), - 23: ('gray_min', Fraction), - 24: ('gray_max', Fraction), - 25: ('gray_unit_name', str), - 26: ('standard_lut', int), - 27: ('wavelength', int), - 28: ('stage_position', '(%i,2,2)u4'), # N xy positions as fractions - 29: ('camera_chip_offset', '(%i,2,2)u4'), # N xy offsets as fractions - 30: ('overlay_mask', None), - 31: ('overlay_compress', None), - 32: ('overlay', None), - 33: ('special_overlay_mask', None), - 34: ('special_overlay_compress', None), - 35: ('special_overlay', None), - 36: ('image_property', read_uic_image_property), - 37: ('stage_label', '%ip'), # N str - 38: ('autoscale_lo_info', Fraction), - 39: ('autoscale_hi_info', Fraction), - 40: ('absolute_z', '(%i,2)u4'), # N fractions - 41: ('absolute_z_valid', '(%i,)u4'), # N long - 42: ('gamma', int), - 43: ('gamma_red', int), - 44: ('gamma_green', int), - 45: ('gamma_blue', int), - 46: ('camera_bin', int), - 47: ('new_lut', int), - 48: ('image_property_ex', None), - 49: ('plane_property', int), - 50: ('user_lut_table', '(256,3)u1'), - 51: ('red_autoscale_info', int), - 52: ('red_autoscale_lo_info', Fraction), - 53: ('red_autoscale_hi_info', Fraction), - 54: ('red_minscale_info', int), - 55: ('red_maxscale_info', int), - 56: ('green_autoscale_info', int), - 57: ('green_autoscale_lo_info', Fraction), - 58: ('green_autoscale_hi_info', Fraction), - 59: ('green_minscale_info', int), - 60: ('green_maxscale_info', int), - 61: ('blue_autoscale_info', int), - 62: ('blue_autoscale_lo_info', Fraction), - 63: ('blue_autoscale_hi_info', Fraction), - 64: ('blue_min_scale_info', int), - 65: ('blue_max_scale_info', int), - #66: ('overlay_plane_color', read_uic_overlay_plane_color), -} - -# Olympus FluoView -MM_DIMENSION = [ - ('name', 'a16'), - ('size', 'i4'), - ('origin', 'f8'), - ('resolution', 'f8'), - ('unit', 'a64'), -] - -MM_HEADER = [ - ('header_flag', 'i2'), - ('image_type', 'u1'), - ('image_name', 'a257'), - ('offset_data', 'u4'), - ('palette_size', 'i4'), - ('offset_palette0', 'u4'), - ('offset_palette1', 'u4'), - ('comment_size', 'i4'), - ('offset_comment', 'u4'), - ('dimensions', MM_DIMENSION, 10), - ('offset_position', 'u4'), - ('map_type', 'i2'), - ('map_min', 'f8'), - ('map_max', 'f8'), - ('min_value', 'f8'), - ('max_value', 'f8'), - ('offset_map', 'u4'), - ('gamma', 'f8'), - ('offset', 'f8'), - ('gray_channel', MM_DIMENSION), - ('offset_thumbnail', 'u4'), - ('voice_field', 'i4'), - ('offset_voice_field', 'u4'), -] - -# Carl Zeiss LSM -CZ_LSM_INFO = [ - ('magic_number', 'u4'), - ('structure_size', 'i4'), - ('dimension_x', 'i4'), - ('dimension_y', 'i4'), - ('dimension_z', 'i4'), - ('dimension_channels', 'i4'), - ('dimension_time', 'i4'), - ('data_type', 'i4'), # CZ_DATA_TYPES - ('thumbnail_x', 'i4'), - ('thumbnail_y', 'i4'), - ('voxel_size_x', 'f8'), - ('voxel_size_y', 'f8'), - ('voxel_size_z', 'f8'), - ('origin_x', 'f8'), - ('origin_y', 'f8'), - ('origin_z', 'f8'), - ('scan_type', 'u2'), - ('spectral_scan', 'u2'), - ('type_of_data', 'u4'), # CZ_TYPE_OF_DATA - ('offset_vector_overlay', 'u4'), - ('offset_input_lut', 'u4'), - ('offset_output_lut', 'u4'), - ('offset_channel_colors', 'u4'), - ('time_interval', 'f8'), - ('offset_channel_data_types', 'u4'), - ('offset_scan_info', 'u4'), # CZ_LSM_SCAN_INFO - ('offset_ks_data', 'u4'), - ('offset_time_stamps', 'u4'), - ('offset_event_list', 'u4'), - ('offset_roi', 'u4'), - ('offset_bleach_roi', 'u4'), - ('offset_next_recording', 'u4'), - # LSM 2.0 ends here - ('display_aspect_x', 'f8'), - ('display_aspect_y', 'f8'), - ('display_aspect_z', 'f8'), - ('display_aspect_time', 'f8'), - ('offset_mean_of_roi_overlay', 'u4'), - ('offset_topo_isoline_overlay', 'u4'), - ('offset_topo_profile_overlay', 'u4'), - ('offset_linescan_overlay', 'u4'), - ('offset_toolbar_flags', 'u4'), - ('offset_channel_wavelength', 'u4'), - ('offset_channel_factors', 'u4'), - ('objective_sphere_correction', 'f8'), - ('offset_unmix_parameters', 'u4'), - # LSM 3.2, 4.0 end here - ('offset_acquisition_parameters', 'u4'), - ('offset_characteristics', 'u4'), - ('offset_palette', 'u4'), - ('time_difference_x', 'f8'), - ('time_difference_y', 'f8'), - ('time_difference_z', 'f8'), - ('internal_use_1', 'u4'), - ('dimension_p', 'i4'), - ('dimension_m', 'i4'), - ('dimensions_reserved', '16i4'), - ('offset_tile_positions', 'u4'), - ('reserved_1', '9u4'), - ('offset_positions', 'u4'), - ('reserved_2', '21u4'), # must be 0 -] - -# Import functions for LSM_INFO sub-records -CZ_LSM_INFO_READERS = { - 'scan_info': read_cz_lsm_scan_info, - 'time_stamps': read_cz_lsm_time_stamps, - 'event_list': read_cz_lsm_event_list, - 'channel_colors': read_cz_lsm_floatpairs, - 'positions': read_cz_lsm_floatpairs, - 'tile_positions': read_cz_lsm_floatpairs, -} - -# Map cz_lsm_info.scan_type to dimension order -CZ_SCAN_TYPES = { - 0: 'XYZCT', # x-y-z scan - 1: 'XYZCT', # z scan (x-z plane) - 2: 'XYZCT', # line scan - 3: 'XYTCZ', # time series x-y - 4: 'XYZTC', # time series x-z - 5: 'XYTCZ', # time series 'Mean of ROIs' - 6: 'XYZTC', # time series x-y-z - 7: 'XYCTZ', # spline scan - 8: 'XYCZT', # spline scan x-z - 9: 'XYTCZ', # time series spline plane x-z - 10: 'XYZCT', # point mode -} - -# Map dimension codes to cz_lsm_info attribute -CZ_DIMENSIONS = { - 'X': 'dimension_x', - 'Y': 'dimension_y', - 'Z': 'dimension_z', - 'C': 'dimension_channels', - 'T': 'dimension_time', - 'P': 'dimension_p', - 'M': 'dimension_m', -} - -# Description of cz_lsm_info.data_type -CZ_DATA_TYPES = { - 0: 'varying data types', - 1: '8 bit unsigned integer', - 2: '12 bit unsigned integer', - 5: '32 bit float', -} - -# Description of cz_lsm_info.type_of_data -CZ_TYPE_OF_DATA = { - 0: 'Original scan data', - 1: 'Calculated data', - 2: '3D reconstruction', - 3: 'Topography height map', -} - -CZ_LSM_SCAN_INFO_ARRAYS = { - 0x20000000: "tracks", - 0x30000000: "lasers", - 0x60000000: "detection_channels", - 0x80000000: "illumination_channels", - 0xa0000000: "beam_splitters", - 0xc0000000: "data_channels", - 0x11000000: "timers", - 0x13000000: "markers", -} - -CZ_LSM_SCAN_INFO_STRUCTS = { - # 0x10000000: "recording", - 0x40000000: "track", - 0x50000000: "laser", - 0x70000000: "detection_channel", - 0x90000000: "illumination_channel", - 0xb0000000: "beam_splitter", - 0xd0000000: "data_channel", - 0x12000000: "timer", - 0x14000000: "marker", -} - -CZ_LSM_SCAN_INFO_ATTRIBUTES = { - # recording - 0x10000001: "name", - 0x10000002: "description", - 0x10000003: "notes", - 0x10000004: "objective", - 0x10000005: "processing_summary", - 0x10000006: "special_scan_mode", - 0x10000007: "scan_type", - 0x10000008: "scan_mode", - 0x10000009: "number_of_stacks", - 0x1000000a: "lines_per_plane", - 0x1000000b: "samples_per_line", - 0x1000000c: "planes_per_volume", - 0x1000000d: "images_width", - 0x1000000e: "images_height", - 0x1000000f: "images_number_planes", - 0x10000010: "images_number_stacks", - 0x10000011: "images_number_channels", - 0x10000012: "linscan_xy_size", - 0x10000013: "scan_direction", - 0x10000014: "time_series", - 0x10000015: "original_scan_data", - 0x10000016: "zoom_x", - 0x10000017: "zoom_y", - 0x10000018: "zoom_z", - 0x10000019: "sample_0x", - 0x1000001a: "sample_0y", - 0x1000001b: "sample_0z", - 0x1000001c: "sample_spacing", - 0x1000001d: "line_spacing", - 0x1000001e: "plane_spacing", - 0x1000001f: "plane_width", - 0x10000020: "plane_height", - 0x10000021: "volume_depth", - 0x10000023: "nutation", - 0x10000034: "rotation", - 0x10000035: "precession", - 0x10000036: "sample_0time", - 0x10000037: "start_scan_trigger_in", - 0x10000038: "start_scan_trigger_out", - 0x10000039: "start_scan_event", - 0x10000040: "start_scan_time", - 0x10000041: "stop_scan_trigger_in", - 0x10000042: "stop_scan_trigger_out", - 0x10000043: "stop_scan_event", - 0x10000044: "stop_scan_time", - 0x10000045: "use_rois", - 0x10000046: "use_reduced_memory_rois", - 0x10000047: "user", - 0x10000048: "use_bc_correction", - 0x10000049: "position_bc_correction1", - 0x10000050: "position_bc_correction2", - 0x10000051: "interpolation_y", - 0x10000052: "camera_binning", - 0x10000053: "camera_supersampling", - 0x10000054: "camera_frame_width", - 0x10000055: "camera_frame_height", - 0x10000056: "camera_offset_x", - 0x10000057: "camera_offset_y", - 0x10000059: "rt_binning", - 0x1000005a: "rt_frame_width", - 0x1000005b: "rt_frame_height", - 0x1000005c: "rt_region_width", - 0x1000005d: "rt_region_height", - 0x1000005e: "rt_offset_x", - 0x1000005f: "rt_offset_y", - 0x10000060: "rt_zoom", - 0x10000061: "rt_line_period", - 0x10000062: "prescan", - 0x10000063: "scan_direction_z", - # track - 0x40000001: "multiplex_type", # 0 after line; 1 after frame - 0x40000002: "multiplex_order", - 0x40000003: "sampling_mode", # 0 sample; 1 line average; 2 frame average - 0x40000004: "sampling_method", # 1 mean; 2 sum - 0x40000005: "sampling_number", - 0x40000006: "acquire", - 0x40000007: "sample_observation_time", - 0x4000000b: "time_between_stacks", - 0x4000000c: "name", - 0x4000000d: "collimator1_name", - 0x4000000e: "collimator1_position", - 0x4000000f: "collimator2_name", - 0x40000010: "collimator2_position", - 0x40000011: "is_bleach_track", - 0x40000012: "is_bleach_after_scan_number", - 0x40000013: "bleach_scan_number", - 0x40000014: "trigger_in", - 0x40000015: "trigger_out", - 0x40000016: "is_ratio_track", - 0x40000017: "bleach_count", - 0x40000018: "spi_center_wavelength", - 0x40000019: "pixel_time", - 0x40000021: "condensor_frontlens", - 0x40000023: "field_stop_value", - 0x40000024: "id_condensor_aperture", - 0x40000025: "condensor_aperture", - 0x40000026: "id_condensor_revolver", - 0x40000027: "condensor_filter", - 0x40000028: "id_transmission_filter1", - 0x40000029: "id_transmission1", - 0x40000030: "id_transmission_filter2", - 0x40000031: "id_transmission2", - 0x40000032: "repeat_bleach", - 0x40000033: "enable_spot_bleach_pos", - 0x40000034: "spot_bleach_posx", - 0x40000035: "spot_bleach_posy", - 0x40000036: "spot_bleach_posz", - 0x40000037: "id_tubelens", - 0x40000038: "id_tubelens_position", - 0x40000039: "transmitted_light", - 0x4000003a: "reflected_light", - 0x4000003b: "simultan_grab_and_bleach", - 0x4000003c: "bleach_pixel_time", - # laser - 0x50000001: "name", - 0x50000002: "acquire", - 0x50000003: "power", - # detection_channel - 0x70000001: "integration_mode", - 0x70000002: "special_mode", - 0x70000003: "detector_gain_first", - 0x70000004: "detector_gain_last", - 0x70000005: "amplifier_gain_first", - 0x70000006: "amplifier_gain_last", - 0x70000007: "amplifier_offs_first", - 0x70000008: "amplifier_offs_last", - 0x70000009: "pinhole_diameter", - 0x7000000a: "counting_trigger", - 0x7000000b: "acquire", - 0x7000000c: "point_detector_name", - 0x7000000d: "amplifier_name", - 0x7000000e: "pinhole_name", - 0x7000000f: "filter_set_name", - 0x70000010: "filter_name", - 0x70000013: "integrator_name", - 0x70000014: "channel_name", - 0x70000015: "detector_gain_bc1", - 0x70000016: "detector_gain_bc2", - 0x70000017: "amplifier_gain_bc1", - 0x70000018: "amplifier_gain_bc2", - 0x70000019: "amplifier_offset_bc1", - 0x70000020: "amplifier_offset_bc2", - 0x70000021: "spectral_scan_channels", - 0x70000022: "spi_wavelength_start", - 0x70000023: "spi_wavelength_stop", - 0x70000026: "dye_name", - 0x70000027: "dye_folder", - # illumination_channel - 0x90000001: "name", - 0x90000002: "power", - 0x90000003: "wavelength", - 0x90000004: "aquire", - 0x90000005: "detchannel_name", - 0x90000006: "power_bc1", - 0x90000007: "power_bc2", - # beam_splitter - 0xb0000001: "filter_set", - 0xb0000002: "filter", - 0xb0000003: "name", - # data_channel - 0xd0000001: "name", - 0xd0000003: "acquire", - 0xd0000004: "color", - 0xd0000005: "sample_type", - 0xd0000006: "bits_per_sample", - 0xd0000007: "ratio_type", - 0xd0000008: "ratio_track1", - 0xd0000009: "ratio_track2", - 0xd000000a: "ratio_channel1", - 0xd000000b: "ratio_channel2", - 0xd000000c: "ratio_const1", - 0xd000000d: "ratio_const2", - 0xd000000e: "ratio_const3", - 0xd000000f: "ratio_const4", - 0xd0000010: "ratio_const5", - 0xd0000011: "ratio_const6", - 0xd0000012: "ratio_first_images1", - 0xd0000013: "ratio_first_images2", - 0xd0000014: "dye_name", - 0xd0000015: "dye_folder", - 0xd0000016: "spectrum", - 0xd0000017: "acquire", - # timer - 0x12000001: "name", - 0x12000002: "description", - 0x12000003: "interval", - 0x12000004: "trigger_in", - 0x12000005: "trigger_out", - 0x12000006: "activation_time", - 0x12000007: "activation_number", - # marker - 0x14000001: "name", - 0x14000002: "description", - 0x14000003: "trigger_in", - 0x14000004: "trigger_out", -} - -# TVIPS metadata from EMMENU Help file -TVIPS_HEADER_V1 = [ - ('version', 'i4'), - ('comment_v1', 'a80'), - ('high_tension', 'i4'), - ('spherical_aberration', 'i4'), - ('illumination_aperture', 'i4'), - ('magnification', 'i4'), - ('post-magnification', 'i4'), - ('focal_length', 'i4'), - ('defocus', 'i4'), - ('astigmatism', 'i4'), - ('astigmatism_direction', 'i4'), - ('biprism_voltage', 'i4'), - ('specimen_tilt_angle', 'i4'), - ('specimen_tilt_direction', 'i4'), - ('illumination_tilt_direction', 'i4'), - ('illumination_tilt_angle', 'i4'), - ('image_mode', 'i4'), - ('energy_spread', 'i4'), - ('chromatic_aberration', 'i4'), - ('shutter_type', 'i4'), - ('defocus_spread', 'i4'), - ('ccd_number', 'i4'), - ('ccd_size', 'i4'), - ('offset_x_v1', 'i4'), - ('offset_y_v1', 'i4'), - ('physical_pixel_size', 'i4'), - ('binning', 'i4'), - ('readout_speed', 'i4'), - ('gain_v1', 'i4'), - ('sensitivity_v1', 'i4'), - ('exposure_time_v1', 'i4'), - ('flat_corrected', 'i4'), - ('dead_px_corrected', 'i4'), - ('image_mean', 'i4'), - ('image_std', 'i4'), - ('displacement_x', 'i4'), - ('displacement_y', 'i4'), - ('date_v1', 'i4'), - ('time_v1', 'i4'), - ('image_min', 'i4'), - ('image_max', 'i4'), - ('image_statistics_quality', 'i4'), -] - -TVIPS_HEADER_V2 = [ - ('image_name', 'V160'), # utf16 - ('image_folder', 'V160'), - ('image_size_x', 'i4'), - ('image_size_y', 'i4'), - ('image_size_z', 'i4'), - ('image_size_e', 'i4'), - ('image_data_type', 'i4'), - ('date', 'i4'), - ('time', 'i4'), - ('comment', 'V1024'), - ('image_history', 'V1024'), - ('scaling', '16f4'), - ('image_statistics', '16c16'), - ('image_type', 'i4'), - ('image_display_type', 'i4'), - ('pixel_size_x', 'f4'), # distance between two px in x, [nm] - ('pixel_size_y', 'f4'), # distance between two px in y, [nm] - ('image_distance_z', 'f4'), - ('image_distance_e', 'f4'), - ('image_misc', '32f4'), - ('tem_type', 'V160'), - ('tem_high_tension', 'f4'), - ('tem_aberrations', '32f4'), - ('tem_energy', '32f4'), - ('tem_mode', 'i4'), - ('tem_magnification', 'f4'), - ('tem_magnification_correction', 'f4'), - ('post_magnification', 'f4'), - ('tem_stage_type', 'i4'), - ('tem_stage_position', '5f4'), # x, y, z, a, b - ('tem_image_shift', '2f4'), - ('tem_beam_shift', '2f4'), - ('tem_beam_tilt', '2f4'), - ('tiling_parameters', '7f4'), # 0: tiling? 1:x 2:y 3: max x 4: max y - # 5: overlap x 6: overlap y - ('tem_illumination', '3f4'), # 0: spotsize 1: intensity - ('tem_shutter', 'i4'), - ('tem_misc', '32f4'), - ('camera_type', 'V160'), - ('physical_pixel_size_x', 'f4'), - ('physical_pixel_size_y', 'f4'), - ('offset_x', 'i4'), - ('offset_y', 'i4'), - ('binning_x', 'i4'), - ('binning_y', 'i4'), - ('exposure_time', 'f4'), - ('gain', 'f4'), - ('readout_rate', 'f4'), - ('flatfield_description', 'V160'), - ('sensitivity', 'f4'), - ('dose', 'f4'), - ('cam_misc', '32f4'), - ('fei_microscope_information', 'V1024'), - ('fei_specimen_information', 'V1024'), - ('magic', 'u4'), -] - -# Map TIFF tag code to attribute name, default value, type, count, validator -TIFF_TAGS = { - 254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()), - 255: ('subfile_type', None, 3, 1, - {0: 'undefined', 1: 'image', 2: 'reduced_image', 3: 'page'}), - 256: ('image_width', None, 4, 1, None), - 257: ('image_length', None, 4, 1, None), - 258: ('bits_per_sample', 1, 3, None, None), - 259: ('compression', 1, 3, 1, TIFF_COMPESSIONS), - 262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS), - 266: ('fill_order', 1, 3, 1, {1: 'msb2lsb', 2: 'lsb2msb'}), - 269: ('document_name', None, 2, None, None), - 270: ('image_description', None, 2, None, None), - 271: ('make', None, 2, None, None), - 272: ('model', None, 2, None, None), - 273: ('strip_offsets', None, 4, None, None), - 274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS), - 277: ('samples_per_pixel', 1, 3, 1, None), - 278: ('rows_per_strip', 2**32-1, 4, 1, None), - 279: ('strip_byte_counts', None, 4, None, None), - 280: ('min_sample_value', None, 3, None, None), - 281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample - 282: ('x_resolution', None, 5, 1, None), - 283: ('y_resolution', None, 5, 1, None), - 284: ('planar_configuration', 1, 3, 1, {1: 'contig', 2: 'separate'}), - 285: ('page_name', None, 2, None, None), - 286: ('x_position', None, 5, 1, None), - 287: ('y_position', None, 5, 1, None), - 296: ('resolution_unit', 2, 4, 1, {1: None, 2: 'inch', 3: 'centimeter'}), - 297: ('page_number', None, 3, 2, None), - 305: ('software', None, 2, None, None), - 306: ('datetime', None, 2, None, None), - 315: ('artist', None, 2, None, None), - 316: ('host_computer', None, 2, None, None), - 317: ('predictor', 1, 3, 1, {1: None, 2: 'horizontal', 3: 'float'}), - 318: ('white_point', None, 5, 2, None), - 319: ('primary_chromaticities', None, 5, 6, None), - 320: ('color_map', None, 3, None, None), - 322: ('tile_width', None, 4, 1, None), - 323: ('tile_length', None, 4, 1, None), - 324: ('tile_offsets', None, 4, None, None), - 325: ('tile_byte_counts', None, 4, None, None), - 330: ('sub_ifds', None, 4, None, None), - 338: ('extra_samples', None, 3, None, - {0: 'unspecified', 1: 'assocalpha', 2: 'unassalpha'}), - 339: ('sample_format', 1, 3, None, TIFF_SAMPLE_FORMATS), - 340: ('smin_sample_value', None, None, None, None), - 341: ('smax_sample_value', None, None, None, None), - 346: ('indexed', 0, 3, 1, None), - 347: ('jpeg_tables', None, 7, None, None), - 530: ('ycbcr_subsampling', (1, 1), 3, 2, None), - 531: ('ycbcr_positioning', (1, 1), 3, 1, None), - 532: ('reference_black_white', None, 5, 1, None), - 32995: ('sgi_matteing', None, None, 1, None), # use extra_samples - 32996: ('sgi_datatype', None, None, None, None), # use sample_format - 32997: ('image_depth', 1, 4, 1, None), - 32998: ('tile_depth', None, 4, 1, None), - 33432: ('copyright', None, 1, None, None), - 33445: ('md_file_tag', None, 4, 1, None), - 33446: ('md_scale_pixel', None, 5, 1, None), - 33447: ('md_color_table', None, 3, None, None), - 33448: ('md_lab_name', None, 2, None, None), - 33449: ('md_sample_info', None, 2, None, None), - 33450: ('md_prep_date', None, 2, None, None), - 33451: ('md_prep_time', None, 2, None, None), - 33452: ('md_file_units', None, 2, None, None), - 33550: ('model_pixel_scale', None, 12, 3, None), - 33922: ('model_tie_point', None, 12, None, None), - 34665: ('exif_ifd', None, None, 1, None), - 34735: ('geo_key_directory', None, 3, None, None), - 34736: ('geo_double_params', None, 12, None, None), - 34737: ('geo_ascii_params', None, 2, None, None), - 34853: ('gps_ifd', None, None, 1, None), - 37510: ('user_comment', None, None, None, None), - 42112: ('gdal_metadata', None, 2, None, None), - 42113: ('gdal_nodata', None, 2, None, None), - 50289: ('mc_xy_position', None, 12, 2, None), - 50290: ('mc_z_position', None, 12, 1, None), - 50291: ('mc_xy_calibration', None, 12, 3, None), - 50292: ('mc_lens_lem_na_n', None, 12, 3, None), - 50293: ('mc_channel_name', None, 1, None, None), - 50294: ('mc_ex_wavelength', None, 12, 1, None), - 50295: ('mc_time_stamp', None, 12, 1, None), - 50838: ('imagej_byte_counts', None, None, None, None), - 51023: ('fibics_xml', None, 2, None, None), - 65200: ('flex_xml', None, 2, None, None), - # code: (attribute name, default value, type, count, validator) -} - -# Map custom TIFF tag codes to attribute names and import functions -CUSTOM_TAGS = { - 700: ('xmp', read_bytes), - 34377: ('photoshop', read_numpy), - 33723: ('iptc', read_bytes), - 34675: ('icc_profile', read_bytes), - 33628: ('uic1tag', read_uic1tag), # Universal Imaging Corporation STK - 33629: ('uic2tag', read_uic2tag), - 33630: ('uic3tag', read_uic3tag), - 33631: ('uic4tag', read_uic4tag), - 34118: ('sem_metadata', read_sem_metadata), # Zeiss SEM - 34361: ('mm_header', read_mm_header), # Olympus FluoView - 34362: ('mm_stamp', read_mm_stamp), - 34386: ('mm_user_block', read_bytes), - 34412: ('cz_lsm_info', read_cz_lsm_info), # Carl Zeiss LSM - 34680: ('sfeg_metadata', read_fei_metadata), # S-FEG - 34682: ('helios_metadata', read_fei_metadata), # Helios NanoLab - 37706: ('tvips_metadata', read_tvips_header), # TVIPS EMMENU - 43314: ('nih_image_header', read_nih_image_header), - # 40001: ('mc_ipwinscal', read_bytes), - 40100: ('mc_id_old', read_bytes), - 50288: ('mc_id', read_bytes), - 50296: ('mc_frame_properties', read_bytes), - 50839: ('imagej_metadata', read_bytes), - 51123: ('micromanager_metadata', read_json), -} - -# Max line length of printed output -PRINT_LINE_LEN = 79 +def lsm2bin(lsmfile, binfile=None, tile=(256, 256), verbose=True): + """Convert [MP]TZCYX LSM file to series of BIN files. + + One BIN file containing 'ZCYX' data is created for each position, time, + and tile. The position, time, and tile indices are encoded at the end + of the filenames. + + """ + verbose = print_ if verbose else nullfunc + + if binfile is None: + binfile = lsmfile + elif binfile.lower() == 'none': + binfile = None + if binfile: + binfile += "_(z%ic%iy%ix%i)_m%%ip%%it%%03iy%%ix%%i.bin" + + verbose("\nOpening LSM file... ", end='', flush=True) + start_time = time.time() + + with TiffFile(lsmfile) as lsm: + if not lsm.is_lsm: + verbose("\n", lsm, flush=True) + raise ValueError("not a LSM file") + series = lsm.series[0] # first series contains the image data + shape = series.shape + axes = series.axes + dtype = series.dtype + size = product(shape) * dtype.itemsize + + verbose("%.3f s" % (time.time() - start_time)) + # verbose(lsm, flush=True) + verbose("Image\n axes: %s\n shape: %s\n dtype: %s\n size: %s" + % (axes, shape, dtype, format_size(size)), flush=True) + if not series.axes.endswith('TZCYX'): + raise ValueError("not a *TZCYX LSM file") + + verbose("Copying image from LSM to BIN files", end='', flush=True) + start_time = time.time() + tiles = shape[-2] // tile[-2], shape[-1] // tile[-1] + if binfile: + binfile = binfile % (shape[-4], shape[-3], tile[0], tile[1]) + shape = (1,) * (7-len(shape)) + shape + # cache for ZCYX stacks and output files + data = numpy.empty(shape[3:], dtype=dtype) + out = numpy.empty((shape[-4], shape[-3], tile[0], tile[1]), + dtype=dtype) + # iterate over Tiff pages containing data + pages = iter(series.pages) + for m in range(shape[0]): # mosaic axis + for p in range(shape[1]): # position axis + for t in range(shape[2]): # time axis + for z in range(shape[3]): # z slices + data[z] = next(pages).asarray() + for y in range(tiles[0]): # tile y + for x in range(tiles[1]): # tile x + out[:] = data[..., + y*tile[0]:(y+1)*tile[0], + x*tile[1]:(x+1)*tile[1]] + if binfile: + out.tofile(binfile % (m, p, t, y, x)) + verbose('.', end='', flush=True) + verbose(" %.3f s" % (time.time() - start_time)) def imshow(data, title=None, vmin=0, vmax=None, cmap=None, - bitspersample=None, photometric='rgb', interpolation=None, - dpi=96, figure=None, subplot=111, maxdim=8192, **kwargs): + bitspersample=None, photometric='RGB', + interpolation=None, dpi=96, figure=None, subplot=111, maxdim=32768, + **kwargs): """Plot n-dimensional images using matplotlib.pyplot. Return figure, subplot and plot axis. - Requires pyplot already imported `from matplotlib import pyplot`. + Requires pyplot already imported C{from matplotlib import pyplot}. Parameters ---------- bitspersample : int or None Number of bits per channel in integer RGB images. - photometric : {'miniswhite', 'minisblack', 'rgb', or 'palette'} + photometric : {'MINISWHITE', 'MINISBLACK', 'RGB', or 'PALETTE'} The color space of the image data. title : str Window and subplot title. @@ -6033,13 +8619,14 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, Arguments for matplotlib.pyplot.imshow. """ - #if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'): - # raise ValueError("Can not handle %s photometrics" % photometric) - # TODO: handle photometric == 'separated' (CMYK) - isrgb = photometric in ('rgb', 'palette') + isrgb = photometric in ('RGB',) # 'PALETTE' + if isrgb and not (data.shape[-1] in (3, 4) or ( + data.ndim > 2 and data.shape[-3] in (3, 4))): + isrgb = False + photometric = 'MINISWHITE' data = data.squeeze() - if photometric in ('miniswhite', 'minisblack'): + if photometric in ('MINISWHITE', 'MINISBLACK', None): data = reshape_nd(data, 2) else: data = reshape_nd(data, 3) @@ -6067,10 +8654,10 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, else: data = data[..., :maxdim, :maxdim] - if photometric == 'palette' and isrgb: + if photometric == 'PALETTE' and isrgb: datamax = data.max() if datamax > 255: - data >>= 8 # possible precision loss + data = data >> 8 # possible precision loss data = data.astype('B') elif data.dtype.kind in 'ui': if not (isrgb and data.dtype.itemsize <= 1) or bitspersample is None: @@ -6078,27 +8665,29 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, bitspersample = int(math.ceil(math.log(data.max(), 2))) except Exception: bitspersample = data.dtype.itemsize * 8 - elif not isinstance(bitspersample, int): + elif not isinstance(bitspersample, inttypes): # bitspersample can be tuple, e.g. (5, 6, 5) bitspersample = data.dtype.itemsize * 8 datamax = 2**bitspersample if isrgb: if bitspersample < 8: - data <<= 8 - bitspersample + data = data << (8 - bitspersample) elif bitspersample > 8: - data >>= bitspersample - 8 # precision loss + data = data >> (bitspersample - 8) # precision loss data = data.astype('B') elif data.dtype.kind == 'f': datamax = data.max() if isrgb and datamax > 1.0: if data.dtype.char == 'd': data = data.astype('f') - data /= datamax + data /= datamax + else: + data = data / datamax elif data.dtype.kind == 'b': datamax = 1 elif data.dtype.kind == 'c': - # TODO: handle complex types - raise NotImplementedError("complex type") + data = numpy.absolute(data) + datamax = data.max() if not isrgb: if vmax is None: @@ -6127,7 +8716,8 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, figure.canvas.manager.window.title(title) except Exception: pass - pyplot.subplots_adjust(bottom=0.03*(dims+2), top=0.9, + l = len(title.splitlines()) if title else 1 + pyplot.subplots_adjust(bottom=0.03*(dims+2), top=0.98-l*0.03, left=0.1, right=0.95, hspace=0.05, wspace=0.0) subplot = pyplot.subplot(subplot) @@ -6140,10 +8730,10 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, if cmap is None: if data.dtype.kind in 'ubf' or vmin == 0: - cmap = 'cubehelix' + cmap = 'viridis' else: cmap = 'coolwarm' - if photometric == 'miniswhite': + if photometric == 'MINISWHITE': cmap += '_r' image = pyplot.imshow(data[(0,) * dims].squeeze(), vmin=vmin, vmax=vmax, @@ -6158,18 +8748,22 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, y = int(y + 0.5) try: if dims: - return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x], - current, x, y) - else: - return "%s @ [%4i, %4i]" % (data[y, x], x, y) + return "%s @ %s [%4i, %4i]" % ( + curaxdat[1][y, x], current, y, x) + return "%s @ [%4i, %4i]" % (data[y, x], y, x) except IndexError: - return "" + return '' + + def none(event): + return '' - pyplot.gca().format_coord = format_coord + subplot.format_coord = format_coord + image.get_cursor_data = none + image.format_cursor_data = none if dims: current = list((0,) * dims) - cur_ax_dat = [0, data[tuple(current)].squeeze()] + curaxdat = [0, data[tuple(current)].squeeze()] sliders = [pyplot.Slider( pyplot.axes([0.125, 0.03*(axis+1), 0.725, 0.025]), 'Dimension %i' % axis, 0, data.shape[axis]-1, 0, facecolor='0.5', @@ -6179,8 +8773,8 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, def set_image(current, sliders=sliders, data=data): # change image and redraw canvas - cur_ax_dat[1] = data[tuple(current)].squeeze() - image.set_data(cur_ax_dat[1]) + curaxdat[1] = data[tuple(current)].squeeze() + image.set_data(curaxdat[1]) for ctrl, index in zip(sliders, current): ctrl.eventson = False ctrl.set_val(index) @@ -6190,7 +8784,7 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, def on_changed(index, axis, data=data, current=current): # callback function for slider change event index = int(round(index)) - cur_ax_dat[0] = axis + curaxdat[0] = axis if index == current[axis]: return if index >= data.shape[axis]: @@ -6203,7 +8797,7 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, def on_keypressed(event, data=data, current=current): # callback function for key press event key = event.key - axis = cur_ax_dat[0] + axis = curaxdat[0] if str(key) in '0123456789': on_changed(key, axis) elif key == 'right': @@ -6211,9 +8805,9 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, elif key == 'left': on_changed(current[axis] - 1, axis) elif key == 'up': - cur_ax_dat[0] = 0 if axis == len(data.shape)-1 else axis + 1 + curaxdat[0] = 0 if axis == len(data.shape)-1 else axis + 1 elif key == 'down': - cur_ax_dat[0] = len(data.shape)-1 if axis == 0 else axis - 1 + curaxdat[0] = len(data.shape)-1 if axis == 0 else axis - 1 elif key == 'end': on_changed(data.shape[axis] - 1, axis) elif key == 'home': @@ -6249,10 +8843,14 @@ def askopenfilename(**kwargs): def main(argv=None): """Command line usage main function.""" + if float(sys.version[0:3]) < 2.7: + print("This script requires Python version 2.7 or better.") + print("This is Python version %s" % sys.version) + return 0 if argv is None: argv = sys.argv - import optparse + import optparse # TODO: use argparse parser = optparse.OptionParser( usage="usage: %prog [options] path", @@ -6265,35 +8863,33 @@ def main(argv=None): help="display series of pages of same shape") opt('--nomultifile', dest='nomultifile', action='store_true', default=False, help="do not read OME series from multiple files") - opt('--noplot', dest='noplot', action='store_true', default=False, - help="do not display images") + opt('--noplots', dest='noplots', type='int', default=8, + help="maximum number of plots") opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear', help="image interpolation method") opt('--dpi', dest='dpi', type='int', default=96, - help="set plot resolution") + help="plot resolution") opt('--vmin', dest='vmin', type='int', default=None, - help="set minimum value for colormapping") + help="minimum value for colormapping") opt('--vmax', dest='vmax', type='int', default=None, - help="set maximum value for colormapping") + help="maximum value for colormapping") opt('--debug', dest='debug', action='store_true', default=False, help="raise exception on failures") opt('--doctest', dest='doctest', action='store_true', default=False, help="runs the docstring examples") - opt('-v', '--verbose', dest='verbose', action='store_true', default=True) - opt('-q', '--quiet', dest='verbose', action='store_false') + opt('-v', '--detail', dest='detail', type='int', default=2) + opt('-q', '--quiet', dest='quiet', action='store_true') settings, path = parser.parse_args() path = ' '.join(path) if settings.doctest: import doctest - doctest.testmod() + doctest.testmod(optionflags=doctest.ELLIPSIS) return 0 if not path: - path = askopenfilename( - title="Select a TIFF file", - filetypes=[("TIF files", "*.tif"), ("LSM files", "*.lsm"), - ("STK files", "*.stk"), ("allfiles", "*")]) + path = askopenfilename(title="Select a TIFF file", + filetypes=TIFF.FILEOPEN_FILTER) if not path: parser.error("No file specified") @@ -6303,10 +8899,10 @@ def main(argv=None): print('no files match the pattern') return 0 # TODO: handle image sequences - #if len(path) == 1: path = path[0] - print("Reading file structure...", end=' ') + if not settings.quiet: + print("\nReading file structure...", end=' ') start = time.time() try: tif = TiffFile(path, multifile=not settings.nomultifile) @@ -6316,14 +8912,16 @@ def main(argv=None): else: print("\n", e) sys.exit(0) - print("%.3f ms" % ((time.time()-start) * 1e3)) + if not settings.quiet: + print("%.3f ms" % ((time.time()-start) * 1e3)) if tif.is_ome: settings.norgb = True images = [] - if not settings.noplot: - print("Reading image data... ", end=' ') + if settings.noplots > 0: + if not settings.quiet: + print("Reading image data... ", end=' ') def notnone(x): return next(i for i in x if i is not None) @@ -6332,36 +8930,40 @@ def main(argv=None): try: if settings.page >= 0: images = [(tif.asarray(key=settings.page), - tif[settings.page])] + tif[settings.page], None)] elif settings.series >= 0: images = [(tif.asarray(series=settings.series), - notnone(tif.series[settings.series].pages))] + notnone(tif.series[settings.series].pages), + tif.series[settings.series])] else: images = [] - for i, s in enumerate(tif.series): + for i, s in enumerate(tif.series[:settings.noplots]): try: - images.append( - (tif.asarray(series=i), notnone(s.pages))) + images.append((tif.asarray(series=i), + notnone(s.pages), + tif.series[i])) except ValueError as e: - images.append((None, notnone(s.pages))) + images.append((None, notnone(s.pages), None)) if settings.debug: raise else: - print("\n* series %i failed: %s... " % (i, e), + print("\nSeries %i failed: %s... " % (i, e), end='') - print("%.3f ms" % ((time.time()-start) * 1e3)) + if not settings.quiet: + print("%.3f ms" % ((time.time()-start) * 1e3)) except Exception as e: if settings.debug: raise else: print(e) + if not settings.quiet: + print() + print(TiffFile.__str__(tif, detail=int(settings.detail))) + print() tif.close() - print() - print(tif.info()) - print() - if images and not settings.noplot: + if images and settings.noplots > 0: try: import matplotlib matplotlib.use('TkAgg') @@ -6369,54 +8971,88 @@ def main(argv=None): except ImportError as e: warnings.warn("failed to import matplotlib.\n%s" % e) else: - for img, page in images: + for img, page, series in images: if img is None: continue vmin, vmax = settings.vmin, settings.vmax - if 'gdal_nodata' in page.tags: + if 'GDAL_NODATA' in page.tags: try: - vmin = numpy.min(img[img > float(page.gdal_nodata)]) + vmin = numpy.min( + img[img > float(page.tags['GDAL_NODATA'].value)]) except ValueError: pass - if page.is_stk: + if tif.is_stk: try: - vmin = page.uic_tags['min_scale'] - vmax = page.uic_tags['max_scale'] + vmin = tif.stk_metadata['MinScale'] + vmax = tif.stk_metadata['MaxScale'] except KeyError: pass else: if vmax <= vmin: vmin, vmax = settings.vmin, settings.vmax - title = "%s\n %s" % (str(tif), str(page)) + if series: + title = "%s\n%s\n%s" % (str(tif), str(page), str(series)) + else: + title = "%s\n %s" % (str(tif), str(page)) + photometric = 'MINISBLACK' + if page.photometric not in (3,): + photometric = TIFF.PHOTOMETRIC(page.photometric).name imshow(img, title=title, vmin=vmin, vmax=vmax, - bitspersample=page.bits_per_sample, - photometric=page.photometric, + bitspersample=page.bitspersample, + photometric=photometric, interpolation=settings.interpol, dpi=settings.dpi) pyplot.show() -TIFFfile = TiffFile # backwards compatibility - -if sys.version_info[0] > 2: - basestring = str, bytes - unicode = str - - def bytes2str(b): - return str(b, 'cp1252') +if sys.version_info[0] == 2: + inttypes = int, long # noqa - def str2bytes(s, encoding="latin-1"): - return s.encode(encoding) + def print_(*args, **kwargs): + """Print function with flush support.""" + flush = kwargs.pop('flush', False) + print(*args, **kwargs) + if flush: + sys.stdout.flush() -else: - bytes2str = str + def bytes2str(b, encoding=None, errors=None): + """Return string from bytes.""" + return b - def str2bytes(s): + def str2bytes(s, encoding=None): + """Return bytes from string.""" return s + def byte2int(b): + """Return value of byte as int.""" + return ord(b) + class FileNotFoundError(IOError): pass + TiffFrame = TiffPage # noqa +else: + inttypes = int + basestring = str, bytes + unicode = str + print_ = print + + def bytes2str(b, encoding=None, errors='strict'): + """Return unicode string from encoded bytes.""" + if encoding is not None: + return b.decode(encoding, errors) + try: + return b.decode('utf-8', errors) + except UnicodeDecodeError: + return b.decode('cp1252', errors) + + def str2bytes(s, encoding='cp1252'): + """Return bytes from unicode string.""" + return s.encode(encoding) + + def byte2int(b): + """Return value of byte as int.""" + return b if __name__ == "__main__": sys.exit(main()) diff --git a/imageio/plugins/tifffile.py b/imageio/plugins/tifffile.py index a23215d..1857b3e 100644 --- a/imageio/plugins/tifffile.py +++ b/imageio/plugins/tifffile.py @@ -172,19 +172,27 @@ class TiffFormat(Format): def _open(self, **kwargs): if not _tifffile: load_lib() - self._tf = _tifffile.TiffFile(self.request.get_file(), **kwargs) + # Allow loading from http; tiffile uses seek, so download first + if self.request.filename.startswith(('http://', 'https://')): + self._f = f = open(self.request.get_local_filename(), 'rb') + else: + self._f = None + f = self.request.get_file() + self._tf = _tifffile.TiffFile(f, **kwargs) # metadata is the same for all images self._meta = {} def _close(self): self._tf.close() + if self._f is not None: + self._f.close() def _get_length(self): if self.request.mode[1] in 'vV': return 1 # or can there be pages in pages or something? else: - return len(self._tf) + return len(self._tf.pages) def _get_data(self, index): if self.request.mode[1] in 'vV': @@ -196,16 +204,16 @@ class TiffFormat(Format): meta = self._meta else: # Read as 2D image - if index < 0 or index >= len(self._tf): + if index < 0 or index >= self._get_length(): raise IndexError( 'Index out of range while reading from tiff file') - im = self._tf[index].asarray() + im = self._tf.pages[index].asarray() meta = self._meta or self._get_meta_data(index) # Return array and empty meta data return im, meta def _get_meta_data(self, index): - page = self._tf[index or 0] + page = self._tf.pages[index or 0] for key in READ_METADATA_KEYS: try: self._meta[key] = getattr(page, key)
update tifffile [Version 2017.09.29](https://www.lfd.uci.edu/%7Egohlke/code/tifffile.py.html) brought significant updates for `tifffile.py`, e.g. consolidated access to metadata. At the moment, imageio holds at version 2017.01.12; it would be great if this could be updated.
imageio/imageio
diff --git a/tests/test_core.py b/tests/test_core.py index 31d4c7b..707569f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -19,18 +19,26 @@ from imageio import core from imageio.core import Request from imageio.core import get_remote_file, IS_PYPY +if sys.version_info < (3,): + FileNotFoundError = OSError + +try: + from pathlib import Path +except ImportError: + Path = None + test_dir = get_test_dir() def test_fetching(): """ Test fetching of files """ - + need_internet() - + # Clear image files if os.path.isdir(test_dir): - shutil.rmtree(test_dir) - + shutil.rmtree(test_dir) + # This should download the file (force download, because local cache) fname1 = get_remote_file('images/chelsea.png', test_dir, True) mtime1 = os.path.getmtime(fname1) @@ -46,7 +54,7 @@ def test_fetching(): # This should not fname5 = get_remote_file('images/chelsea.png', test_dir, '2014-01-01') mtime5 = os.path.getmtime(fname4) - # + # assert os.path.isfile(fname1) assert fname1 == fname2 assert fname1 == fname3 @@ -56,9 +64,9 @@ def test_fetching(): # weird, but these often fail on my osx VM assert mtime1 == mtime2 assert mtime1 < mtime3 - assert mtime3 < mtime4 + assert mtime3 < mtime4 assert mtime4 == mtime5 - + # Test failures _urlopen = core.fetching.urlopen _chunk_read = core.fetching._chunk_read @@ -87,37 +95,37 @@ def test_fetching(): def test_findlib1(): - + # Lib name would need to be "libc.so.5", or "libc.so.6", or ... # Meh, just skip skip('always skip, is tested implicitly anyway') - + if not sys.platform.startswith('linux'): skip('test on linux only') - + # Candidate libs for common lib (note, this runs only on linux) dirs, paths = core.findlib.generate_candidate_libs(['libc']) assert paths def test_findlib2(): - + if not sys.platform.startswith('linux'): skip('test on linux only') - + need_internet() # need our own version of FI to test this bit - + # Candidate libs for common freeimage fi_dir = os.path.join(core.appdata_dir('imageio'), 'freeimage') if not os.path.isdir(fi_dir): os.mkdir(fi_dir) - dirs, paths = core.findlib.generate_candidate_libs(['libfreeimage'], + dirs, paths = core.findlib.generate_candidate_libs(['libfreeimage'], [fi_dir]) #assert fi_dir in dirs -> Cannot test: lib may not exist assert paths - + open(os.path.join(fi_dir, 'notalib.test.so'), 'wb') - + # Loading libs gllib = ctypes.util.find_library('GL') core.load_lib([gllib], []) @@ -131,7 +139,7 @@ def test_findlib2(): def test_request(): """ Test request object """ - + # Check uri-type, this is not a public property, so we test the private R = Request('http://example.com', 'ri') assert R._uri_type == core.request.URI_HTTP @@ -161,7 +169,7 @@ def test_request(): # zip file R = Request('~/bar.zip/spam.png', 'wi') assert R._uri_type == core.request.URI_ZIPPED - + # Test failing inits raises(ValueError, Request, '/some/file', None) # mode must be str raises(ValueError, Request, '/some/file', 3) # mode must be str @@ -173,11 +181,17 @@ def test_request(): # raises(IOError, Request, ['invalid', 'uri'] * 10, 'ri') # invalid uri raises(IOError, Request, 4, 'ri') # invalid uri - raises(IOError, Request, '/does/not/exist', 'ri') # reading nonexistent - raises(IOError, Request, '/does/not/exist.zip/spam.png', 'ri') # dito + # nonexistent reads + raises(FileNotFoundError, Request, '/does/not/exist', 'ri') + raises(FileNotFoundError, Request, '/does/not/exist.zip/spam.png', 'ri') + if Path is not None: + raises(FileNotFoundError, Request, Path('/does/not/exist'), 'ri') raises(IOError, Request, 'http://example.com', 'wi') # no writing here - raises(IOError, Request, '/does/not/exist.png', 'wi') # write dir nonexist - + # write dir nonexist + raises(FileNotFoundError, Request, '/does/not/exist.png', 'wi') + if Path is not None: + raises(FileNotFoundError, Request, Path('/does/not/exist.png'), 'wi') + # Test auto-download R = Request('imageio:chelsea.png', 'ri') assert R.filename == get_remote_file('images/chelsea.png') @@ -188,7 +202,7 @@ def test_request(): def test_request_read_sources(): - + # Make an image available in many ways fname = 'images/chelsea.png' filename = get_remote_file(fname, test_dir) @@ -198,13 +212,13 @@ def test_request_read_sources(): z = ZipFile(os.path.join(test_dir, 'test.zip'), 'w') z.writestr(fname, bytes) z.close() - + has_inet = os.getenv('IMAGEIO_NO_INTERNET', '') not in ('1', 'yes', 'true') - + # Read that image from these different sources. Read data from file # and from local file (the two main plugin-facing functions) for X in range(2): - + # Define uris to test. Define inside loop, since we need fresh files uris = [filename, os.path.join(test_dir, 'test.zip', fname), @@ -212,7 +226,7 @@ def test_request_read_sources(): open(filename, 'rb')] if has_inet: uris.append(burl + fname) - + for uri in uris: R = Request(uri, 'ri') first_bytes = R.firstbytes @@ -229,7 +243,7 @@ def test_request_read_sources(): def test_request_save_sources(): - + # Prepare desinations fname = 'images/chelsea.png' filename = get_remote_file(fname, test_dir) @@ -239,7 +253,7 @@ def test_request_save_sources(): filename2 = os.path.join(test_dir, fname2) zipfilename2 = os.path.join(test_dir, 'test.zip') file2 = BytesIO() - + # Write an image into many different destinations # Do once via file and ones via local filename for i in range(2): @@ -248,7 +262,7 @@ def test_request_save_sources(): if os.path.isfile(xx): os.remove(xx) # Write to three destinations - for uri in (filename2, + for uri in (filename2, os.path.join(zipfilename2, fname2), file2, imageio.RETURN_BYTES # This one last to fill `res` @@ -267,21 +281,21 @@ def test_request_save_sources(): def test_request_file_no_seek(): - + class File: - + def read(self, n): return b'\x00' * n - + def seek(self, i): raise IOError('Not supported') - + def tell(self): raise Exception('Not supported') - + def close(self): pass - + R = Request(File(), 'ri') with raises(IOError): R.firstbytes @@ -289,7 +303,7 @@ def test_request_file_no_seek(): def test_util_imagelist(): meta = {'foo': 3, 'bar': {'spam': 1, 'eggs': 2}} - + # Image list L = core.util.ImageList(meta) assert isinstance(L, list) @@ -300,7 +314,7 @@ def test_util_imagelist(): def test_util_image(): meta = {'foo': 3, 'bar': {'spam': 1, 'eggs': 2}} - # Image + # Image a = np.zeros((10, 10)) im = core.util.Image(a, meta) isinstance(im, np.ndarray) @@ -377,7 +391,7 @@ def test_util_asarray(): def test_util_progres_bar(sleep=0): """ Test the progress bar """ # This test can also be run on itself to *see* the result - + # Progress bar for Progress in (core.StdoutProgressIndicator, core.BaseProgressIndicator): B = Progress('test') @@ -391,7 +405,7 @@ def test_util_progres_bar(sleep=0): assert B._progress == i B.increase_progress(1) assert B._progress == i + 1 - B.finish() + B.finish() assert B.status() == 2 # Without max B.start('Run without max int') @@ -481,32 +495,32 @@ def test_util_image_as_uint(): def test_util_has_has_module(): - + assert not core.has_module('this_module_does_not_exist') assert core.has_module('sys') def test_functions(): """ Test the user-facing API functions """ - + # Test help(), it prints stuff, so we just check whether that goes ok imageio.help() # should print overview imageio.help('PNG') # should print about PNG - + fname1 = get_remote_file('images/chelsea.png', test_dir) fname2 = fname1[:-3] + 'jpg' fname3 = fname1[:-3] + 'notavalidext' open(fname3, 'wb') - + # Test read() R1 = imageio.read(fname1) R2 = imageio.read(fname1, 'png') assert R1.format is R2.format # Fail raises(ValueError, imageio.read, fname3) # existing but not readable - raises(IOError, imageio.read, 'notexisting.barf') + raises(FileNotFoundError, imageio.read, 'notexisting.barf') raises(IndexError, imageio.read, fname1, 'notexistingformat') - + # Test save() W1 = imageio.save(fname2) W2 = imageio.save(fname2, 'JPG') @@ -514,14 +528,15 @@ def test_functions(): W2.close() assert W1.format is W2.format # Fail - raises(IOError, imageio.save, '~/dirdoesnotexist/wtf.notexistingfile') - + raises(FileNotFoundError, imageio.save, + '~/dirdoesnotexist/wtf.notexistingfile') + # Test imread() im1 = imageio.imread(fname1) im2 = imageio.imread(fname1, 'png') assert im1.shape[2] == 3 assert np.all(im1 == im2) - + # Test imsave() if os.path.isfile(fname2): os.remove(fname2) @@ -529,7 +544,7 @@ def test_functions(): imageio.imsave(fname2, im1[:, :, 0]) imageio.imsave(fname2, im1) assert os.path.isfile(fname2) - + # Test mimread() fname3 = get_remote_file('images/newtonscradle.gif', test_dir) ims = imageio.mimread(fname3) @@ -540,10 +555,10 @@ def test_functions(): # Test protection with raises(RuntimeError): imageio.mimread('imageio:chelsea.png', 'dummy', length=np.inf) - + if IS_PYPY: return # no support for npz format :( - + # Test mimsave() fname5 = fname3[:-4] + '2.npz' if os.path.isfile(fname5): @@ -552,7 +567,7 @@ def test_functions(): imageio.mimsave(fname5, [im[:, :, 0] for im in ims]) imageio.mimsave(fname5, ims) assert os.path.isfile(fname5) - + # Test volread() fname4 = get_remote_file('images/stent.npz', test_dir) vol = imageio.volread(fname4) @@ -560,7 +575,7 @@ def test_functions(): assert vol.shape[0] == 256 assert vol.shape[1] == 128 assert vol.shape[2] == 128 - + # Test volsave() volc = np.zeros((10, 10, 10, 3), np.uint8) # color volume fname6 = os.path.join(test_dir, 'images', 'stent2.npz') @@ -570,13 +585,13 @@ def test_functions(): imageio.volsave(fname6, volc) imageio.volsave(fname6, vol) assert os.path.isfile(fname6) - + # Test mvolread() vols = imageio.mvolread(fname4) assert isinstance(vols, list) assert len(vols) == 1 assert vols[0].shape == vol.shape - + # Test mvolsave() if os.path.isfile(fname6): os.remove(fname6) @@ -584,7 +599,7 @@ def test_functions(): imageio.mvolsave(fname6, [volc, volc]) imageio.mvolsave(fname6, vols) assert os.path.isfile(fname6) - + # Fail for save functions raises(ValueError, imageio.imsave, fname2, np.zeros((100, 100, 5))) raises(ValueError, imageio.imsave, fname2, 42) @@ -598,7 +613,7 @@ def test_functions(): def test_example_plugin(): """ Test the example plugin """ - + fname = os.path.join(test_dir, 'out.png') r = Request('imageio:chelsea.png', 'r?') R = imageio.formats['dummy'].get_reader(r) diff --git a/tests/test_tifffile.py b/tests/test_tifffile.py index ea7ad7d..a2900df 100644 --- a/tests/test_tifffile.py +++ b/tests/test_tifffile.py @@ -65,7 +65,7 @@ def test_tifffile_reading_writing(): # Mixed W = imageio.save(filename1) - W.set_meta_data({'planarconfig': 'planar'}) + W.set_meta_data({'planarconfig': 'SEPARATE'}) # was "planar" assert W.format.name == 'TIFF' W.append_data(im2) W.append_data(im2) @@ -75,8 +75,8 @@ def test_tifffile_reading_writing(): assert R.format.name == 'TIFF' ims = list(R) # == [im for im in R] assert (ims[0] == im2).all() - meta = R.get_meta_data() - assert meta['orientation'] == 'top_left' + # meta = R.get_meta_data() + # assert meta['orientation'] == 'top_left' # not there in later version # Fail raises(IndexError, R.get_data, -1) raises(IndexError, R.get_data, 3)
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_hyperlinks", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": -1, "issue_text_score": 1, "test_score": -1 }, "num_modified_files": 5 }
2.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "pytest-asyncio" ], "pre_install": [ "apt-get update", "apt-get install -y gcc libfreeimage3" ], "python": "3.9", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
coverage==7.8.0 exceptiongroup==1.2.2 execnet==2.1.1 -e git+https://github.com/imageio/imageio.git@b122f914d2b8f6971f958d925b79261fc8df51f8#egg=imageio iniconfig==2.1.0 numpy==2.0.2 packaging==24.2 pillow==11.1.0 pluggy==1.5.0 pytest==8.3.5 pytest-asyncio==0.26.0 pytest-cov==6.0.0 pytest-mock==3.14.0 pytest-xdist==3.6.1 tomli==2.2.1 typing_extensions==4.13.0
name: imageio channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.4.4=h6a678d5_1 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=3.0.16=h5eee18b_0 - pip=25.0=py39h06a4308_0 - python=3.9.21=he870216_1 - readline=8.2=h5eee18b_0 - setuptools=75.8.0=py39h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - tzdata=2025a=h04d1e81_0 - wheel=0.45.1=py39h06a4308_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - coverage==7.8.0 - exceptiongroup==1.2.2 - execnet==2.1.1 - iniconfig==2.1.0 - numpy==2.0.2 - packaging==24.2 - pillow==11.1.0 - pluggy==1.5.0 - pytest==8.3.5 - pytest-asyncio==0.26.0 - pytest-cov==6.0.0 - pytest-mock==3.14.0 - pytest-xdist==3.6.1 - tomli==2.2.1 - typing-extensions==4.13.0 prefix: /opt/conda/envs/imageio
[ "tests/test_core.py::test_request", "tests/test_tifffile.py::test_tifffile_reading_writing" ]
[ "tests/test_core.py::test_findlib2", "tests/test_core.py::test_util_image", "tests/test_core.py::test_functions" ]
[ "tests/test_core.py::test_fetching", "tests/test_core.py::test_request_read_sources", "tests/test_core.py::test_request_save_sources", "tests/test_core.py::test_request_file_no_seek", "tests/test_core.py::test_util_imagelist", "tests/test_core.py::test_util_dict", "tests/test_core.py::test_util_get_platform", "tests/test_core.py::test_util_asarray", "tests/test_core.py::test_util_progres_bar", "tests/test_core.py::test_util_image_as_uint", "tests/test_core.py::test_util_has_has_module", "tests/test_core.py::test_example_plugin", "tests/test_tifffile.py::test_tifffile_format" ]
[]
BSD 2-Clause "Simplified" License
2,178
[ "imageio/plugins/tifffile.py", "imageio/core/functions.py", ".travis.yml", "imageio/core/request.py", "imageio/plugins/_tifffile.py" ]
[ "imageio/plugins/tifffile.py", "imageio/core/functions.py", ".travis.yml", "imageio/core/request.py", "imageio/plugins/_tifffile.py" ]
briancurtin__deprecation-14
3ad94c2de7c313d0ca20a8ce4c9247a0faaa8018
2018-02-19 16:36:14
3ad94c2de7c313d0ca20a8ce4c9247a0faaa8018
codecov-io: # [Codecov](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=h1) Report > Merging [#14](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=desc) into [master](https://codecov.io/gh/briancurtin/deprecation/commit/3ad94c2de7c313d0ca20a8ce4c9247a0faaa8018?src=pr&el=desc) will **decrease** coverage by `7.69%`. > The diff coverage is `66.66%`. [![Impacted file tree graph](https://codecov.io/gh/briancurtin/deprecation/pull/14/graphs/tree.svg?token=6w8zqu2CEQ&height=150&src=pr&width=650)](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=tree) ```diff @@ Coverage Diff @@ ## master #14 +/- ## ======================================== - Coverage 100% 92.3% -7.7% ======================================== Files 1 1 Lines 62 78 +16 Branches 10 15 +5 ======================================== + Hits 62 72 +10 - Misses 0 3 +3 - Partials 0 3 +3 ``` | [Impacted Files](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=tree) | Coverage Δ | | |---|---|---| | [deprecation.py](https://codecov.io/gh/briancurtin/deprecation/pull/14/diff?src=pr&el=tree#diff-ZGVwcmVjYXRpb24ucHk=) | `92.3% <66.66%> (-7.7%)` | :arrow_down: | ------ [Continue to review full report at Codecov](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=continue). > **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta) > `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data` > Powered by [Codecov](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=footer). Last update [3ad94c2...7304a70](https://codecov.io/gh/briancurtin/deprecation/pull/14?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
diff --git a/deprecation.py b/deprecation.py index 00598db..a64b5b7 100644 --- a/deprecation.py +++ b/deprecation.py @@ -9,6 +9,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import collections import functools import textwrap import warnings @@ -51,9 +52,22 @@ class DeprecatedWarning(DeprecationWarning): super(DeprecatedWarning, self).__init__() def __str__(self): - return ("%s is deprecated as of %s and will " - "be removed in %s. %s" % (self.function, self.deprecated_in, - self.removed_in, self.details)) + # Use a defaultdict to give us the empty string + # when a part isn't included. + parts = collections.defaultdict(str) + parts["function"] = self.function + + if self.deprecated_in: + parts["deprecated"] = " as of %s" % self.deprecated_in + if self.removed_in: + parts["removed"] = " and will be removed in %s" % self.removed_in + if any([self.deprecated_in, self.removed_in, self.details]): + parts["period"] = "." + if self.details: + parts["details"] = " %s" % self.details + + return ("%(function)s is deprecated%(deprecated)s%(removed)s" + "%(period)s%(details)s" % (parts)) class UnsupportedWarning(DeprecatedWarning): @@ -67,9 +81,15 @@ class UnsupportedWarning(DeprecatedWarning): """ def __str__(self): - return ("%s is unsupported as of %s. %s" % (self.function, - self.removed_in, - self.details)) + parts = collections.defaultdict(str) + parts["function"] = self.function + parts["removed"] = self.removed_in + + if self.details: + parts["details"] = " %s" % self.details + + return ("%(function)s is unsupported as of %(removed)s." + "%(details)s" % (parts)) def deprecated(deprecated_in=None, removed_in=None, current_version=None,
DeprecatedWarning.__str__ should handle None arguments better `DeprecatedWarning.__str__`doesn't play nicely with `None` values, such as what happens when you call `deprecation.deprecated()` with no arguments. This ends up showing up in the interpreter messages when run with `python -Wd`, though it doesn't affect docstrings. /.../deprecation.py:183: DeprecatedWarning: fn is deprecated as of None and will be removed in None. warnings.warn(the_warning)
briancurtin/deprecation
diff --git a/tests/test_deprecation.py b/tests/test_deprecation.py index cf9d420..55e05bd 100644 --- a/tests/test_deprecation.py +++ b/tests/test_deprecation.py @@ -71,14 +71,33 @@ class Test_deprecated(unittest2.TestCase): ret_val = "lololol" for test in [{"args": {}, # No args just means deprecated - "warning": deprecation.DeprecatedWarning}, + "warning": deprecation.DeprecatedWarning, + "message": "method is deprecated"}, + {"args": {"details": "do something else."}, + "warning": deprecation.DeprecatedWarning, + "message": "method is deprecated. do something else."}, {"args": {"deprecated_in": "1.0", "current_version": "2.0"}, - "warning": deprecation.DeprecatedWarning}, + "warning": deprecation.DeprecatedWarning, + "message": "method is deprecated as of 1.0."}, + {"args": {"deprecated_in": "1.0", + "removed_in": "3.0", + "current_version": "2.0"}, + "warning": deprecation.DeprecatedWarning, + "message": ("method is deprecated as of 1.0 " + "and will be removed in 3.0.")}, {"args": {"deprecated_in": "1.0", "removed_in": "2.0", "current_version": "2.0"}, - "warning": deprecation.UnsupportedWarning}]: + "warning": deprecation.UnsupportedWarning, + "message": "method is unsupported as of 2.0."}, + {"args": {"deprecated_in": "1.0", + "removed_in": "2.0", + "current_version": "2.0", + "details": "do something else."}, + "warning": deprecation.UnsupportedWarning, + "message": ("method is unsupported as of 2.0. " + "do something else.")}]: with self.subTest(**test): class Test(object): @deprecation.deprecated(**test["args"]) @@ -93,6 +112,8 @@ class Test_deprecated(unittest2.TestCase): self.assertEqual(len(caught_warnings), 1) self.assertEqual(caught_warnings[0].category, test["warning"]) + self.assertEqual(str(caught_warnings[0].message), + test["message"]) def test_DeprecatedWarning_not_raised(self): ret_val = "lololol"
{ "commit_name": "merge_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 2, "test_score": 2 }, "num_modified_files": 1 }
1.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "pytest", "pip_packages": [ "unittest2", "pytest" ], "pre_install": null, "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work certifi==2021.5.30 -e git+https://github.com/briancurtin/deprecation.git@3ad94c2de7c313d0ca20a8ce4c9247a0faaa8018#egg=deprecation importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work linecache2==1.0.0 more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work py @ file:///opt/conda/conda-bld/py_1644396412707/work pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work pytest==6.2.4 six==1.17.0 toml @ file:///tmp/build/80754af9/toml_1616166611790/work traceback2==1.4.0 typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work unittest2==1.1.0 zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
name: deprecation channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - attrs=21.4.0=pyhd3eb1b0_0 - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - importlib-metadata=4.8.1=py36h06a4308_0 - importlib_metadata=4.8.1=hd3eb1b0_0 - iniconfig=1.1.1=pyhd3eb1b0_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - more-itertools=8.12.0=pyhd3eb1b0_0 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - packaging=21.3=pyhd3eb1b0_0 - pip=21.2.2=py36h06a4308_0 - pluggy=0.13.1=py36h06a4308_0 - py=1.11.0=pyhd3eb1b0_0 - pyparsing=3.0.4=pyhd3eb1b0_0 - pytest=6.2.4=py36h06a4308_2 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - toml=0.10.2=pyhd3eb1b0_0 - typing_extensions=4.1.1=pyh06a4308_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zipp=3.6.0=pyhd3eb1b0_0 - zlib=1.2.13=h5eee18b_1 - pip: - argparse==1.4.0 - linecache2==1.0.0 - six==1.17.0 - traceback2==1.4.0 - unittest2==1.1.0 prefix: /opt/conda/envs/deprecation
[ "tests/test_deprecation.py::Test_deprecated::test_warning_raised" ]
[]
[ "tests/test_deprecation.py::Test_deprecated::test_DeprecatedWarning_not_raised", "tests/test_deprecation.py::Test_deprecated::test_docstring", "tests/test_deprecation.py::Test_deprecated::test_multiline_docstring", "tests/test_deprecation.py::Test_deprecated::test_removing_without_deprecating", "tests/test_deprecation.py::Test_fail_if_not_removed::test_DeprecatedWarning_doesnt_fail", "tests/test_deprecation.py::Test_fail_if_not_removed::test_UnsupportedWarning_causes_failure", "tests/test_deprecation.py::Test_fail_if_not_removed::test_literal_DeprecatedWarning" ]
[]
Apache License 2.0
2,179
[ "deprecation.py" ]
[ "deprecation.py" ]
oasis-open__cti-python-stix2-127
b1a020bb38d74c407ac125b1e032850acfe7a880
2018-02-19 20:07:31
4a9c38e0b50415f4733072fc76eb8ebd0749c84b
diff --git a/stix2/base.py b/stix2/base.py index 76b07b8..fc13094 100644 --- a/stix2/base.py +++ b/stix2/base.py @@ -161,9 +161,12 @@ class _STIXBase(collections.Mapping): ", ".join(["{0!s}={1!r}".format(k, v) for k, v in props])) def __deepcopy__(self, memo): - # Assumption: we can ignore the memo argument, because no object will ever contain the same sub-object multiple times. + # Assume: we can ignore the memo argument, because no object will ever contain the same sub-object multiple times. new_inner = copy.deepcopy(self._inner, memo) cls = type(self) + if isinstance(self, _Observable): + # Assume: valid references in the original object are still valid in the new version + new_inner['_valid_refs'] = {'*': '*'} return cls(**new_inner) def properties_populated(self): @@ -221,6 +224,9 @@ class _Observable(_STIXBase): super(_Observable, self).__init__(**kwargs) def _check_ref(self, ref, prop, prop_name): + if '*' in self._STIXBase__valid_refs: + return # don't check if refs are valid + if ref not in self._STIXBase__valid_refs: raise InvalidObjRefError(self.__class__, prop_name, "'%s' is not a valid object in local scope" % ref)
Fail to create new version of ObservedData if it has related objects If you try to generate a new version of an ObservedData which contains related objects (like domain-name -> ipv4) you obtain a fatal error Example: `from stix2 import ObservedData` `from datetime import datetime` `data = ObservedData(first_observed=datetime.now(),last_observed=datetime.now(),number_observed=1,objects={'src_ip': {'type':'ipv4-addr','value':'127.0.0.1/32'}, 'domain':{'type':'domain-name','value':'example.com','resolves_to_refs':['src_ip']}})` `new_version = data.new_version(last_observed=datetime.now())` Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/usr/local/lib/python3.6/site-packages/stix2/base.py", line 175, in new_version return _new_version(self, **kwargs) File "/usr/local/lib/python3.6/site-packages/stix2/utils.py", line 212, in new_version new_obj_inner = copy.deepcopy(data._inner) File "/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/copy.py", line 150, in deepcopy y = copier(x, memo) File "/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/copy.py", line 240, in _deepcopy_dict y[deepcopy(key, memo)] = deepcopy(value, memo) File "/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/copy.py", line 150, in deepcopy y = copier(x, memo) File "/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/copy.py", line 240, in _deepcopy_dict y[deepcopy(key, memo)] = deepcopy(value, memo) File "/usr/local/Cellar/python3/3.6.4_2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/copy.py", line 161, in deepcopy y = copier(memo) File "/usr/local/lib/python3.6/site-packages/stix2/base.py", line 167, in __deepcopy__ return cls(**new_inner) File "/usr/local/lib/python3.6/site-packages/stix2/base.py", line 221, in __init__ super(_Observable, self).__init__(**kwargs) File "/usr/local/lib/python3.6/site-packages/stix2/base.py", line 128, in __init__ self._check_property(prop_name, prop_metadata, setting_kwargs) File "/usr/local/lib/python3.6/site-packages/stix2/base.py", line 251, in _check_property self._check_ref(ref, prop, prop_name) File "/usr/local/lib/python3.6/site-packages/stix2/base.py", line 225, in _check_ref raise InvalidObjRefError(self.__class__, prop_name, "'%s' is not a valid object in local scope" % ref)` The error appears also if you use an incremental value as index in objects dictionary
oasis-open/cti-python-stix2
diff --git a/stix2/test/test_observed_data.py b/stix2/test/test_observed_data.py index 3029b68..30c3cab 100644 --- a/stix2/test/test_observed_data.py +++ b/stix2/test/test_observed_data.py @@ -1162,3 +1162,25 @@ def test_x509_certificate_example(): assert x509.type == "x509-certificate" assert x509.issuer == "C=ZA, ST=Western Cape, L=Cape Town, O=Thawte Consulting cc, OU=Certification Services Division, CN=Thawte Server CA/[email protected]" # noqa assert x509.subject == "C=US, ST=Maryland, L=Pasadena, O=Brent Baccala, OU=FreeSoft, CN=www.freesoft.org/[email protected]" # noqa + + +def test_new_version_with_related_objects(): + data = stix2.ObservedData( + first_observed="2016-03-12T12:00:00Z", + last_observed="2016-03-12T12:00:00Z", + number_observed=1, + objects={ + 'src_ip': { + 'type': 'ipv4-addr', + 'value': '127.0.0.1/32' + }, + 'domain': { + 'type': 'domain-name', + 'value': 'example.com', + 'resolves_to_refs': ['src_ip'] + } + } + ) + new_version = data.new_version(last_observed="2017-12-12T12:00:00Z") + assert new_version.last_observed.year == 2017 + assert new_version.objects['domain'].resolves_to_refs[0] == 'src_ip'
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.4
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 antlr4-python3-runtime==4.9.3 async-generator==1.10 attrs==22.2.0 Babel==2.11.0 bleach==4.1.0 bump2version==1.0.1 bumpversion==0.6.0 certifi==2021.5.30 cfgv==3.3.1 charset-normalizer==2.0.12 coverage==6.2 decorator==5.1.1 defusedxml==0.7.1 distlib==0.3.9 docutils==0.18.1 entrypoints==0.4 filelock==3.4.1 identify==2.4.4 idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 importlib-resources==5.2.3 iniconfig==1.1.1 ipython-genutils==0.2.0 Jinja2==3.0.3 jsonschema==3.2.0 jupyter-client==7.1.2 jupyter-core==4.9.2 jupyterlab-pygments==0.1.2 MarkupSafe==2.0.1 mistune==0.8.4 nbclient==0.5.9 nbconvert==6.0.7 nbformat==5.1.3 nbsphinx==0.8.8 nest-asyncio==1.6.0 nodeenv==1.6.0 packaging==21.3 pandocfilters==1.5.1 platformdirs==2.4.0 pluggy==1.0.0 pre-commit==2.17.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 pytest-cov==4.0.0 python-dateutil==2.9.0.post0 pytz==2025.2 PyYAML==6.0.1 pyzmq==25.1.2 requests==2.27.1 simplejson==3.20.1 six==1.17.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-prompt==1.5.0 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -e git+https://github.com/oasis-open/cti-python-stix2.git@b1a020bb38d74c407ac125b1e032850acfe7a880#egg=stix2 stix2-patterns==2.0.0 taxii2-client==2.3.0 testpath==0.6.0 toml==0.10.2 tomli==1.2.3 tornado==6.1 tox==3.28.0 traitlets==4.3.3 typing_extensions==4.1.1 urllib3==1.26.20 virtualenv==20.16.2 webencodings==0.5.1 zipp==3.6.0
name: cti-python-stix2 channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - antlr4-python3-runtime==4.9.3 - async-generator==1.10 - attrs==22.2.0 - babel==2.11.0 - bleach==4.1.0 - bump2version==1.0.1 - bumpversion==0.6.0 - cfgv==3.3.1 - charset-normalizer==2.0.12 - coverage==6.2 - decorator==5.1.1 - defusedxml==0.7.1 - distlib==0.3.9 - docutils==0.18.1 - entrypoints==0.4 - filelock==3.4.1 - identify==2.4.4 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - importlib-resources==5.2.3 - iniconfig==1.1.1 - ipython-genutils==0.2.0 - jinja2==3.0.3 - jsonschema==3.2.0 - jupyter-client==7.1.2 - jupyter-core==4.9.2 - jupyterlab-pygments==0.1.2 - markupsafe==2.0.1 - mistune==0.8.4 - nbclient==0.5.9 - nbconvert==6.0.7 - nbformat==5.1.3 - nbsphinx==0.8.8 - nest-asyncio==1.6.0 - nodeenv==1.6.0 - packaging==21.3 - pandocfilters==1.5.1 - platformdirs==2.4.0 - pluggy==1.0.0 - pre-commit==2.17.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - pytest-cov==4.0.0 - python-dateutil==2.9.0.post0 - pytz==2025.2 - pyyaml==6.0.1 - pyzmq==25.1.2 - requests==2.27.1 - simplejson==3.20.1 - six==1.17.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-prompt==1.5.0 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - stix2-patterns==2.0.0 - taxii2-client==2.3.0 - testpath==0.6.0 - toml==0.10.2 - tomli==1.2.3 - tornado==6.1 - tox==3.28.0 - traitlets==4.3.3 - typing-extensions==4.1.1 - urllib3==1.26.20 - virtualenv==20.16.2 - webencodings==0.5.1 - zipp==3.6.0 prefix: /opt/conda/envs/cti-python-stix2
[ "stix2/test/test_observed_data.py::test_new_version_with_related_objects" ]
[]
[ "stix2/test/test_observed_data.py::test_observed_data_example", "stix2/test/test_observed_data.py::test_observed_data_example_with_refs", "stix2/test/test_observed_data.py::test_observed_data_example_with_bad_refs", "stix2/test/test_observed_data.py::test_observed_data_example_with_non_dictionary", "stix2/test/test_observed_data.py::test_observed_data_example_with_empty_dictionary", "stix2/test/test_observed_data.py::test_parse_observed_data[{\\n", "stix2/test/test_observed_data.py::test_parse_observed_data[data1]", "stix2/test/test_observed_data.py::test_parse_artifact_valid[\"0\":", "stix2/test/test_observed_data.py::test_parse_artifact_invalid[\"0\":", "stix2/test/test_observed_data.py::test_artifact_example_dependency_error", "stix2/test/test_observed_data.py::test_parse_autonomous_system_valid[\"0\":", "stix2/test/test_observed_data.py::test_parse_email_address[{\\n", "stix2/test/test_observed_data.py::test_parse_email_message[\\n", "stix2/test/test_observed_data.py::test_parse_email_message_not_multipart[\\n", "stix2/test/test_observed_data.py::test_parse_file_archive[\"0\":", "stix2/test/test_observed_data.py::test_parse_email_message_with_at_least_one_error[\\n", "stix2/test/test_observed_data.py::test_parse_basic_tcp_traffic[\\n", "stix2/test/test_observed_data.py::test_parse_basic_tcp_traffic_with_error[\\n", "stix2/test/test_observed_data.py::test_observed_data_with_process_example", "stix2/test/test_observed_data.py::test_artifact_example", "stix2/test/test_observed_data.py::test_artifact_mutual_exclusion_error", "stix2/test/test_observed_data.py::test_directory_example", "stix2/test/test_observed_data.py::test_directory_example_ref_error", "stix2/test/test_observed_data.py::test_domain_name_example", "stix2/test/test_observed_data.py::test_domain_name_example_invalid_ref_type", "stix2/test/test_observed_data.py::test_file_example", "stix2/test/test_observed_data.py::test_file_example_with_NTFSExt", "stix2/test/test_observed_data.py::test_file_example_with_empty_NTFSExt", "stix2/test/test_observed_data.py::test_file_example_with_PDFExt", "stix2/test/test_observed_data.py::test_file_example_with_PDFExt_Object", "stix2/test/test_observed_data.py::test_file_example_with_RasterImageExt_Object", "stix2/test/test_observed_data.py::test_file_example_with_WindowsPEBinaryExt", "stix2/test/test_observed_data.py::test_file_example_encryption_error", "stix2/test/test_observed_data.py::test_ip4_address_example", "stix2/test/test_observed_data.py::test_ip4_address_example_cidr", "stix2/test/test_observed_data.py::test_ip6_address_example", "stix2/test/test_observed_data.py::test_mac_address_example", "stix2/test/test_observed_data.py::test_network_traffic_example", "stix2/test/test_observed_data.py::test_network_traffic_http_request_example", "stix2/test/test_observed_data.py::test_network_traffic_icmp_example", "stix2/test/test_observed_data.py::test_network_traffic_socket_example", "stix2/test/test_observed_data.py::test_network_traffic_tcp_example", "stix2/test/test_observed_data.py::test_mutex_example", "stix2/test/test_observed_data.py::test_process_example", "stix2/test/test_observed_data.py::test_process_example_empty_error", "stix2/test/test_observed_data.py::test_process_example_empty_with_extensions", "stix2/test/test_observed_data.py::test_process_example_windows_process_ext", "stix2/test/test_observed_data.py::test_process_example_windows_process_ext_empty", "stix2/test/test_observed_data.py::test_process_example_extensions_empty", "stix2/test/test_observed_data.py::test_process_example_with_WindowsProcessExt_Object", "stix2/test/test_observed_data.py::test_process_example_with_WindowsServiceExt", "stix2/test/test_observed_data.py::test_process_example_with_WindowsProcessServiceExt", "stix2/test/test_observed_data.py::test_software_example", "stix2/test/test_observed_data.py::test_url_example", "stix2/test/test_observed_data.py::test_user_account_example", "stix2/test/test_observed_data.py::test_user_account_unix_account_ext_example", "stix2/test/test_observed_data.py::test_windows_registry_key_example", "stix2/test/test_observed_data.py::test_x509_certificate_example" ]
[]
BSD 3-Clause "New" or "Revised" License
2,181
[ "stix2/base.py" ]
[ "stix2/base.py" ]
pre-commit__pre-commit-hooks-266
2f1e5e2abf7c3be32557ad1d70aea3be5c69c354
2018-02-19 20:57:17
e80813e7e9bceeb263a4329341f2681074fa725a
diff --git a/pre_commit_hooks/no_commit_to_branch.py b/pre_commit_hooks/no_commit_to_branch.py index 22ee95e..0c75217 100644 --- a/pre_commit_hooks/no_commit_to_branch.py +++ b/pre_commit_hooks/no_commit_to_branch.py @@ -1,21 +1,25 @@ from __future__ import print_function import argparse -import sys +from pre_commit_hooks.util import CalledProcessError from pre_commit_hooks.util import cmd_output def is_on_branch(protected): - branch = cmd_output('git', 'symbolic-ref', 'HEAD') + try: + branch = cmd_output('git', 'symbolic-ref', 'HEAD') + except CalledProcessError: + return False chunks = branch.strip().split('/') return '/'.join(chunks[2:]) == protected -def main(argv=[]): +def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument( - '-b', '--branch', default='master', help='branch to disallow commits to', + '-b', '--branch', default='master', + help='branch to disallow commits to', ) args = parser.parse_args(argv) @@ -23,4 +27,4 @@ def main(argv=[]): if __name__ == '__main__': - sys.exit(main(sys.argv)) + exit(main())
no-commit-to-branch on CI As a safety net, we've started running pre-commit on CI, e.g. ``` git diff --name-only "$GIT_COMMIT"^..."$GIT_COMMIT" | xargs pre-commit run --files ``` one of our projects uses `no-commit-to-branch` and we just noticed this behavior on CI: ``` Don't commit to branch...................................................Failed hookid: no-commit-to-branch Traceback (most recent call last): File "/var/lib/jenkins/.cache/pre-commit/repoVUdrA6/py_env-python2.7/bin/no-commit-to-branch", line 11, in <module> load_entry_point('pre-commit-hooks==1.1.0', 'console_scripts', 'no-commit-to-branch')() File "/var/lib/jenkins/.cache/pre-commit/repoVUdrA6/py_env-python2.7/local/lib/python2.7/site-packages/pre_commit_hooks/no_commit_to_branch.py", line 22, in main return int(is_on_branch(args.branch)) File "/var/lib/jenkins/.cache/pre-commit/repoVUdrA6/py_env-python2.7/local/lib/python2.7/site-packages/pre_commit_hooks/no_commit_to_branch.py", line 10, in is_on_branch branch = cmd_output('git', 'symbolic-ref', 'HEAD') File "/var/lib/jenkins/.cache/pre-commit/repoVUdrA6/py_env-python2.7/local/lib/python2.7/site-packages/pre_commit_hooks/util.py", line 28, in cmd_output raise CalledProcessError(cmd, retcode, proc.returncode, stdout, stderr) pre_commit_hooks.util.CalledProcessError: (('git', 'symbolic-ref', 'HEAD'), 0, 128, u'', u'fatal: ref HEAD is not a symbolic ref\n') ``` most likely this is not really an issue with `pre-commit` but with the way our Jenkins jobs are set up (?). I worked around this by using `SKIP=no-commit-to-branch pre-commit run` but just thought I would let you know.
pre-commit/pre-commit-hooks
diff --git a/tests/check_no_commit_to_branch_test.py b/tests/check_no_commit_to_branch_test.py index 99af938..7e39256 100644 --- a/tests/check_no_commit_to_branch_test.py +++ b/tests/check_no_commit_to_branch_test.py @@ -29,19 +29,22 @@ def test_master_branch(temp_git_dir): assert is_on_branch('master') is True -def test_main_b_call(temp_git_dir): - with temp_git_dir.as_cwd(): - cmd_output('git', 'checkout', '-b', 'other') - assert main(['-b', 'other']) == 1 - - def test_main_branch_call(temp_git_dir): with temp_git_dir.as_cwd(): cmd_output('git', 'checkout', '-b', 'other') - assert main(['--branch', 'other']) == 1 + assert main(('--branch', 'other')) == 1 def test_main_default_call(temp_git_dir): with temp_git_dir.as_cwd(): cmd_output('git', 'checkout', '-b', 'anotherbranch') - assert main() == 0 + assert main(()) == 0 + + +def test_not_on_a_branch(temp_git_dir): + with temp_git_dir.as_cwd(): + cmd_output('git', 'commit', '--no-gpg-sign', '--allow-empty', '-m1') + head = cmd_output('git', 'rev-parse', 'HEAD').strip() + cmd_output('git', 'checkout', head) + # we're not on a branch! + assert main(()) == 0
{ "commit_name": "head_commit", "failed_lite_validators": [], "has_test_patch": true, "is_lite": true, "llm_score": { "difficulty_score": 0, "issue_text_score": 0, "test_score": 0 }, "num_modified_files": 1 }
1.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "coverage" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": null, "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 autopep8==2.0.4 certifi==2021.5.30 coverage==6.2 flake8==2.5.5 importlib-metadata==4.8.3 iniconfig==1.1.1 mccabe==0.4.0 packaging==21.3 pep8==1.7.1 pluggy==1.0.0 -e git+https://github.com/pre-commit/pre-commit-hooks.git@2f1e5e2abf7c3be32557ad1d70aea3be5c69c354#egg=pre_commit_hooks py==1.11.0 pycodestyle==2.10.0 pyflakes==1.0.0 pyparsing==3.1.4 pytest==7.0.1 PyYAML==6.0.1 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: pre-commit-hooks channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - autopep8==2.0.4 - coverage==6.2 - flake8==2.5.5 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - mccabe==0.4.0 - packaging==21.3 - pep8==1.7.1 - pluggy==1.0.0 - py==1.11.0 - pycodestyle==2.10.0 - pyflakes==1.0.0 - pyparsing==3.1.4 - pytest==7.0.1 - pyyaml==6.0.1 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/pre-commit-hooks
[ "tests/check_no_commit_to_branch_test.py::test_not_on_a_branch" ]
[]
[ "tests/check_no_commit_to_branch_test.py::test_other_branch", "tests/check_no_commit_to_branch_test.py::test_multi_branch", "tests/check_no_commit_to_branch_test.py::test_multi_branch_fail", "tests/check_no_commit_to_branch_test.py::test_master_branch", "tests/check_no_commit_to_branch_test.py::test_main_branch_call", "tests/check_no_commit_to_branch_test.py::test_main_default_call" ]
[]
MIT License
2,182
[ "pre_commit_hooks/no_commit_to_branch.py" ]
[ "pre_commit_hooks/no_commit_to_branch.py" ]
innolitics__hdat-16
3b3b8250e98a9ffe8bb583b7f7024c4d0cbc07bc
2018-02-19 21:30:17
3b3b8250e98a9ffe8bb583b7f7024c4d0cbc07bc
diff --git a/README.md b/README.md index 5714ec4..b296333 100644 --- a/README.md +++ b/README.md @@ -103,9 +103,9 @@ A casespec is a string that selects one or more test cases. A casespec may spec Here are several casespecs along with the test cases they would select: -- `` Selects all test cases in all suites. -- `a` Selects all cases in the test suite with id "a". -- `a/b` Selects test case with id "b" in the suite with id "b". +"" - Selects all test cases in all suites. +"a" - Selects all cases in the test suite with id "a". +"a/b" - Selects test case with id "b" in the suite with id "b". # Resultspecs @@ -113,10 +113,10 @@ A resultspec is a string that selects one or more test results. A result spec m Here are several resultspec along with the test cases they would select: -- `` Selects the most recent result for every test case in every test suite. -- `a` Selects the most recent results for every test case in the test suite with id "a". -- `a/b` Selects the most recent result for the test case with id "b" in the test suite with id "a". -- `a/b/c` Selects the test result with id "c" for the test case with id "b" in the test suite with id "a". -- `a/b/~0` Selects the most recent result for the test case with id "b" in the test suite with id "a". -- `a/b/~1` Selects the previous result for the test case with id "b" in the test suite with id "a". -- `a/b/~4` Selects the 4 test older than the previous result for the test case with id "b" in the test suite with id "a". +"" - Selects the most recent result for every test case in every test suite. +"a" - Selects the most recent results for every test case in the test suite with id "a". +"a/b" - Selects the most recent result for the test case with id "b" in the test suite with id "a". +"a/b/c" - Selects the test result with id "c" for the test case with id "b" in the test suite with id "a". +"a/b/~0" - Selects the most recent result for the test case with id "b" in the test suite with id "a". +"a/b/~1" - Selects the previous result for the test case with id "b" in the test suite with id "a". +"a/b/~4" - Selects the 4 test older than the previous result for the test case with id "b" in the test suite with id "a". diff --git a/hdat/hdat_cli.py b/hdat/hdat_cli.py index a995af4..6ac5ca7 100644 --- a/hdat/hdat_cli.py +++ b/hdat/hdat_cli.py @@ -11,10 +11,13 @@ def parse_arguments(arguments): parser = argparse.ArgumentParser(prog='hdat') subparsers = parser.add_subparsers(dest='command', metavar='<command>') + list_help = 'list available cases, collected in the current working dir' + list_parser = subparsers.add_parser('list', help=list_help) + list_parser.add_argument('casespecs', nargs='*', default=[''], metavar='<case>') + run_help = 'run cases, store results in archive, compare against goldens' run_parser = subparsers.add_parser('run', help=run_help) run_parser.add_argument('casespecs', nargs='*', default=[''], metavar='<case>') - run_parser.add_argument('--collect-only', default=False, action='store_true') show_help = 'visualize a single result' show_parser = subparsers.add_parser('show', help=show_help) @@ -53,7 +56,7 @@ def hdat_cli(arguments, suites, golden_store, archive, git_info): if args.command is None: parse_arguments(['-h']) - if args.command == 'run' and args.collect_only: + if args.command == 'list': cases = resolve_casespecs(suites, args.casespecs) print("\n".join(['{}/{}'.format(suite_id, case_id) for suite_id, case_id in cases])) elif args.command == 'run': diff --git a/hdat/main.py b/hdat/main.py index b35ba0b..23f4828 100755 --- a/hdat/main.py +++ b/hdat/main.py @@ -11,6 +11,7 @@ from hdat.store import Archive, GoldenStore def main(): cwd = os.getcwd() + sys.path.append(cwd) try: git_info = git_info_from_directory(cwd) diff --git a/hdat/suite.py b/hdat/suite.py index a45c80c..f67d04d 100644 --- a/hdat/suite.py +++ b/hdat/suite.py @@ -1,6 +1,9 @@ -import pydoc +import importlib +import sys +import inspect +import os -from .util import print_error, find_here_or_in_parents, AbortError +from .util import print_error, AbortError class Suite: @@ -10,7 +13,7 @@ class Suite: Is responsible for collecting, running, checking, and visualizing the results of running the algorithm against its test cases. ''' - def cases(self): + def collect(self): ''' Collect all of the cases for this suite, and return them as a dict-like mapping where the keys are the "case ids" and the values are the "case @@ -71,24 +74,26 @@ def collect_suites(directory): def _collect_suite_classes(directory): - suites_filename = find_here_or_in_parents(directory, '.hdattsuites') - if suites_filename is None: - raise AbortError('Unable to locate a ".hdattsuites" file') + hdat_module_suffix = '_hdat.py' + hdat_suite_class = Suite suite_classes = [] - with open(suites_filename, 'r') as suites_file: - for line in suites_file: - class_location = line.strip() - try: - test_suite_class = pydoc.locate(class_location) - except pydoc.ErrorDuringImport as e: - print_error(e) - test_suite_class = None - - if test_suite_class is None: - msg = 'Unable to import test suite "{}"' - raise AbortError(msg.format(class_location)) - else: - suite_classes.append(test_suite_class) - + for root, dirs, files in os.walk(directory, topdown=True): + # prevent os.walk from going into hidden dirs + dirs[:] = [subdir for subdir in dirs if not subdir.startswith('.')] + for filename in files: + if filename.endswith(hdat_module_suffix): + module_name = filename.strip(".py") + + module_path = (os.path.relpath(root, start=directory)) + if module_path == '.': + module_spec = module_name + else: + module_spec = os.path.join(module_path, '').replace(os.path.sep, '.') + module_name + + importlib.import_module(module_spec) + classes = inspect.getmembers(sys.modules[module_spec], predicate=inspect.isclass) + for name, value in classes: + if hdat_suite_class in inspect.getmro(value) and hdat_suite_class != value: + suite_classes.append(value) return suite_classes
Find a way to avoid needing `.hdatsuites` As usual, it would be good to write out your design here so we can discuss, before implementing.
innolitics/hdat
diff --git a/tests/conftest.py b/tests/conftest.py index 8374aa9..a6fe0b2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,9 @@ import tempfile -from collections import OrderedDict import pytest from hdat.store import GoldenStore, Archive -from hdat.suite import Suite -from hdat.resultspec import print_resultspec +from test_suite_hdat import BasicSuiteA, BasicSuiteB @pytest.fixture @@ -22,44 +20,6 @@ def tmp_archive(): tmp_directory.cleanup() -class BaseSuite(Suite): - def check(self, old, new): - return old == new, 'Looks good!' - - def run(self, case_input): - return case_input, {} - - def show(self, result): - raise NotImplementedError('showing "{}"'.format( - print_resultspec(result) - )) - - def diff(self, golden_result, result): - raise NotImplementedError('diffing "{}" and "{}"'.format( - print_resultspec(golden_result), - print_resultspec(result) - )) - - -class BasicSuiteA(BaseSuite): - id = 'a' - - def collect(self): - return OrderedDict([ - ('1', 10), - ('2', 20), - ]) - - -class BasicSuiteB(BaseSuite): - id = 'b' - - def collect(self): - return { - '3': 30, - } - - @pytest.fixture def basic_suite_a(): return BasicSuiteA diff --git a/tests/main_test.py b/tests/main_test.py index 399dd97..16e9363 100644 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -1,7 +1,9 @@ import pytest +import os from hdat.hdat_cli import hdat_cli from hdat.util import AbortError +from hdat.suite import collect_suites @pytest.fixture @@ -41,3 +43,8 @@ class TestMainRun: hdat_cli_with_mocks(['diff', 'a/1/r1', 'a/1/101_r2']) assert 'diffing "a/1/r1" and "a/1/101_r2"' in str(e) + + def test_collect_suites(self): + test_path = os.path.dirname(__file__) + suites = collect_suites(test_path) + assert suites.keys() == set(['BaseSuite', 'a', 'b']) diff --git a/tests/test_suite_hdat.py b/tests/test_suite_hdat.py new file mode 100644 index 0000000..582eb7e --- /dev/null +++ b/tests/test_suite_hdat.py @@ -0,0 +1,44 @@ +from hdat.suite import Suite +from collections import OrderedDict +from hdat.resultspec import print_resultspec + + +class BaseSuite(Suite): + def check(self, old, new): + return old == new, 'Looks good!' + + def run(self, case_input): + return case_input, {} + + def collect(self): + return OrderedDict() + + def show(self, result): + raise NotImplementedError('showing "{}"'.format( + print_resultspec(result) + )) + + def diff(self, golden_result, result): + raise NotImplementedError('diffing "{}" and "{}"'.format( + print_resultspec(golden_result), + print_resultspec(result) + )) + + +class BasicSuiteA(BaseSuite): + id = 'a' + + def collect(self): + return OrderedDict([ + ('1', 10), + ('2', 20), + ]) + + +class BasicSuiteB(BaseSuite): + id = 'b' + + def collect(self): + return { + '3': 30, + }
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_short_problem_statement", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 1, "issue_text_score": 3, "test_score": 3 }, "num_modified_files": 4 }
unknown
{ "env_vars": null, "env_yml_path": [], "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
alabaster==0.7.13 attrs==22.2.0 Babel==2.11.0 build==0.9.0 certifi==2021.5.30 charset-normalizer==2.0.12 check-manifest==0.48 colorama==0.4.5 docutils==0.18.1 gitdb==4.0.9 GitPython==3.1.18 -e git+https://github.com/innolitics/hdat.git@3b3b8250e98a9ffe8bb583b7f7024c4d0cbc07bc#egg=hdat idna==3.10 imagesize==1.4.1 importlib-metadata==4.8.3 iniconfig==1.1.1 Jinja2==3.0.3 livereload==2.6.3 MarkupSafe==2.0.1 mock==5.2.0 packaging==21.3 pep517==0.13.1 pluggy==1.0.0 py==1.11.0 Pygments==2.14.0 pyparsing==3.1.4 pytest==7.0.1 pytz==2025.2 requests==2.27.1 six==1.17.0 smmap==5.0.0 snowballstemmer==2.2.0 Sphinx==5.3.0 sphinx-autobuild==2021.3.14 sphinxcontrib-applehelp==1.0.2 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 tomli==1.2.3 tornado==6.1 typing_extensions==4.1.1 urllib3==1.26.20 zipp==3.6.0
name: hdat channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - alabaster==0.7.13 - attrs==22.2.0 - babel==2.11.0 - build==0.9.0 - charset-normalizer==2.0.12 - check-manifest==0.48 - colorama==0.4.5 - docutils==0.18.1 - gitdb==4.0.9 - gitpython==3.1.18 - idna==3.10 - imagesize==1.4.1 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jinja2==3.0.3 - livereload==2.6.3 - markupsafe==2.0.1 - mock==5.2.0 - packaging==21.3 - pep517==0.13.1 - pluggy==1.0.0 - py==1.11.0 - pygments==2.14.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytz==2025.2 - requests==2.27.1 - six==1.17.0 - smmap==5.0.0 - snowballstemmer==2.2.0 - sphinx==5.3.0 - sphinx-autobuild==2021.3.14 - sphinxcontrib-applehelp==1.0.2 - sphinxcontrib-devhelp==1.0.2 - sphinxcontrib-htmlhelp==2.0.0 - sphinxcontrib-jsmath==1.0.1 - sphinxcontrib-qthelp==1.0.3 - sphinxcontrib-serializinghtml==1.1.5 - tomli==1.2.3 - tornado==6.1 - typing-extensions==4.1.1 - urllib3==1.26.20 - zipp==3.6.0 prefix: /opt/conda/envs/hdat
[ "tests/main_test.py::TestMainRun::test_collect_suites" ]
[]
[ "tests/main_test.py::TestMainRun::test_run_all_verify_all_rerun", "tests/main_test.py::TestMainRun::test_show_most_recent", "tests/main_test.py::TestMainRun::test_diff" ]
[]
MIT License
2,183
[ "hdat/hdat_cli.py", "hdat/main.py", "README.md", "hdat/suite.py" ]
[ "hdat/hdat_cli.py", "hdat/main.py", "README.md", "hdat/suite.py" ]
great-expectations__great_expectations-191
f4b15d20a092fcbef690506f89bddec84b8140ff
2018-02-20 06:38:49
f4b15d20a092fcbef690506f89bddec84b8140ff
diff --git a/great_expectations/dataset/base.py b/great_expectations/dataset/base.py index 7b6b568bf..d75e3c0e6 100644 --- a/great_expectations/dataset/base.py +++ b/great_expectations/dataset/base.py @@ -606,18 +606,38 @@ If you wish to change this behavior, please set discard_failed_expectations, dis warnings.warn("WARNING: No great_expectations version found in configuration object.") for expectation in expectations_config['expectations']: - expectation_method = getattr(self, expectation['expectation_type']) + try: + expectation_method = getattr(self, expectation['expectation_type']) - if output_format is not None: - expectation['kwargs'].update({"output_format": output_format}) + if output_format is not None: + expectation['kwargs'].update({"output_format": output_format}) - if include_config is not None: - expectation['kwargs'].update({"include_config": include_config}) + if include_config is not None: + expectation['kwargs'].update({"include_config": include_config}) - result = expectation_method( - catch_exceptions=catch_exceptions, - **expectation['kwargs'] - ) + result = expectation_method( + catch_exceptions=catch_exceptions, + **expectation['kwargs'] + ) + + except AttributeError as err: + if catch_exceptions: + raised_exception = True + exception_traceback = traceback.format_exc() + + if output_format != "BOOLEAN_ONLY": + result = { + "success": False, + "expectation_type": expectation['expectation_type'], + "kwargs": expectation['kwargs'], + "raised_exception": raised_exception, + "exception_traceback": exception_traceback, + } + else: + result = False + + else: + raise(err) if output_format != "BOOLEAN_ONLY": results.append(
`Dataset.validate` `catch_exceptions` parameter fails if the `expectation_type` itself is unknown. Replication code: ``` import great_expectations as ge my_df = ge.dataset.PandasDataSet({"x":range(10)}) my_df.append_expectation({'expectation_type':'foobar', 'kwargs':{}}) my_df.validate() ``` Produces: ``` In [13]: my_df.validate() --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) <ipython-input-13-78271cde7254> in <module>() ----> 1 my_df.validate() /Users/abe/Documents/superconductive/tools/great_expectations/great_expectations/dataset/base.pyc in validate(self, expectations_config, catch_exceptions, output_format, include_config, only_return_failures) 607 608 for expectation in expectations_config['expectations']: --> 609 expectation_method = getattr(self, expectation['expectation_type']) 610 611 if output_format is not None: /Users/abe/anaconda2/lib/python2.7/site-packages/pandas/core/generic.pyc in __getattr__(self, name) 3612 if name in self._info_axis: 3613 return self[name] -> 3614 return object.__getattribute__(self, name) 3615 3616 def __setattr__(self, name, value): AttributeError: 'PandasDataSet' object has no attribute 'foobar' ``` I would have expected this to produce something like: ``` { "results": [ { "raised_exception": true, "exception_traceback": "Traceback (most recent call last):\n File \".../great_expectations/great_expectations/dataset/base.py\", line 113, in wrapper\n return_obj = func(self, **expectation_args)\nValueError: Dataset does not have an expectation method 'foobar'\n", "expectation_type": "foobar", "success": false, "kwargs": {} } ] } ```
great-expectations/great_expectations
diff --git a/tests/test_dataset.py b/tests/test_dataset.py index 39e9407be..eb6870e02 100644 --- a/tests/test_dataset.py +++ b/tests/test_dataset.py @@ -994,5 +994,20 @@ class TestDataset(unittest.TestCase): self.assertEqual(str(w[0].message), "WARNING: This configuration object was built using a different version of great_expectations than is currently validating it.") + def test_catch_exceptions_with_bad_expectation_type(self): + my_df = ge.dataset.PandasDataSet({"x":range(10)}) + my_df.append_expectation({'expectation_type':'foobar', 'kwargs':{}}) + result = my_df.validate(catch_exceptions=True) + + self.assertEqual(result["results"][1]["success"], False) + self.assertEqual(result["results"][1]["expectation_type"], "foobar") + self.assertEqual(result["results"][1]["kwargs"], {}) + self.assertEqual(result["results"][1]["raised_exception"], True) + assert "AttributeError: \'PandasDataSet\' object has no attribute \'foobar\'" in result["results"][1]["exception_traceback"] + + with self.assertRaises(AttributeError) as context: + result = my_df.validate(catch_exceptions=False) + + if __name__ == "__main__": unittest.main()
{ "commit_name": "head_commit", "failed_lite_validators": [ "has_pytest_match_arg" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 0 }, "num_modified_files": 1 }
0.3
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest" ], "pre_install": [ "apt-get update", "apt-get install -y gcc" ], "python": "3.6", "reqs_path": [ "requirements.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
argh==0.27.2 attrs==22.2.0 certifi==2021.5.30 -e git+https://github.com/great-expectations/great_expectations.git@f4b15d20a092fcbef690506f89bddec84b8140ff#egg=great_expectations importlib-metadata==4.8.3 iniconfig==1.1.1 jsonschema==3.2.0 numpy==1.19.5 packaging==21.3 pandas==1.1.5 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pyrsistent==0.18.0 pytest==7.0.1 python-dateutil==2.9.0.post0 pytz==2025.2 scipy==1.5.4 six==1.17.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: great_expectations channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - argh==0.27.2 - attrs==22.2.0 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - jsonschema==3.2.0 - numpy==1.19.5 - packaging==21.3 - pandas==1.1.5 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pyrsistent==0.18.0 - pytest==7.0.1 - python-dateutil==2.9.0.post0 - pytz==2025.2 - scipy==1.5.4 - six==1.17.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/great_expectations
[ "tests/test_dataset.py::TestDataset::test_catch_exceptions_with_bad_expectation_type" ]
[ "tests/test_dataset.py::TestDataset::test_test_column_aggregate_expectation_function" ]
[ "tests/test_dataset.py::TestDataset::test_calc_map_expectation_success", "tests/test_dataset.py::TestDataset::test_dataset", "tests/test_dataset.py::TestDataset::test_expectation_meta", "tests/test_dataset.py::TestDataset::test_find_expectations", "tests/test_dataset.py::TestDataset::test_format_column_map_output", "tests/test_dataset.py::TestDataset::test_get_and_save_expectation_config", "tests/test_dataset.py::TestDataset::test_meta_version_warning", "tests/test_dataset.py::TestDataset::test_remove_expectation", "tests/test_dataset.py::TestDataset::test_set_default_expectation_argument", "tests/test_dataset.py::TestDataset::test_test_column_map_expectation_function", "tests/test_dataset.py::TestDataset::test_test_expectation_function" ]
[]
Apache License 2.0
2,184
[ "great_expectations/dataset/base.py" ]
[ "great_expectations/dataset/base.py" ]
imageio__imageio-311
0c7eea3babaaefc049e93db937155c477fc5ca7e
2018-02-20 12:23:25
a22145a9ebbd3aa3bc742911e052ed1903381486
coveralls: [![Coverage Status](https://coveralls.io/builds/15605546/badge)](https://coveralls.io/builds/15605546) Coverage increased (+0.02%) to 93.076% when pulling **aea557ffd9eaf68be110b7d8142c906b2b4e8891 on scipy** into **0c7eea3babaaefc049e93db937155c477fc5ca7e on master**. almarklein: Ping @leycec @untom, it would be great if you could take a look.
diff --git a/README.md b/README.md index 1df3bc2..da3a1fe 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![Build Status](https://ci.appveyor.com/api/projects/status/4wjqg4o5r2q53iwt/branch/master?svg=true)](https://ci.appveyor.com/project/almarklein/imageio/branch/master) [![Build Status](https://travis-ci.org/imageio/imageio.svg?branch=master)](https://travis-ci.org/imageio/imageio) [![Coverage Status](https://coveralls.io/repos/imageio/imageio/badge.png?branch=master)](https://coveralls.io/r/imageio/imageio?branch=master) -[![Documentation Status](https://readthedocs.org/projects/imageio/badge/?version=latest)](https://imageio.readthedocs.org) +[![Documentation Status](https://readthedocs.org/projects/imageio/badge/?version=latest)](https://imageio.readthedocs.io) Website: http://imageio.github.io @@ -20,7 +20,7 @@ Python 2.7 and 3.4+, and is easy to install. <h2>Example</h2> Here's a minimal example of how to use imageio. See the docs for -<a href='http://imageio.readthedocs.org/en/latest/examples.html'>more examples</a>. +<a href='http://imageio.readthedocs.io/en/latest/examples.html'>more examples</a>. <pre> >>> import imageio >>> im = imageio.imread('imageio:chelsea.png') # read a standard image @@ -37,18 +37,18 @@ As a user, you just have to remember a handfull of functions: <li>mimread() and mimwrite() - for image series (animations)</li> <li>volread() and volwrite() - for volumetric image data</li> <li>get_reader() and get_writer() - for more control (e.g. streaming)</li> - <li>See the <a href='http://imageio.readthedocs.org/en/latest/userapi.html'>user api</a> for more information</li> + <li>See the <a href='http://imageio.readthedocs.io/en/latest/userapi.html'>user api</a> for more information</li> </ul> <h2>Features</h2> <ul> <li>Simple interface via a consise set of functions.</li> - <li>Easy to <a href='http://imageio.readthedocs.org/en/latest/installation.html'>install</a> using conda or pip.</li> - <li>Few dependencies (only Numpy).</li> + <li>Easy to <a href='http://imageio.readthedocs.io/en/latest/installation.html'>install</a> using conda or pip.</li> + <li>Few dependencies (only Numpy and Pillow).</li> <li>Pure Python, runs on Python 2.7, 3.4+, and Pypy</li> <li>Cross platform, runs on Windows, Linux, OS X (Raspberry Pi planned)</li> - <li>Lots of supported <a href='http://imageio.readthedocs.org/en/latest/formats.html'>formats</a>.</li> + <li>Lots of supported <a href='http://imageio.readthedocs.io/en/latest/formats.html'>formats</a>.</li> <li>Can read from file names, file objects, zipfiles, http/ftp, and raw bytes.</li> <li>Easy to extend using plugins.</li> <li>Code quality is maintained with many tests and continuous integration.</li> @@ -68,8 +68,8 @@ a way to download these with one function call, and prompts the user to do so when needed. The download is cached in your appdata directory, this keeps imageio light and scalable. </p><p> -We plan to provide a wide range of image formats. Also scientific -formats. Any help in implementing more formats is very welcome! +Imageio provides a wide range of image formats, including scientific +formats. Any help with implementing more formats is very welcome! </p><p> The codebase adheres to (a subset of) the PEP8 style guides. We strive for maximum test coverage (100% for the core, >95% for each plugin). @@ -107,7 +107,7 @@ with one function call: Imageio was based out of the frustration that many libraries that needed to read or write image data produced their own functionality for IO. PIL did not meet the needs very well, and libraries like scikit-image -need to be able to deal with scientific formats. I felt there was a +need to be able to deal with scientific formats. There was a need for a good image io library, which is an easy dependency, easy to maintain, and scalable to exotic file formats. </p><p> @@ -117,13 +117,12 @@ We created a simple but powerful core, a clean user API, and a proper plugin system. </p><p> The purpose of imageio is to support reading and writing of image data. -We're not processing images, you should use scikit-image for that. Imageio +We're not processing images, you should use e.g. scikit-image for that. Imageio should be easy to install and be lightweight. Imageio's plugin system makes it possible to scale the number of supported formats and still -keep a low footprint. +keep a small footprint. </p><p> -It is impossible for one person to implement and maintain a wide variety -of formats. My hope is to form a group of developers, who each maintain +It is our hope to form a group of developers, whom each maintain one or more plugins. In that way, the burder of each developer is low, and together we can make imageio into a really useful library! </p> diff --git a/appveyor.yml b/appveyor.yml index 8882185..1344274 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -5,12 +5,8 @@ environment: matrix: - - PYTHON: "C:\\Python35-conda64" - PYTHON_VERSION: "3.5" - PYTHON_ARCH: "64" - - - PYTHON: "C:\\Python27-conda64" - PYTHON_VERSION: "2.7" + - PYTHON: "C:\\Python36-conda64" + PYTHON_VERSION: "3.6" PYTHON_ARCH: "64" install: diff --git a/docs/conf.py b/docs/conf.py index 7d4f5d3..5f1bafc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ master_doc = 'index' # General information about the project. project = u'imageio' -copyright = u'1014-2018, imageio contributors' +copyright = u'2014-2018, imageio contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/docs/drop27.rst b/docs/drop27.rst new file mode 100644 index 0000000..14ebd0b --- /dev/null +++ b/docs/drop27.rst @@ -0,0 +1,18 @@ +Plan for dropping support for Python 2.7 +======================================== + +Starting on January 1, 2020, imageio will stop supporting Python 2.7. +The latest release that still supports Python 2.7 will remain available +on Pypi and conda-forge. A branch will be made that may be used to +continue supporting 2.7, but one should not expect (free) contributions +from the imageio developers there. + +For more information on the scientific Python ecosystem's transition +to Python3 only, see the python3-statement_. + +For more information on porting your code to run on Python 3, see the +python3-howto_. + +.. _python3-statement: http://www.python3statement.org/ + +.. _python3-howto: https://docs.python.org/3/howto/pyporting.html diff --git a/docs/installation.rst b/docs/installation.rst index 22cd3e0..d1d7888 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -2,7 +2,7 @@ Installing imageio ================== Imageio is written in pure Python, so installation is easy. -Imageio works on Python 2.7 and 3.4+. It also works on Pypy. +Imageio works on Python `2.7 <drop27.html>`_ and 3.4+. It also works on Pypy. Imageio depends on Numpy and Pillow. For some formats, imageio needs additional libraries/executables (e.g. ffmpeg), which imageio helps you download and store in a folder in your application data. @@ -14,5 +14,8 @@ To install imageio, use one of the following methods: * Good old ``python setup.py install`` For developers, we provide a simple mechanism to allow importing -imageio from the cloned repository. See the file ``imageio.proxy.io`` for +imageio from the cloned repository. See the file ``imageio.proxy.py`` for details. + +After installation, checkout the +:doc:`examples <examples>` and :doc:`user api <userapi>`. diff --git a/docs/scipy.rst b/docs/scipy.rst new file mode 100644 index 0000000..9f3c355 --- /dev/null +++ b/docs/scipy.rst @@ -0,0 +1,28 @@ +Transitioning from Scipy's imread +================================= + +Scipy is `deprecating <https://scipy.github.io/devdocs/release.1.0.0.html#backwards-incompatible-changes>`_ +their image I/O functionality. + +This document is intended to help people coming from +`Scipy <https://docs.scipy.org/doc/scipy/reference/generated/scipy.misc.imread.html>`_ +to adapt to Imageio's :func:`imread <imageio.imread>` function. +We recommend reading the :doc:`user api <userapi>` and checkout some +:doc:`examples <examples>` to get a feel of imageio. + +Imageio makes use of variety of plugins to support reading images (and volumes/movies) +from many different formats. Fortunately, Pillow is the main plugin for common images, +which is the same library as used by Scipy's ``imread``. Note that Imageio +automatically selects a plugin based on the image to read (unless a format is +explicitly specified), but uses Pillow where possible. + +In short terms: For images previously read by Scipy's imread, imageio should +generally use Pillow as well, and imageio provides the same functionality as Scipy +in these cases. But keep in mind: + + * Instead of ``mode``, use the ``pilmode`` keyword argument. + * Instead of ``flatten``, use the ``as_gray`` keyword argument. + * The documentation for the above arguments is not on :func:`imread <imageio.imread>`, + but on the docs of the individual formats, e.g. :doc:`PNG <format_png-pil>`. + * Imageio's functions all return numpy arrays, albeit as a subclass (so that + meta data can be attached). This subclass is called ``Image``. diff --git a/docs/sec_gettingstarted.rst b/docs/sec_gettingstarted.rst index 60bc20a..182d714 100644 --- a/docs/sec_gettingstarted.rst +++ b/docs/sec_gettingstarted.rst @@ -7,4 +7,5 @@ Getting started Installation <installation> Usage examples <examples> + Transitioning from Scipy <scipy> Release notes <releasenotes> diff --git a/imageio/core/format.py b/imageio/core/format.py index 2167816..1bac071 100644 --- a/imageio/core/format.py +++ b/imageio/core/format.py @@ -588,7 +588,10 @@ class FormatManager(object): return format else: # Maybe the user meant to specify an extension - return self['.'+name.lower()] + try: + return self['.'+name.lower()] + except IndexError: + pass # Fail using original name below # Nothing found ... raise IndexError('No format known by name %s.' % name) diff --git a/imageio/core/functions.py b/imageio/core/functions.py index 581bc9e..51fcb56 100644 --- a/imageio/core/functions.py +++ b/imageio/core/functions.py @@ -198,6 +198,10 @@ def imread(uri, format=None, **kwargs): to see what arguments are available for a particular format. """ + if 'mode' in kwargs: + raise TypeError('Invalid keyword argument "mode", ' + 'perhaps you mean "pilmode"?') + # Get reader and read first reader = read(uri, format, 'i', **kwargs) with reader: diff --git a/imageio/core/util.py b/imageio/core/util.py index fc613d5..0cdd39c 100644 --- a/imageio/core/util.py +++ b/imageio/core/util.py @@ -13,7 +13,7 @@ import re import struct import sys import time -from warnings import warn +from logging import warning as warn # Make pkg_resources optional if setuptools is not available try: @@ -53,6 +53,12 @@ def urlopen(*args, **kwargs): return urlopen(*args, **kwargs) +def _precision_warn(p1, p2, extra=''): + t = ('Lossy conversion from {} to {}. {} Convert image to {} prior to ' + 'saving to suppress this warning.') + warn(t.format(p1, p2, extra, p2)) + + def image_as_uint(im, bitdepth=None): """ Convert the given image to uint (default: uint8) @@ -73,29 +79,26 @@ def image_as_uint(im, bitdepth=None): out_type = np.uint16 else: raise ValueError('Bitdepth must be either 8 or 16') - dtype_str = str(im.dtype) + dtype_str1 = str(im.dtype) + dtype_str2 = out_type.__name__ if ((im.dtype == np.uint8 and bitdepth == 8) or (im.dtype == np.uint16 and bitdepth == 16)): # Already the correct format? Return as-is return im - if (dtype_str.startswith('float') and + if (dtype_str1.startswith('float') and np.nanmin(im) >= 0 and np.nanmax(im) <= 1): - warn('Lossy conversion from {} to {}, range [0, 1]'.format( - dtype_str, out_type.__name__)) - im = im.astype(np.float64) * (np.power(2.0, bitdepth)-1) + _precision_warn(dtype_str1, dtype_str2, 'Range [0, 1].') + im = im.astype(np.float64) * (np.power(2.0, bitdepth)-1) + 0.499999999 elif im.dtype == np.uint16 and bitdepth == 8: - warn('Lossy conversion from uint16 to uint8, ' - 'losing 8 bits of resolution') + _precision_warn(dtype_str1, dtype_str2, 'Losing 8 bits of resolution.') im = np.right_shift(im, 8) elif im.dtype == np.uint32: - warn('Lossy conversion from uint32 to {}, ' - 'losing {} bits of resolution'.format(out_type.__name__, - 32-bitdepth)) + _precision_warn(dtype_str1, dtype_str2, + 'Losing {} bits of resolution.'.format(32-bitdepth)) im = np.right_shift(im, 32-bitdepth) elif im.dtype == np.uint64: - warn('Lossy conversion from uint64 to {}, ' - 'losing {} bits of resolution'.format(out_type.__name__, - 64-bitdepth)) + _precision_warn(dtype_str1, dtype_str2, + 'Losing {} bits of resolution.'.format(64-bitdepth,)) im = np.right_shift(im, 64-bitdepth) else: mi = np.nanmin(im) @@ -106,12 +109,12 @@ def image_as_uint(im, bitdepth=None): raise ValueError('Maximum image value is not finite') if ma == mi: raise ValueError('Max value == min value, ambiguous given dtype') - warn('Conversion from {} to {}, ' - 'range [{}, {}]'.format(dtype_str, out_type.__name__, mi, ma)) + _precision_warn(dtype_str1, dtype_str2, + 'Range [{}, {}].'.format(mi, ma)) # Now make float copy before we scale im = im.astype('float64') # Scale the values between 0 and 1 then multiply by the max value - im = (im - mi) / (ma - mi) * (np.power(2.0, bitdepth)-1) + im = (im - mi) / (ma - mi) * (np.power(2.0, bitdepth)-1) + 0.499999999 assert np.nanmin(im) >= 0 assert np.nanmax(im) < np.power(2.0, bitdepth) return im.astype(out_type) diff --git a/imageio/plugins/_freeimage.py b/imageio/plugins/_freeimage.py index d52a367..9e66a7d 100644 --- a/imageio/plugins/_freeimage.py +++ b/imageio/plugins/_freeimage.py @@ -18,7 +18,7 @@ import os import sys import ctypes import threading -from logging import warn +from logging import warning as warn import numpy from ..core import (get_remote_file, load_lib, Dict, resource_dirs, diff --git a/imageio/plugins/pillow.py b/imageio/plugins/pillow.py index 709e7b5..5c1fae3 100644 --- a/imageio/plugins/pillow.py +++ b/imageio/plugins/pillow.py @@ -19,6 +19,38 @@ from .pillow_info import pillow_formats, pillow_docs # todo: Pillow ImageGrab module supports grabbing the screen on Win and OSX. +GENERIC_DOCS = """ + Parameters for reading + ---------------------- + + pilmode : str + From the Pillow documentation: + + * 'L' (8-bit pixels, grayscale) + * 'P' (8-bit pixels, mapped to any other mode using a color palette) + * 'RGB' (3x8-bit pixels, true color) + * 'RGBA' (4x8-bit pixels, true color with transparency mask) + * 'CMYK' (4x8-bit pixels, color separation) + * 'YCbCr' (3x8-bit pixels, color video format) + * 'I' (32-bit signed integer pixels) + * 'F' (32-bit floating point pixels) + + PIL also provides limited support for a few special modes, including + 'LA' ('L' with alpha), 'RGBX' (true color with padding) and 'RGBa' + (true color with premultiplied alpha). + + When translating a color image to grayscale (mode 'L', 'I' or 'F'), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + as_gray : bool + If True, the image is converted using mode 'F'. When `mode` is + not None and `as_gray` is True, the image is first converted + according to `mode`, and the result is then "flattened" using + mode 'F'. +""" + + class PillowFormat(Format): """ Base format class for Pillow formats. @@ -78,29 +110,36 @@ class PillowFormat(Format): return True class Reader(Format.Reader): - - def _open(self, **kwargs): + + def _open(self, pilmode=None, as_gray=False): Image = self.format._init_pillow() try: factory, accept = Image.OPEN[self.format.plugin_id] except KeyError: raise RuntimeError('Format %s cannot read images.' % self.format.name) - self._fp = self.request.get_file() + self._fp = self._get_file() self._im = factory(self._fp, '') if hasattr(Image, '_decompression_bomb_check'): Image._decompression_bomb_check(self._im.size) pil_try_read(self._im) - self._grayscale = _palette_is_grayscale(self._im) + # Store args + self._kwargs = dict(mode=pilmode, as_gray=as_gray, + is_gray=_palette_is_grayscale(self._im)) # Set length self._length = 1 if hasattr(self._im, 'n_frames'): self._length = self._im.n_frames + def _get_file(self): + self._we_own_fp = False + return self.request.get_file() + def _close(self): - if hasattr(self._im, 'close'): # see issue #216 - self._im.close() - # request object handled closing the _fp + save_pillow_close(self._im) + if self._we_own_fp: + self._fp.close() + # else: request object handles closing the _fp def _get_length(self): return self._length @@ -122,7 +161,7 @@ class PillowFormat(Format): i += 1 self._seek(i) self._im.getdata()[0] - im = pil_get_frame(self._im, self._grayscale) + im = pil_get_frame(self._im, **self._kwargs) return im, self._im.info def _get_meta_data(self, index): @@ -132,7 +171,7 @@ class PillowFormat(Format): class Writer(Format.Writer): - def _open(self, **kwargs): + def _open(self): Image = self.format._init_pillow() try: self._save_func = Image.SAVE[self.format.plugin_id] @@ -141,7 +180,6 @@ class PillowFormat(Format): self.format.name) self._fp = self.request.get_file() self._meta = {} - self._meta.update(kwargs) self._written = False def _close(self): @@ -160,7 +198,7 @@ class PillowFormat(Format): if 'bits' in self._meta: img = img.quantize() # Make it a P image, so bits arg is used img.save(self._fp, format=self.format.plugin_id, **self._meta) - img.close() + save_pillow_close(img) def set_meta_data(self, meta): self._meta.update(meta) @@ -175,6 +213,31 @@ class PNGFormat(PillowFormat): ---------------------- ignoregamma : bool Avoid gamma correction. Default False. + pilmode : str + From the Pillow documentation: + + * 'L' (8-bit pixels, grayscale) + * 'P' (8-bit pixels, mapped to any other mode using a color palette) + * 'RGB' (3x8-bit pixels, true color) + * 'RGBA' (4x8-bit pixels, true color with transparency mask) + * 'CMYK' (4x8-bit pixels, color separation) + * 'YCbCr' (3x8-bit pixels, color video format) + * 'I' (32-bit signed integer pixels) + * 'F' (32-bit floating point pixels) + + PIL also provides limited support for a few special modes, including + 'LA' ('L' with alpha), 'RGBX' (true color with padding) and 'RGBa' + (true color with premultiplied alpha). + + When translating a color image to grayscale (mode 'L', 'I' or 'F'), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + as_gray : bool + If True, the image is converted using mode 'F'. When `mode` is + not None and `as_gray` is True, the image is first converted + according to `mode`, and the result is then "flattened" using + mode 'F'. Parameters for saving --------------------- @@ -206,12 +269,12 @@ class PNGFormat(PillowFormat): bits. In this case, given as a number between 1-256. dictionary (experimental): dict Set the ZLIB encoder dictionary. - """ class Reader(PillowFormat.Reader): - def _open(self, ignoregamma=False): - return PillowFormat.Reader._open(self) + def _open(self, pilmode=None, as_gray=False, ignoregamma=False): + return PillowFormat.Reader._open(self, + pilmode=pilmode, as_gray=as_gray) def _get_data(self, index): im, info = PillowFormat.Reader._get_data(self, index) @@ -223,7 +286,7 @@ class PNGFormat(PillowFormat): else: scale = float(65536 if im.dtype == np.uint16 else 255) gain = 1.0 - im = ((im / scale) ** gamma) * scale * gain + im[:] = ((im / scale) ** gamma) * scale * gain + 0.4999 return im, info # -- @@ -257,7 +320,8 @@ class PNGFormat(PillowFormat): if key not in ok_keys: raise TypeError('Invalid arg for PNG writer: %r' % key) - return PillowFormat.Writer._open(self, **kwargs) + PillowFormat.Writer._open(self) + self._meta.update(kwargs) def _append_data(self, im, meta): if str(im.dtype) == 'uint16' and (im.ndim == 2 or @@ -277,6 +341,31 @@ class JPEGFormat(PillowFormat): ---------------------- exifrotate : bool Automatically rotate the image according to exif flag. Default True. + pilmode : str + From the Pillow documentation: + + * 'L' (8-bit pixels, grayscale) + * 'P' (8-bit pixels, mapped to any other mode using a color palette) + * 'RGB' (3x8-bit pixels, true color) + * 'RGBA' (4x8-bit pixels, true color with transparency mask) + * 'CMYK' (4x8-bit pixels, color separation) + * 'YCbCr' (3x8-bit pixels, color video format) + * 'I' (32-bit signed integer pixels) + * 'F' (32-bit floating point pixels) + + PIL also provides limited support for a few special modes, including + 'LA' ('L' with alpha), 'RGBX' (true color with padding) and 'RGBa' + (true color with premultiplied alpha). + + When translating a color image to grayscale (mode 'L', 'I' or 'F'), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + as_gray : bool + If True, the image is converted using mode 'F'. When `mode` is + not None and `as_gray` is True, the image is first converted + according to `mode`, and the result is then "flattened" using + mode 'F'. Parameters for saving --------------------- @@ -289,7 +378,7 @@ class JPEGFormat(PillowFormat): optimize : bool On saving, compute optimal Huffman coding tables (can reduce a few percent of file size). Default False. - dpi : tuplw of int + dpi : tuple of int The pixel density, ``(x,y)``. icc_profile : object If present and true, the image is stored with the provided ICC profile. @@ -304,8 +393,18 @@ class JPEGFormat(PillowFormat): """ class Reader(PillowFormat.Reader): - def _open(self, exifrotate=True): - return PillowFormat.Reader._open(self) + def _open(self, pilmode=None, as_gray=False, exifrotate=True): + return PillowFormat.Reader._open(self, + pilmode=pilmode, as_gray=as_gray) + + def _get_file(self): + # Pillow uses seek for JPG, so we cannot directly stream from web + if self.request.filename.startswith(('http://', 'https://')): + self._we_own_fp = True + return open(self.request.get_local_filename(), 'rb') + else: + self._we_own_fp = False + return self.request.get_file() def _get_data(self, index): im, info = PillowFormat.Reader._get_data(self, index) @@ -360,7 +459,8 @@ class JPEGFormat(PillowFormat): kwargs['progressive'] = bool(progressive) kwargs['optimize'] = bool(progressive) - return PillowFormat.Writer._open(self, **kwargs) + PillowFormat.Writer._open(self) + self._meta.update(kwargs) def _append_data(self, im, meta): if im.ndim == 3 and im.shape[-1] == 4: @@ -370,6 +470,13 @@ class JPEGFormat(PillowFormat): return +def save_pillow_close(im): + # see issue #216 and #300 + if hasattr(im, 'close'): + if hasattr(getattr(im, 'fp', None), 'close'): + im.close() + + ## Func from skimage # This cells contains code from scikit-image, in particular from @@ -383,7 +490,7 @@ def pil_try_read(im): # this will raise an IOError if the file is not readable im.getdata()[0] except IOError as e: - site = "http://pillow.readthedocs.org/en/latest/installation.html" + site = "http://pillow.readthedocs.io/en/latest/installation.html" site += "#external-libraries" pillow_error_message = str(e) error_message = ('Could not load "%s" \n' @@ -406,68 +513,91 @@ def _palette_is_grayscale(pil_image): return np.allclose(np.diff(valid_palette), 0) -def pil_get_frame(im, grayscale, dtype=None): +def pil_get_frame(im, is_gray=None, as_gray=None, mode=None, dtype=None): + """ + is_gray: Whether the image *is* gray (by inspecting its palette). + as_gray: Whether the resulting image must be converted to gaey. + mode: The mode to convert to. + """ + + if is_gray is None: + is_gray = _palette_is_grayscale(im) + frame = im - if im.format == 'PNG' and im.mode == 'I' and dtype is None: - dtype = 'uint16' - - if im.mode == 'P': - if grayscale is None: - grayscale = _palette_is_grayscale(im) - - if grayscale: - frame = im.convert('L') + # Convert ... + if mode is not None: + # Mode is explicitly given ... + if mode != im.mode: + frame = im.convert(mode) + elif as_gray: + pass # don't do any auto-conversions (but do the explit one above) + elif im.mode == 'P' and is_gray: + # Paletted images that are already gray by their palette + # are converted so that the resulting numpy array is 2D. + frame = im.convert('L') + elif im.mode == 'P': + # Paletted images are converted to RGB/RGBA. We jump some loops to make + # this work well. + if im.info.get('transparency', None) is not None: + # Let Pillow apply the transparency, see issue #210 and #246 + frame = im.convert('RGBA') + elif im.palette.mode in ('RGB', 'RGBA'): + # We can do this ourselves. Pillow seems to sometimes screw + # this up if a multi-gif has a pallete for each frame ... + # Create palette array + p = np.frombuffer(im.palette.getdata()[1], np.uint8) + # Shape it. + nchannels = len(im.palette.mode) + p.shape = -1, nchannels + if p.shape[1] == 3: + p = np.column_stack((p, 255*np.ones(p.shape[0], p.dtype))) + # Apply palette + frame_paletted = np.array(im, np.uint8) + try: + frame = p[frame_paletted] + except Exception: + # Ok, let PIL do it. The introduction of the branch that + # tests `im.info['transparency']` should make this happen + # much less often, but let's keep it, to be safe. + frame = im.convert('RGBA') else: - - if im.info.get('transparency', None) is not None: - # Let Pillow apply the transparency, see issue #210 and #246 + # Let Pillow do it. Unlinke skimage, we always convert + # to RGBA; palettes can be RGBA. + if True: # im.format == 'PNG' and 'transparency' in im.info: frame = im.convert('RGBA') - elif im.palette.mode in ('RGB', 'RGBA'): - # We can do this ourselves. Pillow seems to sometimes screw - # this up if a multi-gif has a pallete for each frame ... - # Create palette array - p = np.frombuffer(im.palette.getdata()[1], np.uint8) - # Shape it. - nchannels = len(im.palette.mode) - p.shape = -1, nchannels - if p.shape[1] == 3: - p = np.column_stack((p, 255*np.ones(p.shape[0], p.dtype))) - # Apply palette - frame_paletted = np.array(im, np.uint8) - try: - frame = p[frame_paletted] - except Exception: - # Ok, let PIL do it. The introduction of the branch that - # tests `im.info['transparency']` should make this happen - # much less often, but let's keep it, to be safe. - frame = im.convert('RGBA') else: - # Let Pillow do it. Unlinke skimage, we always convert - # to RGBA; palettes can be RGBA. - if True: # im.format == 'PNG' and 'transparency' in im.info: - frame = im.convert('RGBA') - else: - frame = im.convert('RGB') - - elif im.mode == '1': - frame = im.convert('L') - + frame = im.convert('RGB') elif 'A' in im.mode: frame = im.convert('RGBA') - elif im.mode == 'CMYK': frame = im.convert('RGB') - + + # Apply a post-convert if necessary + if as_gray: + frame = frame.convert('F') # Scipy compat + elif not isinstance(frame, np.ndarray) and frame.mode == '1': + # Workaround for crash in PIL. When im is 1-bit, the call array(im) + # can cause a segfault, or generate garbage. See + # https://github.com/scipy/scipy/issues/2138 and + # https://github.com/python-pillow/Pillow/issues/350. + # + # This converts im from a 1-bit image to an 8-bit image. + frame = frame.convert('L') + + # Convert to numpy array if im.mode.startswith('I;16'): + # e.g. in16 PNG's shape = im.size dtype = '>u2' if im.mode.endswith('B') else '<u2' if 'S' in im.mode: dtype = dtype.replace('u', 'i') frame = np.fromstring(frame.tobytes(), dtype) frame.shape = shape[::-1] - else: + # Use uint16 for PNG's in mode I + if im.format == 'PNG' and im.mode == 'I' and dtype is None: + dtype = 'uint16' frame = np.array(frame, dtype=dtype) return frame @@ -533,7 +663,7 @@ def register_pillow_formats(): format = FormatCls(id + '-PIL', summary, ext, FormatCls._modes) format._plugin_id = id if FormatCls is PillowFormat or not FormatCls.__doc__: - format.__doc__ = pillow_docs[id] + format.__doc__ = pillow_docs[id] + GENERIC_DOCS formats.add_format(format) diff --git a/imageio/plugins/pillow_info.py b/imageio/plugins/pillow_info.py index a799307..3967df2 100644 --- a/imageio/plugins/pillow_info.py +++ b/imageio/plugins/pillow_info.py @@ -64,7 +64,7 @@ def generate_info(): # pragma: no cover # Fill in the blancs for id in ids: if id in docs: - docs[id] = '*This is a copy from the Pillow docs.*\n\n' + docs[id] + docs[id] = '*From the Pillow docs:*\n\n' + docs[id] else: docs[id] = 'No docs for %s.' % id print('no docs for', id) @@ -150,7 +150,7 @@ pillow_formats = [ pillow_docs = { 'BMP': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads and writes Windows and OS/2 BMP files containing ``1``, ``L``, ``P``, @@ -164,7 +164,7 @@ u"""*This is a copy from the Pillow docs.* Set to ``bmp_rle`` if the file is run-length encoded. """, 'BUFR': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* .. versionadded:: Pillow 1.1.3 @@ -175,14 +175,14 @@ u"""*This is a copy from the Pillow docs.* :py:func:`PIL.BufrStubImagePlugin.register_handler`. """, 'CUR': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* CUR is used to store cursors on Windows. The CUR decoder reads the largest available cursor. Animated cursors are not supported. """, 'DCX': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* DCX is a container file format for PCX files, defined by Intel. The DCX format @@ -194,7 +194,7 @@ u"""*This is a copy from the Pillow docs.* """, 'DDS': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* DDS is a popular container texture format used in video games and natively @@ -207,7 +207,7 @@ u"""*This is a copy from the Pillow docs.* 'DIB': u"""No docs for DIB.""", 'EPS': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies EPS files containing image data, and can read files that contain @@ -233,7 +233,7 @@ u"""*This is a copy from the Pillow docs.* im.size #(200,200) """, 'FITS': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* .. versionadded:: Pillow 1.1.5 @@ -246,7 +246,7 @@ u"""*This is a copy from the Pillow docs.* 'FLI': u"""No docs for FLI.""", 'FPX': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads Kodak FlashPix files. In the current version, only the highest @@ -260,7 +260,7 @@ u"""*This is a copy from the Pillow docs.* README for details. """, 'FTEX': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* .. versionadded:: Pillow 3.2.0 @@ -270,7 +270,7 @@ u"""*This is a copy from the Pillow docs.* per file, in the compressed and uncompressed formats. """, 'GBR': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* The GBR decoder reads GIMP brush files, version 1 and 2. @@ -299,7 +299,7 @@ u"""*This is a copy from the Pillow docs.* transparent. """, 'GIF': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads GIF87a and GIF89a versions of the GIF file format. The library writes @@ -353,6 +353,7 @@ u"""*This is a copy from the Pillow docs.* **append_images** A list of images to append as additional frames. Each of the images in the list can be single or multiframe images. + This is currently only supported for GIF, PDF, TIFF, and WebP. **duration** The display duration of each frame of the multiframe gif, in @@ -367,13 +368,24 @@ u"""*This is a copy from the Pillow docs.* eliminating unused colors. This is only useful if the palette can be compressed to the next smaller power of 2 elements. - **palette** + **palette** Use the specified palette for the saved image. The palette should be a bytes or bytearray object containing the palette entries in RGBRGB... form. It should be no more than 768 bytes. Alternately, the palette can be passed in as an :py:class:`PIL.ImagePalette.ImagePalette` object. + **disposal** + Indicates the way in which the graphic is to be treated after being displayed. + + * 0 - No disposal specified. + * 1 - Do not dispose. + * 2 - Restore to background color. + * 3 - Restore to previous content. + + Pass a single integer for a constant disposal, or a list or tuple + to set the disposal for each frame separately. + Reading local images ~~~~~~~~~~~~~~~~~~~~ @@ -392,7 +404,7 @@ u"""*This is a copy from the Pillow docs.* im.tile = [(tag, (0, 0) + im.size, offset, extra)] """, 'GRIB': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* .. versionadded:: Pillow 1.1.5 @@ -407,7 +419,7 @@ u"""*This is a copy from the Pillow docs.* :py:func:`PIL.GribStubImagePlugin.register_handler`. """, 'HDF5': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* .. versionadded:: Pillow 1.1.5 @@ -418,7 +430,7 @@ u"""*This is a copy from the Pillow docs.* :py:func:`PIL.Hdf5StubImagePlugin.register_handler`. """, 'ICNS': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads and (macOS only) writes macOS ``.icns`` files. By default, the @@ -438,7 +450,7 @@ u"""*This is a copy from the Pillow docs.* :py:attr:`~PIL.Image.Image.size` will be ``(1024, 1024)``). """, 'ICO': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* ICO is used to store icons on Windows. The largest available icon is read. @@ -448,8 +460,8 @@ u"""*This is a copy from the Pillow docs.* **sizes** A list of sizes including in this ico file; these are a 2-tuple, ``(width, height)``; Default to ``[(16, 16), (24, 24), (32, 32), (48, 48), - (64, 64), (128, 128), (255, 255)]``. Any sizes bigger than the original - size or 255 will be ignored. + (64, 64), (128, 128), (256, 256)]``. Any sizes bigger than the original + size or 256 will be ignored. IM ^^ @@ -463,7 +475,7 @@ u"""*This is a copy from the Pillow docs.* 'IM': u"""No docs for IM.""", 'IMT': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads Image Tools images containing ``L`` data. @@ -471,7 +483,7 @@ u"""*This is a copy from the Pillow docs.* 'IPTC': u"""No docs for IPTC.""", 'JPEG': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads JPEG, JFIF, and Adobe JPEG files containing ``L``, ``RGB``, or @@ -556,11 +568,11 @@ u"""*This is a copy from the Pillow docs.* If present, sets the subsampling for the encoder. * ``keep``: Only valid for JPEG files, will retain the original image setting. - * ``4:4:4``, ``4:2:2``, ``4:1:1``: Specific sampling values + * ``4:4:4``, ``4:2:2``, ``4:2:0``: Specific sampling values * ``-1``: equivalent to ``keep`` * ``0``: equivalent to ``4:4:4`` * ``1``: equivalent to ``4:2:2`` - * ``2``: equivalent to ``4:1:1`` + * ``2``: equivalent to ``4:2:0`` **qtables** If present, sets the qtables for the encoder. This is listed as an @@ -582,7 +594,7 @@ u"""*This is a copy from the Pillow docs.* details. """, 'JPEG2000': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* .. versionadded:: Pillow 2.4.0 @@ -671,27 +683,29 @@ u"""*This is a copy from the Pillow docs.* ``_imaging`` DLL). """, 'MCIDAS': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies and reads 8-bit McIdas area files. """, 'MIC': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies and reads Microsoft Image Composer (MIC) files. When opened, the first sprite in the file is loaded. You can use :py:meth:`~file.seek` and :py:meth:`~file.tell` to read other sprites from the file. + + Note that there may be an embedded gamma of 2.2 in MIC files. """, 'MPEG': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies MPEG files. """, 'MPO': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* Pillow identifies and reads Multi Picture Object (MPO) files, loading the primary @@ -700,14 +714,14 @@ u"""*This is a copy from the Pillow docs.* zero-indexed and random access is supported. """, 'MSP': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies and reads MSP files from Windows 1 and 2. The library writes uncompressed (Windows 1) versions of this format. """, 'PCD': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads PhotoCD files containing ``RGB`` data. This only reads the 768x512 @@ -715,13 +729,13 @@ u"""*This is a copy from the Pillow docs.* encoding. """, 'PCX': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads and writes PCX files containing ``1``, ``L``, ``P``, or ``RGB`` data. """, 'PIXAR': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL provides limited support for PIXAR raster files. The library can identify @@ -730,7 +744,7 @@ u"""*This is a copy from the Pillow docs.* The format code is ``PIXAR``. """, 'PNG': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies, reads, and writes PNG files containing ``1``, ``L``, ``P``, @@ -739,9 +753,22 @@ u"""*This is a copy from the Pillow docs.* The :py:meth:`~PIL.Image.Image.write` method sets the following :py:attr:`~PIL.Image.Image.info` properties, when appropriate: + **chromaticity** + The chromaticity points, as an 8 tuple of floats. (``White Point + X``, ``White Point Y``, ``Red X``, ``Red Y``, ``Green X``, ``Green + Y``, ``Blue X``, ``Blue Y``) + **gamma** Gamma, given as a floating point number. + **srgb** + The sRGB rendering intent as an integer. + + * 0 Perceptual + * 1 Relative Colorimetric + * 2 Saturation + * 3 Absolute Colorimetric + **transparency** For ``P`` images: Either the palette index for full transparent pixels, or a byte string with alpha values for each palette entry. @@ -802,28 +829,28 @@ u"""*This is a copy from the Pillow docs.* documentation for details. """, 'PPM': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads and writes PBM, PGM and PPM files containing ``1``, ``L`` or ``RGB`` data. """, 'PSD': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL identifies and reads PSD files written by Adobe Photoshop 2.5 and 3.0. """, 'SGI': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* Pillow reads and writes uncompressed ``L``, ``RGB``, and ``RGBA`` files. """, 'SPIDER': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads and writes SPIDER image files of 32-bit floating point data @@ -866,22 +893,27 @@ u"""*This is a copy from the Pillow docs.* 'SUN': u"""No docs for SUN.""", 'TGA': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads 24- and 32-bit uncompressed and run-length encoded TGA files. """, 'TIFF': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* - PIL reads and writes TIFF files. It can read both striped and tiled images, - pixel and plane interleaved multi-band images, and either uncompressed, or - Packbits, LZW, or JPEG compressed images. + Pillow reads and writes TIFF files. It can read both striped and tiled + images, pixel and plane interleaved multi-band images. If you have + libtiff and its headers installed, PIL can read and write many kinds + of compressed TIFF files. If not, PIL will only read and write + uncompressed files. + + .. note:: - If you have libtiff and its headers installed, PIL can read and write many more - kinds of compressed TIFF files. If not, PIL will always write uncompressed - files. + Beginning in version 5.0.0, Pillow requires libtiff to read or + write compressed files. Prior to that release, Pillow had buggy + support for reading Packbits, LZW and JPEG compressed TIFFs + without using libtiff. The :py:meth:`~PIL.Image.Image.write` method sets the following :py:attr:`~PIL.Image.Image.info` properties: @@ -1000,7 +1032,7 @@ u"""*This is a copy from the Pillow docs.* """, 'WMF': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL can identify playable WMF files. @@ -1032,13 +1064,13 @@ u"""*This is a copy from the Pillow docs.* im = Image.open("sample.wmf")""", 'XBM': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads and writes X bitmap files (mode ``1``). """, 'XPM': -u"""*This is a copy from the Pillow docs.* +u"""*From the Pillow docs:* PIL reads X pixmap files (mode ``P``) with 256 colors or less. diff --git a/setup.py b/setup.py index 578ec64..405ab4b 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ for line in open(initFile).readlines(): elif docStatus == 1: docStatus = 2 if docStatus == 1: - __doc__ += line + __doc__ += line.rstrip() + '\n' # Template for long description. __doc__ gets inserted here long_description = """ @@ -85,7 +85,7 @@ long_description = """ __doc__ -Release notes: http://imageio.readthedocs.org/en/latest/releasenotes.html +Release notes: http://imageio.readthedocs.io/en/latest/releasenotes.html Example: @@ -97,8 +97,8 @@ Example: (512, 512, 3) >>> imageio.imwrite('astronaut-gray.jpg', im[:, :, 0]) -See the `user API <http://imageio.readthedocs.org/en/latest/userapi.html>`_ -or `examples <http://imageio.readthedocs.org/en/latest/examples.html>`_ +See the `user API <http://imageio.readthedocs.io/en/latest/userapi.html>`_ +or `examples <http://imageio.readthedocs.io/en/latest/examples.html>`_ for more information. """ diff --git a/tasks/docs.py b/tasks/docs.py index 252b96d..7d24344 100644 --- a/tasks/docs.py +++ b/tasks/docs.py @@ -44,6 +44,7 @@ def sphinx_build(src_dir, build_dir): ] if sphinx.version_info > (1, 7): + import sphinx.cmd.build ret = sphinx.cmd.build.build_main(cmd) else: ret = sphinx.build_main(['sphinx-build'] + cmd) diff --git a/tasks/install_python.ps1 b/tasks/install_python.ps1 index 6ec6634..652aa91 100644 --- a/tasks/install_python.ps1 +++ b/tasks/install_python.ps1 @@ -96,10 +96,10 @@ function InstallPip ($python_home) { function DownloadMiniconda ($python_version, $platform_suffix) { $webclient = New-Object System.Net.WebClient - if ($python_version -eq "3.5") { - $filename = "Miniconda3-4.0.5-Windows-" + $platform_suffix + ".exe" + if ($python_version -eq "3.6") { + $filename = "Miniconda3-4.4.10-Windows-" + $platform_suffix + ".exe" } else { - $filename = "Miniconda2-4.0.5-Windows-" + $platform_suffix + ".exe" + $filename = "Miniconda2-4.4.10-Windows-" + $platform_suffix + ".exe" } $url = $MINICONDA_URL + $filename
[Feature Request] Partial Conformance with the SciPy Image I/O API The long-heralded release of [SciPy 1.0.0](https://scipy.github.io/devdocs/release.1.0.0.html) has landed. Sixteen years was a hard wait, but good things come to those who wait... *for sixteen years.* ## The Inevitable Bad News Well, mostly good things. SciPy version: * 1.0.0 [officially deprecates](https://scipy.github.io/devdocs/release.1.0.0.html#backwards-incompatible-changes) *all* image I/O functionality residing in the `scipy.ndimage` and `scipy.misc` packages – including the commonly called `scipy.ndimage.imread()` and `scipy.ndimage.imwrite()` functions. * 1.2.0 plans to **permanently remove** this functionality. *Boom!* Since [our downstream biophysics simulator](https://gitlab.com/betse/betse) heavily depends upon this functionality, SciPy 1.0.0 is more of a bad than a good thing for us. <sup>Yet again, progress only brings pain.</sup> [SciPy documentation](https://docs.scipy.org/doc/scipy/reference/generated/scipy.misc.imread.html) explicitly recommends `imageio` as a suitable replacement. Since the `imageio` API is *considerably* more bare bones than the equivalent SciPy API, however, `imageio` may prove to be a less-than-suitable replacement for some end users – mostly, us. ## The Overly Optimistic Hope Ideally, the `imageio` API could be marginally improved to provide at least *partial* support for several of the most commonly used features of SciPy's image I/O API. This includes: * **Pillow-based mode conversion.** The [`scipy.ndimage.imread()`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.misc.imread.html) function, for example, provides an optional `mode` string parameter permitting external callers to specify a desired target colorspace (defaulting to `None`). If non-`None` and different from the image's current `mode`, this parameter is passed as is to the `PIL.Image.convert()` method. This is two lines of Python. So, this is good. * **Grayscale flattening.** Again, the [`scipy.ndimage.imread()`](https://docs.scipy.org/doc/scipy/reference/generated/scipy.misc.imread.html) function provides an optional `flatten` boolean parameter permitting external callers to reduce non-grayscale images to grayscale (defaulting to `False`). If `True`, the `mode` parameter is coerced to `F`. Why not `L`? I have no idea and doubt SciPy developers do either. I also resent the overly ambiguous name of this boolean, when something resembling `grayscale` or `as_gray` would have sown less confusion. This also is two lines of Python. So, at least that is good. This isn't simply syntactic sugar. This is critical functionality of general interest. Well, O.K... `flatten` is blatantly syntactic sugar. But `mode` isn't. External callers have *no* explicit access to the `PIL.Image` instance internally instantiated by this function. These optional parameters are the only means callers have of intervening in the conversion process. Doing so after the fact is non-trivial (or perhaps even infeasible). I accept that `imageio` is not, will never be, and *should* never be an image processor. `imageio` is a pure image I/O framework – just as Code Jesus intended. Still... the thin line between image I/O and image processing is an easy Rubicon to cross, especially when Pillow already performs all of the heavy lifting on our behalf. ## Bold Details for a Bright Future SciPy's image I/O is unconditionally implemented in terms of Pillow. `imageio`, on the other hand, is conditionally implemented in terms of Pillow and whatever else it happens to scrounge from the local filesystem (e.g., FreeImage). Presumably, this conditionality prevents the optional `mode` and `flatten` parameters from being directly handled by the `imageio.imread()` function. I grok that. Nonetheless, would it be feasible to permit these parameters to be: * Passed by callers as variadic keyword arguments accepted by `imageio.imread()`. * Passed by Pillow plugins invoked by `imageio.imread()` to the `PIL.Image.convert()` method, munging a `True` value for the `flatten` key into a `mode` value of either `L` or `F`. * Processed in some fashion by the non-Pillow plugins. Hopefully, these parameters won't simply be silently ignored – but I'll take what I can get. The `imageio.plugins.pillow.pil_get_frame()` function appears to offer similar functionality, albeit in a manner *not* exposed to external callers. Or is it? My modest (read: *awful*) `grep` skills failed me here. Alternately, would exposing the `PIL.Image` instance internally instantiated by Pillow plugins via some public variable or getter method of the `imageio.core.format.Reader` class be feasible? I half-suspect this class *already* exposes `PIL.Image` instances, but (again) shamefully failed to `grep` the exact variable or method for doing so. ## The End of a Dream Thus ends this excrutiating feature request. Thanks for the long years of gruelling work. May there be many more to come! 🌞 imageio.imread with jpg image with Spyder 3.2.5 and Python 3.6 debug message Error closing: 'NoneType' object has no attribute 'close' when running imageio.imread to import a jpg image with DEBUG logging enabled I get the following on Spyder 3.2.5 and Python 3.6 ``` import imageio import logging logging.basicConfig(level=logging.DEBUG) a = imageio.imread('picture.jpg') ``` `DEBUG:PIL.Image:Error closing: 'NoneType' object has no attribute 'close'` the picture imports fine.
imageio/imageio
diff --git a/tests/test_core.py b/tests/test_core.py index 707569f..d5b1c2f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -487,7 +487,14 @@ def test_util_image_as_uint(): (np.array([0, 1], 'float64'), 16, np.uint16([0, 65535])), (np.array([-1.0, 1.0], 'float16'), 16, np.uint16([0, 65535])), (np.array([-1.0, 1.0], 'float32'), 16, np.uint16([0, 65535])), - (np.array([-1.0, 1.0], 'float64'), 16, np.uint16([0, 65535])),) + (np.array([-1.0, 1.0], 'float64'), 16, np.uint16([0, 65535])), + # Rounding + (np.array([1.4/255, 1.6/255], 'float32'), 8, np.uint8([1, 2])), + (np.array([254.4/255, 254.6/255], 'float32'), 8, np.uint8([254, 255])), + (np.array([1.4/65535, 1.6/65535], 'float32'), 16, np.uint16([1, 2])), + (np.array([65534.4/65535, 65534.6/65535], 'float32'), 16, + np.uint16([65534, 65535])), # noqa + ) for tup in test_arrays: res = core.image_as_uint(tup[0], bitdepth=tup[1]) diff --git a/tests/test_meta.py b/tests/test_meta.py index f35626c..6623869 100644 --- a/tests/test_meta.py +++ b/tests/test_meta.py @@ -143,7 +143,8 @@ def test_import_dependencies(): else: print(modname, mod.__file__) - # Check that only imageio is left + # Check that only imageio is left (Windows needs a little help) + extra_modules.difference_update(['pythoncom', 'pywintypes', 'win32com']) assert extra_modules == {'imageio'} diff --git a/tests/test_pillow.py b/tests/test_pillow.py index 9c6c510..c0c558a 100644 --- a/tests/test_pillow.py +++ b/tests/test_pillow.py @@ -329,6 +329,46 @@ def test_images_with_transparency(): assert im.shape == (24, 30, 4) +def test_regression_302(): + # When using gamma correction, the result should keep the same dtype + need_internet() + + fname = get_remote_file('images/kodim03.png') + im = imageio.imread(fname) + assert im.shape == (512, 768, 3) and im.dtype == 'uint8' + + +def test_scipy_imread_compat(): + # https://docs.scipy.org/doc/scipy/reference/generated/scipy.misc.imread.html + # https://github.com/scipy/scipy/blob/41a3e69ca3141d8bf996bccb5eca5fc7bbc21a51/scipy/misc/pilutil.py#L111 + + im = imageio.imread('imageio:chelsea.png') + assert im.shape == (300, 451, 3) and im.dtype == 'uint8' + + # Scipy users may default to using "mode", but our getreader() already has + # a "mode" argument, so they should use pilmode instead. + try: + im = imageio.imread('imageio:chelsea.png', mode='L') + except TypeError as err: + assert 'pilmode' in str(err) + + im = imageio.imread('imageio:chelsea.png', pilmode='RGBA') + assert im.shape == (300, 451, 4) and im.dtype == 'uint8' + + im = imageio.imread('imageio:chelsea.png', pilmode='L') + assert im.shape == (300, 451) and im.dtype == 'uint8' + + im = imageio.imread('imageio:chelsea.png', pilmode='F') + assert im.shape == (300, 451) and im.dtype == 'float32' + + im = imageio.imread('imageio:chelsea.png', as_gray=True) + assert im.shape == (300, 451) and im.dtype == 'float32' + + # Force using pillow (but really, Pillow's imageio's first choice! Except + # for tiff) + im = imageio.imread('imageio:chelsea.png', 'PNG-PIL') + + if __name__ == '__main__': # test_png() # test_animated_gif()
{ "commit_name": "merge_commit", "failed_lite_validators": [ "has_hyperlinks", "has_media", "has_added_files", "has_many_modified_files", "has_many_hunks" ], "has_test_patch": true, "is_lite": false, "llm_score": { "difficulty_score": 2, "issue_text_score": 1, "test_score": 3 }, "num_modified_files": 14 }
2.2
{ "env_vars": null, "env_yml_path": null, "install": "pip install -e .[dev]", "log_parser": "parse_log_pytest", "no_use_env": null, "packages": "requirements.txt", "pip_packages": [ "pytest", "pytest-cov", "invoke" ], "pre_install": [ "apt-get update", "apt-get install -y libfreeimage3" ], "python": "3.6", "reqs_path": [ "requirements/base.txt" ], "test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning" }
attrs==22.2.0 certifi==2021.5.30 coverage==6.2 -e git+https://github.com/imageio/imageio.git@0c7eea3babaaefc049e93db937155c477fc5ca7e#egg=imageio importlib-metadata==4.8.3 iniconfig==1.1.1 invoke==2.2.0 numpy==1.19.5 packaging==21.3 Pillow==8.4.0 pluggy==1.0.0 py==1.11.0 pyparsing==3.1.4 pytest==7.0.1 pytest-cov==4.0.0 tomli==1.2.3 typing_extensions==4.1.1 zipp==3.6.0
name: imageio channels: - defaults - https://repo.anaconda.com/pkgs/main - https://repo.anaconda.com/pkgs/r - conda-forge dependencies: - _libgcc_mutex=0.1=main - _openmp_mutex=5.1=1_gnu - ca-certificates=2025.2.25=h06a4308_0 - certifi=2021.5.30=py36h06a4308_0 - ld_impl_linux-64=2.40=h12ee557_0 - libffi=3.3=he6710b0_2 - libgcc-ng=11.2.0=h1234567_1 - libgomp=11.2.0=h1234567_1 - libstdcxx-ng=11.2.0=h1234567_1 - ncurses=6.4=h6a678d5_0 - openssl=1.1.1w=h7f8727e_0 - pip=21.2.2=py36h06a4308_0 - python=3.6.13=h12debd9_1 - readline=8.2=h5eee18b_0 - setuptools=58.0.4=py36h06a4308_0 - sqlite=3.45.3=h5eee18b_0 - tk=8.6.14=h39e8969_0 - wheel=0.37.1=pyhd3eb1b0_0 - xz=5.6.4=h5eee18b_1 - zlib=1.2.13=h5eee18b_1 - pip: - attrs==22.2.0 - coverage==6.2 - importlib-metadata==4.8.3 - iniconfig==1.1.1 - invoke==2.2.0 - numpy==1.19.5 - packaging==21.3 - pillow==8.4.0 - pluggy==1.0.0 - py==1.11.0 - pyparsing==3.1.4 - pytest==7.0.1 - pytest-cov==4.0.0 - tomli==1.2.3 - typing-extensions==4.1.1 - zipp==3.6.0 prefix: /opt/conda/envs/imageio
[ "tests/test_core.py::test_util_image_as_uint", "tests/test_pillow.py::test_regression_302", "tests/test_pillow.py::test_scipy_imread_compat" ]
[ "tests/test_core.py::test_findlib2", "tests/test_meta.py::test_import_dependencies" ]
[ "tests/test_core.py::test_fetching", "tests/test_core.py::test_request", "tests/test_core.py::test_request_read_sources", "tests/test_core.py::test_request_save_sources", "tests/test_core.py::test_request_file_no_seek", "tests/test_core.py::test_util_imagelist", "tests/test_core.py::test_util_image", "tests/test_core.py::test_util_dict", "tests/test_core.py::test_util_get_platform", "tests/test_core.py::test_util_asarray", "tests/test_core.py::test_util_progres_bar", "tests/test_core.py::test_util_has_has_module", "tests/test_core.py::test_functions", "tests/test_core.py::test_example_plugin", "tests/test_meta.py::test_namespace", "tests/test_meta.py::test_import_nothing", "tests/test_meta.py::test_import_modules", "tests/test_pillow.py::test_pillow_format", "tests/test_pillow.py::test_png", "tests/test_pillow.py::test_png_remote", "tests/test_pillow.py::test_jpg", "tests/test_pillow.py::test_jpg_more", "tests/test_pillow.py::test_gif", "tests/test_pillow.py::test_animated_gif", "tests/test_pillow.py::test_images_with_transparency" ]
[]
BSD 2-Clause "Simplified" License
2,185
[ "docs/conf.py", "tasks/install_python.ps1", "docs/installation.rst", "imageio/core/functions.py", "imageio/plugins/pillow.py", "setup.py", "docs/drop27.rst", "imageio/plugins/pillow_info.py", "docs/scipy.rst", "README.md", "imageio/core/util.py", "imageio/plugins/_freeimage.py", "imageio/core/format.py", "appveyor.yml", "tasks/docs.py", "docs/sec_gettingstarted.rst" ]
[ "docs/conf.py", "tasks/install_python.ps1", "docs/installation.rst", "imageio/core/functions.py", "imageio/plugins/pillow.py", "setup.py", "docs/drop27.rst", "imageio/plugins/pillow_info.py", "docs/scipy.rst", "README.md", "imageio/core/util.py", "imageio/plugins/_freeimage.py", "imageio/core/format.py", "appveyor.yml", "tasks/docs.py", "docs/sec_gettingstarted.rst" ]