KennethEnevoldsen commited on
Commit
ca7fb7b
·
unverified ·
1 Parent(s): 2a1b9d7

added tests for duplicates

Browse files

currently does not pass so disabled it for now

src/tests/conftest.py CHANGED
@@ -4,8 +4,19 @@ from typing import Any
4
  import pytest
5
  import yaml
6
 
 
 
7
  root_path = Path(__file__).parent.parent.parent
8
 
 
 
 
 
 
 
 
 
 
9
 
10
  @pytest.fixture()
11
  def repo_path() -> Path:
 
4
  import pytest
5
  import yaml
6
 
7
+ from tests.readme_parsing import read_frontmatter_and_body
8
+
9
  root_path = Path(__file__).parent.parent.parent
10
 
11
+ main_readme = root_path / "README.md"
12
+
13
+ frontmatter, _ = read_frontmatter_and_body(main_readme)
14
+ DATASET_NAMES = [
15
+ cfg["config_name"]
16
+ for cfg in frontmatter["configs"]
17
+ if cfg["config_name"] != "default"
18
+ ]
19
+
20
 
21
  @pytest.fixture()
22
  def repo_path() -> Path:
src/tests/test_dataset_schema.py CHANGED
@@ -7,18 +7,9 @@ from datasets import load_dataset
7
  from pydantic import AfterValidator, BaseModel, BeforeValidator
8
  from typing_extensions import Annotated
9
 
10
- from .conftest import root_path
11
  from .readme_parsing import get_tag_idx, read_frontmatter_and_body
12
 
13
- main_readme = root_path / "README.md"
14
-
15
- frontmatter, _ = read_frontmatter_and_body(main_readme)
16
- DATASET_NAMES = [
17
- cfg["config_name"]
18
- for cfg in frontmatter["configs"]
19
- if cfg["config_name"] != "default"
20
- ]
21
-
22
 
23
  def ensure_tuple(created: str | tuple) -> tuple:
24
  if isinstance(created, str):
 
7
  from pydantic import AfterValidator, BaseModel, BeforeValidator
8
  from typing_extensions import Annotated
9
 
10
+ from .conftest import DATASET_NAMES
11
  from .readme_parsing import get_tag_idx, read_frontmatter_and_body
12
 
 
 
 
 
 
 
 
 
 
13
 
14
  def ensure_tuple(created: str | tuple) -> tuple:
15
  if isinstance(created, str):
src/tests/test_duplicates.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+ from typing import cast
3
+
4
+ import pytest
5
+ from datasets import Dataset, load_dataset
6
+
7
+ from .conftest import DATASET_NAMES
8
+
9
+
10
+ @pytest.mark.skip("This currently fails for a number of datasets") # TODO: fix this
11
+ @pytest.mark.parametrize("dataset_name", DATASET_NAMES)
12
+ def test_no_within_data_duplicates(repo_path: Path, dataset_name: str):
13
+ ds = load_dataset(str(repo_path.resolve()), dataset_name, split="train")
14
+ ds = cast(Dataset, ds)
15
+
16
+ assert len(set(ds["text"])) == len(ds)
17
+
18
+
19
+ @pytest.mark.skip("This currently fails (see test above)") # TODO: fix this
20
+ def test_no_data_duplicates(repo_path: Path):
21
+ ds = load_dataset(str(repo_path.resolve()), split="train")
22
+ ds = cast(Dataset, ds)
23
+
24
+ assert len(set(ds["text"])) == len(ds)