Datasets:
tau
/

Modalities:
Text
Libraries:
Datasets
yuvalkirstain commited on
Commit
c6ded99
1 Parent(s): a6a875e
Files changed (6) hide show
  1. configs/arxiv.py +9 -7
  2. configs/fs.py +14 -7
  3. configs/scrolls.py +7 -7
  4. configs/super_glue.py +36 -0
  5. debug.py +2 -2
  6. fs.py +17 -0
configs/arxiv.py CHANGED
@@ -1,3 +1,5 @@
 
 
1
  from configs.fs import FSConfig
2
 
3
 
@@ -6,30 +8,30 @@ class ArxivConfig(FSConfig):
6
  super().__init__(**kwargs)
7
 
8
  @property
9
- def id_key(self):
10
  return "article_id"
11
 
12
  @property
13
- def source_key(self):
14
  return "article_text"
15
 
16
  @property
17
- def target_key(self):
18
  return "abstract_text"
19
 
20
  @property
21
- def train_file(self):
22
  return "train.txt"
23
 
24
  @property
25
- def validation_file(self):
26
  return "val.txt"
27
 
28
  @property
29
- def test_file(self):
30
  return "test.txt"
31
 
32
- def process(self, example):
33
  example[self.source_key] = " ".join(example[self.source_key])
34
  example[self.target_key] = " ".join(example[self.target_key]).replace("<S>", "").replace("</S>", "")
35
  del example["labels"]
 
1
+ from typing import NoReturn
2
+
3
  from configs.fs import FSConfig
4
 
5
 
 
8
  super().__init__(**kwargs)
9
 
10
  @property
11
+ def id_key(self) -> str:
12
  return "article_id"
13
 
14
  @property
15
+ def source_key(self) -> str:
16
  return "article_text"
17
 
18
  @property
19
+ def target_key(self) -> str:
20
  return "abstract_text"
21
 
22
  @property
23
+ def train_file(self) -> str:
24
  return "train.txt"
25
 
26
  @property
27
+ def validation_file(self) -> str:
28
  return "val.txt"
29
 
30
  @property
31
+ def test_file(self) -> str:
32
  return "test.txt"
33
 
34
+ def process(self, example) -> NoReturn:
35
  example[self.source_key] = " ".join(example[self.source_key])
36
  example[self.target_key] = " ".join(example[self.target_key]).replace("<S>", "").replace("</S>", "")
37
  del example["labels"]
configs/fs.py CHANGED
@@ -1,4 +1,5 @@
1
  from abc import abstractmethod
 
2
 
3
  import datasets
4
 
@@ -21,39 +22,45 @@ class FSConfig(datasets.BuilderConfig):
21
  """
22
  super(FSConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
23
  self.features = [self.id_key, self.source_key, self.target_key] + additional_features
 
 
24
  self.data_url = data_url
25
  self.citation = citation
26
  self.url = url
27
 
28
  @property
29
  @abstractmethod
30
- def id_key(self):
31
  pass
32
 
33
  @property
34
  @abstractmethod
35
- def train_file(self):
36
  pass
37
 
38
  @property
39
  @abstractmethod
40
- def validation_file(self):
41
  pass
42
 
43
  @property
44
  @abstractmethod
45
- def test_file(self):
46
  pass
47
 
48
  @property
49
  @abstractmethod
50
- def source_key(self):
51
  pass
52
 
 
 
 
 
53
  @property
54
  @abstractmethod
55
- def target_key(self):
56
  pass
57
 
58
- def process(self, example):
59
  pass
 
1
  from abc import abstractmethod
2
+ from typing import Optional, NoReturn, Union
3
 
4
  import datasets
5
 
 
22
  """
23
  super(FSConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
24
  self.features = [self.id_key, self.source_key, self.target_key] + additional_features
25
+ if self.question_key:
26
+ self.features += [self.question_key]
27
  self.data_url = data_url
28
  self.citation = citation
29
  self.url = url
30
 
31
  @property
32
  @abstractmethod
33
+ def id_key(self) -> str:
34
  pass
35
 
36
  @property
37
  @abstractmethod
38
+ def train_file(self) -> str:
39
  pass
40
 
41
  @property
42
  @abstractmethod
43
+ def validation_file(self) -> str:
44
  pass
45
 
46
  @property
47
  @abstractmethod
48
+ def test_file(self) -> str:
49
  pass
50
 
51
  @property
52
  @abstractmethod
53
+ def source_key(self) -> str:
54
  pass
55
 
56
+ @property
57
+ def question_key(self) -> Union[str, None]:
58
+ return None
59
+
60
  @property
61
  @abstractmethod
62
+ def target_key(self) -> str:
63
  pass
64
 
65
+ def process(self, example) -> NoReturn:
66
  pass
configs/scrolls.py CHANGED
@@ -6,25 +6,25 @@ class ScrollsConfig(FSConfig):
6
  super().__init__(**kwargs)
7
 
8
  @property
9
- def source_key(self):
10
  return "input"
11
 
12
  @property
13
- def target_key(self):
14
  return "output"
15
 
16
  @property
17
- def train_file(self):
18
  return "train.jsonl"
19
 
20
  @property
21
- def validation_file(self):
22
  return "validation.jsonl"
23
 
24
  @property
25
- def test_file(self):
26
  return "test.jsonl"
27
 
28
  @property
29
- def id_key(self):
30
- return "pid"
 
6
  super().__init__(**kwargs)
7
 
8
  @property
9
+ def source_key(self) -> str:
10
  return "input"
11
 
12
  @property
13
+ def target_key(self) -> str:
14
  return "output"
15
 
16
  @property
17
+ def train_file(self) -> str:
18
  return "train.jsonl"
19
 
20
  @property
21
+ def validation_file(self) -> str:
22
  return "validation.jsonl"
23
 
24
  @property
25
+ def test_file(self) -> str:
26
  return "test.jsonl"
27
 
28
  @property
29
+ def id_key(self) -> str:
30
+ return "pid"
configs/super_glue.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Union
2
+
3
+ from configs.fs import FSConfig
4
+
5
+
6
+ class BoolQConfig(FSConfig):
7
+ def __init__(self, **kwargs):
8
+ super().__init__(**kwargs)
9
+
10
+ @property
11
+ def id_key(self) -> str:
12
+ return "idx"
13
+
14
+ @property
15
+ def source_key(self) -> str:
16
+ return "passage"
17
+
18
+ @property
19
+ def question_key(self) -> Union[str, None]:
20
+ return "question"
21
+
22
+ @property
23
+ def target_key(self) -> str:
24
+ return "label"
25
+
26
+ @property
27
+ def train_file(self) -> str:
28
+ return "train.jsonl"
29
+
30
+ @property
31
+ def validation_file(self) -> str:
32
+ return "val.jsonl"
33
+
34
+ @property
35
+ def test_file(self) -> str:
36
+ return "test.jsonl"
debug.py CHANGED
@@ -2,9 +2,9 @@ from transformers import AutoTokenizer
2
  from datasets import load_dataset
3
 
4
  def main():
5
- tokenizer = AutoTokenizer.from_pretrained("t5-base")
6
  # dataset = load_dataset("tau/fs",name="summ_screen_fd", max_source_length=512, tokenizer=tokenizer, prompt="Summary:")
7
- ssfd_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="summ_screen_fd_debug")
 
8
  # arxiv_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="arxiv_debug", max_source_length=512,
9
  # tokenizer=tokenizer, prompt="Summarize the above:")
10
 
 
2
  from datasets import load_dataset
3
 
4
  def main():
 
5
  # dataset = load_dataset("tau/fs",name="summ_screen_fd", max_source_length=512, tokenizer=tokenizer, prompt="Summary:")
6
+ ssfd_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="summ_screen_fd")
7
+ x = 5
8
  # arxiv_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="arxiv_debug", max_source_length=512,
9
  # tokenizer=tokenizer, prompt="Summarize the above:")
10
 
fs.py CHANGED
@@ -10,6 +10,7 @@ from citations_and_descriptions import (
10
  )
11
  from configs.arxiv import ArxivConfig
12
  from configs.scrolls import ScrollsConfig
 
13
 
14
 
15
  class FS(datasets.GeneratorBasedBuilder):
@@ -40,6 +41,14 @@ class FS(datasets.GeneratorBasedBuilder):
40
  citation=_ARXIV_DESCRIPTION,
41
  url="https://github.com/armancohan/long-summarization"
42
  ),
 
 
 
 
 
 
 
 
43
  ]
44
 
45
  def _info(self):
@@ -54,6 +63,8 @@ class FS(datasets.GeneratorBasedBuilder):
54
 
55
  def _split_generators(self, dl_manager):
56
  dl_dir = dl_manager.download_and_extract(self.config.data_url)
 
 
57
 
58
  data_files = {} if self.config.data_files is not None else None
59
  if data_files is not None:
@@ -86,4 +97,10 @@ class FS(datasets.GeneratorBasedBuilder):
86
  for line in f:
87
  row = json.loads(line)
88
  self.config.process(row)
 
 
89
  yield row[self.config.id_key], row
 
 
 
 
 
10
  )
11
  from configs.arxiv import ArxivConfig
12
  from configs.scrolls import ScrollsConfig
13
+ from configs.super_glue import BoolQConfig
14
 
15
 
16
  class FS(datasets.GeneratorBasedBuilder):
 
41
  citation=_ARXIV_DESCRIPTION,
42
  url="https://github.com/armancohan/long-summarization"
43
  ),
44
+ BoolQConfig(
45
+ additional_features=[],
46
+ name="boolq",
47
+ description="", # TODO
48
+ data_url="https://dl.fbaipublicfiles.com/glue/superglue/data/v2/BoolQ.zip",
49
+ citation=_ARXIV_DESCRIPTION,
50
+ url="" # TODO
51
+ )
52
  ]
53
 
54
  def _info(self):
 
63
 
64
  def _split_generators(self, dl_manager):
65
  dl_dir = dl_manager.download_and_extract(self.config.data_url)
66
+ task_name = _get_task_name_from_data_url(self.config.data_url)
67
+ dl_dir = os.path.join(dl_dir, task_name)
68
 
69
  data_files = {} if self.config.data_files is not None else None
70
  if data_files is not None:
 
97
  for line in f:
98
  row = json.loads(line)
99
  self.config.process(row)
100
+ if self.config.target_key not in row:
101
+ row[self.config.target_key] = None
102
  yield row[self.config.id_key], row
103
+
104
+
105
+ def _get_task_name_from_data_url(data_url):
106
+ return data_url.split("/")[-1].split(".")[0]