Datasets:
CZLC
/

Modalities:
Text
Formats:
json
Languages:
Czech
Libraries:
Datasets
pandas
License:
mfajcik commited on
Commit
2c7b81b
·
verified ·
1 Parent(s): be5834e

Upload 3 files

Browse files
Files changed (3) hide show
  1. convert_ner_court_decisions.py +7 -3
  2. test.jsonl +0 -0
  3. train.jsonl +0 -0
convert_ner_court_decisions.py CHANGED
@@ -16,7 +16,7 @@ def proc_dataset(dataset, max_text_length=512):
16
  before_tokens = random.randint(0, int(allowed_tokens * 0.8))
17
  after_tokens = allowed_tokens - before_tokens
18
  target_text = (" ".join(text.split(" ")[:20]) + " ... "
19
- + text[entity["start"] - before_tokens:entity["end"] + after_tokens])
20
  r.append({
21
  "label": entity["category_str"],
22
  "answers": [entity["content"]],
@@ -28,8 +28,8 @@ def proc_dataset(dataset, max_text_length=512):
28
  d = load_dataset("fewshot-goes-multilingual/cs_czech-court-decisions-ner")
29
  train = list(d['train'])
30
  random.shuffle(train)
31
- new_dataset_train = proc_dataset(train[75:])
32
- dataset_test_ftrain = proc_dataset(train[:75])
33
  dataset_val = proc_dataset(d['validation'])
34
  dataset_test = proc_dataset(d['test'])
35
 
@@ -41,6 +41,10 @@ random.shuffle(new_dataset_test)
41
  os.makedirs(".data/hf_dataset/ner_court_decisions", exist_ok=True)
42
  import jsonlines
43
 
 
 
 
 
44
  with jsonlines.open(".data/hf_dataset/ner_court_decisions/test.jsonl", "w") as f:
45
  f.write_all(new_dataset_test)
46
  with jsonlines.open(".data/hf_dataset/ner_court_decisions/train.jsonl", "w") as f:
 
16
  before_tokens = random.randint(0, int(allowed_tokens * 0.8))
17
  after_tokens = allowed_tokens - before_tokens
18
  target_text = (" ".join(text.split(" ")[:20]) + " ... "
19
+ + " ".join(text.split(" ")[entity["start"] - before_tokens:entity["end"] + after_tokens]))
20
  r.append({
21
  "label": entity["category_str"],
22
  "answers": [entity["content"]],
 
28
  d = load_dataset("fewshot-goes-multilingual/cs_czech-court-decisions-ner")
29
  train = list(d['train'])
30
  random.shuffle(train)
31
+ new_dataset_train = proc_dataset(train[60:])
32
+ dataset_test_ftrain = proc_dataset(train[:60])
33
  dataset_val = proc_dataset(d['validation'])
34
  dataset_test = proc_dataset(d['test'])
35
 
 
41
  os.makedirs(".data/hf_dataset/ner_court_decisions", exist_ok=True)
42
  import jsonlines
43
 
44
+ # print dataset lengths
45
+ print("train", len(new_dataset_train))
46
+ print("test", len(new_dataset_test))
47
+
48
  with jsonlines.open(".data/hf_dataset/ner_court_decisions/test.jsonl", "w") as f:
49
  f.write_all(new_dataset_test)
50
  with jsonlines.open(".data/hf_dataset/ner_court_decisions/train.jsonl", "w") as f:
test.jsonl CHANGED
The diff for this file is too large to render. See raw diff
 
train.jsonl CHANGED
The diff for this file is too large to render. See raw diff