init
Browse files- .gitattributes +43 -0
- data/{tweet_nerd β tweet_nerd_new}/test.jsonl +0 -0
- data/{tweet_nerd β tweet_nerd_new}/test_1.jsonl +0 -0
- data/{tweet_nerd β tweet_nerd_new}/test_2.jsonl +0 -0
- data/{tweet_nerd β tweet_nerd_new}/test_3.jsonl +0 -0
- data/{tweet_nerd β tweet_nerd_new}/test_4.jsonl +0 -0
- data/{tweet_nerd β tweet_nerd_new}/train.jsonl +0 -0
- data/{tweet_nerd β tweet_nerd_new}/validation.jsonl +0 -0
- data/{tweet_nerd_test0_seed0 β tweet_nerd_new_test0_seed0}/test.jsonl +0 -0
- data/{tweet_nerd_test0_seed0 β tweet_nerd_new_test0_seed0}/train.jsonl +0 -0
- data/{tweet_nerd_test0_seed0 β tweet_nerd_new_test0_seed0}/validation.jsonl +0 -0
- data/{tweet_nerd_test0_seed1 β tweet_nerd_new_test0_seed1}/test.jsonl +0 -0
- data/{tweet_nerd_test0_seed1 β tweet_nerd_new_test0_seed1}/train.jsonl +0 -0
- data/{tweet_nerd_test0_seed1 β tweet_nerd_new_test0_seed1}/validation.jsonl +0 -0
- data/{tweet_nerd_test0_seed2 β tweet_nerd_new_test0_seed2}/test.jsonl +0 -0
- data/{tweet_nerd_test0_seed2 β tweet_nerd_new_test0_seed2}/train.jsonl +0 -0
- data/{tweet_nerd_test0_seed2 β tweet_nerd_new_test0_seed2}/validation.jsonl +0 -0
- data/{tweet_nerd_test1_seed0 β tweet_nerd_new_test1_seed0}/test.jsonl +0 -0
- data/{tweet_nerd_test1_seed0 β tweet_nerd_new_test1_seed0}/train.jsonl +0 -0
- data/{tweet_nerd_test1_seed0 β tweet_nerd_new_test1_seed0}/validation.jsonl +0 -0
- data/{tweet_nerd_test1_seed1 β tweet_nerd_new_test1_seed1}/test.jsonl +0 -0
- data/{tweet_nerd_test1_seed1 β tweet_nerd_new_test1_seed1}/train.jsonl +0 -0
- data/{tweet_nerd_test1_seed1 β tweet_nerd_new_test1_seed1}/validation.jsonl +0 -0
- data/{tweet_nerd_test1_seed2 β tweet_nerd_new_test1_seed2}/test.jsonl +0 -0
- data/{tweet_nerd_test1_seed2 β tweet_nerd_new_test1_seed2}/train.jsonl +0 -0
- data/{tweet_nerd_test1_seed2 β tweet_nerd_new_test1_seed2}/validation.jsonl +0 -0
- data/{tweet_nerd_test2_seed0 β tweet_nerd_new_test2_seed0}/test.jsonl +0 -0
- data/{tweet_nerd_test2_seed0 β tweet_nerd_new_test2_seed0}/train.jsonl +0 -0
- data/{tweet_nerd_test2_seed0 β tweet_nerd_new_test2_seed0}/validation.jsonl +0 -0
- data/{tweet_nerd_test2_seed1 β tweet_nerd_new_test2_seed1}/test.jsonl +0 -0
- data/{tweet_nerd_test2_seed1 β tweet_nerd_new_test2_seed1}/train.jsonl +0 -0
- data/{tweet_nerd_test2_seed1 β tweet_nerd_new_test2_seed1}/validation.jsonl +0 -0
- data/{tweet_nerd_test2_seed2 β tweet_nerd_new_test2_seed2}/test.jsonl +0 -0
- data/{tweet_nerd_test2_seed2 β tweet_nerd_new_test2_seed2}/train.jsonl +0 -0
- data/{tweet_nerd_test2_seed2 β tweet_nerd_new_test2_seed2}/validation.jsonl +0 -0
- data/{tweet_nerd_test3_seed0 β tweet_nerd_new_test3_seed0}/test.jsonl +0 -0
- data/{tweet_nerd_test3_seed0 β tweet_nerd_new_test3_seed0}/train.jsonl +0 -0
- data/{tweet_nerd_test3_seed0 β tweet_nerd_new_test3_seed0}/validation.jsonl +0 -0
- data/{tweet_nerd_test3_seed1 β tweet_nerd_new_test3_seed1}/test.jsonl +0 -0
- data/{tweet_nerd_test3_seed1 β tweet_nerd_new_test3_seed1}/train.jsonl +0 -0
- data/{tweet_nerd_test3_seed1 β tweet_nerd_new_test3_seed1}/validation.jsonl +0 -0
- data/{tweet_nerd_test3_seed2 β tweet_nerd_new_test3_seed2}/test.jsonl +0 -0
- data/{tweet_nerd_test3_seed2 β tweet_nerd_new_test3_seed2}/train.jsonl +0 -0
- data/{tweet_nerd_test3_seed2 β tweet_nerd_new_test3_seed2}/validation.jsonl +0 -0
- process/tweet_nerd.py +12 -12
- tweet_temporal_shift.py +2 -2
.gitattributes
CHANGED
@@ -386,3 +386,46 @@ data/tweet_ner_test1_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
|
386 |
data/tweet_ner_test2_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
387 |
data/tweet_ner_test3_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
388 |
data/tweet_ner_test3_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
386 |
data/tweet_ner_test2_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
387 |
data/tweet_ner_test3_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
388 |
data/tweet_ner_test3_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
389 |
+
data/tweet_nerd_new/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
390 |
+
data/tweet_nerd_new_test0_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
391 |
+
data/tweet_nerd_new_test2_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
392 |
+
data/tweet_nerd_new_test1_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
393 |
+
data/tweet_nerd_new_test1_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
394 |
+
data/tweet_nerd_new_test2_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
395 |
+
data/tweet_nerd_new/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
396 |
+
data/tweet_nerd_new/test_4.jsonl filter=lfs diff=lfs merge=lfs -text
|
397 |
+
data/tweet_nerd_new_test0_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
398 |
+
data/tweet_nerd_new_test0_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
399 |
+
data/tweet_nerd_new_test1_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
400 |
+
data/tweet_nerd_new_test3_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
401 |
+
data/tweet_nerd_new_test3_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
402 |
+
data/tweet_nerd_new/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
403 |
+
data/tweet_nerd_new_test0_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
404 |
+
data/tweet_nerd_new_test0_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
405 |
+
data/tweet_nerd_new_test2_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
406 |
+
data/tweet_nerd_new/test_3.jsonl filter=lfs diff=lfs merge=lfs -text
|
407 |
+
data/tweet_nerd_new_test1_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
408 |
+
data/tweet_nerd_new_test1_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
409 |
+
data/tweet_nerd_new_test1_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
410 |
+
data/tweet_nerd_new_test2_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
411 |
+
data/tweet_nerd_new_test3_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
412 |
+
data/tweet_nerd_new_test3_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
413 |
+
data/tweet_nerd_new/test_1.jsonl filter=lfs diff=lfs merge=lfs -text
|
414 |
+
data/tweet_nerd_new/test_2.jsonl filter=lfs diff=lfs merge=lfs -text
|
415 |
+
data/tweet_nerd_new_test0_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
416 |
+
data/tweet_nerd_new_test2_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
417 |
+
data/tweet_nerd_new_test2_seed2/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
418 |
+
data/tweet_nerd_new_test0_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
419 |
+
data/tweet_nerd_new_test1_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
420 |
+
data/tweet_nerd_new_test2_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
421 |
+
data/tweet_nerd_new_test3_seed0/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
422 |
+
data/tweet_nerd_new_test1_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
423 |
+
data/tweet_nerd_new_test1_seed1/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
424 |
+
data/tweet_nerd_new_test3_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
425 |
+
data/tweet_nerd_new_test3_seed0/validation.jsonl filter=lfs diff=lfs merge=lfs -text
|
426 |
+
data/tweet_nerd_new_test3_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
427 |
+
data/tweet_nerd_new_test0_seed1/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
428 |
+
data/tweet_nerd_new_test0_seed1/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
429 |
+
data/tweet_nerd_new_test2_seed2/test.jsonl filter=lfs diff=lfs merge=lfs -text
|
430 |
+
data/tweet_nerd_new_test2_seed2/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
431 |
+
data/tweet_nerd_new_test3_seed0/train.jsonl filter=lfs diff=lfs merge=lfs -text
|
data/{tweet_nerd β tweet_nerd_new}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd β tweet_nerd_new}/test_1.jsonl
RENAMED
File without changes
|
data/{tweet_nerd β tweet_nerd_new}/test_2.jsonl
RENAMED
File without changes
|
data/{tweet_nerd β tweet_nerd_new}/test_3.jsonl
RENAMED
File without changes
|
data/{tweet_nerd β tweet_nerd_new}/test_4.jsonl
RENAMED
File without changes
|
data/{tweet_nerd β tweet_nerd_new}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd β tweet_nerd_new}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed0 β tweet_nerd_new_test0_seed0}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed0 β tweet_nerd_new_test0_seed0}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed0 β tweet_nerd_new_test0_seed0}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed1 β tweet_nerd_new_test0_seed1}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed1 β tweet_nerd_new_test0_seed1}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed1 β tweet_nerd_new_test0_seed1}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed2 β tweet_nerd_new_test0_seed2}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed2 β tweet_nerd_new_test0_seed2}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test0_seed2 β tweet_nerd_new_test0_seed2}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed0 β tweet_nerd_new_test1_seed0}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed0 β tweet_nerd_new_test1_seed0}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed0 β tweet_nerd_new_test1_seed0}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed1 β tweet_nerd_new_test1_seed1}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed1 β tweet_nerd_new_test1_seed1}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed1 β tweet_nerd_new_test1_seed1}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed2 β tweet_nerd_new_test1_seed2}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed2 β tweet_nerd_new_test1_seed2}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test1_seed2 β tweet_nerd_new_test1_seed2}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed0 β tweet_nerd_new_test2_seed0}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed0 β tweet_nerd_new_test2_seed0}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed0 β tweet_nerd_new_test2_seed0}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed1 β tweet_nerd_new_test2_seed1}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed1 β tweet_nerd_new_test2_seed1}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed1 β tweet_nerd_new_test2_seed1}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed2 β tweet_nerd_new_test2_seed2}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed2 β tweet_nerd_new_test2_seed2}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test2_seed2 β tweet_nerd_new_test2_seed2}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed0 β tweet_nerd_new_test3_seed0}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed0 β tweet_nerd_new_test3_seed0}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed0 β tweet_nerd_new_test3_seed0}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed1 β tweet_nerd_new_test3_seed1}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed1 β tweet_nerd_new_test3_seed1}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed1 β tweet_nerd_new_test3_seed1}/validation.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed2 β tweet_nerd_new_test3_seed2}/test.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed2 β tweet_nerd_new_test3_seed2}/train.jsonl
RENAMED
File without changes
|
data/{tweet_nerd_test3_seed2 β tweet_nerd_new_test3_seed2}/validation.jsonl
RENAMED
File without changes
|
process/tweet_nerd.py
CHANGED
@@ -48,20 +48,20 @@ test_1 = test[:n_test]
|
|
48 |
test_2 = test[n_test:n_test*2]
|
49 |
test_3 = test[n_test*2:n_test*3]
|
50 |
test_4 = test[n_test*3:]
|
51 |
-
os.makedirs("data/
|
52 |
-
with open("data/
|
53 |
f.write("\n".join([json.dumps(i) for i in test]))
|
54 |
-
with open("data/
|
55 |
f.write("\n".join([json.dumps(i) for i in test_1]))
|
56 |
-
with open("data/
|
57 |
f.write("\n".join([json.dumps(i) for i in test_2]))
|
58 |
-
with open("data/
|
59 |
f.write("\n".join([json.dumps(i) for i in test_3]))
|
60 |
-
with open("data/
|
61 |
f.write("\n".join([json.dumps(i) for i in test_4]))
|
62 |
-
with open("data/
|
63 |
f.write("\n".join([json.dumps(i) for i in train]))
|
64 |
-
with open("data/
|
65 |
f.write("\n".join([json.dumps(i) for i in validation]))
|
66 |
|
67 |
|
@@ -83,13 +83,13 @@ for n, _test in enumerate([
|
|
83 |
test_1 + test_2 + test_4,
|
84 |
test_1 + test_2 + test_3]):
|
85 |
for s in range(3):
|
86 |
-
os.makedirs(f"data/
|
87 |
_train, _valid = sampler(_test, s)
|
88 |
-
with open(f"data/
|
89 |
f.write("\n".join([json.dumps(i) for i in _train]))
|
90 |
-
with open(f"data/
|
91 |
f.write("\n".join([json.dumps(i) for i in _valid]))
|
92 |
-
with open(f"data/
|
93 |
f.write("\n".join([json.dumps(i) for i in id2test[n]]))
|
94 |
|
95 |
|
|
|
48 |
test_2 = test[n_test:n_test*2]
|
49 |
test_3 = test[n_test*2:n_test*3]
|
50 |
test_4 = test[n_test*3:]
|
51 |
+
os.makedirs("data/tweet_nerd_new", exist_ok=True)
|
52 |
+
with open("data/tweet_nerd_new/test.jsonl", "w") as f:
|
53 |
f.write("\n".join([json.dumps(i) for i in test]))
|
54 |
+
with open("data/tweet_nerd_new/test_1.jsonl", "w") as f:
|
55 |
f.write("\n".join([json.dumps(i) for i in test_1]))
|
56 |
+
with open("data/tweet_nerd_new/test_2.jsonl", "w") as f:
|
57 |
f.write("\n".join([json.dumps(i) for i in test_2]))
|
58 |
+
with open("data/tweet_nerd_new/test_3.jsonl", "w") as f:
|
59 |
f.write("\n".join([json.dumps(i) for i in test_3]))
|
60 |
+
with open("data/tweet_nerd_new/test_4.jsonl", "w") as f:
|
61 |
f.write("\n".join([json.dumps(i) for i in test_4]))
|
62 |
+
with open("data/tweet_nerd_new/train.jsonl", "w") as f:
|
63 |
f.write("\n".join([json.dumps(i) for i in train]))
|
64 |
+
with open("data/tweet_nerd_new/validation.jsonl", "w") as f:
|
65 |
f.write("\n".join([json.dumps(i) for i in validation]))
|
66 |
|
67 |
|
|
|
83 |
test_1 + test_2 + test_4,
|
84 |
test_1 + test_2 + test_3]):
|
85 |
for s in range(3):
|
86 |
+
os.makedirs(f"data/tweet_nerd_new_test{n}_seed{s}", exist_ok=True)
|
87 |
_train, _valid = sampler(_test, s)
|
88 |
+
with open(f"data/tweet_nerd_new_test{n}_seed{s}/train.jsonl", "w") as f:
|
89 |
f.write("\n".join([json.dumps(i) for i in _train]))
|
90 |
+
with open(f"data/tweet_nerd_new_test{n}_seed{s}/validation.jsonl", "w") as f:
|
91 |
f.write("\n".join([json.dumps(i) for i in _valid]))
|
92 |
+
with open(f"data/tweet_nerd_new_test{n}_seed{s}/test.jsonl", "w") as f:
|
93 |
f.write("\n".join([json.dumps(i) for i in id2test[n]]))
|
94 |
|
95 |
|
tweet_temporal_shift.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
import json
|
3 |
import datasets
|
4 |
|
5 |
-
_VERSION = "1.0.
|
6 |
_TWEET_TEMPORAL_DESCRIPTION = """"""
|
7 |
_TWEET_TEMPORAL_CITATION = """"""
|
8 |
_TWEET_TOPIC_DESCRIPTION = """
|
@@ -129,7 +129,7 @@ class TweetTemporalShift(datasets.GeneratorBasedBuilder):
|
|
129 |
description=_TWEET_NERD_DESCRIPTION,
|
130 |
citation=_TWEET_NERD_CITATION,
|
131 |
features=["gold_label_binary", "target", "text", "definition", "text_start", "text_end", "date"],
|
132 |
-
data_url=f"{_ROOT_URL}/
|
133 |
),
|
134 |
]
|
135 |
for s in range(3):
|
|
|
2 |
import json
|
3 |
import datasets
|
4 |
|
5 |
+
_VERSION = "1.0.2"
|
6 |
_TWEET_TEMPORAL_DESCRIPTION = """"""
|
7 |
_TWEET_TEMPORAL_CITATION = """"""
|
8 |
_TWEET_TOPIC_DESCRIPTION = """
|
|
|
129 |
description=_TWEET_NERD_DESCRIPTION,
|
130 |
citation=_TWEET_NERD_CITATION,
|
131 |
features=["gold_label_binary", "target", "text", "definition", "text_start", "text_end", "date"],
|
132 |
+
data_url=f"{_ROOT_URL}/tweet_nerd_new",
|
133 |
),
|
134 |
]
|
135 |
for s in range(3):
|