Datasets:
tau
/

Modalities:
Text
Libraries:
Datasets
File size: 540 Bytes
4316980
 
 
 
 
c6ded99
 
bf19c4f
 
4316980
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
from transformers import AutoTokenizer
from datasets import load_dataset

def main():
    # dataset = load_dataset("tau/fs",name="summ_screen_fd", max_source_length=512, tokenizer=tokenizer, prompt="Summary:")
    ssfd_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="summ_screen_fd")
    x = 5
    # arxiv_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="arxiv_debug", max_source_length=512,
    #                            tokenizer=tokenizer, prompt="Summarize the above:")


if __name__ == '__main__':
    main()