Datasets:
tau
/

Modalities:
Text
Libraries:
Datasets
fs / debug.py
yuvalkirstain's picture
update
c6ded99
raw
history blame
540 Bytes
from transformers import AutoTokenizer
from datasets import load_dataset
def main():
# dataset = load_dataset("tau/fs",name="summ_screen_fd", max_source_length=512, tokenizer=tokenizer, prompt="Summary:")
ssfd_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="summ_screen_fd")
x = 5
# arxiv_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="arxiv_debug", max_source_length=512,
# tokenizer=tokenizer, prompt="Summarize the above:")
if __name__ == '__main__':
main()