File size: 663 Bytes
4316980 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
from transformers import AutoTokenizer
from datasets import load_dataset
def main():
tokenizer = AutoTokenizer.from_pretrained("t5-base")
# dataset = load_dataset("tau/fs",name="summ_screen_fd", max_source_length=512, tokenizer=tokenizer, prompt="Summary:")
ssfd_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="summ_screen_fd_debug", max_source_length=512,
tokenizer=tokenizer)
arxiv_debug = load_dataset("/Users/yuvalkirstain/repos/fs", name="arxiv_debug", max_source_length=512,
tokenizer=tokenizer, prompt="Summarize the above:")
if __name__ == '__main__':
main()
|