Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -45,7 +45,7 @@ encoded_sent = tokenizer.encode(
|
|
45 |
#return_tensors = 'pt', # Return pytorch tensors.
|
46 |
)
|
47 |
#tkns = tokenized_sub_sentence
|
48 |
-
indexed_tokens = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(str(text)))
|
49 |
segments_ids = [0] * len(indexed_tokens)
|
50 |
|
51 |
tokens_tensor = torch.tensor([indexed_tokens]).to(device)
|
|
|
45 |
#return_tensors = 'pt', # Return pytorch tensors.
|
46 |
)
|
47 |
#tkns = tokenized_sub_sentence
|
48 |
+
indexed_tokens = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(str(text)))#le.convert_tokens_to_ids(tkns)
|
49 |
segments_ids = [0] * len(indexed_tokens)
|
50 |
|
51 |
tokens_tensor = torch.tensor([indexed_tokens]).to(device)
|