huseinzol05
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -282,7 +282,7 @@ my_array = np.reshape(my_array, (2, 5)) # menjadi array 2D dengan 2 baris dan 5
|
|
282 |
```
|
283 |
Itulah beberapa operasi dasar numpy. Anda dapat menemukan dokumentasi resmi numpy di https://numpy.org/doc/stable/.
|
284 |
"""
|
285 |
-
input_ids = tokenizer.encode(f'terjemah ke Melayu: {
|
286 |
outputs = model.generate(input_ids, max_length = 1024)
|
287 |
outputs = [i for i in outputs[0] if i not in all_special_ids]
|
288 |
print(tokenizer.decode(outputs, spaces_between_special_tokens = False))
|
@@ -340,7 +340,7 @@ my_array = np.reshape(my_array, (2, 5)) # menjadi array 2D dengan 2 baris dan 5
|
|
340 |
```
|
341 |
Itulah beberapa operasi dasar numpy. Anda dapat menemukan dokumentasi resmi numpy di https://numpy.org/doc/stable/.
|
342 |
"""
|
343 |
-
input_ids = tokenizer.encode(f'terjemah ke Jawi: {code_indon}{tokenizer.eos_token}', return_tensors = 'pt')
|
344 |
outputs = model.generate(input_ids, max_length = 1024)
|
345 |
outputs = [i for i in outputs[0] if i not in all_special_ids]
|
346 |
print(tokenizer.decode(outputs, spaces_between_special_tokens = False))
|
|
|
282 |
```
|
283 |
Itulah beberapa operasi dasar numpy. Anda dapat menemukan dokumentasi resmi numpy di https://numpy.org/doc/stable/.
|
284 |
"""
|
285 |
+
input_ids = tokenizer.encode(f'terjemah ke Melayu: {code_indon}{tokenizer.eos_token}', return_tensors = 'pt')
|
286 |
outputs = model.generate(input_ids, max_length = 1024)
|
287 |
outputs = [i for i in outputs[0] if i not in all_special_ids]
|
288 |
print(tokenizer.decode(outputs, spaces_between_special_tokens = False))
|
|
|
340 |
```
|
341 |
Itulah beberapa operasi dasar numpy. Anda dapat menemukan dokumentasi resmi numpy di https://numpy.org/doc/stable/.
|
342 |
"""
|
343 |
+
input_ids = tokenizer.encode(f'terjemah ke Jawi: {code_indon}\n{tokenizer.eos_token}', return_tensors = 'pt')
|
344 |
outputs = model.generate(input_ids, max_length = 1024)
|
345 |
outputs = [i for i in outputs[0] if i not in all_special_ids]
|
346 |
print(tokenizer.decode(outputs, spaces_between_special_tokens = False))
|