|
config: null |
|
print_config: false |
|
log_level: INFO |
|
dry_run: false |
|
iterator_type: sequence |
|
output_dir: exp/lm_train_bpe150 |
|
ngpu: 1 |
|
seed: 0 |
|
num_workers: 1 |
|
num_att_plot: 3 |
|
dist_backend: nccl |
|
dist_init_method: env:// |
|
dist_world_size: null |
|
dist_rank: null |
|
local_rank: 0 |
|
dist_master_addr: null |
|
dist_master_port: null |
|
dist_launcher: null |
|
multiprocessing_distributed: false |
|
cudnn_enabled: true |
|
cudnn_benchmark: false |
|
cudnn_deterministic: true |
|
collect_stats: false |
|
write_collected_feats: false |
|
max_epoch: 40 |
|
patience: null |
|
val_scheduler_criterion: |
|
- valid |
|
- loss |
|
early_stopping_criterion: |
|
- valid |
|
- loss |
|
- min |
|
best_model_criterion: |
|
- - train |
|
- loss |
|
- min |
|
- - valid |
|
- loss |
|
- min |
|
- - train |
|
- acc |
|
- max |
|
- - valid |
|
- acc |
|
- max |
|
keep_nbest_models: |
|
- 10 |
|
grad_clip: 5.0 |
|
grad_clip_type: 2.0 |
|
grad_noise: false |
|
accum_grad: 1 |
|
no_forward_run: false |
|
resume: true |
|
train_dtype: float32 |
|
use_amp: false |
|
log_interval: null |
|
unused_parameters: false |
|
use_tensorboard: true |
|
use_wandb: false |
|
wandb_project: null |
|
wandb_id: null |
|
pretrain_path: null |
|
init_param: [] |
|
num_iters_per_epoch: null |
|
batch_size: 20 |
|
valid_batch_size: null |
|
batch_bins: 1000000 |
|
valid_batch_bins: null |
|
train_shape_file: |
|
- exp/lm_stats_bpe150/train/text_shape.bpe |
|
valid_shape_file: |
|
- exp/lm_stats_bpe150/valid/text_shape.bpe |
|
batch_type: folded |
|
valid_batch_type: null |
|
fold_length: |
|
- 150 |
|
sort_in_batch: descending |
|
sort_batch: descending |
|
multiple_iterator: false |
|
chunk_length: 500 |
|
chunk_shift_ratio: 0.5 |
|
num_cache_chunks: 1024 |
|
train_data_path_and_name_and_type: |
|
- - dump/raw/lm_train.txt |
|
- text |
|
- text |
|
valid_data_path_and_name_and_type: |
|
- - dump/raw/es_dev/text |
|
- text |
|
- text |
|
allow_variable_data_keys: false |
|
max_cache_size: 0.0 |
|
max_cache_fd: 32 |
|
valid_max_cache_size: null |
|
optim: adadelta |
|
optim_conf: {} |
|
scheduler: null |
|
scheduler_conf: {} |
|
token_list: |
|
- <blank> |
|
- <unk> |
|
- ▁ |
|
- s |
|
- n |
|
- r |
|
- o |
|
- a |
|
- ▁de |
|
- e |
|
- l |
|
- ▁a |
|
- u |
|
- ▁y |
|
- ▁que |
|
- ra |
|
- ta |
|
- do |
|
- ▁la |
|
- i |
|
- ▁en |
|
- re |
|
- to |
|
- ▁el |
|
- d |
|
- p |
|
- da |
|
- la |
|
- c |
|
- b |
|
- t |
|
- ro |
|
- ó |
|
- en |
|
- ri |
|
- g |
|
- ba |
|
- ▁se |
|
- os |
|
- er |
|
- te |
|
- ▁con |
|
- ci |
|
- ▁es |
|
- es |
|
- ▁no |
|
- ▁su |
|
- h |
|
- ti |
|
- é |
|
- mo |
|
- á |
|
- ▁ca |
|
- ▁ha |
|
- na |
|
- ▁los |
|
- lo |
|
- í |
|
- ía |
|
- de |
|
- me |
|
- ca |
|
- ▁al |
|
- le |
|
- ce |
|
- v |
|
- ma |
|
- nte |
|
- ▁di |
|
- ▁ma |
|
- ▁por |
|
- y |
|
- di |
|
- m |
|
- ▁pa |
|
- sa |
|
- ▁si |
|
- ▁pe |
|
- gu |
|
- z |
|
- ▁mi |
|
- ▁co |
|
- ▁me |
|
- ▁o |
|
- ▁e |
|
- ▁un |
|
- tra |
|
- ▁re |
|
- li |
|
- ▁f |
|
- co |
|
- ▁á |
|
- ndo |
|
- se |
|
- mi |
|
- ga |
|
- ni |
|
- ▁cu |
|
- ▁le |
|
- jo |
|
- ▁ve |
|
- mp |
|
- bi |
|
- f |
|
- va |
|
- ▁mu |
|
- go |
|
- ▁so |
|
- ñ |
|
- tu |
|
- si |
|
- ▁lo |
|
- ▁pu |
|
- ▁vi |
|
- ▁b |
|
- ▁las |
|
- ▁c |
|
- ▁sa |
|
- za |
|
- ▁del |
|
- ▁po |
|
- ▁in |
|
- vi |
|
- ▁te |
|
- tro |
|
- cia |
|
- ▁una |
|
- qui |
|
- pi |
|
- que |
|
- ja |
|
- pa |
|
- ▁para |
|
- cu |
|
- pe |
|
- ▁como |
|
- ▁esta |
|
- ve |
|
- je |
|
- lle |
|
- x |
|
- ú |
|
- j |
|
- q |
|
- '''' |
|
- k |
|
- w |
|
- ü |
|
- '-' |
|
- <sos/eos> |
|
init: null |
|
model_conf: |
|
ignore_id: 0 |
|
use_preprocessor: true |
|
token_type: bpe |
|
bpemodel: data/token_list/bpe_unigram150/bpe.model |
|
non_linguistic_symbols: null |
|
cleaner: null |
|
g2p: null |
|
lm: seq_rnn |
|
lm_conf: {} |
|
required: |
|
- output_dir |
|
- token_list |
|
distributed: false |
|
|