{ "src_vocab_size": 20000, "src_vocab": "en-ko/src.eole.vocab", "n_sample": 0, "tensorboard_log_dir": "tensorboard", "overwrite": true, "tgt_vocab": "en-ko/tgt.eole.vocab", "valid_metrics": [ "BLEU" ], "seed": 1234, "save_data": "en-ko/data_spm", "tensorboard_log_dir_dated": "tensorboard/Feb-26_17-05-11", "report_every": 100, "transforms": [ "sentencepiece", "filtertoolong" ], "share_vocab": false, "vocab_size_multiple": 8, "tensorboard": true, "tgt_vocab_size": 20000, "training": { "bucket_size": 128000, "gpu_ranks": [ 0 ], "learning_rate": 2.0, "valid_batch_size": 8192, "model_path": "en-ko/model", "decay_method": "noam", "optim": "pagedadamw8bit", "average_decay": 0.0001, "compute_dtype": "torch.bfloat16", "normalization": "tokens", "label_smoothing": 0.1, "world_size": 1, "adam_beta2": 0.998, "accum_steps": [ 0 ], "batch_size_multiple": 8, "attention_dropout": [ 0.1 ], "batch_size": 8192, "valid_steps": 500, "keep_checkpoint": 4, "num_workers": 0, "save_checkpoint_steps": 500, "accum_count": [ 16 ], "max_grad_norm": 2.0, "train_steps": 100000, "prefetch_factor": 100, "warmup_steps": 10000, "dropout_steps": [ 0 ], "batch_type": "tokens", "dropout": [ 0.1 ], "param_init_method": "xavier_uniform" }, "transforms_configs": { "filtertoolong": { "src_seq_length": 256, "tgt_seq_length": 256 }, "sentencepiece": { "tgt_subword_model": "${MODEL_PATH}/tgt.spm.model", "src_subword_model": "${MODEL_PATH}/src.spm.model" } }, "model": { "add_qkvbias": false, "add_estimator": false, "position_encoding_type": "SinusoidalInterleaved", "heads": 8, "mlp_activation_fn": "gelu", "add_ffnbias": true, "share_decoder_embeddings": true, "share_embeddings": false, "architecture": "transformer", "hidden_size": 1024, "transformer_ff": 4096, "layer_norm": "standard", "norm_eps": 1e-06, "encoder": { "norm_eps": 1e-06, "encoder_type": "transformer", "add_ffnbias": true, "add_qkvbias": false, "n_positions": null, "hidden_size": 1024, "transformer_ff": 4096, "layer_norm": "standard", "rope_config": null, "position_encoding_type": "SinusoidalInterleaved", "heads": 8, "mlp_activation_fn": "gelu", "layers": 8, "src_word_vec_size": 1024 }, "embeddings": { "position_encoding_type": "SinusoidalInterleaved", "word_vec_size": 1024, "tgt_word_vec_size": 1024, "src_word_vec_size": 1024 }, "decoder": { "norm_eps": 1e-06, "add_ffnbias": true, "tgt_word_vec_size": 1024, "add_qkvbias": false, "n_positions": null, "decoder_type": "transformer", "hidden_size": 1024, "transformer_ff": 4096, "layer_norm": "standard", "rope_config": null, "position_encoding_type": "SinusoidalInterleaved", "heads": 8, "mlp_activation_fn": "gelu", "layers": 2 } }, "data": { "corpus_1": { "path_align": null, "path_src": "en-ko/train.cleaned.src", "transforms": [ "sentencepiece", "filtertoolong" ], "path_tgt": "en-ko/train.cleaned.tgt" }, "valid": { "path_align": null, "path_src": "en-ko/dev.src", "transforms": [ "sentencepiece", "filtertoolong" ], "path_tgt": "en-ko/dev.tgt" } } }