From dfba5ad90fd148195be276b60427f64b33b9d64c Mon Sep 17 00:00:00 2001 From: Pramod Kaushik Mudrakarta <27796676+pramodkaushik@users.noreply.github.com> Date: Wed, 7 Mar 2018 19:58:06 -0600 Subject: [PATCH] Update WMT16 standard hparams "num_encoder_layers" and "num_decoder_layers" are set in nmt.create_hparams() (Line 595 in nmt.py), but not updated after calling nmt.misc_utils.maybe_parse_standard_hparams() (Line 497 in nmt.py). Fixes the issue (https://github.com/tensorflow/nmt/issues/264) where loading the pretrained checkpoint was resulting in an error. --- nmt/standard_hparams/wmt16.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nmt/standard_hparams/wmt16.json b/nmt/standard_hparams/wmt16.json index 8c1cb3fb0..afa38f91b 100644 --- a/nmt/standard_hparams/wmt16.json +++ b/nmt/standard_hparams/wmt16.json @@ -14,6 +14,8 @@ "metrics": ["bleu"], "num_buckets": 5, "num_layers": 4, + "num_encoder_layers": 4, + "num_decoder_layers": 4, "num_train_steps": 340000, "decay_scheme": "luong10", "num_units": 1024,