From d17c29a2144c5a0101750d8aecbe09e2e0afc328 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Ka=C5=BAmierczak?= Date: Mon, 11 Nov 2019 22:29:32 +0100 Subject: [PATCH] fix: remove appending variables/variables to the tfpath --- albert_english_pytorch/model/modeling_albert.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/albert_english_pytorch/model/modeling_albert.py b/albert_english_pytorch/model/modeling_albert.py index 12b9f12..837661d 100644 --- a/albert_english_pytorch/model/modeling_albert.py +++ b/albert_english_pytorch/model/modeling_albert.py @@ -47,7 +47,6 @@ def load_tf_weights_in_albert(model, config, tf_checkpoint_path): raise tf_path = os.path.abspath(tf_checkpoint_path) logger.info("Converting TensorFlow checkpoint from {}".format(tf_path)) - tf_path = tf_path + "/variables/variables" # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] @@ -482,7 +481,7 @@ def _init_weights(self, module): .. _`torch.nn.Module`: https://pytorch.org/docs/stable/nn.html#module Parameters: - config (:class:`~transformers.BertConfig`): Model configuration class with all the parameters of the model. + config (:class:`~transformers.BertConfig`): Model configuration class with all the parameters of the model. Initializing with a config file does not load the weights associated with the model, only the configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights. """