From f3ed3c00f57e60989a8aa51e30269e2d97e20339 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 29 Nov 2023 19:28:07 +0100 Subject: [PATCH] convert.py : fix llama/llama2 conversion due to vocab_size=-1 --- convert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert.py b/convert.py index 3ad836ce0ec1d..6e95d6cb37e79 100755 --- a/convert.py +++ b/convert.py @@ -267,7 +267,7 @@ def loadOriginalParamsJson(model: LazyModel, config_path: Path) -> Params: n_ctx = 2048 return Params( - n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]), + n_vocab = model["tok_embeddings.weight"].shape[0], n_embd = config["dim"], n_layer = config["n_layers"], n_ctx = n_ctx,