convert.py : fix llama/llama2 conversion due to vocab_size=-1 (#4258)

This commit is contained in:
slaren 2023-11-30 22:42:23 +01:00 committed by GitHub
parent 954e22858c
commit f4d973cecb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -267,7 +267,7 @@ class Params:
n_ctx = 2048
return Params(
n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]),
n_vocab = model["tok_embeddings.weight"].shape[0],
n_embd = config["dim"],
n_layer = config["n_layers"],
n_ctx = n_ctx,