Fix for KeyError on Loading LLaMA (#1978)

This commit is contained in:
Jun Gao
2023-12-10 07:59:57 +08:00
committed by GitHub
parent c85b80c2b6
commit 3a8c2381f7

View File

@ -322,6 +322,10 @@ class LlamaForCausalLM(nn.Module):
model_name_or_path, cache_dir, load_format, revision):
if "rotary_emb.inv_freq" in name:
continue
if "rotary_emb.cos_cached" in name:
continue
if "rotary_emb.sin_cached" in name:
continue
for (param_name, weight_name, shard_id) in stacked_params_mapping:
if weight_name not in name:
continue