When running the "Upload to model Artifacts" section of "/train-and-deploy-gptj/deployment/deploy_gptj_DJLModel.ipynb' I get the following error when trying to create the 'model_config':
RuntimeError Traceback (most recent call last)
in
40 with smart_open(load_path, "rb") as f:
41 buffer = io.BytesIO(f.read())
---> 42 model.load_state_dict(torch.load(buffer))
43
44 tokenizer = AutoTokenizer.from_pretrained(model_id)
/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py in load_state_dict(self, state_dict, strict)
1602
1603 if len(error_msgs) > 0:
-> 1604 raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
1605 self.class.name, "\n\t".join(error_msgs)))
1606 return _IncompatibleKeys(missing_keys, unexpected_keys)
RuntimeError: Error(s) in loading state_dict for GPTJForCausalLM:
Unexpected key(s) in state_dict: "transformer.h.0.attn.bias", "transformer.h.0.attn.masked_bias", "transformer.h.1.attn.bias", "transformer.h.1.attn.masked_bias", "transformer.h.2.attn.bias", "transformer.h.2.attn.masked_bias", "transformer.h.3.attn.bias", "transformer.h.3.attn.masked_bias", "transformer.h.4.attn.bias", "transformer.h.4.attn.masked_bias", "transformer.h.5.attn.bias", "transformer.h.5.attn.masked_bias", "transformer.h.6.attn.bias", "transformer.h.6.attn.masked_bias", "transformer.h.7.attn.bias", "transformer.h.7.attn.masked_bias", "transformer.h.8.attn.bias", "transformer.h.8.attn.masked_bias", "transformer.h.9.attn.bias", "transformer.h.9.attn.masked_bias", "transformer.h.10.attn.bias", "transformer.h.10.attn.masked_bias", "transformer.h.11.attn.bias", "transformer.h.11.attn.masked_bias", "transformer.h.12.attn.bias", "transformer.h.12.attn.masked_bias", "transformer.h.13.attn.bias", "transformer.h.13.attn.masked_bias", "transformer.h.14.attn.bias", "transformer.h.14.attn.masked_bias", "transformer.h.15.attn.bias", "transformer.h.15.attn.masked_bias", "transformer.h.16.attn.bias", "transformer.h.16.attn.masked_bias", "transformer.h.17.attn.bias", "transformer.h.17.attn.masked_bias", "transformer.h.18.attn.bias", "transformer.h.18.attn.masked_bias", "transformer.h.19.attn.bias", "transformer.h.19.attn.masked_bias", "transformer.h.20.attn.bias", "transformer.h.20.attn.masked_bias", "transformer.h.21.attn.bias", "transformer.h.21.attn.masked_bias", "transformer.h.22.attn.bias", "transformer.h.22.attn.masked_bias", "transformer.h.23.attn.bias", "transformer.h.23.attn.masked_bias", "transformer.h.24.attn.bias", "transformer.h.24.attn.masked_bias", "transformer.h.25.attn.bias", "transformer.h.25.attn.masked_bias", "transformer.h.26.attn.bias", "transformer.h.26.attn.masked_bias", "transformer.h.27.attn.bias", "transformer.h.27.attn.masked_bias", "transformer.h.28.attn.bias", "transformer.h.28.attn.masked_bias", "transformer.h.29.attn.bias", "transformer.h.29.attn.masked_bias", "transformer.h.30.attn.bias", "transformer.h.30.attn.masked_bias", "transformer.h.31.attn.bias", "transformer.h.31.attn.masked_bias", "transformer.h.32.attn.bias", "transformer.h.32.attn.masked_bias", "transformer.h.33.attn.bias", "transformer.h.33.attn.masked_bias", "transformer.h.34.attn.bias", "transformer.h.34.attn.masked_bias", "transformer.h.35.attn.bias", "transformer.h.35.attn.masked_bias", "transformer.h.36.attn.bias", "transformer.h.36.attn.masked_bias", "transformer.h.37.attn.bias", "transformer.h.37.attn.masked_bias", "transformer.h.38.attn.bias", "transformer.h.38.attn.masked_bias", "transformer.h.39.attn.bias", "transformer.h.39.attn.masked_bias", "transformer.h.40.attn.bias", "transformer.h.40.attn.masked_bias", "transformer.h.41.attn.bias", "transformer.h.41.attn.masked_bias", "transformer.h.42.attn.bias", "transformer.h.42.attn.masked_bias", "transformer.h.43.attn.bias", "transformer.h.43.attn.masked_bias", "transformer.h.44.attn.bias", "transformer.h.44.attn.masked_bias", "transformer.h.45.attn.bias", "transformer.h.45.attn.masked_bias", "transformer.h.46.attn.bias", "transformer.h.46.attn.masked_bias", "transformer.h.47.attn.bias", "transformer.h.47.attn.masked_bias".
How can this be fixed?