Traceback (most recent call last):
File "/disk2/xiaoming/Github/ChatHxk/chinese_alpaca_2_7b_16k_hf/hxk_demo.py", line 345, in gentask
ret = self.mfunc(callback=_callback, **self.kwargs)
File "/disk2/xiaoming/Github/ChatHxk/chinese_alpaca_2_7b_16k_hf/hxk_demo.py", line 545, in generate_with_callback
model.generate(**kwargs)
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/transformers/generation/utils.py", line 1525, in generate
return self.sample(
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/transformers/generation/utils.py", line 2622, in sample
outputs = self(
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/transformers/models/llama/modeling_llama.py", line 1183, in forward
outputs = self.model(
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/transformers/models/llama/modeling_llama.py", line 1070, in forward
layer_outputs = decoder_layer(
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/transformers/models/llama/modeling_llama.py", line 798, in forward
hidden_states, self_attn_weights, present_key_value = self.self_attn(
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/disk2/xiaoming/Github/ChatHxk/chinese_alpaca_2_7b_16k_hf/attn_and_long_ctx_patches.py", line 53, in xformers_forward
past_kv_len = past_key_value[0].shape[-2]
File "/home/xiaoming/miniconda3/envs/bigmodel/lib/python3.9/site-packages/transformers/cache_utils.py", line 78, in __getitem__
raise KeyError(f"Cache only has {len(self)} layers, attempted to access layer with index {layer_idx}")
KeyError: 'Cache only has 0 layers, attempted to access layer with index 0'

原因:transformer版本问题

修改:pip install transformers==4.35.0

posted on 2024-03-13 10:17  一点飞鸿  阅读(317)  评论(0编辑  收藏  举报