File "E:\ComfyUI-aki-v1.4\custom_nodes\comfyui_slk_joy_caption_two\joy_caption_two_node.py", line 407, in generate generate_ids = text_model.generate(input_ids, inputs_embeds=input_embeds, attention_mask=attention_mask, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\utils\_contextlib.py", line 116, in decorate_context return func(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\transformers\generation\utils.py", line 2215, in generate result = self._sample( ^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\transformers\generation\utils.py", line 3206, in _sample outputs = self(**model_inputs, return_dict=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1740, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1751, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\transformers\models\llama\modeling_llama.py", line 1190, in forward outputs = self.model( ^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1740, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1751, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\transformers\models\llama\modeling_llama.py", line 945, in forward layer_outputs = decoder_layer( ^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1740, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1751, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\transformers\models\llama\modeling_llama.py", line 676, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( ^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1740, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1751, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\transformers\models\llama\modeling_llama.py", line 559, in forward query_states = self.q_proj(hidden_states) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1740, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1751, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\peft\tuners\lora\bnb.py", line 467, in forward result = self.base_layer(x, *args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1740, in _wrapped_call_impl return self._call_impl(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\nn\modules\module.py", line 1751, in _call_impl return forward_call(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\bitsandbytes\nn\modules.py", line 484, in forward return bnb.matmul_4bit(x, self.weight.t(), bias=bias, quant_state=self.weight.quant_state).to(inp_dtype) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\bitsandbytes\autograd\_functions.py", line 533, in matmul_4bit return MatMul4Bit.apply(A, B, out, bias, quant_state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\torch\autograd\function.py", line 575, in apply return super().apply(*args, **kwargs) # type: ignore[misc] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\bitsandbytes\autograd\_functions.py", line 462, in forward output = torch.nn.functional.linear(A, F.dequantize_4bit(B, quant_state).to(A.dtype).t(), bias) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\bitsandbytes\functional.py", line 1352, in dequantize_4bit absmax = dequantize_blockwise(quant_state.absmax, quant_state.state2) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\bitsandbytes\functional.py", line 1043, in dequantize_blockwise lib.cdequantize_blockwise_fp32(*args) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\site-packages\bitsandbytes\cextension.py", line 46, in __getattr__ return getattr(self._lib, item) ^^^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\ctypes\__init__.py", line 389, in __getattr__ func = self.__getitem__(name) ^^^^^^^^^^^^^^^^^^^^^^ File "E:\ComfyUI-aki-v1.4\python\Lib\ctypes\__init__.py", line 394, in __getitem__ func = self._FuncPtr((name_or_ordinal, self)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^AttributeError: function 'cdequantize_blockwise_fp32' not found2025-04-17T10:03:29.386108 - Prompt executed in 0.46 seconds```## Attached WorkflowPlease make sure that workflow does not contain any sensitive information such as API keys or passwords.```Workflow too large. Please manually upload the workflow from local file system.```## Additional Context(Please add any additional context or steps to reproduce the error here)