Open
Description
I tried to upgrade 1.5 to 1.6, but faild with object has no attribute 'reshape_heads_to_batch_dim error on Huggingface Space
I just clone space (fffiloni/LatentSync) and replace repoid to ByteDance/LatentSync-1.6 and config path to stage2_512.yaml (copy from 1.6repo)
Traceback (most recent call last):
File "/usr/local/lib/python3.10/site-packages/spaces/zero/[wrappers.py](http://wrappers.py/)", line 256, in thread_wrapper
res = future.result()
File "/usr/local/lib/python3.10/concurrent/futures/_base.py", line 451, in result
return self.__get_result()
File "/usr/local/lib/python3.10/concurrent/futures/_base.py", line 403, in __get_result
raise self._exception
File "/usr/local/lib/python3.10/concurrent/futures/[thread.py](http://thread.py/)", line 58, in run
result = [self.fn(*self.args](about:blank), **self.kwargs)
File "/home/user/app/[app.py](http://app.py/)", line 173, in main
pipeline(
File "/usr/local/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
File "/home/user/app/latentsync/pipelines/lipsync_pipeline.py", line 422, in __call__
noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=audio_embeds).sample
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/user/app/latentsync/models/[unet.py](http://unet.py/)", line 418, in forward
sample, res_samples = downsample_block(
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/user/app/latentsync/models/unet_blocks.py", line 484, in forward
motion_module(hidden_states, temb, encoder_hidden_states=encoder_hidden_states)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/user/app/latentsync/models/motion_module.py", line 77, in forward
hidden_states = self.temporal_transformer(hidden_states, encoder_hidden_states, attention_mask)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/user/app/latentsync/models/motion_module.py", line 147, in forward
hidden_states = block(
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/user/app/latentsync/models/motion_module.py", line 214, in forward
attention_block(
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1747, in _call_impl
return forward_call(*args, **kwargs)
File "/home/user/app/latentsync/models/motion_module.py", line 294, in forward
query = self.reshape_heads_to_batch_dim(query)
File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/[module.py](http://module.py/)", line 1931, in __getattr__
raise AttributeError(
AttributeError: 'VersatileAttention' object has no attribute 'reshape_heads_to_batch_dim'
Metadata
Metadata
Assignees
Labels
No labels