We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d2653bc commit ca5982bCopy full SHA for ca5982b
optimum/exporters/executorch/integrations.py
@@ -64,7 +64,9 @@ def export(
64
example_cache_position = (
65
cache_position if cache_position is not None else torch.arange(seq_length, dtype=torch.long)
66
)
67
- seq_len_dim = torch.export.Dim("seq_length_dim", max=128 - 1)
+ seq_len_dim = torch.export.Dim(
68
+ "seq_length_dim", max=min(self.metadata["get_max_seq_len"], max_cache_len) - 1
69
+ )
70
dynamic_shapes = {"input_ids": {1: seq_len_dim}, "cache_position": {0: seq_len_dim}}
71
strict = parse(torch.__version__) != parse(
72
"2.7.0"
0 commit comments