Skip to content

Commit ca5982b

Browse files
committed
set upper bound of seq_len dynamically based on users config
1 parent d2653bc commit ca5982b

File tree

1 file changed

+3
-1
lines changed

1 file changed

+3
-1
lines changed

optimum/exporters/executorch/integrations.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,9 @@ def export(
6464
example_cache_position = (
6565
cache_position if cache_position is not None else torch.arange(seq_length, dtype=torch.long)
6666
)
67-
seq_len_dim = torch.export.Dim("seq_length_dim", max=128 - 1)
67+
seq_len_dim = torch.export.Dim(
68+
"seq_length_dim", max=min(self.metadata["get_max_seq_len"], max_cache_len) - 1
69+
)
6870
dynamic_shapes = {"input_ids": {1: seq_len_dim}, "cache_position": {0: seq_len_dim}}
6971
strict = parse(torch.__version__) != parse(
7072
"2.7.0"

0 commit comments

Comments
 (0)