Skip to content

Commit 4dcf7d1

Browse files
committed
set upper bound of seq_len dynamically based on users config
1 parent d2653bc commit 4dcf7d1

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

optimum/exporters/executorch/integrations.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def export(
6464
example_cache_position = (
6565
cache_position if cache_position is not None else torch.arange(seq_length, dtype=torch.long)
6666
)
67-
seq_len_dim = torch.export.Dim("seq_length_dim", max=128 - 1)
67+
seq_len_dim = torch.export.Dim("seq_length_dim", max=min(self.metadata["get_max_seq_len"], max_cache_len) - 1)
6868
dynamic_shapes = {"input_ids": {1: seq_len_dim}, "cache_position": {0: seq_len_dim}}
6969
strict = parse(torch.__version__) != parse(
7070
"2.7.0"

0 commit comments

Comments
 (0)