Skip to content

Commit 556c7ed

Browse files
committed
Truncate max_tokens if it exceeds context length
1 parent fb2c5f7 commit 556c7ed

File tree

1 file changed

+8
-1
lines changed

1 file changed

+8
-1
lines changed

llama_cpp/llama.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -811,9 +811,16 @@ def _create_completion(
811811
if self.verbose:
812812
llama_cpp.llama_reset_timings(self.ctx)
813813

814-
if len(prompt_tokens) + max_tokens > self._n_ctx:
814+
if len(prompt_tokens) > self._n_ctx:
815815
raise ValueError(f"Requested tokens exceed context window of {self._n_ctx}")
816816

817+
# Truncate max_tokens if requested tokens would exceed the context window
818+
max_tokens = (
819+
max_tokens
820+
if max_tokens + len(prompt_tokens) < self._n_ctx
821+
else (self._n_ctx - len(prompt_tokens))
822+
)
823+
817824
if stop != []:
818825
stop_sequences = [s.encode("utf-8") for s in stop]
819826
else:

0 commit comments

Comments
 (0)