diff options
author | Nate Sesti <sestinj@gmail.com> | 2023-09-02 11:52:16 -0700 |
---|---|---|
committer | Nate Sesti <sestinj@gmail.com> | 2023-09-02 11:52:16 -0700 |
commit | 90590ab4e06fbc3fa721f73a4a922136946a756f (patch) | |
tree | dd5c774191711fff2f549d5aac6bce3a705101bb /continuedev | |
parent | af4944c80c1d442886abf179502f2aff8761ab49 (diff) | |
download | sncontinue-90590ab4e06fbc3fa721f73a4a922136946a756f.tar.gz sncontinue-90590ab4e06fbc3fa721f73a4a922136946a756f.tar.bz2 sncontinue-90590ab4e06fbc3fa721f73a4a922136946a756f.zip |
fix: :bug: llamacpp fix indexing max_tokens
Diffstat (limited to 'continuedev')
-rw-r--r-- | continuedev/src/continuedev/libs/llm/llamacpp.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/continuedev/src/continuedev/libs/llm/llamacpp.py b/continuedev/src/continuedev/libs/llm/llamacpp.py index 84eb85fd..72c766c0 100644 --- a/continuedev/src/continuedev/libs/llm/llamacpp.py +++ b/continuedev/src/continuedev/libs/llm/llamacpp.py @@ -71,7 +71,7 @@ class LlamaCpp(LLM): self.model, with_history, self.context_length, - args["max_tokens"], + args["n_predict"] if "n_predict" in args else 1024, prompt, functions=args.get("functions", None), system_message=self.system_message, |