summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-09-29 12:12:17 -0700
committerNate Sesti <sestinj@gmail.com>2023-09-29 12:12:17 -0700
commitff2a3978a1e2c95a4e288b56411bf0c32b86757b (patch)
treea95db68a4590490ea0ecce1d81519e83327231d6
parentc867cd40342d44901cf5277ded25f5dc5aaa4326 (diff)
downloadsncontinue-ff2a3978a1e2c95a4e288b56411bf0c32b86757b.tar.gz
sncontinue-ff2a3978a1e2c95a4e288b56411bf0c32b86757b.tar.bz2
sncontinue-ff2a3978a1e2c95a4e288b56411bf0c32b86757b.zip
feat: :sparkles: add max_tokens option to LLM class
-rw-r--r--continuedev/src/continuedev/libs/llm/__init__.py10
1 files changed, 7 insertions, 3 deletions
diff --git a/continuedev/src/continuedev/libs/llm/__init__.py b/continuedev/src/continuedev/libs/llm/__init__.py
index 28f614c7..e6a90ef7 100644
--- a/continuedev/src/continuedev/libs/llm/__init__.py
+++ b/continuedev/src/continuedev/libs/llm/__init__.py
@@ -71,6 +71,10 @@ class LLM(ContinueBaseModel):
..., description="The name of the model to be used (e.g. gpt-4, codellama)"
)
+ max_tokens: int = Field(
+ DEFAULT_MAX_TOKENS, description="The maximum number of tokens to generate."
+ )
+
stop_tokens: Optional[List[str]] = Field(
None, description="Tokens that will stop the completion."
)
@@ -237,7 +241,7 @@ class LLM(ContinueBaseModel):
presence_penalty=presence_penalty,
frequency_penalty=frequency_penalty,
stop=stop or self.stop_tokens,
- max_tokens=max_tokens,
+ max_tokens=max_tokens or self.max_tokens,
functions=functions,
)
@@ -288,7 +292,7 @@ class LLM(ContinueBaseModel):
presence_penalty=presence_penalty,
frequency_penalty=frequency_penalty,
stop=stop or self.stop_tokens,
- max_tokens=max_tokens,
+ max_tokens=max_tokens or self.max_tokens,
functions=functions,
)
@@ -337,7 +341,7 @@ class LLM(ContinueBaseModel):
presence_penalty=presence_penalty,
frequency_penalty=frequency_penalty,
stop=stop or self.stop_tokens,
- max_tokens=max_tokens,
+ max_tokens=max_tokens or self.max_tokens,
functions=functions,
)