From 490d30e11f7f7cee2b6b8ac2dd48c55dacffa36d Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Mon, 31 Jul 2023 01:23:17 -0700 Subject: docs: :memo: update documentation for LLMs in config.py --- continuedev/src/continuedev/libs/llm/anthropic.py | 2 +- continuedev/src/continuedev/libs/llm/ggml.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'continuedev/src') diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py index b01a84cd..ec1b7e40 100644 --- a/continuedev/src/continuedev/libs/llm/anthropic.py +++ b/continuedev/src/continuedev/libs/llm/anthropic.py @@ -9,7 +9,7 @@ from ..util.count_tokens import compile_chat_messages, DEFAULT_ARGS, count_token class AnthropicLLM(LLM): - model: str + model: str = "claude-2" requires_api_key: str = "ANTHROPIC_API_KEY" _async_client: AsyncAnthropic = None diff --git a/continuedev/src/continuedev/libs/llm/ggml.py b/continuedev/src/continuedev/libs/llm/ggml.py index 2b56a51c..7742e8c3 100644 --- a/continuedev/src/continuedev/libs/llm/ggml.py +++ b/continuedev/src/continuedev/libs/llm/ggml.py @@ -13,7 +13,7 @@ SERVER_URL = "http://localhost:8000" class GGML(LLM): # this is model-specific - max_context_length: int + max_context_length: int = 2048 _client_session: aiohttp.ClientSession = None -- cgit v1.2.3-70-g09d2