From a7c57e1d1e4a0eff3e4b598f8bf0448ea6068353 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Tue, 18 Jul 2023 22:14:39 -0700 Subject: pass through anthropic system message --- continuedev/src/continuedev/libs/llm/anthropic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py index 566f7150..c82895c6 100644 --- a/continuedev/src/continuedev/libs/llm/anthropic.py +++ b/continuedev/src/continuedev/libs/llm/anthropic.py @@ -73,7 +73,7 @@ class AnthropicLLM(LLM): args = self._transform_args(args) messages = compile_chat_messages( - args["model"], messages, args["max_tokens_to_sample"], functions=args.get("functions", None)) + args["model"], messages, args["max_tokens_to_sample"], functions=args.get("functions", None), system_message=self.system_message) async for chunk in await self.async_client.completions.create( prompt=self.__messages_to_prompt(messages), **args @@ -88,7 +88,7 @@ class AnthropicLLM(LLM): args = self._transform_args(args) messages = compile_chat_messages( - args["model"], with_history, args["max_tokens_to_sample"], prompt, functions=None) + args["model"], with_history, args["max_tokens_to_sample"], prompt, functions=None, system_message=self.system_message) resp = (await self.async_client.completions.create( prompt=self.__messages_to_prompt(messages), **args -- cgit v1.2.3-70-g09d2