summaryrefslogtreecommitdiff
path: root/continuedev
diff options
context:
space:
mode:
Diffstat (limited to 'continuedev')
-rw-r--r--continuedev/src/continuedev/libs/llm/openai.py2
-rw-r--r--continuedev/src/continuedev/libs/util/count_tokens.py3
2 files changed, 3 insertions, 2 deletions
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index 96a4ab71..a0773c1d 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -81,7 +81,7 @@ class OpenAI(LLM):
del args["functions"]
messages = compile_chat_messages(
- args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message)
+ args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
completion = ""
async for chunk in await openai.ChatCompletion.acreate(
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 6e0a3b88..cea91470 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -105,7 +105,8 @@ def compile_chat_messages(model: str, msgs: Union[List[ChatMessage], None], max_
"""
The total number of tokens is system_message + sum(msgs) + functions + prompt after it is converted to a message
"""
- msgs_copy = msgs.copy() if msgs is not None else []
+ msgs_copy = [msg.copy(deep=True)
+ for msg in msgs] if msgs is not None else []
if prompt is not None:
prompt_msg = ChatMessage(role="user", content=prompt, summary=prompt)