summaryrefslogtreecommitdiff
path: root/continuedev/src
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-07-20 16:30:30 -0700
committerNate Sesti <sestinj@gmail.com>2023-07-20 16:30:30 -0700
commit00efacfc3df025f359a8aac86dad8b273d5fd350 (patch)
tree7a29aee9ac4bca2a14c14c0e2b1dd04c1cbaab71 /continuedev/src
parent67b1e77e9dc2134e63a0e2d87524db2260ad817a (diff)
downloadsncontinue-00efacfc3df025f359a8aac86dad8b273d5fd350.tar.gz
sncontinue-00efacfc3df025f359a8aac86dad8b273d5fd350.tar.bz2
sncontinue-00efacfc3df025f359a8aac86dad8b273d5fd350.zip
deep copy
Diffstat (limited to 'continuedev/src')
-rw-r--r--continuedev/src/continuedev/libs/llm/openai.py2
-rw-r--r--continuedev/src/continuedev/libs/util/count_tokens.py3
2 files changed, 3 insertions, 2 deletions
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index 96a4ab71..a0773c1d 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -81,7 +81,7 @@ class OpenAI(LLM):
del args["functions"]
messages = compile_chat_messages(
- args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message)
+ args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
completion = ""
async for chunk in await openai.ChatCompletion.acreate(
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 6e0a3b88..cea91470 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -105,7 +105,8 @@ def compile_chat_messages(model: str, msgs: Union[List[ChatMessage], None], max_
"""
The total number of tokens is system_message + sum(msgs) + functions + prompt after it is converted to a message
"""
- msgs_copy = msgs.copy() if msgs is not None else []
+ msgs_copy = [msg.copy(deep=True)
+ for msg in msgs] if msgs is not None else []
if prompt is not None:
prompt_msg = ChatMessage(role="user", content=prompt, summary=prompt)