summaryrefslogtreecommitdiff
path: root/continuedev/src
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-06-26 11:09:32 -0700
committerNate Sesti <sestinj@gmail.com>2023-06-26 11:09:32 -0700
commita2a6f4547b591c90a62c830b92a7b3920bb13b9f (patch)
tree110d62417cde19a94a3f5c92449ab4367749ef12 /continuedev/src
parent79f65715664b8b0300c35c14e058d318d7c79295 (diff)
parentb9ab8d7cb67ff5f242c42575e06f4a7325ae9b47 (diff)
downloadsncontinue-a2a6f4547b591c90a62c830b92a7b3920bb13b9f.tar.gz
sncontinue-a2a6f4547b591c90a62c830b92a7b3920bb13b9f.tar.bz2
sncontinue-a2a6f4547b591c90a62c830b92a7b3920bb13b9f.zip
Merge branch 'main' of https://github.com/continuedev/continue
Diffstat (limited to 'continuedev/src')
-rw-r--r--continuedev/src/continuedev/core/main.py6
-rw-r--r--continuedev/src/continuedev/libs/llm/proxy_server.py2
-rw-r--r--continuedev/src/continuedev/libs/util/count_tokens.py4
3 files changed, 6 insertions, 6 deletions
diff --git a/continuedev/src/continuedev/core/main.py b/continuedev/src/continuedev/core/main.py
index b9ae9eba..1d2b0cad 100644
--- a/continuedev/src/continuedev/core/main.py
+++ b/continuedev/src/continuedev/core/main.py
@@ -17,11 +17,11 @@ class FunctionCall(ContinueBaseModel):
class ChatMessage(ContinueBaseModel):
role: ChatMessageRole
- content: str | None
- name: str | None
+ content: Union[str, None] = None
+ name: Union[str, None] = None
# A summary for pruning chat context to fit context window. Often the Step name.
summary: str
- function_call: FunctionCall | None = None
+ function_call: Union[FunctionCall, None] = None
def to_dict(self, with_functions: bool) -> Dict:
d = self.dict()
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index 5389218a..9fe6e811 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -42,7 +42,7 @@ class ProxyServer(LLM):
except:
raise Exception(await resp.text())
- async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Any | List | Dict, None, None]]:
+ async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]:
args = self.default_args | kwargs
messages = compile_chat_messages(
self.default_model, messages, None, with_functions=args["model"].endswith("0613"))
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index addafcff..154af5e1 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -21,7 +21,7 @@ def encoding_for_model(model: str):
return tiktoken.encoding_for_model(aliases.get(model, model))
-def count_tokens(model: str, text: str | None):
+def count_tokens(model: str, text: Union[str, None]):
if text is None:
return 0
encoding = encoding_for_model(model)
@@ -74,7 +74,7 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens:
return chat_history
-def compile_chat_messages(model: str, msgs: List[ChatMessage], prompt: str | None = None, with_functions: bool = False, system_message: Union[str, None] = None) -> List[Dict]:
+def compile_chat_messages(model: str, msgs: List[ChatMessage], prompt: Union[str, None] = None, with_functions: bool = False, system_message: Union[str, None] = None) -> List[Dict]:
prompt_tokens = count_tokens(model, prompt)
msgs = prune_chat_history(model,
msgs, MAX_TOKENS_FOR_MODEL[model], prompt_tokens + 1000 + count_tokens(model, system_message))