diff options
author | Ty Dunn <ty@tydunn.com> | 2023-06-26 10:55:42 -0700 |
---|---|---|
committer | Ty Dunn <ty@tydunn.com> | 2023-06-26 10:55:42 -0700 |
commit | e21cde260ad518d83a9811d11362d91a47f0b0c5 (patch) | |
tree | 6ff3433a47930f970d1c14d667c3989ccaf09f98 | |
parent | 4a8515f7fe55144c2460740b8c3d5ab5cf0cc525 (diff) | |
download | sncontinue-e21cde260ad518d83a9811d11362d91a47f0b0c5.tar.gz sncontinue-e21cde260ad518d83a9811d11362d91a47f0b0c5.tar.bz2 sncontinue-e21cde260ad518d83a9811d11362d91a47f0b0c5.zip |
working on 3.9 again
-rw-r--r-- | continuedev/src/continuedev/core/main.py | 6 | ||||
-rw-r--r-- | continuedev/src/continuedev/libs/llm/proxy_server.py | 2 | ||||
-rw-r--r-- | continuedev/src/continuedev/libs/util/count_tokens.py | 4 |
3 files changed, 6 insertions, 6 deletions
diff --git a/continuedev/src/continuedev/core/main.py b/continuedev/src/continuedev/core/main.py index b9ae9eba..1d2b0cad 100644 --- a/continuedev/src/continuedev/core/main.py +++ b/continuedev/src/continuedev/core/main.py @@ -17,11 +17,11 @@ class FunctionCall(ContinueBaseModel): class ChatMessage(ContinueBaseModel): role: ChatMessageRole - content: str | None - name: str | None + content: Union[str, None] = None + name: Union[str, None] = None # A summary for pruning chat context to fit context window. Often the Step name. summary: str - function_call: FunctionCall | None = None + function_call: Union[FunctionCall, None] = None def to_dict(self, with_functions: bool) -> Dict: d = self.dict() diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py index 5389218a..9fe6e811 100644 --- a/continuedev/src/continuedev/libs/llm/proxy_server.py +++ b/continuedev/src/continuedev/libs/llm/proxy_server.py @@ -42,7 +42,7 @@ class ProxyServer(LLM): except: raise Exception(await resp.text()) - async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Any | List | Dict, None, None]]: + async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]: args = self.default_args | kwargs messages = compile_chat_messages( self.default_model, messages, None, with_functions=args["model"].endswith("0613")) diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py index addafcff..154af5e1 100644 --- a/continuedev/src/continuedev/libs/util/count_tokens.py +++ b/continuedev/src/continuedev/libs/util/count_tokens.py @@ -21,7 +21,7 @@ def encoding_for_model(model: str): return tiktoken.encoding_for_model(aliases.get(model, model)) -def count_tokens(model: str, text: str | None): +def count_tokens(model: str, text: Union[str, None]): if text is None: return 0 encoding = encoding_for_model(model) @@ -74,7 +74,7 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens: return chat_history -def compile_chat_messages(model: str, msgs: List[ChatMessage], prompt: str | None = None, with_functions: bool = False, system_message: Union[str, None] = None) -> List[Dict]: +def compile_chat_messages(model: str, msgs: List[ChatMessage], prompt: Union[str, None] = None, with_functions: bool = False, system_message: Union[str, None] = None) -> List[Dict]: prompt_tokens = count_tokens(model, prompt) msgs = prune_chat_history(model, msgs, MAX_TOKENS_FOR_MODEL[model], prompt_tokens + 1000 + count_tokens(model, system_message)) |