diff options
author | Nate Sesti <sestinj@gmail.com> | 2023-08-30 23:02:28 -0700 |
---|---|---|
committer | Nate Sesti <sestinj@gmail.com> | 2023-08-30 23:02:28 -0700 |
commit | cbb30aa3cb0600d83fc5a5d3cbf60621883a4d00 (patch) | |
tree | 16562eb52060a1f03a327d46c2aeb45015c73500 /continuedev | |
parent | 28f5d7bedab05a8b061e4e7ee9055a5403786bbc (diff) | |
download | sncontinue-cbb30aa3cb0600d83fc5a5d3cbf60621883a4d00.tar.gz sncontinue-cbb30aa3cb0600d83fc5a5d3cbf60621883a4d00.tar.bz2 sncontinue-cbb30aa3cb0600d83fc5a5d3cbf60621883a4d00.zip |
don't url decode ollama
Diffstat (limited to 'continuedev')
-rw-r--r-- | continuedev/src/continuedev/libs/llm/ollama.py | 17 | ||||
-rw-r--r-- | continuedev/src/continuedev/plugins/steps/chat.py | 29 |
2 files changed, 29 insertions, 17 deletions
diff --git a/continuedev/src/continuedev/libs/llm/ollama.py b/continuedev/src/continuedev/libs/llm/ollama.py index 240d922b..03300435 100644 --- a/continuedev/src/continuedev/libs/llm/ollama.py +++ b/continuedev/src/continuedev/libs/llm/ollama.py @@ -117,7 +117,8 @@ class Ollama(LLM): ) prompt = llama2_template_messages(messages) - self.write_log(f"Prompt: {prompt}") + self.write_log(f"Prompt:\n{prompt}") + completion = "" async with self._client_session.post( f"{self.server_url}/api/generate", json={ @@ -127,8 +128,6 @@ class Ollama(LLM): "options": {"temperature": args["temperature"]}, }, ) as resp: - # This is streaming application/json instaed of text/event-stream - url_decode_buffer = "" async for line in resp.content.iter_chunks(): if line[1]: json_chunk = line[0].decode("utf-8") @@ -137,18 +136,12 @@ class Ollama(LLM): if chunk.strip() != "": j = json.loads(chunk) if "response" in j: - url_decode_buffer += j["response"] - if ( - "&" in url_decode_buffer - and url_decode_buffer.index("&") - > len(url_decode_buffer) - 5 - ): - continue yield { "role": "assistant", - "content": urllib.parse.unquote(url_decode_buffer), + "content": j["response"], } - url_decode_buffer = "" + completion += j["response"] + self.write_log(f"Completion:\n{completion}") async def complete( self, prompt: str, with_history: List[ChatMessage] = None, **kwargs diff --git a/continuedev/src/continuedev/plugins/steps/chat.py b/continuedev/src/continuedev/plugins/steps/chat.py index cbd94fe2..0b82d30b 100644 --- a/continuedev/src/continuedev/plugins/steps/chat.py +++ b/continuedev/src/continuedev/plugins/steps/chat.py @@ -24,6 +24,12 @@ openai.api_key = OPENAI_API_KEY FREE_USAGE_STEP_NAME = "Please enter OpenAI API key" +def add_ellipsis(text: str, max_length: int = 200) -> str: + if len(text) > max_length: + return text[: max_length - 3] + "..." + return text + + class SimpleChatStep(Step): name: str = "Generating Response..." manage_own_chat_context: bool = True @@ -91,13 +97,26 @@ class SimpleChatStep(Step): if "content" in chunk: self.description += chunk["content"] + + # HTML unencode + # end_size = len(chunk["content"]) - 6 + # if "&" in self.description[-end_size:]: + # self.description = self.description[:-end_size] + html.unescape( + # self.description[-end_size:] + # ) + await sdk.update_ui() - self.name = remove_quotes_and_escapes( - await sdk.models.medium.complete( - f'"{self.description}"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:', - max_tokens=20, - ) + self.name = "Generating title..." + await sdk.update_ui() + self.name = add_ellipsis( + remove_quotes_and_escapes( + await sdk.models.medium.complete( + f'"{self.description}"\n\nPlease write a short title summarizing the message quoted above. Use no more than 10 words:', + max_tokens=20, + ) + ), + 200, ) self.chat_context.append( |