From 19050f83228b3e7f08a6aacd5bdd1804a8315e4a Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Sun, 27 Aug 2023 11:07:38 -0700 Subject: fix: :bug: urldecode ollama responses, make edit faster --- continuedev/src/continuedev/libs/llm/ollama.py | 7 ++++--- continuedev/src/continuedev/plugins/steps/core/core.py | 9 +++++++-- continuedev/src/continuedev/server/gui.py | 5 ++--- 3 files changed, 13 insertions(+), 8 deletions(-) (limited to 'continuedev/src') diff --git a/continuedev/src/continuedev/libs/llm/ollama.py b/continuedev/src/continuedev/libs/llm/ollama.py index 0eb738dc..5647b702 100644 --- a/continuedev/src/continuedev/libs/llm/ollama.py +++ b/continuedev/src/continuedev/libs/llm/ollama.py @@ -1,4 +1,5 @@ import json +import urllib.parse from textwrap import dedent from typing import Any, Coroutine, Dict, Generator, List, Union @@ -100,7 +101,7 @@ class Ollama(LLM): if chunk.strip() != "": j = json.loads(chunk) if "response" in j: - yield j["response"] + yield urllib.parse.unquote(j["response"]) except: raise Exception(str(line[0])) @@ -138,7 +139,7 @@ class Ollama(LLM): if "response" in j: yield { "role": "assistant", - "content": j["response"], + "content": urllib.parse.unquote(j["response"]), } except: raise Exception(str(line[0])) @@ -164,7 +165,7 @@ class Ollama(LLM): if chunk.strip() != "": j = json.loads(chunk) if "response" in j: - completion += j["response"] + completion += urllib.parse.unquote(j["response"]) except: raise Exception(str(line[0])) diff --git a/continuedev/src/continuedev/plugins/steps/core/core.py b/continuedev/src/continuedev/plugins/steps/core/core.py index 8fa73f7c..fe4b8a61 100644 --- a/continuedev/src/continuedev/plugins/steps/core/core.py +++ b/continuedev/src/continuedev/plugins/steps/core/core.py @@ -8,8 +8,9 @@ from pydantic import validator from ....core.main import ChatMessage, ContinueCustomException, Step from ....core.observation import Observation, TextObservation, UserInputObservation -from ....libs.llm.ggml import GGML +from ....libs.llm.anthropic import AnthropicLLM from ....libs.llm.maybe_proxy_openai import MaybeProxyOpenAI +from ....libs.llm.openai import OpenAI from ....libs.util.count_tokens import DEFAULT_MAX_TOKENS from ....libs.util.strings import ( dedent_and_get_common_whitespace, @@ -638,7 +639,11 @@ Please output the code to be inserted at the cursor in order to fulfill the user repeating_file_suffix = False line_below_highlighted_range = file_suffix.lstrip().split("\n")[0] - if isinstance(model_to_use, GGML): + if not ( + isinstance(model_to_use, OpenAI) + or isinstance(model_to_use, MaybeProxyOpenAI) + or isinstance(model_to_use, AnthropicLLM) + ): messages = [ ChatMessage( role="user", diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py index 55a5f3b4..51dad8ed 100644 --- a/continuedev/src/continuedev/server/gui.py +++ b/continuedev/src/continuedev/server/gui.py @@ -262,13 +262,12 @@ class GUIProtocolServer(AbstractGUIProtocolServer): # Set and start the default model if didn't already exist from unused if models.default is None: models.default = MODEL_CLASSES[model_class](**model) - await self.session.autopilot.continue_sdk.start_model( - models.default - ) await self.session.autopilot.continue_sdk.run_step( SetupModelStep(model_class=model_class) ) + await self.session.autopilot.continue_sdk.start_model(models.default) + models_args = {} for role in ALL_MODEL_ROLES: -- cgit v1.2.3-70-g09d2