summaryrefslogtreecommitdiff
path: root/continuedev/src/continuedev/core
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-07-17 13:33:29 -0700
committerNate Sesti <sestinj@gmail.com>2023-07-17 13:33:29 -0700
commit05d665e65aaef62254a4da9a7a381f9984ff0db5 (patch)
tree9b5c08baa5c7c1da051e4109ae34fb8a141c2754 /continuedev/src/continuedev/core
parent868e0b7ef5357b89186119c3c2fa8bd427b8db30 (diff)
parent6e95cb64cd5b2e2d55200bf979106f18d395bb97 (diff)
downloadsncontinue-05d665e65aaef62254a4da9a7a381f9984ff0db5.tar.gz
sncontinue-05d665e65aaef62254a4da9a7a381f9984ff0db5.tar.bz2
sncontinue-05d665e65aaef62254a4da9a7a381f9984ff0db5.zip
Merge branch 'main' of https://github.com/continuedev/continue into anthropic
Diffstat (limited to 'continuedev/src/continuedev/core')
-rw-r--r--continuedev/src/continuedev/core/autopilot.py19
-rw-r--r--continuedev/src/continuedev/core/config.py11
-rw-r--r--continuedev/src/continuedev/core/policy.py2
-rw-r--r--continuedev/src/continuedev/core/sdk.py15
4 files changed, 40 insertions, 7 deletions
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 0696c360..4e177ac9 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -36,7 +36,7 @@ def get_error_title(e: Exception) -> str:
elif isinstance(e, openai_errors.APIConnectionError):
return "The request failed. Please check your internet connection and try again. If this issue persists, you can use our API key for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to \"\""
elif isinstance(e, openai_errors.InvalidRequestError):
- return 'Your API key does not have access to GPT-4. You can use ours for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to ""'
+ return 'Invalid request sent to OpenAI. Please try again.'
elif e.__str__().startswith("Cannot connect to host"):
return "The request failed. Please check your internet connection and try again."
return e.__str__() or e.__repr__()
@@ -166,6 +166,22 @@ class Autopilot(ContinueBaseModel):
if not any(map(lambda x: x.editing, self._highlighted_ranges)):
self._highlighted_ranges[0].editing = True
+ def _disambiguate_highlighted_ranges(self):
+ """If any files have the same name, also display their folder name"""
+ name_counts = {}
+ for rif in self._highlighted_ranges:
+ if rif.display_name in name_counts:
+ name_counts[rif.display_name] += 1
+ else:
+ name_counts[rif.display_name] = 1
+
+ for rif in self._highlighted_ranges:
+ if name_counts[rif.display_name] > 1:
+ rif.display_name = os.path.join(
+ os.path.basename(os.path.dirname(rif.range.filepath)), rif.display_name)
+ else:
+ rif.display_name = os.path.basename(rif.range.filepath)
+
async def handle_highlighted_code(self, range_in_files: List[RangeInFileWithContents]):
# Filter out rifs from ~/.continue/diffs folder
range_in_files = [
@@ -211,6 +227,7 @@ class Autopilot(ContinueBaseModel):
) for rif in range_in_files]
self._make_sure_is_editing_range()
+ self._disambiguate_highlighted_ranges()
await self.update_subscribers()
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index 05ba48c6..6af0878d 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -67,16 +67,21 @@ DEFAULT_SLASH_COMMANDS = [
]
+class AzureInfo(BaseModel):
+ endpoint: str
+ engine: str
+ api_version: str
+
+
class ContinueConfig(BaseModel):
"""
A pydantic class for the continue config file.
"""
steps_on_startup: Optional[Dict[str, Dict]] = {}
disallowed_steps: Optional[List[str]] = []
- server_url: Optional[str] = None
allow_anonymous_telemetry: Optional[bool] = True
default_model: Literal["gpt-3.5-turbo", "gpt-3.5-turbo-16k",
- "gpt-4", "claude-2"] = 'gpt-4'
+ "gpt-4", "claude-2", "ggml"] = 'gpt-4'
custom_commands: Optional[List[CustomCommand]] = [CustomCommand(
name="test",
description="This is an example custom command. Use /config to edit it and create more",
@@ -85,6 +90,8 @@ class ContinueConfig(BaseModel):
slash_commands: Optional[List[SlashCommand]] = DEFAULT_SLASH_COMMANDS
on_traceback: Optional[List[OnTracebackSteps]] = [
OnTracebackSteps(step_name="DefaultOnTracebackStep")]
+ system_message: Optional[str] = None
+ azure_openai_info: Optional[AzureInfo] = None
# Want to force these to be the slash commands for now
@validator('slash_commands', pre=True)
diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py
index bc897357..d007c92b 100644
--- a/continuedev/src/continuedev/core/policy.py
+++ b/continuedev/src/continuedev/core/policy.py
@@ -58,7 +58,7 @@ class DemoPolicy(Policy):
if history.get_current() is None:
return (
MessageStep(name="Welcome to Continue", message=dedent("""\
- - Highlight code and ask a question or give instructions
+ - Highlight code section and ask a question or give instructions
- Use `cmd+m` (Mac) / `ctrl+m` (Windows) to open Continue
- Use `/help` to ask questions about how to use Continue""")) >>
WelcomeStep() >>
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 28487600..d3501f08 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -12,6 +12,7 @@ from ..models.filesystem import RangeInFile
from ..libs.llm.hf_inference_api import HuggingFaceInferenceAPI
from ..libs.llm.openai import OpenAI
from ..libs.llm.anthropic import Anthropic
+from ..libs.llm.ggml import GGML
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
from .main import Context, ContinueCustomException, History, Step, ChatMessage
@@ -34,10 +35,12 @@ MODEL_PROVIDER_TO_ENV_VAR = {
class Models:
provider_keys: Dict[ModelProvider, str] = {}
model_providers: List[ModelProvider]
+ system_message: str
def __init__(self, sdk: "ContinueSDK", model_providers: List[ModelProvider]):
self.sdk = sdk
self.model_providers = model_providers
+ self.system_message = sdk.config.system_message
@classmethod
async def create(cls, sdk: "ContinueSDK", with_providers: List[ModelProvider] = ["openai"]) -> "Models":
@@ -56,12 +59,12 @@ class Models:
def __load_openai_model(self, model: str) -> OpenAI:
api_key = self.provider_keys["openai"]
if api_key == "":
- return ProxyServer(self.sdk.ide.unique_id, model)
- return OpenAI(api_key=api_key, default_model=model)
+ return ProxyServer(self.sdk.ide.unique_id, model, system_message=self.system_message)
+ return OpenAI(api_key=api_key, default_model=model, system_message=self.system_message, azure_info=self.sdk.config.azure_openai_info)
def __load_hf_inference_api_model(self, model: str) -> HuggingFaceInferenceAPI:
api_key = self.provider_keys["hf_inference_api"]
- return HuggingFaceInferenceAPI(api_key=api_key, model=model)
+ return HuggingFaceInferenceAPI(api_key=api_key, model=model, system_message=self.system_message)
def __load_anthropic_model(self, model: str) -> Anthropic:
api_key = self.provider_keys["anthropic"]
@@ -91,6 +94,10 @@ class Models:
def gpt4(self):
return self.__load_openai_model("gpt-4")
+ @cached_property
+ def ggml(self):
+ return GGML(system_message=self.system_message)
+
def __model_from_name(self, model_name: str):
if model_name == "starcoder":
return self.starcoder
@@ -102,6 +109,8 @@ class Models:
return self.gpt4
elif model_name == "claude-2":
return self.claude2
+ elif model_name == "ggml":
+ return self.ggml
else:
raise Exception(f"Unknown model {model_name}")