summaryrefslogtreecommitdiff
path: root/continuedev
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-07-24 01:31:54 -0700
committerNate Sesti <sestinj@gmail.com>2023-07-24 01:31:54 -0700
commit96b3f2c15de5f7cdab646323b65a92aeeb08ae17 (patch)
tree0c6235df9885079de82e5c392042eb2c7aae088c /continuedev
parentdac960348a938552de4a6fcfaff32e517e6ebcb1 (diff)
parent6efe8ce9db21f1991dc1b5cc68657f419afca825 (diff)
downloadsncontinue-96b3f2c15de5f7cdab646323b65a92aeeb08ae17.tar.gz
sncontinue-96b3f2c15de5f7cdab646323b65a92aeeb08ae17.tar.bz2
sncontinue-96b3f2c15de5f7cdab646323b65a92aeeb08ae17.zip
Merge branch 'main' into config-py
Diffstat (limited to 'continuedev')
-rw-r--r--continuedev/README.md22
-rw-r--r--continuedev/src/continuedev/core/autopilot.py6
-rw-r--r--continuedev/src/continuedev/core/policy.py13
-rw-r--r--continuedev/src/continuedev/core/sdk.py5
-rw-r--r--continuedev/src/continuedev/libs/llm/__init__.py6
-rw-r--r--continuedev/src/continuedev/libs/llm/anthropic.py6
-rw-r--r--continuedev/src/continuedev/libs/llm/ggml.py6
-rw-r--r--continuedev/src/continuedev/libs/llm/hf_inference_api.py2
-rw-r--r--continuedev/src/continuedev/libs/llm/openai.py8
-rw-r--r--continuedev/src/continuedev/libs/llm/proxy_server.py35
-rw-r--r--continuedev/src/continuedev/libs/util/commonregex.py138
-rw-r--r--continuedev/src/continuedev/libs/util/count_tokens.py19
-rw-r--r--continuedev/src/continuedev/libs/util/telemetry.py7
-rw-r--r--continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/README.md (renamed from continuedev/src/continuedev/recipes/AddTransformRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/dlt_transform_docs.md (renamed from continuedev/src/continuedev/recipes/AddTransformRecipe/dlt_transform_docs.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/main.py (renamed from continuedev/src/continuedev/recipes/AddTransformRecipe/main.py)8
-rw-r--r--continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/steps.py (renamed from continuedev/src/continuedev/recipes/AddTransformRecipe/steps.py)12
-rw-r--r--continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/README.md (renamed from continuedev/src/continuedev/recipes/ContinueRecipeRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/main.py (renamed from continuedev/src/continuedev/recipes/ContinueRecipeRecipe/main.py)7
-rw-r--r--continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/README.md (renamed from continuedev/src/continuedev/recipes/CreatePipelineRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/main.py (renamed from continuedev/src/continuedev/recipes/CreatePipelineRecipe/main.py)8
-rw-r--r--continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/steps.py (renamed from continuedev/src/continuedev/recipes/CreatePipelineRecipe/steps.py)14
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/README.md (renamed from continuedev/src/continuedev/recipes/DDtoBQRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md (renamed from continuedev/src/continuedev/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/main.py (renamed from continuedev/src/continuedev/recipes/DDtoBQRecipe/main.py)7
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/steps.py (renamed from continuedev/src/continuedev/recipes/DDtoBQRecipe/steps.py)18
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/README.md (renamed from continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/main.py (renamed from continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/main.py)9
-rw-r--r--continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/steps.py (renamed from continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/steps.py)21
-rw-r--r--continuedev/src/continuedev/plugins/recipes/README.md (renamed from continuedev/src/continuedev/recipes/README.md)2
-rw-r--r--continuedev/src/continuedev/plugins/recipes/TemplateRecipe/README.md (renamed from continuedev/src/continuedev/recipes/TemplateRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/TemplateRecipe/main.py (renamed from continuedev/src/continuedev/recipes/TemplateRecipe/main.py)4
-rw-r--r--continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/README.md (renamed from continuedev/src/continuedev/recipes/WritePytestsRecipe/README.md)0
-rw-r--r--continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/main.py (renamed from continuedev/src/continuedev/recipes/WritePytestsRecipe/main.py)5
-rw-r--r--continuedev/src/continuedev/plugins/steps/README.md50
-rw-r--r--continuedev/src/continuedev/plugins/steps/__init__.py (renamed from continuedev/src/continuedev/steps/__init__.py)0
-rw-r--r--continuedev/src/continuedev/plugins/steps/chat.py (renamed from continuedev/src/continuedev/steps/chat.py)8
-rw-r--r--continuedev/src/continuedev/plugins/steps/chroma.py (renamed from continuedev/src/continuedev/steps/chroma.py)8
-rw-r--r--continuedev/src/continuedev/plugins/steps/clear_history.py (renamed from continuedev/src/continuedev/steps/clear_history.py)4
-rw-r--r--continuedev/src/continuedev/plugins/steps/comment_code.py (renamed from continuedev/src/continuedev/steps/comment_code.py)2
-rw-r--r--continuedev/src/continuedev/plugins/steps/core/core.py (renamed from continuedev/src/continuedev/steps/core/core.py)32
-rw-r--r--continuedev/src/continuedev/plugins/steps/custom_command.py (renamed from continuedev/src/continuedev/steps/custom_command.py)6
-rw-r--r--continuedev/src/continuedev/plugins/steps/draft/abstract_method.py (renamed from continuedev/src/continuedev/steps/draft/abstract_method.py)0
-rw-r--r--continuedev/src/continuedev/plugins/steps/draft/migration.py (renamed from continuedev/src/continuedev/steps/draft/migration.py)2
-rw-r--r--continuedev/src/continuedev/plugins/steps/draft/redux.py (renamed from continuedev/src/continuedev/steps/draft/redux.py)8
-rw-r--r--continuedev/src/continuedev/plugins/steps/draft/typeorm.py (renamed from continuedev/src/continuedev/steps/draft/typeorm.py)4
-rw-r--r--continuedev/src/continuedev/plugins/steps/feedback.py (renamed from continuedev/src/continuedev/steps/feedback.py)8
-rw-r--r--continuedev/src/continuedev/plugins/steps/find_and_replace.py (renamed from continuedev/src/continuedev/steps/find_and_replace.py)6
-rw-r--r--continuedev/src/continuedev/plugins/steps/help.py (renamed from continuedev/src/continuedev/steps/help.py)6
-rw-r--r--continuedev/src/continuedev/plugins/steps/input/nl_multiselect.py (renamed from continuedev/src/continuedev/steps/input/nl_multiselect.py)4
-rw-r--r--continuedev/src/continuedev/plugins/steps/main.py (renamed from continuedev/src/continuedev/steps/main.py)26
-rw-r--r--continuedev/src/continuedev/plugins/steps/on_traceback.py (renamed from continuedev/src/continuedev/steps/on_traceback.py)4
-rw-r--r--continuedev/src/continuedev/plugins/steps/open_config.py (renamed from continuedev/src/continuedev/steps/open_config.py)4
-rw-r--r--continuedev/src/continuedev/plugins/steps/react.py (renamed from continuedev/src/continuedev/steps/react.py)5
-rw-r--r--continuedev/src/continuedev/plugins/steps/search_directory.py (renamed from continuedev/src/continuedev/steps/search_directory.py)10
-rw-r--r--continuedev/src/continuedev/plugins/steps/steps_on_startup.py17
-rw-r--r--continuedev/src/continuedev/plugins/steps/welcome.py (renamed from continuedev/src/continuedev/steps/welcome.py)7
-rw-r--r--continuedev/src/continuedev/server/session_manager.py4
-rw-r--r--continuedev/src/continuedev/steps/steps_on_startup.py22
59 files changed, 412 insertions, 223 deletions
diff --git a/continuedev/README.md b/continuedev/README.md
index 528cf75a..d3ead8ec 100644
--- a/continuedev/README.md
+++ b/continuedev/README.md
@@ -1,19 +1,29 @@
# Continue PyPI Package
-This package contains the [Continue](https://github.com/continuedev.com/continue) server and core classes needed to build your own recipes.
+This package contains the [Continue](https://github.com/continuedev/continue) server and core classes needed to build your own recipes.
Continue is a Python library for automating repetitive sequences of software development tasks using language models. Using our VS Code extension, you can build, run, and refine these recipes as they natively interact with your codebase. Read the docs [here](https://continue.dev/docs) or download the VS Code extension [here](https://marketplace.visualstudio.com/items?itemName=Continue.continue).
## Continue Server
-The Continue server acts as a bridge between the Continue React app and your IDE, running your recipes and acting on the codebase.
+The Continue server acts as a bridge between the Continue React app and your IDE, running your recipes and acting on the codebase.
Start it by running the following commands:
+
1. `cd continuedev`
2. Make sure packages are installed with `poetry install`
-3. `poetry shell`
+ - If poetry is not installed, you can install with
+ ```bash
+ curl -sSL https://install.python-poetry.org | python3 -
+ ```
+ (official instructions [here](https://python-poetry.org/docs/#installing-with-the-official-installer))
+3. `poetry shell` to activate the virtual environment
4. `cd ..`
-5. `python3 -m continuedev.src.continuedev.server.main`
+5. `python3 -m continuedev.src.continuedev.server.main` to start the server
+
+Once you've validated that this works, you'll often want to use a debugger, in which case we've provided a launch configuration for VS Code in `.vscode/launch.json`. To start the debugger in VS Code, ensure that the workspace directory is the root of the `continue` repo, then press F5.
+
+> Note: To start the debugger, you'll have to select the poetry Python interpreter (`/path-to-poetry-venv/bin/python3`) in the bottom right of the VS Code window. If you don't see this, you may have to install the [Python extension](https://marketplace.visualstudio.com/items?itemName=ms-python.python).
## Scripts
@@ -29,6 +39,8 @@ See the `src/continuedev/libs/steps` folder for examples of writing a Continue s
Open a [new GitHub Issue](https://github.com/continuedev/continue/issues/new) or comment on [an existing one](https://github.com/continuedev/continue/issues). Let us know what you would like to contribute, and we will help you make it happen!
+For more a more detailed contributing guide, see [CONTRIBUTING.md](../CONTRIBUTING.md).
+
## Install from source
#### 1. Clone this repo
@@ -60,4 +72,4 @@ cd continue/extension/scripts && python3 install_from_source.py
- [Continue GUI README](./extension/react-app/): learn about the React app that lets users interact with the server and is placed adjacent to the text editor in any suppported IDE
- [Schema README](./schema): learn about the JSON Schema types generated from Pydantic models, which we use across the `continuedev/` and `extension/` directories
- [Continue Docs README](./docs): learn how our [docs](https://continue.dev/docs) are written and built
-- [How to debug the VS Code Extension README](./extension/src/README.md): learn how to set up the VS Code extension, so you can debug it \ No newline at end of file
+- [How to debug the VS Code Extension README](./extension/src/README.md): learn how to set up the VS Code extension, so you can debug it
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 4dff2c6d..003962c6 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -15,8 +15,8 @@ from ..libs.context_providers.highlighted_code_context_provider import Highlight
from ..server.ide_protocol import AbstractIdeProtocolServer
from ..libs.util.queue import AsyncSubscriptionQueue
from ..models.main import ContinueBaseModel
-from .main import Context, ContinueCustomException, Policy, History, FullState, Step, HistoryNode
-from ..steps.core.core import ReversibleStep, ManualEditStep, UserInputStep
+from .main import Context, ContinueCustomException, HighlightedRangeContext, Policy, History, FullState, Step, HistoryNode
+from ..plugins.steps.core.core import ReversibleStep, ManualEditStep, UserInputStep
from ..libs.util.telemetry import capture_event
from .sdk import ContinueSDK
from ..libs.util.traceback_parsers import get_python_traceback, get_javascript_traceback
@@ -39,6 +39,8 @@ def get_error_title(e: Exception) -> str:
return "The request failed. Please check your internet connection and try again. If this issue persists, you can use our API key for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to \"\""
elif isinstance(e, openai_errors.InvalidRequestError):
return 'Invalid request sent to OpenAI. Please try again.'
+ elif "rate_limit_ip_middleware" in e.__str__():
+ return 'You have reached your limit for free usage of our token. You can continue using Continue by entering your own OpenAI API key in VS Code settings.'
elif e.__str__().startswith("Cannot connect to host"):
return "The request failed. Please check your internet connection and try again."
return e.__str__() or e.__repr__()
diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py
index 53e482fa..1c87cfeb 100644
--- a/continuedev/src/continuedev/core/policy.py
+++ b/continuedev/src/continuedev/core/policy.py
@@ -1,14 +1,15 @@
from textwrap import dedent
from typing import Union
-from ..steps.chat import SimpleChatStep
-from ..steps.welcome import WelcomeStep
+from ..plugins.steps.chat import SimpleChatStep
+from ..plugins.steps.welcome import WelcomeStep
from .config import ContinueConfig
-from ..steps.steps_on_startup import StepsOnStartupStep
+from ..plugins.steps.steps_on_startup import StepsOnStartupStep
from .main import Step, History, Policy
from .observation import UserInputObservation
-from ..steps.core.core import MessageStep
-from ..steps.custom_command import CustomCommandStep
+from ..plugins.steps.core.core import MessageStep
+from ..plugins.steps.custom_command import CustomCommandStep
+from ..plugins.steps.main import EditHighlightedCodeStep
def parse_slash_command(inp: str, config: ContinueConfig) -> Union[None, Step]:
@@ -43,7 +44,7 @@ def parse_custom_command(inp: str, config: ContinueConfig) -> Union[None, Step]:
return None
-class DemoPolicy(Policy):
+class DefaultPolicy(Policy):
ran_code_last: bool = False
def next(self, config: ContinueConfig, history: History) -> Step:
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 59f33707..f925f20f 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -3,8 +3,7 @@ from functools import cached_property
from typing import Coroutine, Dict, Union
import os
-
-from ..steps.core.core import DefaultModelEditCodeStep
+from ..plugins.steps.core.core import DefaultModelEditCodeStep
from ..models.main import Range
from .context import ContextItem
from .abstract_sdk import AbstractContinueSDK
@@ -18,7 +17,7 @@ from ..libs.llm.ggml import GGML
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
from .main import Context, ContinueCustomException, History, HistoryNode, Step, ChatMessage
-from ..steps.core.core import *
+from ..plugins.steps.core.core import *
from ..libs.llm.proxy_server import ProxyServer
diff --git a/continuedev/src/continuedev/libs/llm/__init__.py b/continuedev/src/continuedev/libs/llm/__init__.py
index 4c4de213..2766db4b 100644
--- a/continuedev/src/continuedev/libs/llm/__init__.py
+++ b/continuedev/src/continuedev/libs/llm/__init__.py
@@ -9,15 +9,15 @@ from pydantic import BaseModel
class LLM(ABC):
system_message: Union[str, None] = None
- async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]:
"""Return the completion of the text with the given temperature."""
raise NotImplementedError
- def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
"""Stream the completion through generator."""
raise NotImplementedError
- async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
"""Stream the chat through generator."""
raise NotImplementedError
diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py
index c82895c6..625d4e57 100644
--- a/continuedev/src/continuedev/libs/llm/anthropic.py
+++ b/continuedev/src/continuedev/libs/llm/anthropic.py
@@ -54,7 +54,7 @@ class AnthropicLLM(LLM):
prompt += AI_PROMPT
return prompt
- async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
@@ -66,7 +66,7 @@ class AnthropicLLM(LLM):
):
yield chunk.completion
- async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
@@ -83,7 +83,7 @@ class AnthropicLLM(LLM):
"content": chunk.completion
}
- async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
args = self._transform_args(args)
diff --git a/continuedev/src/continuedev/libs/llm/ggml.py b/continuedev/src/continuedev/libs/llm/ggml.py
index 6007fdb4..4889a556 100644
--- a/continuedev/src/continuedev/libs/llm/ggml.py
+++ b/continuedev/src/continuedev/libs/llm/ggml.py
@@ -26,7 +26,7 @@ class GGML(LLM):
def count_tokens(self, text: str):
return count_tokens(self.name, text)
- async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
@@ -47,7 +47,7 @@ class GGML(LLM):
except:
raise Exception(str(line))
- async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
self.name, messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
@@ -72,7 +72,7 @@ class GGML(LLM):
except:
raise Exception(str(line[0]))
- async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
async with aiohttp.ClientSession() as session:
diff --git a/continuedev/src/continuedev/libs/llm/hf_inference_api.py b/continuedev/src/continuedev/libs/llm/hf_inference_api.py
index 7e11fbbe..36f03270 100644
--- a/continuedev/src/continuedev/libs/llm/hf_inference_api.py
+++ b/continuedev/src/continuedev/libs/llm/hf_inference_api.py
@@ -16,7 +16,7 @@ class HuggingFaceInferenceAPI(LLM):
self.model = model
self.system_message = system_message # TODO: Nothing being done with this
- def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs):
+ def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs):
"""Return the completion of the text with the given temperature."""
API_URL = f"https://api-inference.huggingface.co/models/{self.model}"
headers = {
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index 64bb39a2..a0773c1d 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -42,7 +42,7 @@ class OpenAI(LLM):
def count_tokens(self, text: str):
return count_tokens(self.default_model, text)
- async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
@@ -72,7 +72,7 @@ class OpenAI(LLM):
self.write_log(f"Completion:\n\n{completion}")
- async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
@@ -81,7 +81,7 @@ class OpenAI(LLM):
del args["functions"]
messages = compile_chat_messages(
- args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message)
+ args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
completion = ""
async for chunk in await openai.ChatCompletion.acreate(
@@ -93,7 +93,7 @@ class OpenAI(LLM):
completion += chunk.choices[0].delta.content
self.write_log(f"Completion: \n\n{completion}")
- async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
if args["model"] in CHAT_MODELS:
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index 46ab19ee..17694afe 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -36,8 +36,12 @@ class ProxyServer(LLM):
def count_tokens(self, text: str):
return count_tokens(self.default_model, text)
+
+ def get_headers(self):
+ # headers with unique id
+ return {"unique_id": self.unique_id}
- async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
@@ -46,17 +50,16 @@ class ProxyServer(LLM):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/complete", json={
"messages": messages,
- "unique_id": self.unique_id,
**args
- }) as resp:
- try:
- response_text = await resp.text()
- self.write_log(f"Completion: \n\n{response_text}")
- return response_text
- except:
+ }, headers=self.get_headers()) as resp:
+ if resp.status != 200:
raise Exception(await resp.text())
- async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]:
+ response_text = await resp.text()
+ self.write_log(f"Completion: \n\n{response_text}")
+ return response_text
+
+ async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
@@ -65,11 +68,12 @@ class ProxyServer(LLM):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_chat", json={
"messages": messages,
- "unique_id": self.unique_id,
**args
- }) as resp:
+ }, headers=self.get_headers()) as resp:
# This is streaming application/json instaed of text/event-stream
completion = ""
+ if resp.status != 200:
+ raise Exception(await resp.text())
async for line in resp.content.iter_chunks():
if line[1]:
try:
@@ -85,10 +89,12 @@ class ProxyServer(LLM):
except Exception as e:
capture_event(self.unique_id, "proxy_server_parse_error", {
"error_title": "Proxy server stream_chat parsing failed", "error_message": '\n'.join(traceback.format_exception(e))})
+ else:
+ break
self.write_log(f"Completion: \n\n{completion}")
- async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None), system_message=self.system_message)
@@ -97,10 +103,11 @@ class ProxyServer(LLM):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_complete", json={
"messages": messages,
- "unique_id": self.unique_id,
**args
- }) as resp:
+ }, headers=self.get_headers()) as resp:
completion = ""
+ if resp.status != 200:
+ raise Exception(await resp.text())
async for line in resp.content.iter_any():
if line:
try:
diff --git a/continuedev/src/continuedev/libs/util/commonregex.py b/continuedev/src/continuedev/libs/util/commonregex.py
new file mode 100644
index 00000000..55da7fc0
--- /dev/null
+++ b/continuedev/src/continuedev/libs/util/commonregex.py
@@ -0,0 +1,138 @@
+# coding: utf-8
+import json
+import re
+from typing import Any, Dict
+
+date = re.compile(
+ '(?:(?<!\:)(?<!\:\d)[0-3]?\d(?:st|nd|rd|th)?\s+(?:of\s+)?(?:jan\.?|january|feb\.?|february|mar\.?|march|apr\.?|april|may|jun\.?|june|jul\.?|july|aug\.?|august|sep\.?|september|oct\.?|october|nov\.?|november|dec\.?|december)|(?:jan\.?|january|feb\.?|february|mar\.?|march|apr\.?|april|may|jun\.?|june|jul\.?|july|aug\.?|august|sep\.?|september|oct\.?|october|nov\.?|november|dec\.?|december)\s+(?<!\:)(?<!\:\d)[0-3]?\d(?:st|nd|rd|th)?)(?:\,)?\s*(?:\d{4})?|[0-3]?\d[-\./][0-3]?\d[-\./]\d{2,4}', re.IGNORECASE)
+time = re.compile(
+ '\d{1,2}:\d{2} ?(?:[ap]\.?m\.?)?|\d[ap]\.?m\.?', re.IGNORECASE)
+phone = re.compile(
+ '''((?:(?<![\d-])(?:\+?\d{1,3}[-.\s*]?)?(?:\(?\d{3}\)?[-.\s*]?)?\d{3}[-.\s*]?\d{4}(?![\d-]))|(?:(?<![\d-])(?:(?:\(\+?\d{2}\))|(?:\+?\d{2}))\s*\d{2}\s*\d{3}\s*\d{4}(?![\d-])))''')
+phones_with_exts = re.compile(
+ '((?:(?:\+?1\s*(?:[.-]\s*)?)?(?:\(\s*(?:[2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s*\)|(?:[2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s*(?:[.-]\s*)?)?(?:[2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s*(?:[.-]\s*)?(?:[0-9]{4})(?:\s*(?:#|x\.?|ext\.?|extension)\s*(?:\d+)?))', re.IGNORECASE)
+link = re.compile('(?i)((?:https?://|www\d{0,3}[.])?[a-z0-9.\-]+[.](?:(?:international)|(?:construction)|(?:contractors)|(?:enterprises)|(?:photography)|(?:immobilien)|(?:management)|(?:technology)|(?:directory)|(?:education)|(?:equipment)|(?:institute)|(?:marketing)|(?:solutions)|(?:builders)|(?:clothing)|(?:computer)|(?:democrat)|(?:diamonds)|(?:graphics)|(?:holdings)|(?:lighting)|(?:plumbing)|(?:training)|(?:ventures)|(?:academy)|(?:careers)|(?:company)|(?:domains)|(?:florist)|(?:gallery)|(?:guitars)|(?:holiday)|(?:kitchen)|(?:recipes)|(?:shiksha)|(?:singles)|(?:support)|(?:systems)|(?:agency)|(?:berlin)|(?:camera)|(?:center)|(?:coffee)|(?:estate)|(?:kaufen)|(?:luxury)|(?:monash)|(?:museum)|(?:photos)|(?:repair)|(?:social)|(?:tattoo)|(?:travel)|(?:viajes)|(?:voyage)|(?:build)|(?:cheap)|(?:codes)|(?:dance)|(?:email)|(?:glass)|(?:house)|(?:ninja)|(?:photo)|(?:shoes)|(?:solar)|(?:today)|(?:aero)|(?:arpa)|(?:asia)|(?:bike)|(?:buzz)|(?:camp)|(?:club)|(?:coop)|(?:farm)|(?:gift)|(?:guru)|(?:info)|(?:jobs)|(?:kiwi)|(?:land)|(?:limo)|(?:link)|(?:menu)|(?:mobi)|(?:moda)|(?:name)|(?:pics)|(?:pink)|(?:post)|(?:rich)|(?:ruhr)|(?:sexy)|(?:tips)|(?:wang)|(?:wien)|(?:zone)|(?:biz)|(?:cab)|(?:cat)|(?:ceo)|(?:com)|(?:edu)|(?:gov)|(?:int)|(?:mil)|(?:net)|(?:onl)|(?:org)|(?:pro)|(?:red)|(?:tel)|(?:uno)|(?:xxx)|(?:ac)|(?:ad)|(?:ae)|(?:af)|(?:ag)|(?:ai)|(?:al)|(?:am)|(?:an)|(?:ao)|(?:aq)|(?:ar)|(?:as)|(?:at)|(?:au)|(?:aw)|(?:ax)|(?:az)|(?:ba)|(?:bb)|(?:bd)|(?:be)|(?:bf)|(?:bg)|(?:bh)|(?:bi)|(?:bj)|(?:bm)|(?:bn)|(?:bo)|(?:br)|(?:bs)|(?:bt)|(?:bv)|(?:bw)|(?:by)|(?:bz)|(?:ca)|(?:cc)|(?:cd)|(?:cf)|(?:cg)|(?:ch)|(?:ci)|(?:ck)|(?:cl)|(?:cm)|(?:cn)|(?:co)|(?:cr)|(?:cu)|(?:cv)|(?:cw)|(?:cx)|(?:cy)|(?:cz)|(?:de)|(?:dj)|(?:dk)|(?:dm)|(?:do)|(?:dz)|(?:ec)|(?:ee)|(?:eg)|(?:er)|(?:es)|(?:et)|(?:eu)|(?:fi)|(?:fj)|(?:fk)|(?:fm)|(?:fo)|(?:fr)|(?:ga)|(?:gb)|(?:gd)|(?:ge)|(?:gf)|(?:gg)|(?:gh)|(?:gi)|(?:gl)|(?:gm)|(?:gn)|(?:gp)|(?:gq)|(?:gr)|(?:gs)|(?:gt)|(?:gu)|(?:gw)|(?:gy)|(?:hk)|(?:hm)|(?:hn)|(?:hr)|(?:ht)|(?:hu)|(?:id)|(?:ie)|(?:il)|(?:im)|(?:in)|(?:io)|(?:iq)|(?:ir)|(?:is)|(?:it)|(?:je)|(?:jm)|(?:jo)|(?:jp)|(?:ke)|(?:kg)|(?:kh)|(?:ki)|(?:km)|(?:kn)|(?:kp)|(?:kr)|(?:kw)|(?:ky)|(?:kz)|(?:la)|(?:lb)|(?:lc)|(?:li)|(?:lk)|(?:lr)|(?:ls)|(?:lt)|(?:lu)|(?:lv)|(?:ly)|(?:ma)|(?:mc)|(?:md)|(?:me)|(?:mg)|(?:mh)|(?:mk)|(?:ml)|(?:mm)|(?:mn)|(?:mo)|(?:mp)|(?:mq)|(?:mr)|(?:ms)|(?:mt)|(?:mu)|(?:mv)|(?:mw)|(?:mx)|(?:my)|(?:mz)|(?:na)|(?:nc)|(?:ne)|(?:nf)|(?:ng)|(?:ni)|(?:nl)|(?:no)|(?:np)|(?:nr)|(?:nu)|(?:nz)|(?:om)|(?:pa)|(?:pe)|(?:pf)|(?:pg)|(?:ph)|(?:pk)|(?:pl)|(?:pm)|(?:pn)|(?:pr)|(?:ps)|(?:pt)|(?:pw)|(?:py)|(?:qa)|(?:re)|(?:ro)|(?:rs)|(?:ru)|(?:rw)|(?:sa)|(?:sb)|(?:sc)|(?:sd)|(?:se)|(?:sg)|(?:sh)|(?:si)|(?:sj)|(?:sk)|(?:sl)|(?:sm)|(?:sn)|(?:so)|(?:sr)|(?:st)|(?:su)|(?:sv)|(?:sx)|(?:sy)|(?:sz)|(?:tc)|(?:td)|(?:tf)|(?:tg)|(?:th)|(?:tj)|(?:tk)|(?:tl)|(?:tm)|(?:tn)|(?:to)|(?:tp)|(?:tr)|(?:tt)|(?:tv)|(?:tw)|(?:tz)|(?:ua)|(?:ug)|(?:uk)|(?:us)|(?:uy)|(?:uz)|(?:va)|(?:vc)|(?:ve)|(?:vg)|(?:vi)|(?:vn)|(?:vu)|(?:wf)|(?:ws)|(?:ye)|(?:yt)|(?:za)|(?:zm)|(?:zw))(?:/[^\s()<>]+[^\s`!()\[\]{};:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019])?)', re.IGNORECASE)
+email = re.compile(
+ "([a-z0-9!#$%&'*+\/=?^_`{|.}~-]+@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)", re.IGNORECASE)
+ip = re.compile('(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)', re.IGNORECASE)
+ipv6 = re.compile(
+ '\s*(?!.*::.*::)(?:(?!:)|:(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)){3})\s*', re.VERBOSE | re.IGNORECASE | re.DOTALL)
+price = re.compile('[$]\s?[+-]?[0-9]{1,3}(?:(?:,?[0-9]{3}))*(?:\.[0-9]{1,2})?')
+hex_color = re.compile('(#(?:[0-9a-fA-F]{8})|#(?:[0-9a-fA-F]{3}){1,2})\\b')
+credit_card = re.compile('((?:(?:\\d{4}[- ]?){3}\\d{4}|\\d{15,16}))(?![\\d])')
+btc_address = re.compile(
+ '(?<![a-km-zA-HJ-NP-Z0-9])[13][a-km-zA-HJ-NP-Z0-9]{26,33}(?![a-km-zA-HJ-NP-Z0-9])')
+street_address = re.compile(
+ '\d{1,4} [\w\s]{1,20}(?:street|st|avenue|ave|road|rd|highway|hwy|square|sq|trail|trl|drive|dr|court|ct|park|parkway|pkwy|circle|cir|boulevard|blvd)\W?(?=\s|$)', re.IGNORECASE)
+zip_code = re.compile(r'\b\d{5}(?:[-\s]\d{4})?\b')
+po_box = re.compile(r'P\.? ?O\.? Box \d+', re.IGNORECASE)
+ssn = re.compile(
+ '(?!000|666|333)0*(?:[0-6][0-9][0-9]|[0-7][0-6][0-9]|[0-7][0-7][0-2])[- ](?!00)[0-9]{2}[- ](?!0000)[0-9]{4}')
+win_absolute_filepath = re.compile(
+ r'^(?:[a-zA-Z]\:|\\\\[\w\.]+\\[\w.$]+)\\(?:[\w]+\\)*\w([\w.])+', re.IGNORECASE)
+unix_absolute_filepath = re.compile(
+ r'^\/(?:[\/\w]+\/)*\w([\w.])+', re.IGNORECASE)
+
+regexes = {
+ "win_absolute_filepath": win_absolute_filepath,
+ "unix_absolute_filepath": unix_absolute_filepath,
+ "dates": date,
+ "times": time,
+ "phones": phone,
+ "phones_with_exts": phones_with_exts,
+ "links": link,
+ "emails": email,
+ "ips": ip,
+ "ipv6s": ipv6,
+ "prices": price,
+ "hex_colors": hex_color,
+ "credit_cards": credit_card,
+ "btc_addresses": btc_address,
+ "street_addresses": street_address,
+ "zip_codes": zip_code,
+ "po_boxes": po_box,
+ "ssn_number": ssn,
+}
+
+placeholders = {
+ "win_absolute_filepath": "<FILEPATH>",
+ "unix_absolute_filepath": "<FILEPATH>",
+ "dates": "<DATE>",
+ "times": "<TIME>",
+ "phones": "<PHONE>",
+ "phones_with_exts": "<PHONE_WITH_EXT>",
+ "links": "<LINK>",
+ "emails": "<EMAIL>",
+ "ips": "<IP>",
+ "ipv6s": "<IPV6>",
+ "prices": "<PRICE>",
+ "hex_colors": "<HEX_COLOR>",
+ "credit_cards": "<CREDIT_CARD>",
+ "btc_addresses": "<BTC_ADDRESS>",
+ "street_addresses": "<STREET_ADDRESS>",
+ "zip_codes": "<ZIP_CODE>",
+ "po_boxes": "<PO_BOX>",
+ "ssn_number": "<SSN>",
+}
+
+
+class regex:
+
+ def __init__(self, obj, regex):
+ self.obj = obj
+ self.regex = regex
+
+ def __call__(self, *args):
+ def regex_method(text=None):
+ return [x.strip() for x in self.regex.findall(text or self.obj.text)]
+ return regex_method
+
+
+class CommonRegex(object):
+
+ def __init__(self, text=""):
+ self.text = text
+
+ for k, v in list(regexes.items()):
+ setattr(self, k, regex(self, v)(self))
+
+ if text:
+ for key in list(regexes.keys()):
+ method = getattr(self, key)
+ setattr(self, key, method())
+
+
+pii_parser = CommonRegex()
+
+
+def clean_pii_from_str(text: str):
+ """Replace personally identifiable information (PII) with placeholders."""
+ for regex_name, regex in list(regexes.items()):
+ placeholder = placeholders[regex_name]
+ text = regex.sub(placeholder, text)
+
+ return text
+
+
+def clean_pii_from_any(v: Any) -> Any:
+ """Replace personally identifiable information (PII) with placeholders. Not guaranteed to return same type as input."""
+ if isinstance(v, str):
+ return clean_pii_from_str(v)
+ elif isinstance(v, dict):
+ cleaned_dict = {}
+ for key, value in v.items():
+ cleaned_dict[key] = clean_pii_from_any(value)
+ return cleaned_dict
+ elif isinstance(v, list):
+ return [clean_pii_from_any(x) for x in v]
+ else:
+ # Try to convert to string
+ try:
+ orig_text = str(v)
+ cleaned_text = clean_pii_from_str(orig_text)
+ if orig_text != cleaned_text:
+ return cleaned_text
+ else:
+ return v
+ except:
+ return v
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 987aa722..c58ae499 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -73,9 +73,9 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens:
message = chat_history.pop(0)
total_tokens -= count_tokens(model, message.content)
- # 3. Truncate message in the last 5
+ # 3. Truncate message in the last 5, except last 1
i = 0
- while total_tokens > max_tokens and len(chat_history) > 0 and i < len(chat_history):
+ while total_tokens > max_tokens and len(chat_history) > 0 and i < len(chat_history) - 1:
message = chat_history[i]
total_tokens -= count_tokens(model, message.content)
total_tokens += count_tokens(model, message.summary)
@@ -101,13 +101,16 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens:
TOKEN_BUFFER_FOR_SAFETY = 100
-def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int, prompt: Union[str, None] = None, functions: Union[List, None] = None, system_message: Union[str, None] = None) -> List[Dict]:
+def compile_chat_messages(model: str, msgs: Union[List[ChatMessage], None], max_tokens: int, prompt: Union[str, None] = None, functions: Union[List, None] = None, system_message: Union[str, None] = None) -> List[Dict]:
"""
The total number of tokens is system_message + sum(msgs) + functions + prompt after it is converted to a message
"""
+ msgs_copy = [msg.copy(deep=True)
+ for msg in msgs] if msgs is not None else []
+
if prompt is not None:
prompt_msg = ChatMessage(role="user", content=prompt, summary=prompt)
- msgs += [prompt_msg]
+ msgs_copy += [prompt_msg]
if system_message is not None:
# NOTE: System message takes second precedence to user prompt, so it is placed just before
@@ -116,7 +119,7 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int,
system_chat_msg = ChatMessage(
role="system", content=rendered_system_message, summary=rendered_system_message)
# insert at second-to-last position
- msgs.insert(-1, system_chat_msg)
+ msgs_copy.insert(-1, system_chat_msg)
# Add tokens from functions
function_tokens = 0
@@ -124,11 +127,11 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int,
for function in functions:
function_tokens += count_tokens(model, json.dumps(function))
- msgs = prune_chat_history(
- model, msgs, MAX_TOKENS_FOR_MODEL[model], function_tokens + max_tokens + TOKEN_BUFFER_FOR_SAFETY)
+ msgs_copy = prune_chat_history(
+ model, msgs_copy, MAX_TOKENS_FOR_MODEL[model], function_tokens + max_tokens + TOKEN_BUFFER_FOR_SAFETY)
history = [msg.to_dict(with_functions=functions is not None)
- for msg in msgs]
+ for msg in msgs_copy]
# Move system message back to start
if system_message is not None and len(history) >= 2 and history[-2]["role"] == "system":
diff --git a/continuedev/src/continuedev/libs/util/telemetry.py b/continuedev/src/continuedev/libs/util/telemetry.py
index bd9fde9d..17735dce 100644
--- a/continuedev/src/continuedev/libs/util/telemetry.py
+++ b/continuedev/src/continuedev/libs/util/telemetry.py
@@ -3,6 +3,7 @@ from posthog import Posthog
from ...core.config import load_config
import os
from dotenv import load_dotenv
+from .commonregex import clean_pii_from_any
load_dotenv()
in_codespaces = os.getenv("CODESPACES") == "true"
@@ -13,10 +14,14 @@ posthog = Posthog('phc_JS6XFROuNbhJtVCEdTSYk6gl5ArRrTNMpCcguAXlSPs',
def capture_event(unique_id: str, event_name: str, event_properties: Any):
+ # Return early if telemetry is disabled
config = load_config('.continue/config.json')
if not config.allow_anonymous_telemetry:
return
if in_codespaces:
event_properties['codespaces'] = True
- posthog.capture(unique_id, event_name, event_properties)
+
+ # Send event to PostHog
+ posthog.capture(unique_id, event_name,
+ clean_pii_from_any(event_properties))
diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/README.md
index d735e0cd..d735e0cd 100644
--- a/continuedev/src/continuedev/recipes/AddTransformRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/dlt_transform_docs.md b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/dlt_transform_docs.md
index 658b285f..658b285f 100644
--- a/continuedev/src/continuedev/recipes/AddTransformRecipe/dlt_transform_docs.md
+++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/dlt_transform_docs.md
diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/main.py
index fdd343f5..5d242f7c 100644
--- a/continuedev/src/continuedev/recipes/AddTransformRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/main.py
@@ -1,9 +1,9 @@
from textwrap import dedent
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
-from ...steps.core.core import WaitForUserInputStep
-from ...steps.core.core import MessageStep
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
+from ....plugins.steps.core.core import WaitForUserInputStep
+from ....plugins.steps.core.core import MessageStep
from .steps import SetUpChessPipelineStep, AddTransformStep
diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/steps.py
index 9744146c..8c6446da 100644
--- a/continuedev/src/continuedev/recipes/AddTransformRecipe/steps.py
+++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/steps.py
@@ -1,14 +1,10 @@
import os
from textwrap import dedent
-from ...models.main import Range
-from ...models.filesystem import RangeInFile
-from ...steps.core.core import MessageStep
-from ...core.sdk import Models
-from ...core.observation import DictObservation
-from ...models.filesystem_edit import AddFile
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
+from ....plugins.steps.core.core import MessageStep
+from ....core.sdk import Models
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
AI_ASSISTED_STRING = "(✨ AI-Assisted ✨)"
diff --git a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/README.md
index df66104f..df66104f 100644
--- a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/main.py
index 953fb0c2..c0f9e7e3 100644
--- a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/main.py
@@ -1,8 +1,7 @@
from textwrap import dedent
-from ...models.filesystem import RangeInFile
-from ...steps.main import EditHighlightedCodeStep
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
+from ....plugins.steps.main import EditHighlightedCodeStep
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
class ContinueStepStep(Step):
diff --git a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/README.md
index e69de29b..e69de29b 100644
--- a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/main.py
index 55ef107b..84363e02 100644
--- a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/main.py
@@ -1,9 +1,9 @@
from textwrap import dedent
-from ...core.sdk import ContinueSDK
-from ...core.main import Step
-from ...steps.core.core import WaitForUserInputStep
-from ...steps.core.core import MessageStep
+from ....core.sdk import ContinueSDK
+from ....core.main import Step
+from ....plugins.steps.core.core import WaitForUserInputStep
+from ....plugins.steps.core.core import MessageStep
from .steps import SetupPipelineStep, ValidatePipelineStep, RunQueryStep
diff --git a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/steps.py
index 60218ef9..433e309e 100644
--- a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/steps.py
+++ b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/steps.py
@@ -1,15 +1,13 @@
import os
-import subprocess
from textwrap import dedent
import time
-from ...models.main import Range
-from ...models.filesystem import RangeInFile
-from ...steps.core.core import MessageStep
-from ...core.observation import DictObservation, InternalErrorObservation
-from ...models.filesystem_edit import AddFile, FileEdit
-from ...core.main import Step
-from ...core.sdk import ContinueSDK, Models
+from ....models.main import Range
+from ....models.filesystem import RangeInFile
+from ....plugins.steps.core.core import MessageStep
+from ....models.filesystem_edit import AddFile, FileEdit
+from ....core.main import Step
+from ....core.sdk import ContinueSDK, Models
AI_ASSISTED_STRING = "(✨ AI-Assisted ✨)"
diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/README.md
index c4981e56..c4981e56 100644
--- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md
index eb68e117..eb68e117 100644
--- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md
+++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md
diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/main.py
index 1ae84310..5b6aa8f0 100644
--- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/main.py
@@ -1,9 +1,8 @@
from textwrap import dedent
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
-from ...steps.core.core import WaitForUserInputStep
-from ...steps.core.core import MessageStep
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
+from ....plugins.steps.core.core import MessageStep
from .steps import SetUpChessPipelineStep, SwitchDestinationStep, LoadDataStep
# Based on the following guide:
diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/steps.py
index df414e2e..767936b8 100644
--- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/steps.py
+++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/steps.py
@@ -1,17 +1,11 @@
import os
-import subprocess
from textwrap import dedent
-import time
-
-from ...steps.find_and_replace import FindAndReplaceStep
-from ...models.main import Range
-from ...models.filesystem import RangeInFile
-from ...steps.core.core import MessageStep
-from ...core.sdk import Models
-from ...core.observation import DictObservation, InternalErrorObservation
-from ...models.filesystem_edit import AddFile, FileEdit
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
+
+from ....plugins.steps.find_and_replace import FindAndReplaceStep
+from ....plugins.steps.core.core import MessageStep
+from ....core.sdk import Models
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
AI_ASSISTED_STRING = "(✨ AI-Assisted ✨)"
diff --git a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/README.md
index e69de29b..e69de29b 100644
--- a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/main.py
index 2a3e3566..54cba45f 100644
--- a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/main.py
@@ -1,10 +1,9 @@
from textwrap import dedent
-from ...steps.input.nl_multiselect import NLMultiselectStep
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
-from ...steps.core.core import WaitForUserInputStep
-from ...steps.core.core import MessageStep
+from ....plugins.steps.input.nl_multiselect import NLMultiselectStep
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
+from ....plugins.steps.core.core import MessageStep
from .steps import SetupPipelineStep, DeployAirflowStep, RunPipelineStep
diff --git a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/steps.py
index d9bdbc0a..83067d52 100644
--- a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/steps.py
+++ b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/steps.py
@@ -1,18 +1,11 @@
import os
-import subprocess
from textwrap import dedent
-import time
-
-from ...steps.core.core import WaitForUserInputStep
-from ...models.main import Range
-from ...models.filesystem import RangeInFile
-from ...steps.core.core import MessageStep
-from ...core.sdk import Models
-from ...core.observation import DictObservation, InternalErrorObservation
-from ...models.filesystem_edit import AddFile, FileEdit
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
-from ...steps.find_and_replace import FindAndReplaceStep
+
+from ....plugins.steps.core.core import MessageStep
+from ....core.sdk import Models
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
+from ....plugins.steps.find_and_replace import FindAndReplaceStep
AI_ASSISTED_STRING = "(✨ AI-Assisted ✨)"
@@ -93,5 +86,3 @@ class DeployAirflowStep(Step):
# Tell the user to check the schedule and fill in owner, email, other default_args
await sdk.run_step(MessageStep(message="Fill in the owner, email, and other default_args in the DAG file with your own personal information. Then the DAG will be ready to run!", name="Fill in default_args"))
-
- # Run the DAG locally ??
diff --git a/continuedev/src/continuedev/recipes/README.md b/continuedev/src/continuedev/plugins/recipes/README.md
index d5a006fb..9860b0e2 100644
--- a/continuedev/src/continuedev/recipes/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/README.md
@@ -1,5 +1,7 @@
# This is a collaborative collection of Continue recipes
+A recipe is technically just a [Step](../steps/README.md), but is intended to be more complex, composed of multiple sub-steps.
+
Recipes here will automatically be made available in the [Continue VS Code extension](https://marketplace.visualstudio.com/items?itemName=Continue.continue).
The `recipes` folder contains all recipes, each with the same structure. **If you wish to create your own recipe, please do the following:**
diff --git a/continuedev/src/continuedev/recipes/TemplateRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/README.md
index 91d1123b..91d1123b 100644
--- a/continuedev/src/continuedev/recipes/TemplateRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/TemplateRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/main.py
index 16132cfd..197abe85 100644
--- a/continuedev/src/continuedev/recipes/TemplateRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/main.py
@@ -1,5 +1,7 @@
from typing import Coroutine
-from continuedev.core import Step, ContinueSDK, Observation, Models
+from ....core.main import Step, Observation
+from ....core.sdk import ContinueSDK
+from ....core.sdk import Models
class TemplateRecipe(Step):
diff --git a/continuedev/src/continuedev/recipes/WritePytestsRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/README.md
index 5ce33ecb..5ce33ecb 100644
--- a/continuedev/src/continuedev/recipes/WritePytestsRecipe/README.md
+++ b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/README.md
diff --git a/continuedev/src/continuedev/recipes/WritePytestsRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/main.py
index c7a65fa6..6ef5ffd6 100644
--- a/continuedev/src/continuedev/recipes/WritePytestsRecipe/main.py
+++ b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/main.py
@@ -1,7 +1,8 @@
from textwrap import dedent
from typing import Union
-from ...models.filesystem_edit import AddDirectory, AddFile
-from ...core.main import Step, ContinueSDK
+from ....models.filesystem_edit import AddDirectory, AddFile
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
import os
diff --git a/continuedev/src/continuedev/plugins/steps/README.md b/continuedev/src/continuedev/plugins/steps/README.md
new file mode 100644
index 00000000..12073835
--- /dev/null
+++ b/continuedev/src/continuedev/plugins/steps/README.md
@@ -0,0 +1,50 @@
+# Steps
+
+Steps are the composable unit of action in Continue. They define a `run` method which has access to the entire `ContinueSDK`, allowing you to take actions inside the IDE, call language models, and more. In this folder you can find a number of good examples.
+
+## How to write a step
+
+a. Start by creating a subclass of `Step`
+
+You should first consider what will be the parameters of your recipe. These are defined as attributes in the Pydantic class. For example, if you wanted a "filepath" attribute that would look like this:
+
+```python
+class HelloWorldStep(Step):
+ filepath: str
+ ...
+```
+
+b. Next, write the `run` method
+
+This method takes the ContinueSDK as a parameter, giving you all the tools you need to write your steps (if it's missing something, let us know, we'll add it!). You can write any code inside the run method; this is what will happen when your step is run, line for line. As an example, here's a step that will open a file and append "Hello World!":
+
+```python
+class HelloWorldStep(Step):
+ filepath: str
+
+ async def run(self, sdk: ContinueSDK):
+ await sdk.ide.setFileOpen(self.filepath)
+ await sdk.append_to_file(self.filepath, "Hello World!")
+```
+
+c. Finally, every Step is displayed with a description of what it has done
+
+If you'd like to override the default description of your step, which is just the class name, then implement the `describe` method. You can:
+
+- Return a static string
+- Store state in a class attribute (prepend with a double underscore, which signifies (through Pydantic) that this is not a parameter for the Step, just internal state) during the run method, and then grab this in the describe method.
+- Use state in conjunction with the `models` parameter of the describe method to autogenerate a description with a language model. For example, if you'd used an attribute called `__code_written` to store a string representing some code that was written, you could implement describe as `return models.gpt35.complete(f"{self.\_\_code_written}\n\nSummarize the changes made in the above code.")`.
+
+Here's an example:
+
+```python
+class HelloWorldStep(Step):
+ filepath: str
+
+ async def run(self, sdk: ContinueSDK):
+ await sdk.ide.setFileOpen(self.filepath)
+ await sdk.append_to_file(self.filepath, "Hello World!")
+
+ def describe(self, models: Models):
+ return f"Appended 'Hello World!' to {self.filepath}"
+```
diff --git a/continuedev/src/continuedev/steps/__init__.py b/continuedev/src/continuedev/plugins/steps/__init__.py
index 8b137891..8b137891 100644
--- a/continuedev/src/continuedev/steps/__init__.py
+++ b/continuedev/src/continuedev/plugins/steps/__init__.py
diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/plugins/steps/chat.py
index aade1ea1..2c662459 100644
--- a/continuedev/src/continuedev/steps/chat.py
+++ b/continuedev/src/continuedev/plugins/steps/chat.py
@@ -3,12 +3,12 @@ from typing import Any, Coroutine, List
from pydantic import Field
-from ..libs.util.strings import remove_quotes_and_escapes
+from ...libs.util.strings import remove_quotes_and_escapes
from .main import EditHighlightedCodeStep
from .core.core import MessageStep
-from ..core.main import FunctionCall, Models
-from ..core.main import ChatMessage, Step, step_to_json_schema
-from ..core.sdk import ContinueSDK
+from ...core.main import FunctionCall, Models
+from ...core.main import ChatMessage, Step, step_to_json_schema
+from ...core.sdk import ContinueSDK
import openai
import os
from dotenv import load_dotenv
diff --git a/continuedev/src/continuedev/steps/chroma.py b/continuedev/src/continuedev/plugins/steps/chroma.py
index 9d085981..dbe8363e 100644
--- a/continuedev/src/continuedev/steps/chroma.py
+++ b/continuedev/src/continuedev/plugins/steps/chroma.py
@@ -1,10 +1,10 @@
from textwrap import dedent
from typing import Coroutine, Union
-from ..core.observation import Observation, TextObservation
-from ..core.main import Step
-from ..core.sdk import ContinueSDK
+from ...core.observation import Observation, TextObservation
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
from .core.core import EditFileStep
-from ..libs.chroma.query import ChromaIndexManager
+from ...libs.chroma.query import ChromaIndexManager
from .core.core import EditFileStep
diff --git a/continuedev/src/continuedev/steps/clear_history.py b/continuedev/src/continuedev/plugins/steps/clear_history.py
index a875c6d3..8f21518b 100644
--- a/continuedev/src/continuedev/steps/clear_history.py
+++ b/continuedev/src/continuedev/plugins/steps/clear_history.py
@@ -1,5 +1,5 @@
-from ..core.main import Step
-from ..core.sdk import ContinueSDK
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
class ClearHistoryStep(Step):
diff --git a/continuedev/src/continuedev/steps/comment_code.py b/continuedev/src/continuedev/plugins/steps/comment_code.py
index aa17e62c..3e34ab52 100644
--- a/continuedev/src/continuedev/steps/comment_code.py
+++ b/continuedev/src/continuedev/plugins/steps/comment_code.py
@@ -1,4 +1,4 @@
-from ..core.main import ContinueSDK, Models, Step
+from ...core.main import ContinueSDK, Models, Step
from .main import EditHighlightedCodeStep
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/plugins/steps/core/core.py
index 4afc36e8..5a81e5ee 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/plugins/steps/core/core.py
@@ -5,15 +5,15 @@ import difflib
from textwrap import dedent
from typing import Coroutine, List, Literal, Union
-from ...libs.llm.ggml import GGML
-from ...models.main import Range
-from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder
-from ...models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit
-from ...models.filesystem import FileSystem, RangeInFile, RangeInFileWithContents
-from ...core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation
-from ...core.main import ChatMessage, ContinueCustomException, Step, SequentialStep
-from ...libs.util.count_tokens import MAX_TOKENS_FOR_MODEL, DEFAULT_MAX_TOKENS
-from ...libs.util.strings import dedent_and_get_common_whitespace, remove_quotes_and_escapes
+from ....libs.llm.ggml import GGML
+from ....models.main import Range
+from ....libs.llm.prompt_utils import MarkdownStyleEncoderDecoder
+from ....models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit
+from ....models.filesystem import FileSystem, RangeInFile, RangeInFileWithContents
+from ....core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation
+from ....core.main import ChatMessage, ContinueCustomException, Step, SequentialStep
+from ....libs.util.count_tokens import MAX_TOKENS_FOR_MODEL, DEFAULT_MAX_TOKENS
+from ....libs.util.strings import dedent_and_get_common_whitespace, remove_quotes_and_escapes
import difflib
@@ -220,13 +220,13 @@ class DefaultModelEditCodeStep(Step):
if total_tokens < MAX_TOKENS_FOR_MODEL[model_to_use.name]:
break
- if total_tokens > MAX_TOKENS_FOR_MODEL[model_to_use.name]:
- while cur_start_line < max_start_line:
- cur_start_line += 1
- total_tokens -= model_to_use.count_tokens(
- full_file_contents_lst[cur_end_line])
- if total_tokens < MAX_TOKENS_FOR_MODEL[model_to_use.name]:
- break
+ if total_tokens > MAX_TOKENS_FOR_MODEL[model_to_use.name]:
+ while cur_start_line < max_start_line:
+ cur_start_line += 1
+ total_tokens -= model_to_use.count_tokens(
+ full_file_contents_lst[cur_start_line])
+ if total_tokens < MAX_TOKENS_FOR_MODEL[model_to_use.name]:
+ break
# Now use the found start/end lines to get the prefix and suffix strings
file_prefix = "\n".join(
diff --git a/continuedev/src/continuedev/steps/custom_command.py b/continuedev/src/continuedev/plugins/steps/custom_command.py
index 375900c1..1491a975 100644
--- a/continuedev/src/continuedev/steps/custom_command.py
+++ b/continuedev/src/continuedev/plugins/steps/custom_command.py
@@ -1,6 +1,6 @@
-from ..libs.util.templating import render_templated_string
-from ..core.main import Models, Step
-from ..core.sdk import ContinueSDK
+from ...libs.util.templating import render_templated_string
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
from ..steps.chat import SimpleChatStep
diff --git a/continuedev/src/continuedev/steps/draft/abstract_method.py b/continuedev/src/continuedev/plugins/steps/draft/abstract_method.py
index f3131c4b..f3131c4b 100644
--- a/continuedev/src/continuedev/steps/draft/abstract_method.py
+++ b/continuedev/src/continuedev/plugins/steps/draft/abstract_method.py
diff --git a/continuedev/src/continuedev/steps/draft/migration.py b/continuedev/src/continuedev/plugins/steps/draft/migration.py
index f3b36b5e..a76d491b 100644
--- a/continuedev/src/continuedev/steps/draft/migration.py
+++ b/continuedev/src/continuedev/plugins/steps/draft/migration.py
@@ -1,7 +1,7 @@
# When an edit is made to an existing class or a new sqlalchemy class is created,
# this should be kicked off.
-from ...core.main import Step
+from ....core.main import Step
class MigrationStep(Step):
diff --git a/continuedev/src/continuedev/steps/draft/redux.py b/continuedev/src/continuedev/plugins/steps/draft/redux.py
index 17506316..30c8fdbb 100644
--- a/continuedev/src/continuedev/steps/draft/redux.py
+++ b/continuedev/src/continuedev/plugins/steps/draft/redux.py
@@ -1,5 +1,5 @@
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
from ..core.core import EditFileStep
@@ -25,14 +25,14 @@ class EditReduxStateStep(Step):
sdk.run_step(EditFileStep(
filepath=selector_filename,
prompt=f"Edit the selector to add a new property for {self.description}. The store looks like this: {store_file_contents}"
- )
+ ))
# Reducer
reducer_filename = ""
sdk.run_step(EditFileStep(
filepath=reducer_filename,
prompt=f"Edit the reducer to add a new property for {self.description}. The store looks like this: {store_file_contents}"
-
+ ))
"""
Starts with implementing selector
1. RootStore
diff --git a/continuedev/src/continuedev/steps/draft/typeorm.py b/continuedev/src/continuedev/plugins/steps/draft/typeorm.py
index 153c855f..d06a6fb4 100644
--- a/continuedev/src/continuedev/steps/draft/typeorm.py
+++ b/continuedev/src/continuedev/plugins/steps/draft/typeorm.py
@@ -1,6 +1,6 @@
from textwrap import dedent
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
class CreateTableStep(Step):
diff --git a/continuedev/src/continuedev/steps/feedback.py b/continuedev/src/continuedev/plugins/steps/feedback.py
index 6f6a9b15..119e3112 100644
--- a/continuedev/src/continuedev/steps/feedback.py
+++ b/continuedev/src/continuedev/plugins/steps/feedback.py
@@ -1,8 +1,8 @@
from typing import Coroutine
-from ..core.main import Models
-from ..core.main import Step
-from ..core.sdk import ContinueSDK
-from ..libs.util.telemetry import capture_event
+from ...core.main import Models
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
+from ...libs.util.telemetry import capture_event
class FeedbackStep(Step):
diff --git a/continuedev/src/continuedev/steps/find_and_replace.py b/continuedev/src/continuedev/plugins/steps/find_and_replace.py
index 690872c0..a2c9c44e 100644
--- a/continuedev/src/continuedev/steps/find_and_replace.py
+++ b/continuedev/src/continuedev/plugins/steps/find_and_replace.py
@@ -1,6 +1,6 @@
-from ..models.filesystem_edit import FileEdit, Range
-from ..core.main import Models, Step
-from ..core.sdk import ContinueSDK
+from ...models.filesystem_edit import FileEdit, Range
+from ...core.main import Models, Step
+from ...core.sdk import ContinueSDK
class FindAndReplaceStep(Step):
diff --git a/continuedev/src/continuedev/steps/help.py b/continuedev/src/continuedev/plugins/steps/help.py
index ba1e6087..5111c7cf 100644
--- a/continuedev/src/continuedev/steps/help.py
+++ b/continuedev/src/continuedev/plugins/steps/help.py
@@ -1,7 +1,7 @@
from textwrap import dedent
-from ..core.main import ChatMessage, Step
-from ..core.sdk import ContinueSDK
-from ..libs.util.telemetry import capture_event
+from ...core.main import ChatMessage, Step
+from ...core.sdk import ContinueSDK
+from ...libs.util.telemetry import capture_event
help = dedent("""\
Continue is an open-source coding autopilot. It is a VS Code extension that brings the power of ChatGPT to your IDE.
diff --git a/continuedev/src/continuedev/steps/input/nl_multiselect.py b/continuedev/src/continuedev/plugins/steps/input/nl_multiselect.py
index aee22866..b54d394a 100644
--- a/continuedev/src/continuedev/steps/input/nl_multiselect.py
+++ b/continuedev/src/continuedev/plugins/steps/input/nl_multiselect.py
@@ -1,7 +1,7 @@
from typing import List, Union
from ..core.core import WaitForUserInputStep
-from ...core.main import Step
-from ...core.sdk import ContinueSDK
+from ....core.main import Step
+from ....core.sdk import ContinueSDK
class NLMultiselectStep(Step):
diff --git a/continuedev/src/continuedev/steps/main.py b/continuedev/src/continuedev/plugins/steps/main.py
index ce7cbc60..30117c55 100644
--- a/continuedev/src/continuedev/steps/main.py
+++ b/continuedev/src/continuedev/plugins/steps/main.py
@@ -1,21 +1,18 @@
import os
from typing import Coroutine, List, Union
-
+from textwrap import dedent
from pydantic import BaseModel, Field
-from ..libs.llm import LLM
-from ..models.main import Traceback, Range
-from ..models.filesystem_edit import EditDiff, FileEdit
-from ..models.filesystem import RangeInFile, RangeInFileWithContents
-from ..core.observation import Observation, TextObservation, TracebackObservation
-from ..libs.llm.prompt_utils import MarkdownStyleEncoderDecoder
-from textwrap import dedent
-from ..core.main import ContinueCustomException, Step
-from ..core.sdk import ContinueSDK, Models
-from ..core.observation import Observation
-import subprocess
+from ...models.main import Traceback, Range
+from ...models.filesystem_edit import EditDiff, FileEdit
+from ...models.filesystem import RangeInFile, RangeInFileWithContents
+from ...core.observation import Observation
+from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder
+from ...core.main import ContinueCustomException, Step
+from ...core.sdk import ContinueSDK, Models
+from ...core.observation import Observation
from .core.core import DefaultModelEditCodeStep
-from ..libs.util.calculate_diff import calculate_diff2
+from ...libs.util.calculate_diff import calculate_diff2
class SetupContinueWorkspaceStep(Step):
@@ -303,8 +300,7 @@ class SolveTracebackStep(Step):
range_in_files.append(
RangeInFile.from_entire_file(frame.filepath, content))
- await sdk.run_step(EditCodeStep(
- range_in_files=range_in_files, prompt=prompt))
+ await sdk.run_step(DefaultModelEditCodeStep(range_in_files=range_in_files, user_input=prompt))
return None
diff --git a/continuedev/src/continuedev/steps/on_traceback.py b/continuedev/src/continuedev/plugins/steps/on_traceback.py
index efb4c703..e99f212d 100644
--- a/continuedev/src/continuedev/steps/on_traceback.py
+++ b/continuedev/src/continuedev/plugins/steps/on_traceback.py
@@ -1,8 +1,8 @@
import os
from .core.core import UserInputStep
-from ..core.main import ChatMessage, Step
-from ..core.sdk import ContinueSDK
+from ...core.main import ChatMessage, Step
+from ...core.sdk import ContinueSDK
from .chat import SimpleChatStep
diff --git a/continuedev/src/continuedev/steps/open_config.py b/continuedev/src/continuedev/plugins/steps/open_config.py
index af55a95a..d950c26f 100644
--- a/continuedev/src/continuedev/steps/open_config.py
+++ b/continuedev/src/continuedev/plugins/steps/open_config.py
@@ -1,6 +1,6 @@
from textwrap import dedent
-from ..core.main import Step
-from ..core.sdk import ContinueSDK
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
import os
diff --git a/continuedev/src/continuedev/steps/react.py b/continuedev/src/continuedev/plugins/steps/react.py
index cddb8b42..8b2e7c2e 100644
--- a/continuedev/src/continuedev/steps/react.py
+++ b/continuedev/src/continuedev/plugins/steps/react.py
@@ -1,8 +1,7 @@
from textwrap import dedent
from typing import List, Union, Tuple
-from ..core.main import Step
-from ..core.sdk import ContinueSDK
-from .core.core import MessageStep
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
class NLDecisionStep(Step):
diff --git a/continuedev/src/continuedev/steps/search_directory.py b/continuedev/src/continuedev/plugins/steps/search_directory.py
index bfb97630..7d02d6fa 100644
--- a/continuedev/src/continuedev/steps/search_directory.py
+++ b/continuedev/src/continuedev/plugins/steps/search_directory.py
@@ -2,11 +2,11 @@ import asyncio
from textwrap import dedent
from typing import List, Union
-from ..models.filesystem import RangeInFile
-from ..models.main import Range
-from ..core.main import Step
-from ..core.sdk import ContinueSDK
-from ..libs.util.create_async_task import create_async_task
+from ...models.filesystem import RangeInFile
+from ...models.main import Range
+from ...core.main import Step
+from ...core.sdk import ContinueSDK
+from ...libs.util.create_async_task import create_async_task
import os
import re
diff --git a/continuedev/src/continuedev/plugins/steps/steps_on_startup.py b/continuedev/src/continuedev/plugins/steps/steps_on_startup.py
new file mode 100644
index 00000000..19d62d30
--- /dev/null
+++ b/continuedev/src/continuedev/plugins/steps/steps_on_startup.py
@@ -0,0 +1,17 @@
+from ...core.main import Step
+from ...core.sdk import Models, ContinueSDK
+from ...libs.util.step_name_to_steps import get_step_from_name
+
+
+class StepsOnStartupStep(Step):
+ hide: bool = True
+
+ async def describe(self, models: Models):
+ return "Running steps on startup"
+
+ async def run(self, sdk: ContinueSDK):
+ steps_on_startup = sdk.config.steps_on_startup
+
+ for step_name, step_params in steps_on_startup.items():
+ step = get_step_from_name(step_name, step_params)
+ await sdk.run_step(step)
diff --git a/continuedev/src/continuedev/steps/welcome.py b/continuedev/src/continuedev/plugins/steps/welcome.py
index 2dece649..df3e9a8a 100644
--- a/continuedev/src/continuedev/steps/welcome.py
+++ b/continuedev/src/continuedev/plugins/steps/welcome.py
@@ -1,9 +1,10 @@
from textwrap import dedent
-from ..models.filesystem_edit import AddFile
-from ..core.main import Step
-from ..core.sdk import ContinueSDK, Models
import os
+from ...models.filesystem_edit import AddFile
+from ...core.main import Step
+from ...core.sdk import ContinueSDK, Models
+
class WelcomeStep(Step):
name: str = "Welcome to Continue!"
diff --git a/continuedev/src/continuedev/server/session_manager.py b/continuedev/src/continuedev/server/session_manager.py
index 96daf92c..3136f1bf 100644
--- a/continuedev/src/continuedev/server/session_manager.py
+++ b/continuedev/src/continuedev/server/session_manager.py
@@ -7,7 +7,7 @@ import json
from ..libs.util.paths import getSessionFilePath, getSessionsFolderPath
from ..models.filesystem_edit import FileEditWithFullContents
from ..libs.constants.main import CONTINUE_SESSIONS_FOLDER
-from ..core.policy import DemoPolicy
+from ..core.policy import DefaultPolicy
from ..core.main import FullState
from ..core.autopilot import Autopilot
from .ide_protocol import AbstractIdeProtocolServer
@@ -65,7 +65,7 @@ class SessionManager:
full_state = FullState(**json.load(f))
autopilot = await DemoAutopilot.create(
- policy=DemoPolicy(), ide=ide, full_state=full_state)
+ policy=DefaultPolicy(), ide=ide, full_state=full_state)
session_id = session_id or str(uuid4())
ide.session_id = session_id
session = Session(session_id=session_id, autopilot=autopilot)
diff --git a/continuedev/src/continuedev/steps/steps_on_startup.py b/continuedev/src/continuedev/steps/steps_on_startup.py
deleted file mode 100644
index 318c28df..00000000
--- a/continuedev/src/continuedev/steps/steps_on_startup.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from ..core.main import Step
-from ..core.sdk import Models, ContinueSDK
-from .main import UserInputStep
-from ..recipes.CreatePipelineRecipe.main import CreatePipelineRecipe
-from ..recipes.DDtoBQRecipe.main import DDtoBQRecipe
-from ..recipes.DeployPipelineAirflowRecipe.main import DeployPipelineAirflowRecipe
-from ..recipes.DDtoBQRecipe.main import DDtoBQRecipe
-from ..recipes.AddTransformRecipe.main import AddTransformRecipe
-
-
-class StepsOnStartupStep(Step):
- hide: bool = True
-
- async def describe(self, models: Models):
- return "Running steps on startup"
-
- async def run(self, sdk: ContinueSDK):
- steps_on_startup = sdk.config.steps_on_startup
-
- for step_type in steps_on_startup:
- step = step_type()
- await sdk.run_step(step)