diff options
80 files changed, 815 insertions, 458 deletions
@@ -131,7 +131,7 @@ dmypy.json **/node_modules **/out -**/.vscode +**/.vscode/settings.json notes.txt cached_embeddings.pkl .ruff_cache diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..cc7b1ce4 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,17 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Module", + "type": "python", + "request": "launch", + "module": "continuedev.src.continuedev.server.main", + "args": ["--port", "8001"], + "justMyCode": false, + "subProcess": false + } + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7e49dc2d..f7166411 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,56 +2,87 @@ ## Table of Contents -- [Continue Architecture](#continue-architecture) -- [Core Concepts](#core-concepts) +- [β€οΈ Ways to Contribute](#β€οΈ-ways-to-contribute) + - [π Report Bugs](#π-report-bugs) + - [β¨ Suggest Enhancements](#β¨-suggest-enhancements) + - [π Updating / Improving Documentation](#π-updating--improving-documentation) + - [π§βπ» Contributing Code](#π§βπ»-contributing-code) + - [Setup Development Environment](#setting-up-the-development-environment) + - [Writing Steps](#writing-steps) + - [Writing Context Providers](#writing-context-providers) +- [π Continue Architecture](#π-continue-architecture) + - [Continue VS Code Client](#continue-vs-code-client) + - [Continue IDE Websockets Protocol](#continue-ide-websockets-protocol) + - [Continue GUI Websockets Protocol](#continue-gui-websockets-protocol) +- [βοΈ Core Concepts](#βοΈ-core-concepts) - [Step](#step) -- [Continue VS Code Client](#continue-vs-code-client) -- [Continue IDE Websockets Protocol](#continue-ide-websockets-protocol) -- [Continue GUI Websockets Protocol](#continue-gui-websockets-protocol) -- [Ways to Contribute](#ways-to-contribute) - - [Report Bugs](#report-bugs) - - [Suggest Enhancements](#suggest-enhancements) - - [Updating / Improving Documentation](#updating--improving-documentation) + - [Autopilot](#autopilot) + - [Observation](#observation) + - [Policy](#policy) -## Continue Architecture +# β€οΈ Ways to Contribute -Continue consists of 3 components, designed so that Continue can easily be extended to work in any IDE: +## π Report Bugs -1. **Continue Server** - The Continue Server is responsible for keeping state, running the autopilot loop which takes actions, and communicating between the IDE and GUI. +If you find a bug, please [create an issue](https://github.com/continuedev/continue/issues) to report it! A great bug report includes: -2. **Continue IDE Client** - The Continue IDE Client is a plugin for the IDE which implements the Continue IDE Protocol. This allows the server to request actions to be taken within the IDE, for example if `sdk.ide.setFileOpen("main.py")` is called on the server, it will communicate over websocketes with the IDE, which will open the file `main.py`. The first IDE Client we have built is for VS Code, but we plan to build clients for other IDEs in the future. The IDE Client must 1. implement the websockets protocol, as is done [here](./extension/src/continueIdeClient.ts) for VS Code and 2. launch the Continue Server, like [here](./extension/src/activation/environmentSetup.ts), and 3. display the Continue GUI in a sidebar, like [here](./extension/src/debugPanel.ts). +- A description of the bug +- Steps to reproduce +- What you expected to happen +- What actually happened +- Screenshots or videos -3. **Continue GUI** - The Continue GUI is a React application that gives the user control over Continue. It displays the history of Steps, shows what context is included in the current Step, and lets the users enter natural language or slash commands to initiate new Steps. The GUI communicates with the Continue Server over its own websocket connection +## β¨ Suggest Enhancements -It is important that the IDE Client and GUI never communicate except when the IDE Client initially sets up the GUI. This ensures that the server is the source-of-truth for state, and that we can easily extend Continue to work in other IDEs. +Continue is quickly adding features, and we'd love to hear which are the most important to you. The best ways to suggest an enhancement are -![Continue Architecture](https://continue.dev/docs/assets/images/continue-architecture-146a90742e25f6524452c74fe44fa2a0.png) +- Create an issue -## Core Concepts + - First, check whether a similar proposal has already been made + - If not, [create an issue](https://github.com/continuedev/continue/issues) + - Please describe the enhancement in as much detail as you can, and why it would be useful -All of Continue's logic happens inside of the server, and it is built around a few core concepts. Most of these are Pydantic Models defined in [core/main.py](./continuedev/src/continuedev/core/main.py). +- Join the [Continue Discord](https://discord.gg/NWtdYexhMs) and tell us about your idea in the `#feedback` channel -### `Step` +## π Updating / Improving Documentation -Everything in Continue is a "Step". The `Step` class defines 2 methods: +Continue is continuously improving, but a feature isn't complete until it is reflected in the documentation! If you see something out-of-date or missing, you can help by clicking "Edit this page" at the bottom of any page on [continue.dev/docs](https://continue.dev/docs). -1. `async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]` - This method defines what happens when the Step is run. It has access to the Continue SDK, which lets you take actions in the IDE, call LLMs, run nested Steps, and more. Optionally, a Step can return an `Observation` object, which a `Policy` can use to make decisions about what to do next. +## π§βπ» Contributing Code -2. `async def describe(self, models: Models) -> Coroutine[str, None, None]` - After each Step is run, this method is called to asynchronously generate a summary title for the step. A `Models` object is passed so that you have access to LLMs to summarize for you. +### Setting up the Development Environment -Steps are designed to be composable, so that you can easily build new Steps by combining existing ones. And because they are Pydantic models, they can instantly be used as tools useable by an LLM, for example with OpenAI's function-calling functionality (see [ChatWithFunctions](./continuedev/src/continuedev/steps/chat.py) for an example of this). +There are different levels of setup necessary depending on which part of Continue you are developing. For all of them, first clone the repo: -Some of the most commonly used Steps are: +```bash +git clone https://github.com/continuedev/continue +``` -- [`SimpleChatStep`](./continuedev/src/continuedev/steps/chat.py) - This is the default Step that is run when the user enters natural language input. It takes the user's input and runs it through the default LLM, then displays the result in the GUI. +If editing only the server (`/continuedev` directory), see the directions in [continuedev/README.md](./continuedev/README.md) to set up the Python server. Once it is running on localhost:8001, you can connect your existing VS Code extension by going to VS Code settings, searching for "Continue: Server URL", and setting it to "http://localhost:8001". -- [`EditHighlightedCodeStep`](./continuedev/src/continuedev/steps/core/core.py) - This is the Step run when a user highlights code, enters natural language, and presses CMD/CTRL+ENTER, or uses the slash command '/edit'. It opens a side-by-side diff editor, where updated code is streamed to fulfil the user's request. +If editing the VS Code extension (`/extension` directory) or GUI (`/extension/react-app`), you can follow the instructions in [`extension/DEV_README.md`](./extension/DEV_README.md) to set up the VS Code extension and GUI in development mode. -### `Autopilot` +### Writing Steps -### `Observation` +A Step can be used as a custom slash command, or called otherwise in a `Policy`. See the [steps README](./continuedev/src/continuedev/steps/README.md) to learn how to write a Step. -### `Policy` +### Writing Context Providers + +A `ContextProvider` is a Continue plugin that lets type '@' to quickly select documents as context for the language model. The simplest way to create a `ContextProvider` is to implement the `provide_context_items` method. You can find a great example of this in [GitHubIssuesContextProvider](./continuedev/src/continuedev/libs/context_providers/github_issues.py), which allows you to search GitHub Issues in a repo. + +## π Continue Architecture + +Continue consists of 3 components, designed so that Continue can easily be extended to work in any IDE: + +1. **Continue Server** - The Continue Server is responsible for keeping state, running the autopilot loop which takes actions, and communicating between the IDE and GUI. + +2. **Continue IDE Client** - The Continue IDE Client is a plugin for the IDE which implements the Continue IDE Protocol. This allows the server to request actions to be taken within the IDE, for example if `sdk.ide.setFileOpen("main.py")` is called on the server, it will communicate over websocketes with the IDE, which will open the file `main.py`. The first IDE Client we have built is for VS Code, but we plan to build clients for other IDEs in the future. The IDE Client must 1. implement the websockets protocol, as is done [here](./extension/src/continueIdeClient.ts) for VS Code and 2. launch the Continue Server, like [here](./extension/src/activation/environmentSetup.ts), and 3. display the Continue GUI in a sidebar, like [here](./extension/src/debugPanel.ts). + +3. **Continue GUI** - The Continue GUI is a React application that gives the user control over Continue. It displays the history of Steps, shows what context is included in the current Step, and lets the users enter natural language or slash commands to initiate new Steps. The GUI communicates with the Continue Server over its own websocket connection + +It is important that the IDE Client and GUI never communicate except when the IDE Client initially sets up the GUI. This ensures that the server is the source-of-truth for state, and that we can easily extend Continue to work in other IDEs. + +![Continue Architecture](https://continue.dev/docs/assets/images/continue-architecture-146a90742e25f6524452c74fe44fa2a0.png) ### Continue VS Code Client @@ -77,18 +108,40 @@ When state is updated on the server, we currently send the entirety of the objec - `active`, whether the autopilot is currently running a step. Displayed as a loader while step is running. - `user_input_queue`, the queue of user inputs that have not yet been processed due to waiting for previous Steps to complete. Displayed below the `active` loader until popped from the queue. - `default_model`, the default model used for completions. Displayed as a toggleable button on the bottom of the GUI. -- `highlighted_ranges`, the ranges of code that have been selected to include as context. Displayed just above the main text input. +- `selected_context_items`, the ranges of code and other items (like GitHub Issues, files, etc...) that have been selected to include as context. Displayed just above the main text input. - `slash_commands`, the list of available slash commands. Displayed in the main text input dropdown. - `adding_highlighted_code`, whether highlighting of new code for context is locked. Displayed as a button adjacent to `highlighted_ranges`. Updates are sent with `await sdk.update_ui()` when needed explicitly or `await autopilot.update_subscribers()` automatically between each Step. The GUI can listen for state updates with `ContinueGUIClientProtocol.onStateUpdate()`. -## Ways to Contribute +## βοΈ Core Concepts + +All of Continue's logic happens inside of the server, and it is built around a few core concepts. Most of these are Pydantic Models defined in [core/main.py](./continuedev/src/continuedev/core/main.py). + +### `Step` + +Everything in Continue is a "Step". The `Step` class defines 2 methods: -### Report Bugs +1. `async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]` - This method defines what happens when the Step is run. It has access to the Continue SDK, which lets you take actions in the IDE, call LLMs, run nested Steps, and more. Optionally, a Step can return an `Observation` object, which a `Policy` can use to make decisions about what to do next. -### Suggest Enhancements +2. `async def describe(self, models: Models) -> Coroutine[str, None, None]` - After each Step is run, this method is called to asynchronously generate a summary title for the step. A `Models` object is passed so that you have access to LLMs to summarize for you. + +Steps are designed to be composable, so that you can easily build new Steps by combining existing ones. And because they are Pydantic models, they can instantly be used as tools useable by an LLM, for example with OpenAI's function-calling functionality (see [ChatWithFunctions](./continuedev/src/continuedev/steps/chat.py) for an example of this). -### Updating / Improving Documentation +Some of the most commonly used Steps are: + +- [`SimpleChatStep`](./continuedev/src/continuedev/steps/chat.py) - This is the default Step that is run when the user enters natural language input. It takes the user's input and runs it through the default LLM, then displays the result in the GUI. + +- [`EditHighlightedCodeStep`](./continuedev/src/continuedev/steps/core/core.py) - This is the Step run when a user highlights code, enters natural language, and presses CMD/CTRL+ENTER, or uses the slash command '/edit'. It opens a side-by-side diff editor, where updated code is streamed to fulfil the user's request. + +### `Autopilot` + +In [autopilot.py](./continuedev/src/continuedev/core/autopilot.py), we define the `Autopilot` class, which is the central entity responsible for keeping track of state and running the input/action loop. + +### `Observation` + +An `Observation` is a simple Pydantic model that can be used as a trigger to run a `Step`. For example, if running one `Step` results in an error, this can be returned as an `Observation` that can be used to trigger a `Step` that fixes the error. This is not being used frequently in the codebase right now, but we plan to use it as the basis of various "hooks" that will aid in the development of agents acting within the IDE. + +### `Policy` -Continue is continuously improving, but a feature isn't complete until it is reflected in the documentation! +A `Policy` implements a method `def next(self, config: ContinueConfig, history: History) -> Step`, which decides which `Step` the `Autopilot` should run next. The default policy is defined in [policy.py](./continuedev/src/continuedev/core/policy.py) and runs `SimpleChatStep` by default, or a slash command when the input begins with '/'. It also displays a welcome message at the beginning of each session. If interested in developing agents that autonomously take longer sequences of actions in the IDE, the `Policy` class is the place to start. @@ -44,6 +44,16 @@ Let Continue build the scaffolding of Python scripts, React components, and more ### [Download for VS Code](https://marketplace.visualstudio.com/items?itemName=Continue.continue) +## Install + +Continue requires that you have Python 3.8 or greater. If you do not, please [install](https://python.org) it + +If your Continue server is not setting up, please check the console logs: +1. `cmd+shift+p` (MacOS) / `ctrl+shift+p` (Windows) +2. Search for and then select "Developer: Toggle Developer Tools" +3. Select `Console` +4. Read the console logs + ## OpenAI API Key New users can try out Continue with GPT-4 using a proxy server that securely makes calls to OpenAI using our API key. Continue should just work the first time you install the extension in VS Code. diff --git a/continuedev/README.md b/continuedev/README.md index 528cf75a..d3ead8ec 100644 --- a/continuedev/README.md +++ b/continuedev/README.md @@ -1,19 +1,29 @@ # Continue PyPI Package -This package contains the [Continue](https://github.com/continuedev.com/continue) server and core classes needed to build your own recipes. +This package contains the [Continue](https://github.com/continuedev/continue) server and core classes needed to build your own recipes. Continue is a Python library for automating repetitive sequences of software development tasks using language models. Using our VS Code extension, you can build, run, and refine these recipes as they natively interact with your codebase. Read the docs [here](https://continue.dev/docs) or download the VS Code extension [here](https://marketplace.visualstudio.com/items?itemName=Continue.continue). ## Continue Server -The Continue server acts as a bridge between the Continue React app and your IDE, running your recipes and acting on the codebase. +The Continue server acts as a bridge between the Continue React app and your IDE, running your recipes and acting on the codebase. Start it by running the following commands: + 1. `cd continuedev` 2. Make sure packages are installed with `poetry install` -3. `poetry shell` + - If poetry is not installed, you can install with + ```bash + curl -sSL https://install.python-poetry.org | python3 - + ``` + (official instructions [here](https://python-poetry.org/docs/#installing-with-the-official-installer)) +3. `poetry shell` to activate the virtual environment 4. `cd ..` -5. `python3 -m continuedev.src.continuedev.server.main` +5. `python3 -m continuedev.src.continuedev.server.main` to start the server + +Once you've validated that this works, you'll often want to use a debugger, in which case we've provided a launch configuration for VS Code in `.vscode/launch.json`. To start the debugger in VS Code, ensure that the workspace directory is the root of the `continue` repo, then press F5. + +> Note: To start the debugger, you'll have to select the poetry Python interpreter (`/path-to-poetry-venv/bin/python3`) in the bottom right of the VS Code window. If you don't see this, you may have to install the [Python extension](https://marketplace.visualstudio.com/items?itemName=ms-python.python). ## Scripts @@ -29,6 +39,8 @@ See the `src/continuedev/libs/steps` folder for examples of writing a Continue s Open a [new GitHub Issue](https://github.com/continuedev/continue/issues/new) or comment on [an existing one](https://github.com/continuedev/continue/issues). Let us know what you would like to contribute, and we will help you make it happen! +For more a more detailed contributing guide, see [CONTRIBUTING.md](../CONTRIBUTING.md). + ## Install from source #### 1. Clone this repo @@ -60,4 +72,4 @@ cd continue/extension/scripts && python3 install_from_source.py - [Continue GUI README](./extension/react-app/): learn about the React app that lets users interact with the server and is placed adjacent to the text editor in any suppported IDE - [Schema README](./schema): learn about the JSON Schema types generated from Pydantic models, which we use across the `continuedev/` and `extension/` directories - [Continue Docs README](./docs): learn how our [docs](https://continue.dev/docs) are written and built -- [How to debug the VS Code Extension README](./extension/src/README.md): learn how to set up the VS Code extension, so you can debug it
\ No newline at end of file +- [How to debug the VS Code Extension README](./extension/src/README.md): learn how to set up the VS Code extension, so you can debug it diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py index 4dff2c6d..003962c6 100644 --- a/continuedev/src/continuedev/core/autopilot.py +++ b/continuedev/src/continuedev/core/autopilot.py @@ -15,8 +15,8 @@ from ..libs.context_providers.highlighted_code_context_provider import Highlight from ..server.ide_protocol import AbstractIdeProtocolServer from ..libs.util.queue import AsyncSubscriptionQueue from ..models.main import ContinueBaseModel -from .main import Context, ContinueCustomException, Policy, History, FullState, Step, HistoryNode -from ..steps.core.core import ReversibleStep, ManualEditStep, UserInputStep +from .main import Context, ContinueCustomException, HighlightedRangeContext, Policy, History, FullState, Step, HistoryNode +from ..plugins.steps.core.core import ReversibleStep, ManualEditStep, UserInputStep from ..libs.util.telemetry import capture_event from .sdk import ContinueSDK from ..libs.util.traceback_parsers import get_python_traceback, get_javascript_traceback @@ -39,6 +39,8 @@ def get_error_title(e: Exception) -> str: return "The request failed. Please check your internet connection and try again. If this issue persists, you can use our API key for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to \"\"" elif isinstance(e, openai_errors.InvalidRequestError): return 'Invalid request sent to OpenAI. Please try again.' + elif "rate_limit_ip_middleware" in e.__str__(): + return 'You have reached your limit for free usage of our token. You can continue using Continue by entering your own OpenAI API key in VS Code settings.' elif e.__str__().startswith("Cannot connect to host"): return "The request failed. Please check your internet connection and try again." return e.__str__() or e.__repr__() diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py index 53e482fa..1c87cfeb 100644 --- a/continuedev/src/continuedev/core/policy.py +++ b/continuedev/src/continuedev/core/policy.py @@ -1,14 +1,15 @@ from textwrap import dedent from typing import Union -from ..steps.chat import SimpleChatStep -from ..steps.welcome import WelcomeStep +from ..plugins.steps.chat import SimpleChatStep +from ..plugins.steps.welcome import WelcomeStep from .config import ContinueConfig -from ..steps.steps_on_startup import StepsOnStartupStep +from ..plugins.steps.steps_on_startup import StepsOnStartupStep from .main import Step, History, Policy from .observation import UserInputObservation -from ..steps.core.core import MessageStep -from ..steps.custom_command import CustomCommandStep +from ..plugins.steps.core.core import MessageStep +from ..plugins.steps.custom_command import CustomCommandStep +from ..plugins.steps.main import EditHighlightedCodeStep def parse_slash_command(inp: str, config: ContinueConfig) -> Union[None, Step]: @@ -43,7 +44,7 @@ def parse_custom_command(inp: str, config: ContinueConfig) -> Union[None, Step]: return None -class DemoPolicy(Policy): +class DefaultPolicy(Policy): ran_code_last: bool = False def next(self, config: ContinueConfig, history: History) -> Step: diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py index 59f33707..f925f20f 100644 --- a/continuedev/src/continuedev/core/sdk.py +++ b/continuedev/src/continuedev/core/sdk.py @@ -3,8 +3,7 @@ from functools import cached_property from typing import Coroutine, Dict, Union import os - -from ..steps.core.core import DefaultModelEditCodeStep +from ..plugins.steps.core.core import DefaultModelEditCodeStep from ..models.main import Range from .context import ContextItem from .abstract_sdk import AbstractContinueSDK @@ -18,7 +17,7 @@ from ..libs.llm.ggml import GGML from .observation import Observation from ..server.ide_protocol import AbstractIdeProtocolServer from .main import Context, ContinueCustomException, History, HistoryNode, Step, ChatMessage -from ..steps.core.core import * +from ..plugins.steps.core.core import * from ..libs.llm.proxy_server import ProxyServer diff --git a/continuedev/src/continuedev/libs/llm/__init__.py b/continuedev/src/continuedev/libs/llm/__init__.py index 4c4de213..2766db4b 100644 --- a/continuedev/src/continuedev/libs/llm/__init__.py +++ b/continuedev/src/continuedev/libs/llm/__init__.py @@ -9,15 +9,15 @@ from pydantic import BaseModel class LLM(ABC): system_message: Union[str, None] = None - async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]: + async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]: """Return the completion of the text with the given temperature.""" raise NotImplementedError - def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: """Stream the completion through generator.""" raise NotImplementedError - async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: """Stream the chat through generator.""" raise NotImplementedError diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py index c82895c6..625d4e57 100644 --- a/continuedev/src/continuedev/libs/llm/anthropic.py +++ b/continuedev/src/continuedev/libs/llm/anthropic.py @@ -54,7 +54,7 @@ class AnthropicLLM(LLM): prompt += AI_PROMPT return prompt - async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = self.default_args.copy() args.update(kwargs) args["stream"] = True @@ -66,7 +66,7 @@ class AnthropicLLM(LLM): ): yield chunk.completion - async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = self.default_args.copy() args.update(kwargs) args["stream"] = True @@ -83,7 +83,7 @@ class AnthropicLLM(LLM): "content": chunk.completion } - async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]: + async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]: args = {**self.default_args, **kwargs} args = self._transform_args(args) diff --git a/continuedev/src/continuedev/libs/llm/ggml.py b/continuedev/src/continuedev/libs/llm/ggml.py index 6007fdb4..4889a556 100644 --- a/continuedev/src/continuedev/libs/llm/ggml.py +++ b/continuedev/src/continuedev/libs/llm/ggml.py @@ -26,7 +26,7 @@ class GGML(LLM): def count_tokens(self, text: str): return count_tokens(self.name, text) - async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = self.default_args.copy() args.update(kwargs) args["stream"] = True @@ -47,7 +47,7 @@ class GGML(LLM): except: raise Exception(str(line)) - async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = {**self.default_args, **kwargs} messages = compile_chat_messages( self.name, messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message) @@ -72,7 +72,7 @@ class GGML(LLM): except: raise Exception(str(line[0])) - async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]: + async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]: args = {**self.default_args, **kwargs} async with aiohttp.ClientSession() as session: diff --git a/continuedev/src/continuedev/libs/llm/hf_inference_api.py b/continuedev/src/continuedev/libs/llm/hf_inference_api.py index 7e11fbbe..36f03270 100644 --- a/continuedev/src/continuedev/libs/llm/hf_inference_api.py +++ b/continuedev/src/continuedev/libs/llm/hf_inference_api.py @@ -16,7 +16,7 @@ class HuggingFaceInferenceAPI(LLM): self.model = model self.system_message = system_message # TODO: Nothing being done with this - def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs): + def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs): """Return the completion of the text with the given temperature.""" API_URL = f"https://api-inference.huggingface.co/models/{self.model}" headers = { diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py index 64bb39a2..a0773c1d 100644 --- a/continuedev/src/continuedev/libs/llm/openai.py +++ b/continuedev/src/continuedev/libs/llm/openai.py @@ -42,7 +42,7 @@ class OpenAI(LLM): def count_tokens(self, text: str): return count_tokens(self.default_model, text) - async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = self.default_args.copy() args.update(kwargs) args["stream"] = True @@ -72,7 +72,7 @@ class OpenAI(LLM): self.write_log(f"Completion:\n\n{completion}") - async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = self.default_args.copy() args.update(kwargs) args["stream"] = True @@ -81,7 +81,7 @@ class OpenAI(LLM): del args["functions"] messages = compile_chat_messages( - args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message) + args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message) self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}") completion = "" async for chunk in await openai.ChatCompletion.acreate( @@ -93,7 +93,7 @@ class OpenAI(LLM): completion += chunk.choices[0].delta.content self.write_log(f"Completion: \n\n{completion}") - async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]: + async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]: args = {**self.default_args, **kwargs} if args["model"] in CHAT_MODELS: diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py index 46ab19ee..17694afe 100644 --- a/continuedev/src/continuedev/libs/llm/proxy_server.py +++ b/continuedev/src/continuedev/libs/llm/proxy_server.py @@ -36,8 +36,12 @@ class ProxyServer(LLM): def count_tokens(self, text: str): return count_tokens(self.default_model, text) + + def get_headers(self): + # headers with unique id + return {"unique_id": self.unique_id} - async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]: + async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, str]: args = {**self.default_args, **kwargs} messages = compile_chat_messages( @@ -46,17 +50,16 @@ class ProxyServer(LLM): async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session: async with session.post(f"{SERVER_URL}/complete", json={ "messages": messages, - "unique_id": self.unique_id, **args - }) as resp: - try: - response_text = await resp.text() - self.write_log(f"Completion: \n\n{response_text}") - return response_text - except: + }, headers=self.get_headers()) as resp: + if resp.status != 200: raise Exception(await resp.text()) - async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]: + response_text = await resp.text() + self.write_log(f"Completion: \n\n{response_text}") + return response_text + + async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]: args = {**self.default_args, **kwargs} messages = compile_chat_messages( args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message) @@ -65,11 +68,12 @@ class ProxyServer(LLM): async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session: async with session.post(f"{SERVER_URL}/stream_chat", json={ "messages": messages, - "unique_id": self.unique_id, **args - }) as resp: + }, headers=self.get_headers()) as resp: # This is streaming application/json instaed of text/event-stream completion = "" + if resp.status != 200: + raise Exception(await resp.text()) async for line in resp.content.iter_chunks(): if line[1]: try: @@ -85,10 +89,12 @@ class ProxyServer(LLM): except Exception as e: capture_event(self.unique_id, "proxy_server_parse_error", { "error_title": "Proxy server stream_chat parsing failed", "error_message": '\n'.join(traceback.format_exception(e))}) + else: + break self.write_log(f"Completion: \n\n{completion}") - async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: + async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: args = {**self.default_args, **kwargs} messages = compile_chat_messages( self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None), system_message=self.system_message) @@ -97,10 +103,11 @@ class ProxyServer(LLM): async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session: async with session.post(f"{SERVER_URL}/stream_complete", json={ "messages": messages, - "unique_id": self.unique_id, **args - }) as resp: + }, headers=self.get_headers()) as resp: completion = "" + if resp.status != 200: + raise Exception(await resp.text()) async for line in resp.content.iter_any(): if line: try: diff --git a/continuedev/src/continuedev/libs/util/commonregex.py b/continuedev/src/continuedev/libs/util/commonregex.py new file mode 100644 index 00000000..55da7fc0 --- /dev/null +++ b/continuedev/src/continuedev/libs/util/commonregex.py @@ -0,0 +1,138 @@ +# coding: utf-8 +import json +import re +from typing import Any, Dict + +date = re.compile( + '(?:(?<!\:)(?<!\:\d)[0-3]?\d(?:st|nd|rd|th)?\s+(?:of\s+)?(?:jan\.?|january|feb\.?|february|mar\.?|march|apr\.?|april|may|jun\.?|june|jul\.?|july|aug\.?|august|sep\.?|september|oct\.?|october|nov\.?|november|dec\.?|december)|(?:jan\.?|january|feb\.?|february|mar\.?|march|apr\.?|april|may|jun\.?|june|jul\.?|july|aug\.?|august|sep\.?|september|oct\.?|october|nov\.?|november|dec\.?|december)\s+(?<!\:)(?<!\:\d)[0-3]?\d(?:st|nd|rd|th)?)(?:\,)?\s*(?:\d{4})?|[0-3]?\d[-\./][0-3]?\d[-\./]\d{2,4}', re.IGNORECASE) +time = re.compile( + '\d{1,2}:\d{2} ?(?:[ap]\.?m\.?)?|\d[ap]\.?m\.?', re.IGNORECASE) +phone = re.compile( + '''((?:(?<![\d-])(?:\+?\d{1,3}[-.\s*]?)?(?:\(?\d{3}\)?[-.\s*]?)?\d{3}[-.\s*]?\d{4}(?![\d-]))|(?:(?<![\d-])(?:(?:\(\+?\d{2}\))|(?:\+?\d{2}))\s*\d{2}\s*\d{3}\s*\d{4}(?![\d-])))''') +phones_with_exts = re.compile( + '((?:(?:\+?1\s*(?:[.-]\s*)?)?(?:\(\s*(?:[2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s*\)|(?:[2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s*(?:[.-]\s*)?)?(?:[2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s*(?:[.-]\s*)?(?:[0-9]{4})(?:\s*(?:#|x\.?|ext\.?|extension)\s*(?:\d+)?))', re.IGNORECASE) +link = re.compile('(?i)((?:https?://|www\d{0,3}[.])?[a-z0-9.\-]+[.](?:(?:international)|(?:construction)|(?:contractors)|(?:enterprises)|(?:photography)|(?:immobilien)|(?:management)|(?:technology)|(?:directory)|(?:education)|(?:equipment)|(?:institute)|(?:marketing)|(?:solutions)|(?:builders)|(?:clothing)|(?:computer)|(?:democrat)|(?:diamonds)|(?:graphics)|(?:holdings)|(?:lighting)|(?:plumbing)|(?:training)|(?:ventures)|(?:academy)|(?:careers)|(?:company)|(?:domains)|(?:florist)|(?:gallery)|(?:guitars)|(?:holiday)|(?:kitchen)|(?:recipes)|(?:shiksha)|(?:singles)|(?:support)|(?:systems)|(?:agency)|(?:berlin)|(?:camera)|(?:center)|(?:coffee)|(?:estate)|(?:kaufen)|(?:luxury)|(?:monash)|(?:museum)|(?:photos)|(?:repair)|(?:social)|(?:tattoo)|(?:travel)|(?:viajes)|(?:voyage)|(?:build)|(?:cheap)|(?:codes)|(?:dance)|(?:email)|(?:glass)|(?:house)|(?:ninja)|(?:photo)|(?:shoes)|(?:solar)|(?:today)|(?:aero)|(?:arpa)|(?:asia)|(?:bike)|(?:buzz)|(?:camp)|(?:club)|(?:coop)|(?:farm)|(?:gift)|(?:guru)|(?:info)|(?:jobs)|(?:kiwi)|(?:land)|(?:limo)|(?:link)|(?:menu)|(?:mobi)|(?:moda)|(?:name)|(?:pics)|(?:pink)|(?:post)|(?:rich)|(?:ruhr)|(?:sexy)|(?:tips)|(?:wang)|(?:wien)|(?:zone)|(?:biz)|(?:cab)|(?:cat)|(?:ceo)|(?:com)|(?:edu)|(?:gov)|(?:int)|(?:mil)|(?:net)|(?:onl)|(?:org)|(?:pro)|(?:red)|(?:tel)|(?:uno)|(?:xxx)|(?:ac)|(?:ad)|(?:ae)|(?:af)|(?:ag)|(?:ai)|(?:al)|(?:am)|(?:an)|(?:ao)|(?:aq)|(?:ar)|(?:as)|(?:at)|(?:au)|(?:aw)|(?:ax)|(?:az)|(?:ba)|(?:bb)|(?:bd)|(?:be)|(?:bf)|(?:bg)|(?:bh)|(?:bi)|(?:bj)|(?:bm)|(?:bn)|(?:bo)|(?:br)|(?:bs)|(?:bt)|(?:bv)|(?:bw)|(?:by)|(?:bz)|(?:ca)|(?:cc)|(?:cd)|(?:cf)|(?:cg)|(?:ch)|(?:ci)|(?:ck)|(?:cl)|(?:cm)|(?:cn)|(?:co)|(?:cr)|(?:cu)|(?:cv)|(?:cw)|(?:cx)|(?:cy)|(?:cz)|(?:de)|(?:dj)|(?:dk)|(?:dm)|(?:do)|(?:dz)|(?:ec)|(?:ee)|(?:eg)|(?:er)|(?:es)|(?:et)|(?:eu)|(?:fi)|(?:fj)|(?:fk)|(?:fm)|(?:fo)|(?:fr)|(?:ga)|(?:gb)|(?:gd)|(?:ge)|(?:gf)|(?:gg)|(?:gh)|(?:gi)|(?:gl)|(?:gm)|(?:gn)|(?:gp)|(?:gq)|(?:gr)|(?:gs)|(?:gt)|(?:gu)|(?:gw)|(?:gy)|(?:hk)|(?:hm)|(?:hn)|(?:hr)|(?:ht)|(?:hu)|(?:id)|(?:ie)|(?:il)|(?:im)|(?:in)|(?:io)|(?:iq)|(?:ir)|(?:is)|(?:it)|(?:je)|(?:jm)|(?:jo)|(?:jp)|(?:ke)|(?:kg)|(?:kh)|(?:ki)|(?:km)|(?:kn)|(?:kp)|(?:kr)|(?:kw)|(?:ky)|(?:kz)|(?:la)|(?:lb)|(?:lc)|(?:li)|(?:lk)|(?:lr)|(?:ls)|(?:lt)|(?:lu)|(?:lv)|(?:ly)|(?:ma)|(?:mc)|(?:md)|(?:me)|(?:mg)|(?:mh)|(?:mk)|(?:ml)|(?:mm)|(?:mn)|(?:mo)|(?:mp)|(?:mq)|(?:mr)|(?:ms)|(?:mt)|(?:mu)|(?:mv)|(?:mw)|(?:mx)|(?:my)|(?:mz)|(?:na)|(?:nc)|(?:ne)|(?:nf)|(?:ng)|(?:ni)|(?:nl)|(?:no)|(?:np)|(?:nr)|(?:nu)|(?:nz)|(?:om)|(?:pa)|(?:pe)|(?:pf)|(?:pg)|(?:ph)|(?:pk)|(?:pl)|(?:pm)|(?:pn)|(?:pr)|(?:ps)|(?:pt)|(?:pw)|(?:py)|(?:qa)|(?:re)|(?:ro)|(?:rs)|(?:ru)|(?:rw)|(?:sa)|(?:sb)|(?:sc)|(?:sd)|(?:se)|(?:sg)|(?:sh)|(?:si)|(?:sj)|(?:sk)|(?:sl)|(?:sm)|(?:sn)|(?:so)|(?:sr)|(?:st)|(?:su)|(?:sv)|(?:sx)|(?:sy)|(?:sz)|(?:tc)|(?:td)|(?:tf)|(?:tg)|(?:th)|(?:tj)|(?:tk)|(?:tl)|(?:tm)|(?:tn)|(?:to)|(?:tp)|(?:tr)|(?:tt)|(?:tv)|(?:tw)|(?:tz)|(?:ua)|(?:ug)|(?:uk)|(?:us)|(?:uy)|(?:uz)|(?:va)|(?:vc)|(?:ve)|(?:vg)|(?:vi)|(?:vn)|(?:vu)|(?:wf)|(?:ws)|(?:ye)|(?:yt)|(?:za)|(?:zm)|(?:zw))(?:/[^\s()<>]+[^\s`!()\[\]{};:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019])?)', re.IGNORECASE) +email = re.compile( + "([a-z0-9!#$%&'*+\/=?^_`{|.}~-]+@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)", re.IGNORECASE) +ip = re.compile('(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)', re.IGNORECASE) +ipv6 = re.compile( + '\s*(?!.*::.*::)(?:(?!:)|:(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)){3})\s*', re.VERBOSE | re.IGNORECASE | re.DOTALL) +price = re.compile('[$]\s?[+-]?[0-9]{1,3}(?:(?:,?[0-9]{3}))*(?:\.[0-9]{1,2})?') +hex_color = re.compile('(#(?:[0-9a-fA-F]{8})|#(?:[0-9a-fA-F]{3}){1,2})\\b') +credit_card = re.compile('((?:(?:\\d{4}[- ]?){3}\\d{4}|\\d{15,16}))(?![\\d])') +btc_address = re.compile( + '(?<![a-km-zA-HJ-NP-Z0-9])[13][a-km-zA-HJ-NP-Z0-9]{26,33}(?![a-km-zA-HJ-NP-Z0-9])') +street_address = re.compile( + '\d{1,4} [\w\s]{1,20}(?:street|st|avenue|ave|road|rd|highway|hwy|square|sq|trail|trl|drive|dr|court|ct|park|parkway|pkwy|circle|cir|boulevard|blvd)\W?(?=\s|$)', re.IGNORECASE) +zip_code = re.compile(r'\b\d{5}(?:[-\s]\d{4})?\b') +po_box = re.compile(r'P\.? ?O\.? Box \d+', re.IGNORECASE) +ssn = re.compile( + '(?!000|666|333)0*(?:[0-6][0-9][0-9]|[0-7][0-6][0-9]|[0-7][0-7][0-2])[- ](?!00)[0-9]{2}[- ](?!0000)[0-9]{4}') +win_absolute_filepath = re.compile( + r'^(?:[a-zA-Z]\:|\\\\[\w\.]+\\[\w.$]+)\\(?:[\w]+\\)*\w([\w.])+', re.IGNORECASE) +unix_absolute_filepath = re.compile( + r'^\/(?:[\/\w]+\/)*\w([\w.])+', re.IGNORECASE) + +regexes = { + "win_absolute_filepath": win_absolute_filepath, + "unix_absolute_filepath": unix_absolute_filepath, + "dates": date, + "times": time, + "phones": phone, + "phones_with_exts": phones_with_exts, + "links": link, + "emails": email, + "ips": ip, + "ipv6s": ipv6, + "prices": price, + "hex_colors": hex_color, + "credit_cards": credit_card, + "btc_addresses": btc_address, + "street_addresses": street_address, + "zip_codes": zip_code, + "po_boxes": po_box, + "ssn_number": ssn, +} + +placeholders = { + "win_absolute_filepath": "<FILEPATH>", + "unix_absolute_filepath": "<FILEPATH>", + "dates": "<DATE>", + "times": "<TIME>", + "phones": "<PHONE>", + "phones_with_exts": "<PHONE_WITH_EXT>", + "links": "<LINK>", + "emails": "<EMAIL>", + "ips": "<IP>", + "ipv6s": "<IPV6>", + "prices": "<PRICE>", + "hex_colors": "<HEX_COLOR>", + "credit_cards": "<CREDIT_CARD>", + "btc_addresses": "<BTC_ADDRESS>", + "street_addresses": "<STREET_ADDRESS>", + "zip_codes": "<ZIP_CODE>", + "po_boxes": "<PO_BOX>", + "ssn_number": "<SSN>", +} + + +class regex: + + def __init__(self, obj, regex): + self.obj = obj + self.regex = regex + + def __call__(self, *args): + def regex_method(text=None): + return [x.strip() for x in self.regex.findall(text or self.obj.text)] + return regex_method + + +class CommonRegex(object): + + def __init__(self, text=""): + self.text = text + + for k, v in list(regexes.items()): + setattr(self, k, regex(self, v)(self)) + + if text: + for key in list(regexes.keys()): + method = getattr(self, key) + setattr(self, key, method()) + + +pii_parser = CommonRegex() + + +def clean_pii_from_str(text: str): + """Replace personally identifiable information (PII) with placeholders.""" + for regex_name, regex in list(regexes.items()): + placeholder = placeholders[regex_name] + text = regex.sub(placeholder, text) + + return text + + +def clean_pii_from_any(v: Any) -> Any: + """Replace personally identifiable information (PII) with placeholders. Not guaranteed to return same type as input.""" + if isinstance(v, str): + return clean_pii_from_str(v) + elif isinstance(v, dict): + cleaned_dict = {} + for key, value in v.items(): + cleaned_dict[key] = clean_pii_from_any(value) + return cleaned_dict + elif isinstance(v, list): + return [clean_pii_from_any(x) for x in v] + else: + # Try to convert to string + try: + orig_text = str(v) + cleaned_text = clean_pii_from_str(orig_text) + if orig_text != cleaned_text: + return cleaned_text + else: + return v + except: + return v diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py index 987aa722..c58ae499 100644 --- a/continuedev/src/continuedev/libs/util/count_tokens.py +++ b/continuedev/src/continuedev/libs/util/count_tokens.py @@ -73,9 +73,9 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens: message = chat_history.pop(0) total_tokens -= count_tokens(model, message.content) - # 3. Truncate message in the last 5 + # 3. Truncate message in the last 5, except last 1 i = 0 - while total_tokens > max_tokens and len(chat_history) > 0 and i < len(chat_history): + while total_tokens > max_tokens and len(chat_history) > 0 and i < len(chat_history) - 1: message = chat_history[i] total_tokens -= count_tokens(model, message.content) total_tokens += count_tokens(model, message.summary) @@ -101,13 +101,16 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens: TOKEN_BUFFER_FOR_SAFETY = 100 -def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int, prompt: Union[str, None] = None, functions: Union[List, None] = None, system_message: Union[str, None] = None) -> List[Dict]: +def compile_chat_messages(model: str, msgs: Union[List[ChatMessage], None], max_tokens: int, prompt: Union[str, None] = None, functions: Union[List, None] = None, system_message: Union[str, None] = None) -> List[Dict]: """ The total number of tokens is system_message + sum(msgs) + functions + prompt after it is converted to a message """ + msgs_copy = [msg.copy(deep=True) + for msg in msgs] if msgs is not None else [] + if prompt is not None: prompt_msg = ChatMessage(role="user", content=prompt, summary=prompt) - msgs += [prompt_msg] + msgs_copy += [prompt_msg] if system_message is not None: # NOTE: System message takes second precedence to user prompt, so it is placed just before @@ -116,7 +119,7 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int, system_chat_msg = ChatMessage( role="system", content=rendered_system_message, summary=rendered_system_message) # insert at second-to-last position - msgs.insert(-1, system_chat_msg) + msgs_copy.insert(-1, system_chat_msg) # Add tokens from functions function_tokens = 0 @@ -124,11 +127,11 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int, for function in functions: function_tokens += count_tokens(model, json.dumps(function)) - msgs = prune_chat_history( - model, msgs, MAX_TOKENS_FOR_MODEL[model], function_tokens + max_tokens + TOKEN_BUFFER_FOR_SAFETY) + msgs_copy = prune_chat_history( + model, msgs_copy, MAX_TOKENS_FOR_MODEL[model], function_tokens + max_tokens + TOKEN_BUFFER_FOR_SAFETY) history = [msg.to_dict(with_functions=functions is not None) - for msg in msgs] + for msg in msgs_copy] # Move system message back to start if system_message is not None and len(history) >= 2 and history[-2]["role"] == "system": diff --git a/continuedev/src/continuedev/libs/util/telemetry.py b/continuedev/src/continuedev/libs/util/telemetry.py index bd9fde9d..17735dce 100644 --- a/continuedev/src/continuedev/libs/util/telemetry.py +++ b/continuedev/src/continuedev/libs/util/telemetry.py @@ -3,6 +3,7 @@ from posthog import Posthog from ...core.config import load_config import os from dotenv import load_dotenv +from .commonregex import clean_pii_from_any load_dotenv() in_codespaces = os.getenv("CODESPACES") == "true" @@ -13,10 +14,14 @@ posthog = Posthog('phc_JS6XFROuNbhJtVCEdTSYk6gl5ArRrTNMpCcguAXlSPs', def capture_event(unique_id: str, event_name: str, event_properties: Any): + # Return early if telemetry is disabled config = load_config('.continue/config.json') if not config.allow_anonymous_telemetry: return if in_codespaces: event_properties['codespaces'] = True - posthog.capture(unique_id, event_name, event_properties) + + # Send event to PostHog + posthog.capture(unique_id, event_name, + clean_pii_from_any(event_properties)) diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/README.md index d735e0cd..d735e0cd 100644 --- a/continuedev/src/continuedev/recipes/AddTransformRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/dlt_transform_docs.md b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/dlt_transform_docs.md index 658b285f..658b285f 100644 --- a/continuedev/src/continuedev/recipes/AddTransformRecipe/dlt_transform_docs.md +++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/dlt_transform_docs.md diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/main.py index fdd343f5..5d242f7c 100644 --- a/continuedev/src/continuedev/recipes/AddTransformRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/main.py @@ -1,9 +1,9 @@ from textwrap import dedent -from ...core.main import Step -from ...core.sdk import ContinueSDK -from ...steps.core.core import WaitForUserInputStep -from ...steps.core.core import MessageStep +from ....core.main import Step +from ....core.sdk import ContinueSDK +from ....plugins.steps.core.core import WaitForUserInputStep +from ....plugins.steps.core.core import MessageStep from .steps import SetUpChessPipelineStep, AddTransformStep diff --git a/continuedev/src/continuedev/recipes/AddTransformRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/steps.py index 9744146c..8c6446da 100644 --- a/continuedev/src/continuedev/recipes/AddTransformRecipe/steps.py +++ b/continuedev/src/continuedev/plugins/recipes/AddTransformRecipe/steps.py @@ -1,14 +1,10 @@ import os from textwrap import dedent -from ...models.main import Range -from ...models.filesystem import RangeInFile -from ...steps.core.core import MessageStep -from ...core.sdk import Models -from ...core.observation import DictObservation -from ...models.filesystem_edit import AddFile -from ...core.main import Step -from ...core.sdk import ContinueSDK +from ....plugins.steps.core.core import MessageStep +from ....core.sdk import Models +from ....core.main import Step +from ....core.sdk import ContinueSDK AI_ASSISTED_STRING = "(β¨ AI-Assisted β¨)" diff --git a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/README.md index df66104f..df66104f 100644 --- a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/main.py index 953fb0c2..c0f9e7e3 100644 --- a/continuedev/src/continuedev/recipes/ContinueRecipeRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/ContinueRecipeRecipe/main.py @@ -1,8 +1,7 @@ from textwrap import dedent -from ...models.filesystem import RangeInFile -from ...steps.main import EditHighlightedCodeStep -from ...core.main import Step -from ...core.sdk import ContinueSDK +from ....plugins.steps.main import EditHighlightedCodeStep +from ....core.main import Step +from ....core.sdk import ContinueSDK class ContinueStepStep(Step): diff --git a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/README.md index e69de29b..e69de29b 100644 --- a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/main.py index 55ef107b..84363e02 100644 --- a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/main.py @@ -1,9 +1,9 @@ from textwrap import dedent -from ...core.sdk import ContinueSDK -from ...core.main import Step -from ...steps.core.core import WaitForUserInputStep -from ...steps.core.core import MessageStep +from ....core.sdk import ContinueSDK +from ....core.main import Step +from ....plugins.steps.core.core import WaitForUserInputStep +from ....plugins.steps.core.core import MessageStep from .steps import SetupPipelineStep, ValidatePipelineStep, RunQueryStep diff --git a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/steps.py index 60218ef9..433e309e 100644 --- a/continuedev/src/continuedev/recipes/CreatePipelineRecipe/steps.py +++ b/continuedev/src/continuedev/plugins/recipes/CreatePipelineRecipe/steps.py @@ -1,15 +1,13 @@ import os -import subprocess from textwrap import dedent import time -from ...models.main import Range -from ...models.filesystem import RangeInFile -from ...steps.core.core import MessageStep -from ...core.observation import DictObservation, InternalErrorObservation -from ...models.filesystem_edit import AddFile, FileEdit -from ...core.main import Step -from ...core.sdk import ContinueSDK, Models +from ....models.main import Range +from ....models.filesystem import RangeInFile +from ....plugins.steps.core.core import MessageStep +from ....models.filesystem_edit import AddFile, FileEdit +from ....core.main import Step +from ....core.sdk import ContinueSDK, Models AI_ASSISTED_STRING = "(β¨ AI-Assisted β¨)" diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/README.md index c4981e56..c4981e56 100644 --- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md index eb68e117..eb68e117 100644 --- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md +++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/dlt_duckdb_to_bigquery_docs.md diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/main.py index 1ae84310..5b6aa8f0 100644 --- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/main.py @@ -1,9 +1,8 @@ from textwrap import dedent -from ...core.main import Step -from ...core.sdk import ContinueSDK -from ...steps.core.core import WaitForUserInputStep -from ...steps.core.core import MessageStep +from ....core.main import Step +from ....core.sdk import ContinueSDK +from ....plugins.steps.core.core import MessageStep from .steps import SetUpChessPipelineStep, SwitchDestinationStep, LoadDataStep # Based on the following guide: diff --git a/continuedev/src/continuedev/recipes/DDtoBQRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/steps.py index df414e2e..767936b8 100644 --- a/continuedev/src/continuedev/recipes/DDtoBQRecipe/steps.py +++ b/continuedev/src/continuedev/plugins/recipes/DDtoBQRecipe/steps.py @@ -1,17 +1,11 @@ import os -import subprocess from textwrap import dedent -import time - -from ...steps.find_and_replace import FindAndReplaceStep -from ...models.main import Range -from ...models.filesystem import RangeInFile -from ...steps.core.core import MessageStep -from ...core.sdk import Models -from ...core.observation import DictObservation, InternalErrorObservation -from ...models.filesystem_edit import AddFile, FileEdit -from ...core.main import Step -from ...core.sdk import ContinueSDK + +from ....plugins.steps.find_and_replace import FindAndReplaceStep +from ....plugins.steps.core.core import MessageStep +from ....core.sdk import Models +from ....core.main import Step +from ....core.sdk import ContinueSDK AI_ASSISTED_STRING = "(β¨ AI-Assisted β¨)" diff --git a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/README.md index e69de29b..e69de29b 100644 --- a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/main.py index 2a3e3566..54cba45f 100644 --- a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/main.py @@ -1,10 +1,9 @@ from textwrap import dedent -from ...steps.input.nl_multiselect import NLMultiselectStep -from ...core.main import Step -from ...core.sdk import ContinueSDK -from ...steps.core.core import WaitForUserInputStep -from ...steps.core.core import MessageStep +from ....plugins.steps.input.nl_multiselect import NLMultiselectStep +from ....core.main import Step +from ....core.sdk import ContinueSDK +from ....plugins.steps.core.core import MessageStep from .steps import SetupPipelineStep, DeployAirflowStep, RunPipelineStep diff --git a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/steps.py b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/steps.py index d9bdbc0a..83067d52 100644 --- a/continuedev/src/continuedev/recipes/DeployPipelineAirflowRecipe/steps.py +++ b/continuedev/src/continuedev/plugins/recipes/DeployPipelineAirflowRecipe/steps.py @@ -1,18 +1,11 @@ import os -import subprocess from textwrap import dedent -import time - -from ...steps.core.core import WaitForUserInputStep -from ...models.main import Range -from ...models.filesystem import RangeInFile -from ...steps.core.core import MessageStep -from ...core.sdk import Models -from ...core.observation import DictObservation, InternalErrorObservation -from ...models.filesystem_edit import AddFile, FileEdit -from ...core.main import Step -from ...core.sdk import ContinueSDK -from ...steps.find_and_replace import FindAndReplaceStep + +from ....plugins.steps.core.core import MessageStep +from ....core.sdk import Models +from ....core.main import Step +from ....core.sdk import ContinueSDK +from ....plugins.steps.find_and_replace import FindAndReplaceStep AI_ASSISTED_STRING = "(β¨ AI-Assisted β¨)" @@ -93,5 +86,3 @@ class DeployAirflowStep(Step): # Tell the user to check the schedule and fill in owner, email, other default_args await sdk.run_step(MessageStep(message="Fill in the owner, email, and other default_args in the DAG file with your own personal information. Then the DAG will be ready to run!", name="Fill in default_args")) - - # Run the DAG locally ?? diff --git a/continuedev/src/continuedev/recipes/README.md b/continuedev/src/continuedev/plugins/recipes/README.md index d5a006fb..9860b0e2 100644 --- a/continuedev/src/continuedev/recipes/README.md +++ b/continuedev/src/continuedev/plugins/recipes/README.md @@ -1,5 +1,7 @@ # This is a collaborative collection of Continue recipes +A recipe is technically just a [Step](../steps/README.md), but is intended to be more complex, composed of multiple sub-steps. + Recipes here will automatically be made available in the [Continue VS Code extension](https://marketplace.visualstudio.com/items?itemName=Continue.continue). The `recipes` folder contains all recipes, each with the same structure. **If you wish to create your own recipe, please do the following:** diff --git a/continuedev/src/continuedev/recipes/TemplateRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/README.md index 91d1123b..91d1123b 100644 --- a/continuedev/src/continuedev/recipes/TemplateRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/TemplateRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/main.py index 16132cfd..197abe85 100644 --- a/continuedev/src/continuedev/recipes/TemplateRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/TemplateRecipe/main.py @@ -1,5 +1,7 @@ from typing import Coroutine -from continuedev.core import Step, ContinueSDK, Observation, Models +from ....core.main import Step, Observation +from ....core.sdk import ContinueSDK +from ....core.sdk import Models class TemplateRecipe(Step): diff --git a/continuedev/src/continuedev/recipes/WritePytestsRecipe/README.md b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/README.md index 5ce33ecb..5ce33ecb 100644 --- a/continuedev/src/continuedev/recipes/WritePytestsRecipe/README.md +++ b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/README.md diff --git a/continuedev/src/continuedev/recipes/WritePytestsRecipe/main.py b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/main.py index c7a65fa6..6ef5ffd6 100644 --- a/continuedev/src/continuedev/recipes/WritePytestsRecipe/main.py +++ b/continuedev/src/continuedev/plugins/recipes/WritePytestsRecipe/main.py @@ -1,7 +1,8 @@ from textwrap import dedent from typing import Union -from ...models.filesystem_edit import AddDirectory, AddFile -from ...core.main import Step, ContinueSDK +from ....models.filesystem_edit import AddDirectory, AddFile +from ....core.main import Step +from ....core.sdk import ContinueSDK import os diff --git a/continuedev/src/continuedev/plugins/steps/README.md b/continuedev/src/continuedev/plugins/steps/README.md new file mode 100644 index 00000000..12073835 --- /dev/null +++ b/continuedev/src/continuedev/plugins/steps/README.md @@ -0,0 +1,50 @@ +# Steps + +Steps are the composable unit of action in Continue. They define a `run` method which has access to the entire `ContinueSDK`, allowing you to take actions inside the IDE, call language models, and more. In this folder you can find a number of good examples. + +## How to write a step + +a. Start by creating a subclass of `Step` + +You should first consider what will be the parameters of your recipe. These are defined as attributes in the Pydantic class. For example, if you wanted a "filepath" attribute that would look like this: + +```python +class HelloWorldStep(Step): + filepath: str + ... +``` + +b. Next, write the `run` method + +This method takes the ContinueSDK as a parameter, giving you all the tools you need to write your steps (if it's missing something, let us know, we'll add it!). You can write any code inside the run method; this is what will happen when your step is run, line for line. As an example, here's a step that will open a file and append "Hello World!": + +```python +class HelloWorldStep(Step): + filepath: str + + async def run(self, sdk: ContinueSDK): + await sdk.ide.setFileOpen(self.filepath) + await sdk.append_to_file(self.filepath, "Hello World!") +``` + +c. Finally, every Step is displayed with a description of what it has done + +If you'd like to override the default description of your step, which is just the class name, then implement the `describe` method. You can: + +- Return a static string +- Store state in a class attribute (prepend with a double underscore, which signifies (through Pydantic) that this is not a parameter for the Step, just internal state) during the run method, and then grab this in the describe method. +- Use state in conjunction with the `models` parameter of the describe method to autogenerate a description with a language model. For example, if you'd used an attribute called `__code_written` to store a string representing some code that was written, you could implement describe as `return models.gpt35.complete(f"{self.\_\_code_written}\n\nSummarize the changes made in the above code.")`. + +Here's an example: + +```python +class HelloWorldStep(Step): + filepath: str + + async def run(self, sdk: ContinueSDK): + await sdk.ide.setFileOpen(self.filepath) + await sdk.append_to_file(self.filepath, "Hello World!") + + def describe(self, models: Models): + return f"Appended 'Hello World!' to {self.filepath}" +``` diff --git a/continuedev/src/continuedev/steps/__init__.py b/continuedev/src/continuedev/plugins/steps/__init__.py index 8b137891..8b137891 100644 --- a/continuedev/src/continuedev/steps/__init__.py +++ b/continuedev/src/continuedev/plugins/steps/__init__.py diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/plugins/steps/chat.py index aade1ea1..2c662459 100644 --- a/continuedev/src/continuedev/steps/chat.py +++ b/continuedev/src/continuedev/plugins/steps/chat.py @@ -3,12 +3,12 @@ from typing import Any, Coroutine, List from pydantic import Field -from ..libs.util.strings import remove_quotes_and_escapes +from ...libs.util.strings import remove_quotes_and_escapes from .main import EditHighlightedCodeStep from .core.core import MessageStep -from ..core.main import FunctionCall, Models -from ..core.main import ChatMessage, Step, step_to_json_schema -from ..core.sdk import ContinueSDK +from ...core.main import FunctionCall, Models +from ...core.main import ChatMessage, Step, step_to_json_schema +from ...core.sdk import ContinueSDK import openai import os from dotenv import load_dotenv diff --git a/continuedev/src/continuedev/steps/chroma.py b/continuedev/src/continuedev/plugins/steps/chroma.py index 9d085981..dbe8363e 100644 --- a/continuedev/src/continuedev/steps/chroma.py +++ b/continuedev/src/continuedev/plugins/steps/chroma.py @@ -1,10 +1,10 @@ from textwrap import dedent from typing import Coroutine, Union -from ..core.observation import Observation, TextObservation -from ..core.main import Step -from ..core.sdk import ContinueSDK +from ...core.observation import Observation, TextObservation +from ...core.main import Step +from ...core.sdk import ContinueSDK from .core.core import EditFileStep -from ..libs.chroma.query import ChromaIndexManager +from ...libs.chroma.query import ChromaIndexManager from .core.core import EditFileStep diff --git a/continuedev/src/continuedev/steps/clear_history.py b/continuedev/src/continuedev/plugins/steps/clear_history.py index a875c6d3..8f21518b 100644 --- a/continuedev/src/continuedev/steps/clear_history.py +++ b/continuedev/src/continuedev/plugins/steps/clear_history.py @@ -1,5 +1,5 @@ -from ..core.main import Step -from ..core.sdk import ContinueSDK +from ...core.main import Step +from ...core.sdk import ContinueSDK class ClearHistoryStep(Step): diff --git a/continuedev/src/continuedev/steps/comment_code.py b/continuedev/src/continuedev/plugins/steps/comment_code.py index aa17e62c..3e34ab52 100644 --- a/continuedev/src/continuedev/steps/comment_code.py +++ b/continuedev/src/continuedev/plugins/steps/comment_code.py @@ -1,4 +1,4 @@ -from ..core.main import ContinueSDK, Models, Step +from ...core.main import ContinueSDK, Models, Step from .main import EditHighlightedCodeStep diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/plugins/steps/core/core.py index 4afc36e8..5a81e5ee 100644 --- a/continuedev/src/continuedev/steps/core/core.py +++ b/continuedev/src/continuedev/plugins/steps/core/core.py @@ -5,15 +5,15 @@ import difflib from textwrap import dedent from typing import Coroutine, List, Literal, Union -from ...libs.llm.ggml import GGML -from ...models.main import Range -from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder -from ...models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit -from ...models.filesystem import FileSystem, RangeInFile, RangeInFileWithContents -from ...core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation -from ...core.main import ChatMessage, ContinueCustomException, Step, SequentialStep -from ...libs.util.count_tokens import MAX_TOKENS_FOR_MODEL, DEFAULT_MAX_TOKENS -from ...libs.util.strings import dedent_and_get_common_whitespace, remove_quotes_and_escapes +from ....libs.llm.ggml import GGML +from ....models.main import Range +from ....libs.llm.prompt_utils import MarkdownStyleEncoderDecoder +from ....models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit +from ....models.filesystem import FileSystem, RangeInFile, RangeInFileWithContents +from ....core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation +from ....core.main import ChatMessage, ContinueCustomException, Step, SequentialStep +from ....libs.util.count_tokens import MAX_TOKENS_FOR_MODEL, DEFAULT_MAX_TOKENS +from ....libs.util.strings import dedent_and_get_common_whitespace, remove_quotes_and_escapes import difflib @@ -220,13 +220,13 @@ class DefaultModelEditCodeStep(Step): if total_tokens < MAX_TOKENS_FOR_MODEL[model_to_use.name]: break - if total_tokens > MAX_TOKENS_FOR_MODEL[model_to_use.name]: - while cur_start_line < max_start_line: - cur_start_line += 1 - total_tokens -= model_to_use.count_tokens( - full_file_contents_lst[cur_end_line]) - if total_tokens < MAX_TOKENS_FOR_MODEL[model_to_use.name]: - break + if total_tokens > MAX_TOKENS_FOR_MODEL[model_to_use.name]: + while cur_start_line < max_start_line: + cur_start_line += 1 + total_tokens -= model_to_use.count_tokens( + full_file_contents_lst[cur_start_line]) + if total_tokens < MAX_TOKENS_FOR_MODEL[model_to_use.name]: + break # Now use the found start/end lines to get the prefix and suffix strings file_prefix = "\n".join( diff --git a/continuedev/src/continuedev/steps/custom_command.py b/continuedev/src/continuedev/plugins/steps/custom_command.py index 375900c1..1491a975 100644 --- a/continuedev/src/continuedev/steps/custom_command.py +++ b/continuedev/src/continuedev/plugins/steps/custom_command.py @@ -1,6 +1,6 @@ -from ..libs.util.templating import render_templated_string -from ..core.main import Models, Step -from ..core.sdk import ContinueSDK +from ...libs.util.templating import render_templated_string +from ...core.main import Step +from ...core.sdk import ContinueSDK from ..steps.chat import SimpleChatStep diff --git a/continuedev/src/continuedev/steps/draft/abstract_method.py b/continuedev/src/continuedev/plugins/steps/draft/abstract_method.py index f3131c4b..f3131c4b 100644 --- a/continuedev/src/continuedev/steps/draft/abstract_method.py +++ b/continuedev/src/continuedev/plugins/steps/draft/abstract_method.py diff --git a/continuedev/src/continuedev/steps/draft/migration.py b/continuedev/src/continuedev/plugins/steps/draft/migration.py index f3b36b5e..a76d491b 100644 --- a/continuedev/src/continuedev/steps/draft/migration.py +++ b/continuedev/src/continuedev/plugins/steps/draft/migration.py @@ -1,7 +1,7 @@ # When an edit is made to an existing class or a new sqlalchemy class is created, # this should be kicked off. -from ...core.main import Step +from ....core.main import Step class MigrationStep(Step): diff --git a/continuedev/src/continuedev/steps/draft/redux.py b/continuedev/src/continuedev/plugins/steps/draft/redux.py index 17506316..30c8fdbb 100644 --- a/continuedev/src/continuedev/steps/draft/redux.py +++ b/continuedev/src/continuedev/plugins/steps/draft/redux.py @@ -1,5 +1,5 @@ -from ...core.main import Step -from ...core.sdk import ContinueSDK +from ....core.main import Step +from ....core.sdk import ContinueSDK from ..core.core import EditFileStep @@ -25,14 +25,14 @@ class EditReduxStateStep(Step): sdk.run_step(EditFileStep( filepath=selector_filename, prompt=f"Edit the selector to add a new property for {self.description}. The store looks like this: {store_file_contents}" - ) + )) # Reducer reducer_filename = "" sdk.run_step(EditFileStep( filepath=reducer_filename, prompt=f"Edit the reducer to add a new property for {self.description}. The store looks like this: {store_file_contents}" - + )) """ Starts with implementing selector 1. RootStore diff --git a/continuedev/src/continuedev/steps/draft/typeorm.py b/continuedev/src/continuedev/plugins/steps/draft/typeorm.py index 153c855f..d06a6fb4 100644 --- a/continuedev/src/continuedev/steps/draft/typeorm.py +++ b/continuedev/src/continuedev/plugins/steps/draft/typeorm.py @@ -1,6 +1,6 @@ from textwrap import dedent -from ...core.main import Step -from ...core.sdk import ContinueSDK +from ....core.main import Step +from ....core.sdk import ContinueSDK class CreateTableStep(Step): diff --git a/continuedev/src/continuedev/steps/feedback.py b/continuedev/src/continuedev/plugins/steps/feedback.py index 6f6a9b15..119e3112 100644 --- a/continuedev/src/continuedev/steps/feedback.py +++ b/continuedev/src/continuedev/plugins/steps/feedback.py @@ -1,8 +1,8 @@ from typing import Coroutine -from ..core.main import Models -from ..core.main import Step -from ..core.sdk import ContinueSDK -from ..libs.util.telemetry import capture_event +from ...core.main import Models +from ...core.main import Step +from ...core.sdk import ContinueSDK +from ...libs.util.telemetry import capture_event class FeedbackStep(Step): diff --git a/continuedev/src/continuedev/steps/find_and_replace.py b/continuedev/src/continuedev/plugins/steps/find_and_replace.py index 690872c0..a2c9c44e 100644 --- a/continuedev/src/continuedev/steps/find_and_replace.py +++ b/continuedev/src/continuedev/plugins/steps/find_and_replace.py @@ -1,6 +1,6 @@ -from ..models.filesystem_edit import FileEdit, Range -from ..core.main import Models, Step -from ..core.sdk import ContinueSDK +from ...models.filesystem_edit import FileEdit, Range +from ...core.main import Models, Step +from ...core.sdk import ContinueSDK class FindAndReplaceStep(Step): diff --git a/continuedev/src/continuedev/steps/help.py b/continuedev/src/continuedev/plugins/steps/help.py index ba1e6087..5111c7cf 100644 --- a/continuedev/src/continuedev/steps/help.py +++ b/continuedev/src/continuedev/plugins/steps/help.py @@ -1,7 +1,7 @@ from textwrap import dedent -from ..core.main import ChatMessage, Step -from ..core.sdk import ContinueSDK -from ..libs.util.telemetry import capture_event +from ...core.main import ChatMessage, Step +from ...core.sdk import ContinueSDK +from ...libs.util.telemetry import capture_event help = dedent("""\ Continue is an open-source coding autopilot. It is a VS Code extension that brings the power of ChatGPT to your IDE. diff --git a/continuedev/src/continuedev/steps/input/nl_multiselect.py b/continuedev/src/continuedev/plugins/steps/input/nl_multiselect.py index aee22866..b54d394a 100644 --- a/continuedev/src/continuedev/steps/input/nl_multiselect.py +++ b/continuedev/src/continuedev/plugins/steps/input/nl_multiselect.py @@ -1,7 +1,7 @@ from typing import List, Union from ..core.core import WaitForUserInputStep -from ...core.main import Step -from ...core.sdk import ContinueSDK +from ....core.main import Step +from ....core.sdk import ContinueSDK class NLMultiselectStep(Step): diff --git a/continuedev/src/continuedev/steps/main.py b/continuedev/src/continuedev/plugins/steps/main.py index ce7cbc60..30117c55 100644 --- a/continuedev/src/continuedev/steps/main.py +++ b/continuedev/src/continuedev/plugins/steps/main.py @@ -1,21 +1,18 @@ import os from typing import Coroutine, List, Union - +from textwrap import dedent from pydantic import BaseModel, Field -from ..libs.llm import LLM -from ..models.main import Traceback, Range -from ..models.filesystem_edit import EditDiff, FileEdit -from ..models.filesystem import RangeInFile, RangeInFileWithContents -from ..core.observation import Observation, TextObservation, TracebackObservation -from ..libs.llm.prompt_utils import MarkdownStyleEncoderDecoder -from textwrap import dedent -from ..core.main import ContinueCustomException, Step -from ..core.sdk import ContinueSDK, Models -from ..core.observation import Observation -import subprocess +from ...models.main import Traceback, Range +from ...models.filesystem_edit import EditDiff, FileEdit +from ...models.filesystem import RangeInFile, RangeInFileWithContents +from ...core.observation import Observation +from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder +from ...core.main import ContinueCustomException, Step +from ...core.sdk import ContinueSDK, Models +from ...core.observation import Observation from .core.core import DefaultModelEditCodeStep -from ..libs.util.calculate_diff import calculate_diff2 +from ...libs.util.calculate_diff import calculate_diff2 class SetupContinueWorkspaceStep(Step): @@ -303,8 +300,7 @@ class SolveTracebackStep(Step): range_in_files.append( RangeInFile.from_entire_file(frame.filepath, content)) - await sdk.run_step(EditCodeStep( - range_in_files=range_in_files, prompt=prompt)) + await sdk.run_step(DefaultModelEditCodeStep(range_in_files=range_in_files, user_input=prompt)) return None diff --git a/continuedev/src/continuedev/steps/on_traceback.py b/continuedev/src/continuedev/plugins/steps/on_traceback.py index efb4c703..e99f212d 100644 --- a/continuedev/src/continuedev/steps/on_traceback.py +++ b/continuedev/src/continuedev/plugins/steps/on_traceback.py @@ -1,8 +1,8 @@ import os from .core.core import UserInputStep -from ..core.main import ChatMessage, Step -from ..core.sdk import ContinueSDK +from ...core.main import ChatMessage, Step +from ...core.sdk import ContinueSDK from .chat import SimpleChatStep diff --git a/continuedev/src/continuedev/steps/open_config.py b/continuedev/src/continuedev/plugins/steps/open_config.py index af55a95a..d950c26f 100644 --- a/continuedev/src/continuedev/steps/open_config.py +++ b/continuedev/src/continuedev/plugins/steps/open_config.py @@ -1,6 +1,6 @@ from textwrap import dedent -from ..core.main import Step -from ..core.sdk import ContinueSDK +from ...core.main import Step +from ...core.sdk import ContinueSDK import os diff --git a/continuedev/src/continuedev/steps/react.py b/continuedev/src/continuedev/plugins/steps/react.py index cddb8b42..8b2e7c2e 100644 --- a/continuedev/src/continuedev/steps/react.py +++ b/continuedev/src/continuedev/plugins/steps/react.py @@ -1,8 +1,7 @@ from textwrap import dedent from typing import List, Union, Tuple -from ..core.main import Step -from ..core.sdk import ContinueSDK -from .core.core import MessageStep +from ...core.main import Step +from ...core.sdk import ContinueSDK class NLDecisionStep(Step): diff --git a/continuedev/src/continuedev/steps/search_directory.py b/continuedev/src/continuedev/plugins/steps/search_directory.py index bfb97630..7d02d6fa 100644 --- a/continuedev/src/continuedev/steps/search_directory.py +++ b/continuedev/src/continuedev/plugins/steps/search_directory.py @@ -2,11 +2,11 @@ import asyncio from textwrap import dedent from typing import List, Union -from ..models.filesystem import RangeInFile -from ..models.main import Range -from ..core.main import Step -from ..core.sdk import ContinueSDK -from ..libs.util.create_async_task import create_async_task +from ...models.filesystem import RangeInFile +from ...models.main import Range +from ...core.main import Step +from ...core.sdk import ContinueSDK +from ...libs.util.create_async_task import create_async_task import os import re diff --git a/continuedev/src/continuedev/plugins/steps/steps_on_startup.py b/continuedev/src/continuedev/plugins/steps/steps_on_startup.py new file mode 100644 index 00000000..19d62d30 --- /dev/null +++ b/continuedev/src/continuedev/plugins/steps/steps_on_startup.py @@ -0,0 +1,17 @@ +from ...core.main import Step +from ...core.sdk import Models, ContinueSDK +from ...libs.util.step_name_to_steps import get_step_from_name + + +class StepsOnStartupStep(Step): + hide: bool = True + + async def describe(self, models: Models): + return "Running steps on startup" + + async def run(self, sdk: ContinueSDK): + steps_on_startup = sdk.config.steps_on_startup + + for step_name, step_params in steps_on_startup.items(): + step = get_step_from_name(step_name, step_params) + await sdk.run_step(step) diff --git a/continuedev/src/continuedev/steps/welcome.py b/continuedev/src/continuedev/plugins/steps/welcome.py index 2dece649..df3e9a8a 100644 --- a/continuedev/src/continuedev/steps/welcome.py +++ b/continuedev/src/continuedev/plugins/steps/welcome.py @@ -1,9 +1,10 @@ from textwrap import dedent -from ..models.filesystem_edit import AddFile -from ..core.main import Step -from ..core.sdk import ContinueSDK, Models import os +from ...models.filesystem_edit import AddFile +from ...core.main import Step +from ...core.sdk import ContinueSDK, Models + class WelcomeStep(Step): name: str = "Welcome to Continue!" diff --git a/continuedev/src/continuedev/server/session_manager.py b/continuedev/src/continuedev/server/session_manager.py index 96daf92c..3136f1bf 100644 --- a/continuedev/src/continuedev/server/session_manager.py +++ b/continuedev/src/continuedev/server/session_manager.py @@ -7,7 +7,7 @@ import json from ..libs.util.paths import getSessionFilePath, getSessionsFolderPath from ..models.filesystem_edit import FileEditWithFullContents from ..libs.constants.main import CONTINUE_SESSIONS_FOLDER -from ..core.policy import DemoPolicy +from ..core.policy import DefaultPolicy from ..core.main import FullState from ..core.autopilot import Autopilot from .ide_protocol import AbstractIdeProtocolServer @@ -65,7 +65,7 @@ class SessionManager: full_state = FullState(**json.load(f)) autopilot = await DemoAutopilot.create( - policy=DemoPolicy(), ide=ide, full_state=full_state) + policy=DefaultPolicy(), ide=ide, full_state=full_state) session_id = session_id or str(uuid4()) ide.session_id = session_id session = Session(session_id=session_id, autopilot=autopilot) diff --git a/continuedev/src/continuedev/steps/steps_on_startup.py b/continuedev/src/continuedev/steps/steps_on_startup.py deleted file mode 100644 index 318c28df..00000000 --- a/continuedev/src/continuedev/steps/steps_on_startup.py +++ /dev/null @@ -1,22 +0,0 @@ -from ..core.main import Step -from ..core.sdk import Models, ContinueSDK -from .main import UserInputStep -from ..recipes.CreatePipelineRecipe.main import CreatePipelineRecipe -from ..recipes.DDtoBQRecipe.main import DDtoBQRecipe -from ..recipes.DeployPipelineAirflowRecipe.main import DeployPipelineAirflowRecipe -from ..recipes.DDtoBQRecipe.main import DDtoBQRecipe -from ..recipes.AddTransformRecipe.main import AddTransformRecipe - - -class StepsOnStartupStep(Step): - hide: bool = True - - async def describe(self, models: Models): - return "Running steps on startup" - - async def run(self, sdk: ContinueSDK): - steps_on_startup = sdk.config.steps_on_startup - - for step_type in steps_on_startup: - step = step_type() - await sdk.run_step(step) diff --git a/docs/docs/customization.md b/docs/docs/customization.md new file mode 100644 index 00000000..9e04280d --- /dev/null +++ b/docs/docs/customization.md @@ -0,0 +1,121 @@ +# Customization + +Continue can be deeply customized by editing the `ContinueConfig` object in `~/.continue/config.py` on your machine. This file is created the first time you run Continue. + +## Change the default LLM + +Change the `default_model` field to any of "gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "claude-2", or "ggml". + +### claude-2 and gpt-X + +If you have access, simply set `default_model` to the model you would like to use, then you will be prompted for a personal API key after reloading VS Code. If using an OpenAI model, you can press enter to try with our API key for free. + +### Local models with ggml + +See our [5 minute quickstart](https://github.com/continuedev/ggml-server-example) to run any model locally with ggml. While these models don't yet perform as well, they are free, entirely private, and run offline. + +### Azure OpenAI Service + +If you'd like to use OpenAI models but are concerned about privacy, you can use the Azure OpenAI service, which is GDPR and HIPAA compliant. After applying for access [here](https://azure.microsoft.com/en-us/products/ai-services/openai-service), you will typically hear back within only a few days. Once you have access, set `default_model` to "gpt-4", and then set the `azure_openai_info` property in the `ContinueConfig` like so: + +```python +config = ContinueConfig( + ... + azure_openai_info=AzureInfo( + endpoint="https://my-azure-openai-instance.openai.azure.com/", + engine="my-azure-openai-deployment", + api_version="2023-03-15-preview" + ) +) +``` + +The easiest way to find this information is from the chat playground in the Azure OpenAI portal. Under the "Chat Session" section, click "View Code" to see each of these parameters. Finally, find one of your Azure OpenAI keys and enter it in the VS Code settings under `continue.OPENAI_API_KEY`. + +## Customize System Message + +You can write your own system message, a set of instructions that will always be top-of-mind for the LLM, by setting the `system_message` property to any string. For example, you might request "Please make all responses as concise as possible and never repeat something you have already explained." + +System messages can also reference files. For example, if there is a markdown file (e.g. at `/Users/nate/Documents/docs/reference.md`) you'd like the LLM to know about, you can reference it with [Mustache](http://mustache.github.io/mustache.5.html) templating like this: "Please reference this documentation: {{ Users/nate/Documents/docs/reference.md }}". As of now, you must use an absolute path. + +## Custom Commands + +You can add custom slash commands by adding a `CustomCommand` object to the `custom_commands` property. Each `CustomCommand` has + +- `name`: the name of the command, which will be invoked with `/name` +- `description`: a short description of the command, which will appear in the dropdown +- `prompt`: a set of instructions to the LLM, which will be shown in the prompt + +Custom commands are great when you are frequently reusing a prompt. For example, if you've crafted a great prompt and frequently ask the LLM to check for mistakes in your code, you could add a command like this: + +```python +config = ContinueConfig( + ... + custom_commands=[ + CustomCommand( + name="check", + description="Check for mistakes in my code", + prompt=dedent("""\ + Please read the highlighted code and check for any mistakes. You should look for the following, and be extremely vigilant: + - Syntax errors + - Logic errors + - Security vulnerabilities + - Performance issues + - Anything else that looks wrong + + Once you find an error, please explain it as clearly as possible, but without using extra words. For example, instead of saying "I think there is a syntax error on line 5", you should say "Syntax error on line 5". Give your answer as one bullet point per mistake found.""") + ) + ] +) +``` + +## Temperature + +Set `temperature` to any value between 0 and 1. Higher values will make the LLM more creative, while lower values will make it more predictable. The default is 0.5. + +## Custom Context Providers + +When you type '@' in the Continue text box, it will display a dropdown of items that can be selected to include in your message as context. For example, you might want to reference a GitHub Issue, file, or Slack thread. All of these options are provided by a `ContextProvider` class, and we make it easy to write your own. As an example, here is the `GitHubIssuesContextProvider`, which lets you search all open GitHub Issues in a repo: + +```python +class GitHubIssuesContextProvider(ContextProvider): + """ + The GitHubIssuesContextProvider is a ContextProvider that allows you to search GitHub issues in a repo. + """ + + title = "issues" + repo_name: str + auth_token: str + + async def provide_context_items(self) -> List[ContextItem]: + auth = Auth.Token(self.auth_token) + gh = Github(auth=auth) + + repo = gh.get_repo(self.repo_name) + issues = repo.get_issues().get_page(0) + + return [ContextItem( + content=issue.body, + description=ContextItemDescription( + name=f"Issue #{issue.number}", + description=issue.title, + id=ContextItemId( + provider_title=self.title, + item_id=issue.id + ) + ) + ) for issue in issues] +``` + +It can then be set in the `ContinueConfig` like so: + +```python +config = ContinueConfig( + ... + context_providers=[ + GitHubIssuesContextProvider( + repo_name="my-github-username-or-org/my-github-repo", + auth_token="my-github-auth-token" + ) + ] +) +``` diff --git a/docs/docs/getting-started.md b/docs/docs/getting-started.md index 753c1479..fc19552e 100644 --- a/docs/docs/getting-started.md +++ b/docs/docs/getting-started.md @@ -2,6 +2,10 @@ ## Recommended: Install in VS Code
+:::note
+Continue requires that you have Python 3.8 or greater. If you do not, please [install](https://python.org) it
+:::
+
1. Click `Install` on the **[Continue extension in the Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=Continue.continue)**
2. This will open the Continue extension page in VS Code, where you will need to click `Install` again
diff --git a/docs/sidebars.js b/docs/sidebars.js index 9baf1b94..83b34ee8 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -13,7 +13,15 @@ /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ const sidebars = { - docsSidebar: ["intro", "getting-started", "how-to-use-continue", "how-continue-works", "telemetry", "collecting-data"], + docsSidebar: [ + "intro", + "getting-started", + "how-to-use-continue", + "how-continue-works", + "telemetry", + "collecting-data", + "customization", + ], }; module.exports = sidebars; diff --git a/extension/DEV_README.md b/extension/DEV_README.md index 87ed9334..72ea5c6a 100644 --- a/extension/DEV_README.md +++ b/extension/DEV_README.md @@ -6,8 +6,17 @@ This is the Continue VS Code Extension. Its primary jobs are 2. Open the Continue React app in a side panel. The React app's source code lives in the `react-app` directory. The panel is opened by the `continue.openContinueGUI` command, as defined in `src/commands.ts`. 3. Run a Continue server in the background, which connects to both the IDE protocol and the React app. The server is launched in `src/activation/environmentSetup.ts` by calling Python code that lives in `server/` (unless extension settings define a server URL other than localhost:65432, in which case the extension will just connect to that). -4. Open Continue +## Setting up for development -# Notes +1. Clone this repo +2. `cd extension` +3. `npm run full-package` + + > If NPM is not installed, you can use `brew install node` on Mac, or see the [installation page](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) for other platforms, or more detailed instructions. + +4. Open a VS Code window with `/extension` as the workspace root (_this is important, development mode will not work otherwise_) +5. Open any `.ts` file in the workspace, then press F5 and select "VS Code Extension Development" to begin debugging. + +## Notes - We require vscode engine `^1.67.0` and use `@types/vscode` version `1.67.0` because this is the earliest version that doesn't break any of the APIs we are using. If you go back to `1.66.0`, then it will break `vscode.window.tabGroups`. diff --git a/extension/README.md b/extension/README.md index 2d449b92..2944325b 100644 --- a/extension/README.md +++ b/extension/README.md @@ -25,6 +25,16 @@ Let Continue build the scaffolding of Python scripts, React components, and more - β/edit make an IAM policy that creates a user with read-only access to S3β - β/edit use this schema to write me a SQL query that gets recently churned usersβ +## Install + +Continue requires that you have Python 3.8 or greater. If you do not, please [install](https://python.org) it + +If your Continue server is not setting up, please check the console logs: +1. `cmd+shift+p` (MacOS) / `ctrl+shift+p` (Windows) +2. Search for and then select "Developer: Toggle Developer Tools" +3. Select `Console` +4. Read the console logs + ## OpenAI API Key New users can try out Continue with GPT-4 using a proxy server that securely makes calls to OpenAI using our API key. Continue should just work the first time you install the extension in VS Code. diff --git a/extension/package-lock.json b/extension/package-lock.json index 6818857b..2558c9c2 100644 --- a/extension/package-lock.json +++ b/extension/package-lock.json @@ -1,12 +1,12 @@ { "name": "continue", - "version": "0.0.181", + "version": "0.0.191", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "continue", - "version": "0.0.181", + "version": "0.0.191", "license": "Apache-2.0", "dependencies": { "@electron/rebuild": "^3.2.10", diff --git a/extension/package.json b/extension/package.json index b37bb1b6..6618ff45 100644 --- a/extension/package.json +++ b/extension/package.json @@ -14,7 +14,7 @@ "displayName": "Continue", "pricing": "Free", "description": "The open-source coding autopilot", - "version": "0.0.181", + "version": "0.0.191", "publisher": "Continue", "engines": { "vscode": "^1.67.0" @@ -35,8 +35,7 @@ "chat" ], "activationEvents": [ - "onStartupFinished", - "onView:continueGUIView" + "*" ], "main": "./out/extension.js", "browser": "./out/extension.js", @@ -227,7 +226,7 @@ "test": "node ./out/test/runTest.js", "jest": "jest --config ./jest.config.js", "package": "cp ./config/prod_config.json ./config/config.json && mkdir -p ./build && vsce package --out ./build && cp ./config/dev_config.json ./config/config.json", - "full-package": "cd ../continuedev && poetry build && cp ./dist/continuedev-0.1.2-py3-none-any.whl ../extension/server/continuedev-0.1.2-py3-none-any.whl && cd ../extension && npm install && npm run typegen && npm run clientgen && cd react-app && npm install && npm run build && cd .. && npm run package", + "full-package": "cd ../continuedev && poetry install && poetry build && cp ./dist/continuedev-0.1.2-py3-none-any.whl ../extension/server/continuedev-0.1.2-py3-none-any.whl && cd ../extension && npm install && npm run typegen && npm run clientgen && cd react-app && npm install && npm run build && cd .. && npm run package", "install-extension": "code --install-extension ./build/continue-0.0.8.vsix", "uninstall": "code --uninstall-extension .continue", "reinstall": "rm -rf ./build && npm run package && npm run uninstall && npm run install-extension" diff --git a/extension/react-app/src/components/TextDialog.tsx b/extension/react-app/src/components/TextDialog.tsx index 7c6ba052..7d8e9920 100644 --- a/extension/react-app/src/components/TextDialog.tsx +++ b/extension/react-app/src/components/TextDialog.tsx @@ -3,6 +3,7 @@ import React, { useEffect, useState } from "react"; import styled from "styled-components"; import { Button, secondaryDark, vscBackground, vscForeground } from "."; import { isMetaEquivalentKeyPressed } from "../util"; +import { ReactMarkdown } from "react-markdown/lib/react-markdown"; const ScreenCover = styled.div` position: fixed; @@ -56,6 +57,7 @@ const TextDialog = (props: { onEnter: (text: string) => void; onClose: () => void; message?: string; + entryOn?: boolean; }) => { const [text, setText] = useState(""); const textAreaRef = React.createRef<HTMLTextAreaElement>(); @@ -79,33 +81,37 @@ const TextDialog = (props: { }} > <Dialog> - <P>{props.message || ""}</P> - <TextArea - rows={10} - ref={textAreaRef} - onKeyDown={(e) => { - if ( - e.key === "Enter" && - isMetaEquivalentKeyPressed(e) && - textAreaRef.current - ) { - props.onEnter(textAreaRef.current.value); - setText(""); - } else if (e.key === "Escape") { - props.onClose(); - } - }} - /> - <Button - onClick={() => { - if (textAreaRef.current) { - props.onEnter(textAreaRef.current.value); - setText(""); - } - }} - > - Enter - </Button> + <ReactMarkdown>{props.message || ""}</ReactMarkdown> + {props.entryOn && ( + <> + <TextArea + rows={10} + ref={textAreaRef} + onKeyDown={(e) => { + if ( + e.key === "Enter" && + isMetaEquivalentKeyPressed(e) && + textAreaRef.current + ) { + props.onEnter(textAreaRef.current.value); + setText(""); + } else if (e.key === "Escape") { + props.onClose(); + } + }} + /> + <Button + onClick={() => { + if (textAreaRef.current) { + props.onEnter(textAreaRef.current.value); + setText(""); + } + }} + > + Enter + </Button> + </> + )} </Dialog> </DialogContainer> </ScreenCover> diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx index a1ba1c33..9a00802b 100644 --- a/extension/react-app/src/pages/gui.tsx +++ b/extension/react-app/src/pages/gui.tsx @@ -79,7 +79,6 @@ function GUI(props: GUIProps) { } }, [dataSwitchOn]); - const [usingFastModel, setUsingFastModel] = useState(false); const [waitingForSteps, setWaitingForSteps] = useState(false); const [userInputQueue, setUserInputQueue] = useState<string[]>([]); const [addingHighlightedCode, setAddingHighlightedCode] = useState(false); @@ -119,6 +118,7 @@ function GUI(props: GUIProps) { const [showFeedbackDialog, setShowFeedbackDialog] = useState(false); const [feedbackDialogMessage, setFeedbackDialogMessage] = useState(""); + const [feedbackEntryOn, setFeedbackEntryOn] = useState(true); const dispatch = useDispatch(); const bottomMessage = useSelector( @@ -147,12 +147,9 @@ function GUI(props: GUIProps) { }, [topGuiDivRef.current, scrollTimeout]); useEffect(() => { + // Cmd + Backspace to delete current step const listener = (e: any) => { - // Cmd + i to toggle fast model - if (e.key === "i" && isMetaEquivalentKeyPressed(e) && e.shiftKey) { - setUsingFastModel((prev) => !prev); - // Cmd + backspace to stop currently running step - } else if ( + if ( e.key === "Backspace" && isMetaEquivalentKeyPressed(e) && typeof history?.current_index !== "undefined" && @@ -171,7 +168,6 @@ function GUI(props: GUIProps) { useEffect(() => { client?.onStateUpdate((state: FullState) => { // Scroll only if user is at very bottom of the window. - setUsingFastModel(state.default_model === "gpt-3.5-turbo"); const shouldScrollToBottom = topGuiDivRef.current && topGuiDivRef.current?.offsetHeight - window.scrollY < 100; @@ -282,6 +278,7 @@ function GUI(props: GUIProps) { setShowFeedbackDialog(false); }} message={feedbackDialogMessage} + entryOn={feedbackEntryOn} /> <TopGUIDiv @@ -444,24 +441,26 @@ function GUI(props: GUIProps) { </div> <HeaderButtonWithText onClick={() => { - // client?.changeDefaultModel( - // usingFastModel ? "gpt-4" : "gpt-3.5-turbo" - // ); - if (!usingFastModel) { - // Show the dialog - setFeedbackDialogMessage( - "We don't yet support local models, but we're working on it! If privacy is a concern of yours, please write a short note to let us know." - ); - setShowFeedbackDialog(true); - } - setUsingFastModel((prev) => !prev); + // Show the dialog + setFeedbackDialogMessage( + `Continue uses GPT-4 by default, but works with any model. If you'd like to keep your code completely private, there are few options: + +Run a local model with ggml: [5 minute quickstart](https://github.com/continuedev/ggml-server-example) + +Use Azure OpenAI service, which is GDPR and HIPAA compliant: [Tutorial](https://continue.dev/docs/customization#azure-openai-service) + +If you already have an LLM deployed on your own infrastructure, or would like to do so, please contact us at hi@continue.dev. + ` + ); + setFeedbackEntryOn(false); + setShowFeedbackDialog(true); }} - text={usingFastModel ? "local" : "gpt-4"} + text={"Use Private Model"} > <div style={{ fontSize: "18px", marginLeft: "2px", marginRight: "2px" }} > - {usingFastModel ? "π" : "π§ "} + π </div> </HeaderButtonWithText> <HeaderButtonWithText @@ -486,6 +485,7 @@ function GUI(props: GUIProps) { setFeedbackDialogMessage( "Having trouble using Continue? Want a new feature? Let us know! This box is anonymous, but we will promptly address your feedback." ); + setFeedbackEntryOn(true); setShowFeedbackDialog(true); }} text="Feedback" diff --git a/extension/src/activation/activate.ts b/extension/src/activation/activate.ts index a7f6c55b..a1d88a31 100644 --- a/extension/src/activation/activate.ts +++ b/extension/src/activation/activate.ts @@ -1,7 +1,4 @@ import * as vscode from "vscode"; -import { registerAllCommands } from "../commands"; -import { registerAllCodeLensProviders } from "../lang-server/codeLens"; -import { sendTelemetryEvent, TelemetryEvent } from "../telemetry"; import IdeProtocolClient from "../continueIdeClient"; import { getContinueServerUrl } from "../bridge"; import { ContinueGUIWebviewViewProvider } from "../debugPanel"; @@ -10,8 +7,6 @@ import { startContinuePythonServer, } from "./environmentSetup"; import fetch from "node-fetch"; -import registerQuickFixProvider from "../lang-server/codeActions"; -// import { CapturedTerminal } from "../terminal/terminalEmulator"; const PACKAGE_JSON_RAW_GITHUB_URL = "https://raw.githubusercontent.com/continuedev/continue/HEAD/extension/package.json"; @@ -37,60 +32,60 @@ export async function activateExtension(context: vscode.ExtensionContext) { .catch((e) => console.log("Error checking for extension updates: ", e)); // Start the server and display loader if taking > 2 seconds - await new Promise((resolve) => { - let serverStarted = false; + const sessionIdPromise = (async () => { + await new Promise((resolve) => { + let serverStarted = false; - // Start the server and set serverStarted to true when done - startContinuePythonServer().then(() => { - serverStarted = true; - resolve(null); - }); + // Start the server and set serverStarted to true when done + startContinuePythonServer().then(() => { + serverStarted = true; + resolve(null); + }); - // Wait for 2 seconds - setTimeout(() => { - // If the server hasn't started after 2 seconds, show the notification - if (!serverStarted) { - vscode.window.withProgress( - { - location: vscode.ProgressLocation.Notification, - title: - "Starting Continue Server... (it may take a minute to download Python packages)", - cancellable: false, - }, - async (progress, token) => { - // Wait for the server to start - while (!serverStarted) { - await new Promise((innerResolve) => - setTimeout(innerResolve, 1000) - ); + // Wait for 2 seconds + setTimeout(() => { + // If the server hasn't started after 2 seconds, show the notification + if (!serverStarted) { + vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: + "Starting Continue Server... (it may take a minute to download Python packages)", + cancellable: false, + }, + async (progress, token) => { + // Wait for the server to start + while (!serverStarted) { + await new Promise((innerResolve) => + setTimeout(innerResolve, 1000) + ); + } + return Promise.resolve(); } - return Promise.resolve(); - } - ); - } - }, 2000); - }); + ); + } + }, 2000); + }); - // Initialize IDE Protocol Client - const serverUrl = getContinueServerUrl(); - ideProtocolClient = new IdeProtocolClient( - `${serverUrl.replace("http", "ws")}/ide/ws`, - context - ); + // Initialize IDE Protocol Client + const serverUrl = getContinueServerUrl(); + ideProtocolClient = new IdeProtocolClient( + `${serverUrl.replace("http", "ws")}/ide/ws`, + context + ); + return await ideProtocolClient.getSessionId(); + })(); // Register Continue GUI as sidebar webview, and beging a new session - { - const sessionIdPromise = await ideProtocolClient.getSessionId(); - const provider = new ContinueGUIWebviewViewProvider(sessionIdPromise); + const provider = new ContinueGUIWebviewViewProvider(sessionIdPromise); - context.subscriptions.push( - vscode.window.registerWebviewViewProvider( - "continue.continueGUIView", - provider, - { - webviewOptions: { retainContextWhenHidden: true }, - } - ) - ); - } + context.subscriptions.push( + vscode.window.registerWebviewViewProvider( + "continue.continueGUIView", + provider, + { + webviewOptions: { retainContextWhenHidden: true }, + } + ) + ); } diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts index c341db39..5a9345a6 100644 --- a/extension/src/activation/environmentSetup.ts +++ b/extension/src/activation/environmentSetup.ts @@ -9,7 +9,6 @@ import fetch from "node-fetch"; import * as vscode from "vscode"; import * as os from "os"; import fkill from "fkill"; -import { sendTelemetryEvent, TelemetryEvent } from "../telemetry"; const WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR = "A Python virtual enviroment cannot be activated because running scripts is disabled for this user. In order to use Continue, please enable signed scripts to run with this command in PowerShell: `Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser`, reload VS Code, and then try again."; @@ -57,9 +56,6 @@ async function retryThenFail( vscode.window.showErrorMessage(msg); } - sendTelemetryEvent(TelemetryEvent.ExtensionSetupError, { - error: e.message, - }); throw e; } } @@ -83,12 +79,6 @@ async function runCommand(cmd: string): Promise<[string, string | undefined]> { stdout = ""; } - if (stderr) { - sendTelemetryEvent(TelemetryEvent.ExtensionSetupError, { - error: stderr, - }); - } - return [stdout, stderr]; } @@ -139,7 +129,7 @@ export async function getPythonPipCommands() { if (!versionExists) { vscode.window.showErrorMessage( - "Continue requires Python3 version 3.8 or greater. Please update your Python3 installation, reload VS Code, and try again." + "Continue requires Python version 3.8 or greater. Please update your Python installation, reload VS Code, and try again." ); throw new Error("Python3.8 or greater is not installed."); } @@ -480,16 +470,11 @@ export async function startContinuePythonServer() { console.log("Successfully started Continue python server"); resolve(null); } else if (data.includes("ERROR") || data.includes("Traceback")) { - sendTelemetryEvent(TelemetryEvent.ExtensionSetupError, { - error: data, - }); + console.log("Error starting Continue python server: ", data); } }); child.on("error", (error: any) => { console.log(`error: ${error.message}`); - sendTelemetryEvent(TelemetryEvent.ExtensionSetupError, { - error: error.message, - }); }); // Write the current version of vscode to a file called server_version.txt diff --git a/extension/src/commands.ts b/extension/src/commands.ts index 2b7f4c0c..1da2f04e 100644 --- a/extension/src/commands.ts +++ b/extension/src/commands.ts @@ -40,6 +40,9 @@ const commandsMap: { [command: string]: (...args: any) => any } = { edit ? "/edit " : "" }${code}\n\nHow do I fix this problem in the above code?: ${message}` ); + if (!edit) { + vscode.commands.executeCommand("continue.continueGUIView.focus"); + } }, "continue.focusContinueInput": async () => { if (focusedOnContinueInput) { diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts index a1370a01..802afc1d 100644 --- a/extension/src/continueIdeClient.ts +++ b/extension/src/continueIdeClient.ts @@ -16,7 +16,6 @@ import fs = require("fs"); import { WebsocketMessenger } from "./util/messenger"; import { diffManager } from "./diffs"; import path = require("path"); -import { sendTelemetryEvent, TelemetryEvent } from "./telemetry"; import { registerAllCodeLensProviders } from "./lang-server/codeLens"; import { registerAllCommands } from "./commands"; import registerQuickFixProvider from "./lang-server/codeActions"; @@ -81,7 +80,6 @@ class IdeProtocolClient { this._newWebsocketMessenger(); // Register commands and providers - sendTelemetryEvent(TelemetryEvent.ExtensionActivated); registerAllCodeLensProviders(context); registerAllCommands(context); registerQuickFixProvider(); @@ -167,6 +165,22 @@ class IdeProtocolClient { documentContentProvider ) ); + + // Listen for changes to settings.json + vscode.workspace.onDidChangeConfiguration((event) => { + if (event.affectsConfiguration("continue")) { + vscode.window + .showInformationMessage( + "Please reload VS Code for changes to Continue settings to take effect.", + "Reload" + ) + .then((selection) => { + if (selection === "Reload") { + vscode.commands.executeCommand("workbench.action.reloadWindow"); + } + }); + } + }); } async handleMessage( diff --git a/extension/src/debugPanel.ts b/extension/src/debugPanel.ts index dd24a8d8..f97cf846 100644 --- a/extension/src/debugPanel.ts +++ b/extension/src/debugPanel.ts @@ -181,19 +181,6 @@ export function setupDebugPanel( .getConfiguration("continue") .get<boolean>("dataSwitch"), }); - - // // Listen for changes to server URL in settings - // vscode.workspace.onDidChangeConfiguration((event) => { - // if (event.affectsConfiguration("continue.serverUrl")) { - // debugPanelWebview?.postMessage({ - // type: "onLoad", - // vscMachineId: vscode.env.machineId, - // apiUrl: getContinueServerUrl(), - // sessionId, - // }); - // } - // }); - break; } case "toggleDataSwitch": { diff --git a/extension/src/lang-server/codeActions.ts b/extension/src/lang-server/codeActions.ts index f0d61ace..892c69be 100644 --- a/extension/src/lang-server/codeActions.ts +++ b/extension/src/lang-server/codeActions.ts @@ -39,7 +39,10 @@ class ContinueQuickFixProvider implements vscode.CodeActionProvider { }; return quickFix; }; - return [createQuickFix(true), createQuickFix(false)]; + return [ + // createQuickFix(true), + createQuickFix(false), + ]; } } diff --git a/extension/src/suggestions.ts b/extension/src/suggestions.ts index c2373223..5c2b8860 100644 --- a/extension/src/suggestions.ts +++ b/extension/src/suggestions.ts @@ -1,5 +1,4 @@ import * as vscode from "vscode"; -import { sendTelemetryEvent, TelemetryEvent } from "./telemetry"; import { openEditorAndRevealRange } from "./util/vscode"; import { translate } from "./util/vscode"; import { registerAllCodeLensProviders } from "./lang-server/codeLens"; @@ -244,7 +243,6 @@ function selectSuggestion( } export function acceptSuggestionCommand(key: SuggestionRanges | null = null) { - sendTelemetryEvent(TelemetryEvent.SuggestionAccepted); selectSuggestion("selected", key); } @@ -271,7 +269,6 @@ export function rejectAllSuggestionsCommand() { export async function rejectSuggestionCommand( key: SuggestionRanges | null = null ) { - sendTelemetryEvent(TelemetryEvent.SuggestionRejected); selectSuggestion("old", key); } diff --git a/extension/src/telemetry.ts b/extension/src/telemetry.ts deleted file mode 100644 index db5cb8ca..00000000 --- a/extension/src/telemetry.ts +++ /dev/null @@ -1,53 +0,0 @@ -import * as Segment from "@segment/analytics-node"; -import * as vscode from "vscode"; - -// Setup Segment -const SEGMENT_WRITE_KEY = "57yy2uYXH2bwMuy7djm9PorfFlYqbJL1"; -const analytics = new Segment.Analytics({ writeKey: SEGMENT_WRITE_KEY }); -analytics.identify({ - userId: vscode.env.machineId, - // traits: { - // name: "Michael Bolton", - // email: "mbolton@example.com", - // createdAt: new Date("2014-06-14T02:00:19.467Z"), - // }, -}); - -// Enum of telemetry events -export enum TelemetryEvent { - // Extension has been activated - ExtensionActivated = "ExtensionActivated", - // Suggestion has been accepted - SuggestionAccepted = "SuggestionAccepted", - // Suggestion has been rejected - SuggestionRejected = "SuggestionRejected", - // Queried universal prompt - UniversalPromptQuery = "UniversalPromptQuery", - // `Explain Code` button clicked - ExplainCode = "ExplainCode", - // `Generate Ideas` button clicked - GenerateIdeas = "GenerateIdeas", - // `Suggest Fix` button clicked - SuggestFix = "SuggestFix", - // `Create Test` button clicked - CreateTest = "CreateTest", - // `AutoDebug This Test` button clicked - AutoDebugThisTest = "AutoDebugThisTest", - // Command run to generate docstring - GenerateDocstring = "GenerateDocstring", - // Error setting up the extension - ExtensionSetupError = "ExtensionSetupError", -} - -export function sendTelemetryEvent( - event: TelemetryEvent, - properties?: Record<string, any> -) { - if (!vscode.env.isTelemetryEnabled) return; - - analytics.track({ - event, - userId: vscode.env.machineId, - properties, - }); -} |