From 661ce01334b82a74426f2c30df59318c3e847f4f Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Thu, 15 Jun 2023 23:32:55 -0700 Subject: patch, on_traceback step --- extension/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'extension/package.json') diff --git a/extension/package.json b/extension/package.json index 4b199420..8cf50d2a 100644 --- a/extension/package.json +++ b/extension/package.json @@ -14,7 +14,7 @@ "displayName": "Continue", "pricing": "Free", "description": "Refine code 10x faster", - "version": "0.0.40", + "version": "0.0.44", "publisher": "Continue", "engines": { "vscode": "^1.74.0" -- cgit v1.2.3-70-g09d2 From ed47ad8f6fe463af178bc3ccfcb39c1198c9f6f0 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Fri, 16 Jun 2023 11:55:15 -0700 Subject: patch and get file content in DefaultOnTracebackStep --- continuedev/src/continuedev/core/abstract_sdk.py | 2 +- continuedev/src/continuedev/core/sdk.py | 2 +- continuedev/src/continuedev/steps/on_traceback.py | 9 +++++++++ extension/package-lock.json | 4 ++-- extension/package.json | 2 +- .../scripts/continuedev-0.1.1-py3-none-any.whl | Bin 83961 -> 84291 bytes 6 files changed, 14 insertions(+), 5 deletions(-) (limited to 'extension/package.json') diff --git a/continuedev/src/continuedev/core/abstract_sdk.py b/continuedev/src/continuedev/core/abstract_sdk.py index 0658f1b8..017e75ef 100644 --- a/continuedev/src/continuedev/core/abstract_sdk.py +++ b/continuedev/src/continuedev/core/abstract_sdk.py @@ -85,7 +85,7 @@ class AbstractContinueSDK(ABC): pass @abstractmethod - def add_chat_context(self, content: str, role: ChatMessageRole = "assistent"): + def add_chat_context(self, content: str, role: ChatMessageRole = "assistant"): pass @abstractmethod diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py index 8aea6b7f..7639d010 100644 --- a/continuedev/src/continuedev/core/sdk.py +++ b/continuedev/src/continuedev/core/sdk.py @@ -165,7 +165,7 @@ class ContinueSDK(AbstractContinueSDK): def raise_exception(self, message: str, title: str, with_step: Union[Step, None] = None): raise ContinueCustomException(message, title, with_step) - def add_chat_context(self, content: str, summary: Union[str, None] = None, role: ChatMessageRole = "assistent"): + def add_chat_context(self, content: str, summary: Union[str, None] = None, role: ChatMessageRole = "assistant"): self.history.timeline[self.history.current_index].step.chat_context.append( ChatMessage(content=content, role=role, summary=summary)) diff --git a/continuedev/src/continuedev/steps/on_traceback.py b/continuedev/src/continuedev/steps/on_traceback.py index a0c4d07b..053b4ef4 100644 --- a/continuedev/src/continuedev/steps/on_traceback.py +++ b/continuedev/src/continuedev/steps/on_traceback.py @@ -1,3 +1,4 @@ +import os from ..core.main import Step from ..core.sdk import ContinueSDK from .chat import SimpleChatStep @@ -9,6 +10,14 @@ class DefaultOnTracebackStep(Step): hide: bool = True async def run(self, sdk: ContinueSDK): + # Add context for any files in the traceback that are in the workspace + for line in self.output.split("\n"): + segs = line.split(" ") + for seg in segs: + if seg.startswith(os.path.sep) and os.path.exists(seg) and os.path.commonprefix([seg, sdk.ide.workspace_directory]) == sdk.ide.workspace_directory: + file_contents = await sdk.ide.readFile(seg) + await sdk.add_chat_context(f"The contents of {seg}:\n```\n{file_contents}\n```", "", "user") + await sdk.run_step(SimpleChatStep( name="Help With Traceback", user_input=f"""I got the following error, can you please help explain how to fix it?\n\n{self.output}""")) diff --git a/extension/package-lock.json b/extension/package-lock.json index 7e8da126..eac8edf2 100644 --- a/extension/package-lock.json +++ b/extension/package-lock.json @@ -1,12 +1,12 @@ { "name": "continue", - "version": "0.0.44", + "version": "0.0.45", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "continue", - "version": "0.0.44", + "version": "0.0.45", "license": "Apache-2.0", "dependencies": { "@electron/rebuild": "^3.2.10", diff --git a/extension/package.json b/extension/package.json index 8cf50d2a..8f7da94d 100644 --- a/extension/package.json +++ b/extension/package.json @@ -14,7 +14,7 @@ "displayName": "Continue", "pricing": "Free", "description": "Refine code 10x faster", - "version": "0.0.44", + "version": "0.0.45", "publisher": "Continue", "engines": { "vscode": "^1.74.0" diff --git a/extension/scripts/continuedev-0.1.1-py3-none-any.whl b/extension/scripts/continuedev-0.1.1-py3-none-any.whl index 614190c7..b0b84230 100644 Binary files a/extension/scripts/continuedev-0.1.1-py3-none-any.whl and b/extension/scripts/continuedev-0.1.1-py3-none-any.whl differ -- cgit v1.2.3-70-g09d2 From 2c1e77563097e8c245f28c461c4aca68a8a35cd8 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Fri, 16 Jun 2023 14:34:30 -0700 Subject: Capturing command line prompt --- extension/package-lock.json | 4 +- extension/package.json | 2 +- extension/src/terminal/terminalEmulator.ts | 97 ++++++++++++++++++++++++++---- extension/src/util/lcs.ts | 30 +++++++++ 4 files changed, 117 insertions(+), 16 deletions(-) create mode 100644 extension/src/util/lcs.ts (limited to 'extension/package.json') diff --git a/extension/package-lock.json b/extension/package-lock.json index eac8edf2..86c816e0 100644 --- a/extension/package-lock.json +++ b/extension/package-lock.json @@ -1,12 +1,12 @@ { "name": "continue", - "version": "0.0.45", + "version": "0.0.47", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "continue", - "version": "0.0.45", + "version": "0.0.47", "license": "Apache-2.0", "dependencies": { "@electron/rebuild": "^3.2.10", diff --git a/extension/package.json b/extension/package.json index 8f7da94d..3a3927f3 100644 --- a/extension/package.json +++ b/extension/package.json @@ -14,7 +14,7 @@ "displayName": "Continue", "pricing": "Free", "description": "Refine code 10x faster", - "version": "0.0.45", + "version": "0.0.47", "publisher": "Continue", "engines": { "vscode": "^1.74.0" diff --git a/extension/src/terminal/terminalEmulator.ts b/extension/src/terminal/terminalEmulator.ts index ff2060f8..8e49737e 100644 --- a/extension/src/terminal/terminalEmulator.ts +++ b/extension/src/terminal/terminalEmulator.ts @@ -3,6 +3,7 @@ import * as vscode from "vscode"; import os = require("os"); import stripAnsi from "strip-ansi"; +import { longestCommonSubsequence } from "../util/lcs"; function loadNativeModule(id: string): T | null { try { @@ -71,13 +72,20 @@ export class CapturedTerminal { private dataEndsInPrompt(strippedData: string): boolean { const lines = strippedData.split("\n"); - const last_line = lines[lines.length - 1]; + const lastLine = lines[lines.length - 1]; + return ( - (lines.length > 0 && - (last_line.includes("bash-") || last_line.includes(") $ ")) && - last_line.includes("$")) || - (last_line.includes("]> ") && last_line.includes(") [")) || - (last_line.includes(" (") && last_line.includes(")>")) + lines.length > 0 && + (((lastLine.includes("bash-") || lastLine.includes(") $ ")) && + lastLine.includes("$")) || + (lastLine.includes("]> ") && lastLine.includes(") [")) || + (lastLine.includes(" (") && lastLine.includes(")>")) || + (typeof this.commandPromptString !== "undefined" && + (lastLine.includes(this.commandPromptString) || + this.commandPromptString.length - + longestCommonSubsequence(lastLine, this.commandPromptString) + .length < + 3))) ); } @@ -96,12 +104,6 @@ export class CapturedTerminal { } async runCommand(command: string): Promise { - if (!this.hasRunCommand) { - this.hasRunCommand = true; - // Let the first bash- prompt appear and let python env be opened - // await this.waitForCommandToFinish(); - } - if (this.commandQueue.length === 0) { return new Promise(async (resolve, reject) => { this.commandQueue.push([command, resolve]); @@ -109,8 +111,12 @@ export class CapturedTerminal { while (this.commandQueue.length > 0) { const [command, resolve] = this.commandQueue.shift()!; + // Refresh the command prompt string every time in case it changes + await this.refreshCommandPromptString(); + this.terminal.sendText(command); - resolve(await this.waitForCommandToFinish()); + const output = await this.waitForCommandToFinish(); + resolve(output); } }); } else { @@ -138,6 +144,24 @@ export class CapturedTerminal { } } + private runningClearToGetPrompt: boolean = false; + private seenClear: boolean = false; + private commandPromptString: string | undefined = undefined; + private resolveMeWhenCommandPromptStringFound: + | ((_: unknown) => void) + | undefined = undefined; + + private async refreshCommandPromptString(): Promise { + // Sends a message that will be received by the terminal to get the command prompt string, see the onData method below in constructor. + this.runningClearToGetPrompt = true; + this.terminal.sendText("echo"); + const promise = new Promise((resolve, reject) => { + this.resolveMeWhenCommandPromptStringFound = resolve; + }); + await promise; + return this.commandPromptString; + } + constructor( options: { name: string } & Partial, onCommandOutput?: (output: string) => void @@ -165,7 +189,54 @@ export class CapturedTerminal { this.writeEmitter = new vscode.EventEmitter(); this.ptyProcess.onData((data: any) => { + if (this.runningClearToGetPrompt) { + if ( + stripAnsi(data) + .split("\n") + .flatMap((line) => line.split("\r")) + .find((line) => line.trim() === "echo") !== undefined + ) { + this.seenClear = true; + return; + } else if (this.seenClear) { + const strippedLines = stripAnsi(data) + .split("\r") + .filter( + (line) => + line.trim().length > 0 && + line.trim() !== "%" && + line.trim() !== "⏎" + ); + const lastLine = strippedLines[strippedLines.length - 1] || ""; + const lines = lastLine + .split("\n") + .filter( + (line) => + line.trim().length > 0 && + line.trim() !== "%" && + line.trim() !== "⏎" + ); + const commandPromptString = (lines[lines.length - 1] || "").trim(); + if ( + commandPromptString.length > 0 && + !commandPromptString.includes("echo") + ) { + this.runningClearToGetPrompt = false; + this.seenClear = false; + this.commandPromptString = commandPromptString; + console.log( + "Found command prompt string: " + this.commandPromptString + ); + if (this.resolveMeWhenCommandPromptStringFound) { + this.resolveMeWhenCommandPromptStringFound(undefined); + } + } + return; + } + } + // Pass data through to terminal + data = data.replace("⏎", ""); this.writeEmitter.fire(data); this.splitByCommandsListener(data); diff --git a/extension/src/util/lcs.ts b/extension/src/util/lcs.ts new file mode 100644 index 00000000..17ea63f9 --- /dev/null +++ b/extension/src/util/lcs.ts @@ -0,0 +1,30 @@ +export function longestCommonSubsequence(a: string, b: string) { + const lengths: number[][] = []; + for (let i = 0; i <= a.length; i++) { + lengths[i] = []; + for (let j = 0; j <= b.length; j++) { + if (i === 0 || j === 0) { + lengths[i][j] = 0; + } else if (a[i - 1] === b[j - 1]) { + lengths[i][j] = lengths[i - 1][j - 1] + 1; + } else { + lengths[i][j] = Math.max(lengths[i - 1][j], lengths[i][j - 1]); + } + } + } + let result = ""; + let x = a.length; + let y = b.length; + while (x !== 0 && y !== 0) { + if (lengths[x][y] === lengths[x - 1][y]) { + x--; + } else if (lengths[x][y] === lengths[x][y - 1]) { + y--; + } else { + result = a[x - 1] + result; + x--; + y--; + } + } + return result; +} -- cgit v1.2.3-70-g09d2 From 11618456785f238d1b006aa69b827569c7fb33ba Mon Sep 17 00:00:00 2001 From: Ty Dunn Date: Fri, 16 Jun 2023 14:43:52 -0700 Subject: changing ) to GUI --- docs/docs/intro.md | 4 ++-- extension/package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'extension/package.json') diff --git a/docs/docs/intro.md b/docs/docs/intro.md index 015d45af..6795797e 100644 --- a/docs/docs/intro.md +++ b/docs/docs/intro.md @@ -11,10 +11,10 @@ **Continue is the open-source library for accelerating software development with language models** -You determine when Large Language Models (LLMs) like GPT-4 should act as an autopilot, helping you complete software development tasks. You use recipes created by others to automate more steps in your workflows. If a recipe does not exist or work exactly like you want, you can use the Continue SDK to create custom steps and compose them into personalized recipes. Whether you are using a recipe created by yourself or someone else, you can review, reverse, and rerun steps with the Continue GUI, which helps you guide the work done by LLMs and learn when to use and trust them. +You determine when Large Language Models (LLMs) like GPT-4 should act as an autopilot, helping you complete software development tasks. You open a file or highlight some code and then use slash commands like `/edit`, `/explain`, and `/comment` and naturual language instructions to tell the language model what to do. If an error or exception occurs when you run Python or JavaScript code, Continue will automatically tell you in plain English what to try to address it. You can also review, reverse, and rerun steps with the Continue GUI, which helps you guide the work done by LLMs and learn when to use and trust them. ## Why do developers use Continue? -Many developers have begun to use [GPT-3.5](https://platform.openai.com/docs/models/gpt-3-5) and [GPT-4](https://openai.com/research/gpt-4) through [ChatGPT](https://openai.com/blog/chatgpt) while coding; however, the experience is painful because of how much manual copying, pasting, and editing is required to provide the necessary context and incorporate the generated solutions into your codebase. Continue eliminates this pain by deeply integrating LLMs into your IDE amd workflows. +Many developers have begun to use [GPT-3.5](https://platform.openai.com/docs/models/gpt-3-5) and [GPT-4](https://openai.com/research/gpt-4) through [ChatGPT](https://openai.com/blog/chatgpt) while coding; however, the experience is painful because of how much manual copying, pasting, and editing is required to provide the necessary context and incorporate the generated solutions and advice into your codebase. Continue eliminates this pain by enabling LLMs to natively act in your IDE as you complete your workflows. Continue accelerates how developers build, ship, and maintain software, while giving them the control to define when LLMs should take actions and the confidence to trust LLMs. In short, it enables developers to do what they have always done: work together to create better and better abstractions that make it easier and easier to automate the repetitive work that people want computers to do. \ No newline at end of file diff --git a/extension/package.json b/extension/package.json index 8cf50d2a..ec348aa5 100644 --- a/extension/package.json +++ b/extension/package.json @@ -96,7 +96,7 @@ { "type": "webview", "id": "continue.continueGUIView", - "name": ")", + "name": "GUI", "visibility": "visible" } ] -- cgit v1.2.3-70-g09d2