diff --git a/extension/src/bridge.ts b/extension/src/bridge.ts
index 55c4cc3b..7e6398be 100644
--- a/extension/src/bridge.ts
+++ b/extension/src/bridge.ts
@@ -50,7 +50,7 @@ export function getContinueServerUrl() {
extensionContext &&
extensionContext.extensionMode === vscode.ExtensionMode.Development
) {
- // return "http://localhost:8001";
+ return "http://localhost:8001";
}
return (
vscode.workspace.getConfiguration("continue").get
("serverUrl") ||
diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts
index 304c592b..b728833f 100644
--- a/extension/src/continueIdeClient.ts
+++ b/extension/src/continueIdeClient.ts
@@ -131,6 +131,11 @@ class IdeProtocolClient {
openFiles: this.getOpenFiles(),
});
break;
+ case "visibleFiles":
+ messenger.send("visibleFiles", {
+ visibleFiles: this.getVisibleFiles(),
+ });
+ break;
case "readFile":
messenger.send("readFile", {
contents: this.readFile(data.filepath),
@@ -330,6 +335,12 @@ class IdeProtocolClient {
});
}
+ getVisibleFiles(): string[] {
+ return vscode.window.visibleTextEditors.map((editor) => {
+ return editor.document.uri.fsPath;
+ });
+ }
+
saveFile(filepath: string) {
vscode.window.visibleTextEditors.forEach((editor) => {
if (editor.document.uri.fsPath === filepath) {
--
cgit v1.2.3-70-g09d2
From 391764f1371dab06af30a29e10a826a516b69bb3 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Wed, 12 Jul 2023 21:53:06 -0700
Subject: persist state and reconnect automatically
---
continuedev/src/continuedev/core/autopilot.py | 17 +++-
continuedev/src/continuedev/libs/constants/main.py | 6 ++
continuedev/src/continuedev/libs/util/paths.py | 17 ++++
continuedev/src/continuedev/server/gui.py | 10 +-
continuedev/src/continuedev/server/ide.py | 25 +++--
continuedev/src/continuedev/server/ide_protocol.py | 10 +-
continuedev/src/continuedev/server/main.py | 16 ++-
.../src/continuedev/server/session_manager.py | 41 ++++++--
docs/docs/concepts/ide.md | 4 +-
extension/react-app/src/components/ComboBox.tsx | 1 -
extension/react-app/src/hooks/messenger.ts | 10 --
extension/react-app/src/pages/gui.tsx | 1 -
extension/src/activation/activate.ts | 2 +-
extension/src/continueIdeClient.ts | 62 ++++++++---
extension/src/debugPanel.ts | 113 ++++-----------------
extension/src/util/messenger.ts | 10 ++
16 files changed, 192 insertions(+), 153 deletions(-)
create mode 100644 continuedev/src/continuedev/libs/constants/main.py
create mode 100644 continuedev/src/continuedev/libs/util/paths.py
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 1b074435..e1c8a076 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -1,13 +1,13 @@
from functools import cached_property
import traceback
import time
-from typing import Any, Callable, Coroutine, Dict, List
+from typing import Any, Callable, Coroutine, Dict, List, Union
import os
from aiohttp import ClientPayloadError
+from pydantic import root_validator
from ..models.filesystem import RangeInFileWithContents
from ..models.filesystem_edit import FileEditWithFullContents
-from ..libs.llm import LLM
from .observation import Observation, InternalErrorObservation
from ..server.ide_protocol import AbstractIdeProtocolServer
from ..libs.util.queue import AsyncSubscriptionQueue
@@ -16,7 +16,6 @@ from .main import Context, ContinueCustomException, HighlightedRangeContext, Pol
from ..steps.core.core import ReversibleStep, ManualEditStep, UserInputStep
from ..libs.util.telemetry import capture_event
from .sdk import ContinueSDK
-import asyncio
from ..libs.util.step_name_to_steps import get_step_from_name
from ..libs.util.traceback_parsers import get_python_traceback, get_javascript_traceback
from openai import error as openai_errors
@@ -46,6 +45,7 @@ class Autopilot(ContinueBaseModel):
ide: AbstractIdeProtocolServer
history: History = History.from_empty()
context: Context = Context()
+ full_state: Union[FullState, None] = None
_on_update_callbacks: List[Callable[[FullState], None]] = []
_active: bool = False
@@ -63,8 +63,15 @@ class Autopilot(ContinueBaseModel):
arbitrary_types_allowed = True
keep_untouched = (cached_property,)
+ @root_validator(pre=True)
+ def fill_in_values(cls, values):
+ full_state: FullState = values.get('full_state')
+ if full_state is not None:
+ values['history'] = full_state.history
+ return values
+
def get_full_state(self) -> FullState:
- return FullState(
+ full_state = FullState(
history=self.history,
active=self._active,
user_input_queue=self._main_user_input_queue,
@@ -73,6 +80,8 @@ class Autopilot(ContinueBaseModel):
slash_commands=self.get_available_slash_commands(),
adding_highlighted_code=self._adding_highlighted_code,
)
+ self.full_state = full_state
+ return full_state
def get_available_slash_commands(self) -> List[Dict]:
custom_commands = list(map(lambda x: {
diff --git a/continuedev/src/continuedev/libs/constants/main.py b/continuedev/src/continuedev/libs/constants/main.py
new file mode 100644
index 00000000..96eb6e69
--- /dev/null
+++ b/continuedev/src/continuedev/libs/constants/main.py
@@ -0,0 +1,6 @@
+## PATHS ##
+
+CONTINUE_GLOBAL_FOLDER = ".continue"
+CONTINUE_SESSIONS_FOLDER = "sessions"
+CONTINUE_SERVER_FOLDER = "server"
+
diff --git a/continuedev/src/continuedev/libs/util/paths.py b/continuedev/src/continuedev/libs/util/paths.py
new file mode 100644
index 00000000..fddef887
--- /dev/null
+++ b/continuedev/src/continuedev/libs/util/paths.py
@@ -0,0 +1,17 @@
+import os
+
+from ..constants.main import CONTINUE_SESSIONS_FOLDER, CONTINUE_GLOBAL_FOLDER, CONTINUE_SERVER_FOLDER
+
+def getGlobalFolderPath():
+ return os.path.join(os.path.expanduser("~"), CONTINUE_GLOBAL_FOLDER)
+
+
+
+def getSessionsFolderPath():
+ return os.path.join(getGlobalFolderPath(), CONTINUE_SESSIONS_FOLDER)
+
+def getServerFolderPath():
+ return os.path.join(getGlobalFolderPath(), CONTINUE_SERVER_FOLDER)
+
+def getSessionFilePath(session_id: str):
+ return os.path.join(getSessionsFolderPath(), f"{session_id}.json")
\ No newline at end of file
diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py
index 21089f30..8f6f68f6 100644
--- a/continuedev/src/continuedev/server/gui.py
+++ b/continuedev/src/continuedev/server/gui.py
@@ -31,12 +31,12 @@ class AppStatus:
Server.handle_exit = AppStatus.handle_exit
-def session(x_continue_session_id: str = Header("anonymous")) -> Session:
- return session_manager.get_session(x_continue_session_id)
+async def session(x_continue_session_id: str = Header("anonymous")) -> Session:
+ return await session_manager.get_session(x_continue_session_id)
-def websocket_session(session_id: str) -> Session:
- return session_manager.get_session(session_id)
+async def websocket_session(session_id: str) -> Session:
+ return await session_manager.get_session(session_id)
T = TypeVar("T", bound=BaseModel)
@@ -199,4 +199,6 @@ async def websocket_endpoint(websocket: WebSocket, session: Session = Depends(we
print("Closing gui websocket")
if websocket.client_state != WebSocketState.DISCONNECTED:
await websocket.close()
+
+ session_manager.persist_session(session.session_id)
session_manager.remove_session(session.session_id)
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index 4645b49e..12a21f19 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -52,9 +52,11 @@ class FileEditsUpdate(BaseModel):
class OpenFilesResponse(BaseModel):
openFiles: List[str]
+
class VisibleFilesResponse(BaseModel):
visibleFiles: List[str]
+
class HighlightedCodeResponse(BaseModel):
highlightedCode: List[RangeInFile]
@@ -115,6 +117,7 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
websocket: WebSocket
session_manager: SessionManager
sub_queue: AsyncSubscriptionQueue = AsyncSubscriptionQueue()
+ session_id: Union[str, None] = None
def __init__(self, session_manager: SessionManager, websocket: WebSocket):
self.websocket = websocket
@@ -132,8 +135,6 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
continue
message_type = message["messageType"]
data = message["data"]
- # if message_type == "openGUI":
- # await self.openGUI()
if message_type == "workspaceDirectory":
self.workspace_directory = data["workspaceDirectory"]
break
@@ -158,8 +159,8 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
return resp_model.parse_obj(resp)
async def handle_json(self, message_type: str, data: Any):
- if message_type == "openGUI":
- await self.openGUI()
+ if message_type == "getSessionId":
+ await self.getSessionId()
elif message_type == "setFileOpen":
await self.setFileOpen(data["filepath"], data["open"])
elif message_type == "setSuggestionsLocked":
@@ -217,9 +218,10 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
"locked": locked
})
- async def openGUI(self):
- session_id = self.session_manager.new_session(self)
- await self._send_json("openGUI", {
+ async def getSessionId(self):
+ session_id = self.session_manager.new_session(
+ self, self.session_id).session_id
+ await self._send_json("getSessionId", {
"sessionId": session_id
})
@@ -304,7 +306,7 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
async def getOpenFiles(self) -> List[str]:
resp = await self._send_and_receive_json({}, OpenFilesResponse, "openFiles")
return resp.openFiles
-
+
async def getVisibleFiles(self) -> List[str]:
resp = await self._send_and_receive_json({}, VisibleFilesResponse, "visibleFiles")
return resp.visibleFiles
@@ -416,7 +418,7 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
@router.websocket("/ws")
-async def websocket_endpoint(websocket: WebSocket):
+async def websocket_endpoint(websocket: WebSocket, session_id: str = None):
try:
await websocket.accept()
print("Accepted websocket connection from, ", websocket.client)
@@ -434,6 +436,9 @@ async def websocket_endpoint(websocket: WebSocket):
ideProtocolServer.handle_json(message_type, data))
ideProtocolServer = IdeProtocolServer(session_manager, websocket)
+ ideProtocolServer.session_id = session_id
+ if session_id is not None:
+ session_manager.registered_ides[session_id] = ideProtocolServer
other_msgs = await ideProtocolServer.initialize()
for other_msg in other_msgs:
@@ -454,3 +459,5 @@ async def websocket_endpoint(websocket: WebSocket):
finally:
if websocket.client_state != WebSocketState.DISCONNECTED:
await websocket.close()
+
+ session_manager.registered_ides.pop(ideProtocolServer.session_id)
diff --git a/continuedev/src/continuedev/server/ide_protocol.py b/continuedev/src/continuedev/server/ide_protocol.py
index 2783dc61..2f78cf0e 100644
--- a/continuedev/src/continuedev/server/ide_protocol.py
+++ b/continuedev/src/continuedev/server/ide_protocol.py
@@ -1,5 +1,6 @@
-from typing import Any, List
+from typing import Any, List, Union
from abc import ABC, abstractmethod, abstractproperty
+from fastapi import WebSocket
from ..models.main import Traceback
from ..models.filesystem_edit import FileEdit, FileSystemEdit, EditDiff
@@ -7,6 +8,9 @@ from ..models.filesystem import RangeInFile, RangeInFileWithContents
class AbstractIdeProtocolServer(ABC):
+ websocket: WebSocket
+ session_id: Union[str, None]
+
@abstractmethod
async def handle_json(self, data: Any):
"""Handle a json message"""
@@ -24,8 +28,8 @@ class AbstractIdeProtocolServer(ABC):
"""Set whether suggestions are locked"""
@abstractmethod
- async def openGUI(self):
- """Open a GUI"""
+ async def getSessionId(self):
+ """Get a new session ID"""
@abstractmethod
async def showSuggestionsAndWait(self, suggestions: List[FileEdit]) -> bool:
diff --git a/continuedev/src/continuedev/server/main.py b/continuedev/src/continuedev/server/main.py
index f4d82903..aa093853 100644
--- a/continuedev/src/continuedev/server/main.py
+++ b/continuedev/src/continuedev/server/main.py
@@ -4,7 +4,8 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .ide import router as ide_router
from .gui import router as gui_router
-import logging
+from .session_manager import session_manager
+import atexit
import uvicorn
import argparse
@@ -44,5 +45,16 @@ def run_server():
uvicorn.run(app, host="0.0.0.0", port=args.port)
+def cleanup():
+ print("Cleaning up sessions")
+ for session_id in session_manager.sessions:
+ session_manager.persist_session(session_id)
+
+
+atexit.register(cleanup)
if __name__ == "__main__":
- run_server()
+ try:
+ run_server()
+ except Exception as e:
+ cleanup()
+ raise e
diff --git a/continuedev/src/continuedev/server/session_manager.py b/continuedev/src/continuedev/server/session_manager.py
index 7147dcfa..fb8ac386 100644
--- a/continuedev/src/continuedev/server/session_manager.py
+++ b/continuedev/src/continuedev/server/session_manager.py
@@ -1,9 +1,12 @@
-from asyncio import BaseEventLoop
+import os
from fastapi import WebSocket
from typing import Any, Dict, List, Union
from uuid import uuid4
+import json
+from ..libs.util.paths import getSessionFilePath, getSessionsFolderPath
from ..models.filesystem_edit import FileEditWithFullContents
+from ..libs.constants.main import CONTINUE_SESSIONS_FOLDER
from ..core.policy import DemoPolicy
from ..core.main import FullState
from ..core.autopilot import Autopilot
@@ -39,17 +42,35 @@ class DemoAutopilot(Autopilot):
class SessionManager:
sessions: Dict[str, Session] = {}
+ # Mapping of session_id to IDE, where the IDE is still alive
+ registered_ides: Dict[str, AbstractIdeProtocolServer] = {}
- def get_session(self, session_id: str) -> Session:
+ async def get_session(self, session_id: str) -> Session:
if session_id not in self.sessions:
+ # Check then whether it is persisted by listing all files in the sessions folder
+ # And only if the IDE is still alive
+ sessions_folder = getSessionsFolderPath()
+ session_files = os.listdir(sessions_folder)
+ if f"{session_id}.json" in session_files and session_id in self.registered_ides:
+ if self.registered_ides[session_id].session_id is not None:
+ return self.new_session(self.registered_ides[session_id], session_id=session_id)
+
raise KeyError("Session ID not recognized", session_id)
return self.sessions[session_id]
- def new_session(self, ide: AbstractIdeProtocolServer) -> str:
- autopilot = DemoAutopilot(policy=DemoPolicy(), ide=ide)
- session_id = str(uuid4())
+ def new_session(self, ide: AbstractIdeProtocolServer, session_id: Union[str, None] = None) -> Session:
+ full_state = None
+ if session_id is not None and os.path.exists(getSessionFilePath(session_id)):
+ with open(getSessionFilePath(session_id), "r") as f:
+ full_state = FullState(**json.load(f))
+
+ autopilot = DemoAutopilot(
+ policy=DemoPolicy(), ide=ide, full_state=full_state)
+ session_id = session_id or str(uuid4())
+ ide.session_id = session_id
session = Session(session_id=session_id, autopilot=autopilot)
self.sessions[session_id] = session
+ self.registered_ides[session_id] = ide
async def on_update(state: FullState):
await session_manager.send_ws_data(session_id, "state_update", {
@@ -58,11 +79,19 @@ class SessionManager:
autopilot.on_update(on_update)
create_async_task(autopilot.run_policy())
- return session_id
+ return session
def remove_session(self, session_id: str):
del self.sessions[session_id]
+ def persist_session(self, session_id: str):
+ """Save the session's FullState as a json file"""
+ full_state = self.sessions[session_id].autopilot.get_full_state()
+ if not os.path.exists(getSessionsFolderPath()):
+ os.mkdir(getSessionsFolderPath())
+ with open(getSessionFilePath(session_id), "w") as f:
+ json.dump(full_state.dict(), f)
+
def register_websocket(self, session_id: str, ws: WebSocket):
self.sessions[session_id].ws = ws
print("Registered websocket for session", session_id)
diff --git a/docs/docs/concepts/ide.md b/docs/docs/concepts/ide.md
index dc7b9e23..bd31481b 100644
--- a/docs/docs/concepts/ide.md
+++ b/docs/docs/concepts/ide.md
@@ -41,9 +41,9 @@ Get the workspace directory
Set whether a file is open
-### openGUI
+### getSessionId
-Open a gui
+Get a new session ID
### showSuggestionsAndWait
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index ac994b0a..7d6541c7 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -331,7 +331,6 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
) {
(event.nativeEvent as any).preventDownshiftDefault = true;
} else if (event.key === "ArrowUp") {
- console.log("OWJFOIJO");
if (positionInHistory == 0) return;
else if (
positionInHistory == history.length &&
diff --git a/extension/react-app/src/hooks/messenger.ts b/extension/react-app/src/hooks/messenger.ts
index e2a0bab8..00ce1fbb 100644
--- a/extension/react-app/src/hooks/messenger.ts
+++ b/extension/react-app/src/hooks/messenger.ts
@@ -1,6 +1,3 @@
-// console.log("Websocket import");
-// const WebSocket = require("ws");
-
export abstract class Messenger {
abstract send(messageType: string, data: object): void;
@@ -28,13 +25,6 @@ export class WebsocketMessenger extends Messenger {
private serverUrl: string;
_newWebsocket(): WebSocket {
- // // Dynamic import, because WebSocket is builtin with browser, but not with node. And can't use require in browser.
- // if (typeof process === "object") {
- // console.log("Using node");
- // // process is only available in Node
- // var WebSocket = require("ws");
- // }
-
const newWebsocket = new WebSocket(this.serverUrl);
for (const listener of this.onOpenListeners) {
this.onOpen(listener);
diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx
index b9382bd1..4ff260fa 100644
--- a/extension/react-app/src/pages/gui.tsx
+++ b/extension/react-app/src/pages/gui.tsx
@@ -262,7 +262,6 @@ function GUI(props: GUIProps) {
const onStepUserInput = (input: string, index: number) => {
if (!client) return;
- console.log("Sending step user input", input, index);
client.sendStepUserInput(input, index);
};
diff --git a/extension/src/activation/activate.ts b/extension/src/activation/activate.ts
index 559caf44..cd885b12 100644
--- a/extension/src/activation/activate.ts
+++ b/extension/src/activation/activate.ts
@@ -56,7 +56,7 @@ export async function activateExtension(context: vscode.ExtensionContext) {
registerAllCodeLensProviders(context);
registerAllCommands(context);
- // Initialize IDE Protocol Client, then call "openGUI"
+ // Initialize IDE Protocol Client
const serverUrl = getContinueServerUrl();
ideProtocolClient = new IdeProtocolClient(
`${serverUrl.replace("http", "ws")}/ide/ws`,
diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts
index b728833f..4c1fdf1e 100644
--- a/extension/src/continueIdeClient.ts
+++ b/extension/src/continueIdeClient.ts
@@ -1,10 +1,9 @@
-// import { ShowSuggestionRequest } from "../schema/ShowSuggestionRequest";
import {
editorSuggestionsLocked,
showSuggestion as showSuggestionInEditor,
SuggestionRanges,
} from "./suggestions";
-import { openEditorAndRevealRange, getRightViewColumn } from "./util/vscode";
+import { openEditorAndRevealRange } from "./util/vscode";
import { FileEdit } from "../schema/FileEdit";
import { RangeInFile } from "../schema/RangeInFile";
import * as vscode from "vscode";
@@ -15,8 +14,6 @@ import {
import { FileEditWithFullContents } from "../schema/FileEditWithFullContents";
import fs = require("fs");
import { WebsocketMessenger } from "./util/messenger";
-import * as path from "path";
-import * as os from "os";
import { diffManager } from "./diffs";
class IdeProtocolClient {
@@ -27,17 +24,54 @@ class IdeProtocolClient {
private _highlightDebounce: NodeJS.Timeout | null = null;
- constructor(serverUrl: string, context: vscode.ExtensionContext) {
- this.context = context;
+ private _lastReloadTime: number = 16;
+ private _reconnectionTimeouts: NodeJS.Timeout[] = [];
+
+ private _sessionId: string | null = null;
+ private _serverUrl: string;
- let messenger = new WebsocketMessenger(serverUrl);
+ private _newWebsocketMessenger() {
+ const requestUrl =
+ this._serverUrl +
+ (this._sessionId ? `?session_id=${this._sessionId}` : "");
+ const messenger = new WebsocketMessenger(requestUrl);
this.messenger = messenger;
- messenger.onClose(() => {
+
+ const reconnect = () => {
+ console.log("Trying to reconnect IDE protocol websocket...");
this.messenger = null;
+
+ // Exponential backoff to reconnect
+ this._reconnectionTimeouts.forEach((to) => clearTimeout(to));
+
+ const timeout = setTimeout(() => {
+ if (this.messenger?.websocket?.readyState === 1) {
+ return;
+ }
+ this._newWebsocketMessenger();
+ }, this._lastReloadTime);
+
+ this._reconnectionTimeouts.push(timeout);
+ this._lastReloadTime = Math.min(2 * this._lastReloadTime, 5000);
+ };
+ messenger.onOpen(() => {
+ this._reconnectionTimeouts.forEach((to) => clearTimeout(to));
+ });
+ messenger.onClose(() => {
+ reconnect();
+ });
+ messenger.onError(() => {
+ reconnect();
});
messenger.onMessage((messageType, data, messenger) => {
this.handleMessage(messageType, data, messenger);
});
+ }
+
+ constructor(serverUrl: string, context: vscode.ExtensionContext) {
+ this.context = context;
+ this._serverUrl = serverUrl;
+ this._newWebsocketMessenger();
// Setup listeners for any file changes in open editors
// vscode.workspace.onDidChangeTextDocument((event) => {
@@ -171,7 +205,7 @@ class IdeProtocolClient {
case "showDiff":
this.showDiff(data.filepath, data.replacement, data.step_index);
break;
- case "openGUI":
+ case "getSessionId":
case "connected":
break;
default:
@@ -284,10 +318,6 @@ class IdeProtocolClient {
// ------------------------------------ //
// Initiate Request
- async openGUI(asRightWebviewPanel: boolean = false) {
- // Open the webview panel
- }
-
async getSessionId(): Promise {
await new Promise((resolve, reject) => {
// Repeatedly try to connect to the server
@@ -303,10 +333,10 @@ class IdeProtocolClient {
}
}, 1000);
});
- const resp = await this.messenger?.sendAndReceive("openGUI", {});
- const sessionId = resp.sessionId;
+ const resp = await this.messenger?.sendAndReceive("getSessionId", {});
// console.log("New Continue session with ID: ", sessionId);
- return sessionId;
+ this._sessionId = resp.sessionId;
+ return resp.sessionId;
}
acceptRejectSuggestion(accept: boolean, key: SuggestionRanges) {
diff --git a/extension/src/debugPanel.ts b/extension/src/debugPanel.ts
index 487bbedf..5e1689d1 100644
--- a/extension/src/debugPanel.ts
+++ b/extension/src/debugPanel.ts
@@ -8,76 +8,6 @@ import {
import { RangeInFile } from "./client";
const WebSocket = require("ws");
-class StreamManager {
- private _fullText: string = "";
- private _insertionPoint: vscode.Position | undefined;
-
- private _addToEditor(update: string) {
- let editor =
- vscode.window.activeTextEditor || vscode.window.visibleTextEditors[0];
-
- if (typeof this._insertionPoint === "undefined") {
- if (editor?.selection.isEmpty) {
- this._insertionPoint = editor?.selection.active;
- } else {
- this._insertionPoint = editor?.selection.end;
- }
- }
- editor?.edit((editBuilder) => {
- if (this._insertionPoint) {
- editBuilder.insert(this._insertionPoint, update);
- this._insertionPoint = this._insertionPoint.translate(
- Array.from(update.matchAll(/\n/g)).length,
- update.length
- );
- }
- });
- }
-
- public closeStream() {
- this._fullText = "";
- this._insertionPoint = undefined;
- this._codeBlockStatus = "closed";
- this._pendingBackticks = 0;
- }
-
- private _codeBlockStatus: "open" | "closed" | "language-descriptor" =
- "closed";
- private _pendingBackticks: number = 0;
- public onStreamUpdate(update: string) {
- let textToInsert = "";
- for (let i = 0; i < update.length; i++) {
- switch (this._codeBlockStatus) {
- case "closed":
- if (update[i] === "`" && this._fullText.endsWith("``")) {
- this._codeBlockStatus = "language-descriptor";
- }
- break;
- case "language-descriptor":
- if (update[i] === " " || update[i] === "\n") {
- this._codeBlockStatus = "open";
- }
- break;
- case "open":
- if (update[i] === "`") {
- if (this._fullText.endsWith("``")) {
- this._codeBlockStatus = "closed";
- this._pendingBackticks = 0;
- } else {
- this._pendingBackticks += 1;
- }
- } else {
- textToInsert += "`".repeat(this._pendingBackticks) + update[i];
- this._pendingBackticks = 0;
- }
- break;
- }
- this._fullText += update[i];
- }
- this._addToEditor(textToInsert);
- }
-}
-
let websocketConnections: { [url: string]: WebsocketConnection | undefined } =
{};
@@ -127,8 +57,6 @@ class WebsocketConnection {
}
}
-let streamManager = new StreamManager();
-
export let debugPanelWebview: vscode.Webview | undefined;
export function setupDebugPanel(
panel: vscode.WebviewPanel | vscode.WebviewView,
@@ -147,10 +75,7 @@ export function setupDebugPanel(
.toString();
const isProduction = true; // context?.extensionMode === vscode.ExtensionMode.Development;
- if (!isProduction) {
- scriptUri = "http://localhost:5173/src/main.tsx";
- styleMainUri = "http://localhost:5173/src/main.css";
- } else {
+ if (isProduction) {
scriptUri = debugPanelWebview
.asWebviewUri(
vscode.Uri.joinPath(extensionUri, "react-app/dist/assets/index.js")
@@ -161,6 +86,9 @@ export function setupDebugPanel(
vscode.Uri.joinPath(extensionUri, "react-app/dist/assets/index.css")
)
.toString();
+ } else {
+ scriptUri = "http://localhost:5173/src/main.tsx";
+ styleMainUri = "http://localhost:5173/src/main.css";
}
panel.webview.options = {
@@ -175,11 +103,11 @@ export function setupDebugPanel(
return;
}
- let rangeInFile: RangeInFile = {
+ const rangeInFile: RangeInFile = {
range: e.selections[0],
filepath: e.textEditor.document.fileName,
};
- let filesystem = {
+ const filesystem = {
[rangeInFile.filepath]: e.textEditor.document.getText(),
};
panel.webview.postMessage({
@@ -217,13 +145,19 @@ export function setupDebugPanel(
url,
});
};
- const connection = new WebsocketConnection(
- url,
- onMessage,
- onOpen,
- onClose
- );
- websocketConnections[url] = connection;
+ try {
+ const connection = new WebsocketConnection(
+ url,
+ onMessage,
+ onOpen,
+ onClose
+ );
+ websocketConnections[url] = connection;
+ resolve(null);
+ } catch (e) {
+ console.log("Caught it!: ", e);
+ reject(e);
+ }
});
}
@@ -292,15 +226,6 @@ export function setupDebugPanel(
openEditorAndRevealRange(data.path, undefined, vscode.ViewColumn.One);
break;
}
- case "streamUpdate": {
- // Write code at the position of the cursor
- streamManager.onStreamUpdate(data.update);
- break;
- }
- case "closeStream": {
- streamManager.closeStream();
- break;
- }
case "withProgress": {
// This message allows withProgress to be used in the webview
if (data.done) {
diff --git a/extension/src/util/messenger.ts b/extension/src/util/messenger.ts
index b1df161b..7fd71ddd 100644
--- a/extension/src/util/messenger.ts
+++ b/extension/src/util/messenger.ts
@@ -15,6 +15,8 @@ export abstract class Messenger {
abstract onOpen(callback: () => void): void;
abstract onClose(callback: () => void): void;
+
+ abstract onError(callback: () => void): void;
abstract sendAndReceive(messageType: string, data: any): Promise;
}
@@ -26,6 +28,7 @@ export class WebsocketMessenger extends Messenger {
} = {};
private onOpenListeners: (() => void)[] = [];
private onCloseListeners: (() => void)[] = [];
+ private onErrorListeners: (() => void)[] = [];
private serverUrl: string;
_newWebsocket(): WebSocket {
@@ -43,6 +46,9 @@ export class WebsocketMessenger extends Messenger {
for (const listener of this.onCloseListeners) {
this.onClose(listener);
}
+ for (const listener of this.onErrorListeners) {
+ this.onError(listener);
+ }
for (const messageType in this.onMessageListeners) {
for (const listener of this.onMessageListeners[messageType]) {
this.onMessageType(messageType, listener);
@@ -151,4 +157,8 @@ export class WebsocketMessenger extends Messenger {
onClose(callback: () => void): void {
this.websocket.addEventListener("close", callback);
}
+
+ onError(callback: () => void): void {
+ this.websocket.addEventListener("error", callback);
+ }
}
--
cgit v1.2.3-70-g09d2
From 9558a3602c8678dacd3087bf6512b1bc4c96bc22 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Wed, 12 Jul 2023 22:01:46 -0700
Subject: patch, send state initially
---
continuedev/src/continuedev/server/gui.py | 2 +-
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py
index 8f6f68f6..238273b2 100644
--- a/continuedev/src/continuedev/server/gui.py
+++ b/continuedev/src/continuedev/server/gui.py
@@ -174,7 +174,7 @@ async def websocket_endpoint(websocket: WebSocket, session: Session = Depends(we
protocol.websocket = websocket
# Update any history that may have happened before connection
- # await protocol.send_state_update()
+ await protocol.send_state_update()
while AppStatus.should_exit is False:
message = await websocket.receive_text()
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 01e5bc1a..ea853fae 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.156",
+ "version": "0.0.157",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.156",
+ "version": "0.0.157",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index f3402abf..b3ff0b52 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.156",
+ "version": "0.0.157",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From f0ce0ad4537aadb8252cebd1b5ce25a91c60e392 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Wed, 12 Jul 2023 22:56:06 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index ea853fae..a235aa81 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.157",
+ "version": "0.0.158",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.157",
+ "version": "0.0.158",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index b3ff0b52..6bb6b720 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.157",
+ "version": "0.0.158",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 01419c661430f6d100f086513876201cf9e0f0f6 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Thu, 13 Jul 2023 14:50:10 -0700
Subject: diff editor infer filepath, codelens in middle
---
continuedev/src/continuedev/steps/core/core.py | 98 ++++++++++++++------------
extension/src/diffs.ts | 65 ++++++++++++++---
extension/src/lang-server/codeLens.ts | 8 ++-
3 files changed, 114 insertions(+), 57 deletions(-)
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index 5ea95104..787da316 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -486,58 +486,64 @@ Please output the code to be inserted at the cursor in order to fulfill the user
completion_lines_covered = 0
repeating_file_suffix = False
line_below_highlighted_range = file_suffix.lstrip().split("\n")[0]
- async for chunk in model_to_use.stream_chat(messages, temperature=0, max_tokens=max_tokens):
- # Stop early if it is repeating the file_suffix or the step was deleted
- if repeating_file_suffix:
- break
- if sdk.current_step_was_deleted():
- return
- # Accumulate lines
- if "content" not in chunk:
- continue
- chunk = chunk["content"]
- chunk_lines = chunk.split("\n")
- chunk_lines[0] = unfinished_line + chunk_lines[0]
- if chunk.endswith("\n"):
- unfinished_line = ""
- chunk_lines.pop() # because this will be an empty string
- else:
- unfinished_line = chunk_lines.pop()
-
- # Deal with newly accumulated lines
- for i in range(len(chunk_lines)):
- # Trailing whitespace doesn't matter
- chunk_lines[i] = chunk_lines[i].rstrip()
- chunk_lines[i] = common_whitespace + chunk_lines[i]
-
- # Lines that should signify the end of generation
- if self.is_end_line(chunk_lines[i]):
- break
- # Lines that should be ignored, like the <> tags
- elif self.line_to_be_ignored(chunk_lines[i], completion_lines_covered == 0):
- continue
- # Check if we are currently just copying the prefix
- elif (lines_of_prefix_copied > 0 or completion_lines_covered == 0) and lines_of_prefix_copied < len(file_prefix.splitlines()) and chunk_lines[i] == full_file_contents_lines[lines_of_prefix_copied]:
- # This is a sketchy way of stopping it from repeating the file_prefix. Is a bug if output happens to have a matching line
- lines_of_prefix_copied += 1
- continue
- # Because really short lines might be expected to be repeated, this is only a !heuristic!
- # Stop when it starts copying the file_suffix
- elif chunk_lines[i].strip() == line_below_highlighted_range.strip() and len(chunk_lines[i].strip()) > 4 and not (len(original_lines_below_previous_blocks) > 0 and chunk_lines[i].strip() == original_lines_below_previous_blocks[0].strip()):
- repeating_file_suffix = True
+ generator = model_to_use.stream_chat(
+ messages, temperature=0, max_tokens=max_tokens)
+
+ try:
+ async for chunk in generator:
+ # Stop early if it is repeating the file_suffix or the step was deleted
+ if repeating_file_suffix:
break
+ if sdk.current_step_was_deleted():
+ return
- # If none of the above, insert the line!
- if False:
- await handle_generated_line(chunk_lines[i])
+ # Accumulate lines
+ if "content" not in chunk:
+ continue
+ chunk = chunk["content"]
+ chunk_lines = chunk.split("\n")
+ chunk_lines[0] = unfinished_line + chunk_lines[0]
+ if chunk.endswith("\n"):
+ unfinished_line = ""
+ chunk_lines.pop() # because this will be an empty string
+ else:
+ unfinished_line = chunk_lines.pop()
+
+ # Deal with newly accumulated lines
+ for i in range(len(chunk_lines)):
+ # Trailing whitespace doesn't matter
+ chunk_lines[i] = chunk_lines[i].rstrip()
+ chunk_lines[i] = common_whitespace + chunk_lines[i]
+
+ # Lines that should signify the end of generation
+ if self.is_end_line(chunk_lines[i]):
+ break
+ # Lines that should be ignored, like the <> tags
+ elif self.line_to_be_ignored(chunk_lines[i], completion_lines_covered == 0):
+ continue
+ # Check if we are currently just copying the prefix
+ elif (lines_of_prefix_copied > 0 or completion_lines_covered == 0) and lines_of_prefix_copied < len(file_prefix.splitlines()) and chunk_lines[i] == full_file_contents_lines[lines_of_prefix_copied]:
+ # This is a sketchy way of stopping it from repeating the file_prefix. Is a bug if output happens to have a matching line
+ lines_of_prefix_copied += 1
+ continue
+ # Because really short lines might be expected to be repeated, this is only a !heuristic!
+ # Stop when it starts copying the file_suffix
+ elif chunk_lines[i].strip() == line_below_highlighted_range.strip() and len(chunk_lines[i].strip()) > 4 and not (len(original_lines_below_previous_blocks) > 0 and chunk_lines[i].strip() == original_lines_below_previous_blocks[0].strip()):
+ repeating_file_suffix = True
+ break
- lines.append(chunk_lines[i])
- completion_lines_covered += 1
- current_line_in_file += 1
+ # If none of the above, insert the line!
+ if False:
+ await handle_generated_line(chunk_lines[i])
- await sendDiffUpdate(lines + [common_whitespace if unfinished_line.startswith("<") else (common_whitespace + unfinished_line)], sdk)
+ lines.append(chunk_lines[i])
+ completion_lines_covered += 1
+ current_line_in_file += 1
+ await sendDiffUpdate(lines + [common_whitespace if unfinished_line.startswith("<") else (common_whitespace + unfinished_line)], sdk)
+ finally:
+ await generator.aclose()
# Add the unfinished line
if unfinished_line != "" and not self.line_to_be_ignored(unfinished_line, completion_lines_covered == 0) and not self.is_end_line(unfinished_line):
unfinished_line = common_whitespace + unfinished_line
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index 28089fc6..910c30f2 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -9,6 +9,7 @@ interface DiffInfo {
newFilepath: string;
editor?: vscode.TextEditor;
step_index: number;
+ range: vscode.Range;
}
export const DIFF_DIRECTORY = path.join(os.homedir(), ".continue", "diffs");
@@ -18,6 +19,10 @@ class DiffManager {
// Doing this because virtual files are read-only
private diffs: Map = new Map();
+ diffAtNewFilepath(newFilepath: string): DiffInfo | undefined {
+ return this.diffs.get(newFilepath);
+ }
+
private setupDirectory() {
// Make sure the diff directory exists
if (!fs.existsSync(DIFF_DIRECTORY)) {
@@ -35,6 +40,10 @@ class DiffManager {
return filepath.replace(/\\/g, "_").replace(/\//g, "_");
}
+ private getNewFilepath(originalFilepath: string): string {
+ return path.join(DIFF_DIRECTORY, this.escapeFilepath(originalFilepath));
+ }
+
private openDiffEditor(
originalFilepath: string,
newFilepath: string
@@ -103,18 +112,28 @@ class DiffManager {
this.setupDirectory();
// Create or update existing diff
- const newFilepath = path.join(
- DIFF_DIRECTORY,
- this.escapeFilepath(originalFilepath)
- );
+ const newFilepath = this.getNewFilepath(originalFilepath);
fs.writeFileSync(newFilepath, newContent);
// Open the diff editor if this is a new diff
if (!this.diffs.has(newFilepath)) {
+ // Figure out the first line that is different
+ const oldContent = fs.readFileSync(originalFilepath).toString("utf-8");
+ let line = 0;
+ const newLines = newContent.split("\n");
+ const oldLines = oldContent.split("\n");
+ for (let i = 0; i < newLines.length && i < oldLines.length; i++) {
+ if (newLines[i] !== oldLines[i]) {
+ line = i;
+ break;
+ }
+ }
+
const diffInfo: DiffInfo = {
originalFilepath,
newFilepath,
step_index,
+ range: new vscode.Range(line, 0, line + 1, 0),
};
this.diffs.set(newFilepath, diffInfo);
}
@@ -139,10 +158,38 @@ class DiffManager {
fs.unlinkSync(diffInfo.newFilepath);
}
+ private inferNewFilepath() {
+ const activeEditorPath =
+ vscode.window.activeTextEditor?.document.uri.fsPath;
+ if (activeEditorPath && path.dirname(activeEditorPath) === DIFF_DIRECTORY) {
+ return activeEditorPath;
+ }
+ const visibleEditors = vscode.window.visibleTextEditors.map(
+ (editor) => editor.document.uri.fsPath
+ );
+ for (const editorPath of visibleEditors) {
+ if (path.dirname(editorPath) === DIFF_DIRECTORY) {
+ for (const otherEditorPath of visibleEditors) {
+ if (
+ path.dirname(otherEditorPath) !== DIFF_DIRECTORY &&
+ this.getNewFilepath(otherEditorPath) === editorPath
+ ) {
+ return editorPath;
+ }
+ }
+ }
+ }
+
+ if (this.diffs.size === 1) {
+ return Array.from(this.diffs.keys())[0];
+ }
+ return undefined;
+ }
+
acceptDiff(newFilepath?: string) {
- // If no newFilepath is provided and there is only one in the dictionary, use that
- if (!newFilepath && this.diffs.size === 1) {
- newFilepath = Array.from(this.diffs.keys())[0];
+ // When coming from a keyboard shortcut, we have to infer the newFilepath from visible text editors
+ if (!newFilepath) {
+ newFilepath = this.inferNewFilepath();
}
if (!newFilepath) {
console.log("No newFilepath provided to accept the diff");
@@ -170,8 +217,8 @@ class DiffManager {
rejectDiff(newFilepath?: string) {
// If no newFilepath is provided and there is only one in the dictionary, use that
- if (!newFilepath && this.diffs.size === 1) {
- newFilepath = Array.from(this.diffs.keys())[0];
+ if (!newFilepath) {
+ newFilepath = this.inferNewFilepath();
}
if (!newFilepath) {
console.log(
diff --git a/extension/src/lang-server/codeLens.ts b/extension/src/lang-server/codeLens.ts
index 778b98dc..5800a00e 100644
--- a/extension/src/lang-server/codeLens.ts
+++ b/extension/src/lang-server/codeLens.ts
@@ -2,7 +2,7 @@ import * as vscode from "vscode";
import { editorToSuggestions, editorSuggestionsLocked } from "../suggestions";
import * as path from "path";
import * as os from "os";
-import { DIFF_DIRECTORY } from "../diffs";
+import { DIFF_DIRECTORY, diffManager } from "../diffs";
class SuggestionsCodeLensProvider implements vscode.CodeLensProvider {
public provideCodeLenses(
document: vscode.TextDocument,
@@ -53,7 +53,11 @@ class DiffViewerCodeLensProvider implements vscode.CodeLensProvider {
): vscode.CodeLens[] | Thenable {
if (path.dirname(document.uri.fsPath) === DIFF_DIRECTORY) {
const codeLenses: vscode.CodeLens[] = [];
- const range = new vscode.Range(0, 0, 1, 0);
+ let range = new vscode.Range(0, 0, 1, 0);
+ const diffInfo = diffManager.diffAtNewFilepath(document.uri.fsPath);
+ if (diffInfo) {
+ range = diffInfo.range;
+ }
codeLenses.push(
new vscode.CodeLens(range, {
title: "Accept ✅ (⌘⇧↩)",
--
cgit v1.2.3-70-g09d2
From 8a4acec505b01404f0582320f255d56b533c9145 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Thu, 13 Jul 2023 14:54:59 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index a235aa81..d84be971 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.158",
+ "version": "0.0.159",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.158",
+ "version": "0.0.159",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 6bb6b720..bdc67cc9 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.158",
+ "version": "0.0.159",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 3430de2b46b5c65c953b24dab5c31678a7bacc20 Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 15:36:09 -0700
Subject: adding /help command
---
continuedev/src/continuedev/core/config.py | 5 ++
continuedev/src/continuedev/core/policy.py | 5 +-
.../continuedev/libs/util/step_name_to_steps.py | 4 +-
continuedev/src/continuedev/steps/help.py | 57 ++++++++++++++++++++++
4 files changed, 66 insertions(+), 5 deletions(-)
create mode 100644 continuedev/src/continuedev/steps/help.py
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index f6167638..6e430c04 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -44,6 +44,11 @@ DEFAULT_SLASH_COMMANDS = [
description="Open the config file to create new and edit existing slash commands",
step_name="OpenConfigStep",
),
+ SlashCommand(
+ name="help",
+ description="Ask a question like '/help what is given to the llm as context?'",
+ step_name="HelpStep",
+ ),
SlashCommand(
name="comment",
description="Write comments for the current file or highlighted code",
diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py
index b8363df2..59ea78b1 100644
--- a/continuedev/src/continuedev/core/policy.py
+++ b/continuedev/src/continuedev/core/policy.py
@@ -60,10 +60,7 @@ class DemoPolicy(Policy):
MessageStep(name="Welcome to Continue", message=dedent("""\
- Highlight code and ask a question or give instructions
- Use `cmd+k` (Mac) / `ctrl+k` (Windows) to open Continue
- - Use `cmd+shift+e` / `ctrl+shift+e` to open file Explorer
- - Add your own OpenAI API key to VS Code Settings with `cmd+,`
- - Use slash commands when you want fine-grained control
- - Past steps are included as part of the context by default""")) >>
+ - Use `/help` to ask questions about how to use Continue""")) >>
WelcomeStep() >>
# SetupContinueWorkspaceStep() >>
# CreateCodebaseIndexChroma() >>
diff --git a/continuedev/src/continuedev/libs/util/step_name_to_steps.py b/continuedev/src/continuedev/libs/util/step_name_to_steps.py
index d329e110..49056c81 100644
--- a/continuedev/src/continuedev/libs/util/step_name_to_steps.py
+++ b/continuedev/src/continuedev/libs/util/step_name_to_steps.py
@@ -13,6 +13,7 @@ from ...recipes.DeployPipelineAirflowRecipe.main import DeployPipelineAirflowRec
from ...steps.on_traceback import DefaultOnTracebackStep
from ...steps.clear_history import ClearHistoryStep
from ...steps.open_config import OpenConfigStep
+from ...steps.help import HelpStep
# This mapping is used to convert from string in ContinueConfig json to corresponding Step class.
# Used for example in slash_commands and steps_on_startup
@@ -28,7 +29,8 @@ step_name_to_step_class = {
"DeployPipelineAirflowRecipe": DeployPipelineAirflowRecipe,
"DefaultOnTracebackStep": DefaultOnTracebackStep,
"ClearHistoryStep": ClearHistoryStep,
- "OpenConfigStep": OpenConfigStep
+ "OpenConfigStep": OpenConfigStep,
+ "HelpStep": HelpStep,
}
diff --git a/continuedev/src/continuedev/steps/help.py b/continuedev/src/continuedev/steps/help.py
new file mode 100644
index 00000000..fdfb986f
--- /dev/null
+++ b/continuedev/src/continuedev/steps/help.py
@@ -0,0 +1,57 @@
+from textwrap import dedent
+from ..core.main import ChatMessage, Step
+from ..core.sdk import ContinueSDK
+from ..libs.util.telemetry import capture_event
+
+help = dedent("""\
+ Continue is an open-source coding autopilot. It is a VS Code extension that brings the power of ChatGPT to your IDE.
+
+ It gathers context for you and stores your interactions automatically, so that you can avoid copy/paste now and benefit from a customized LLM later.
+
+ Continue can be used to...
+ 1. Edit chunks of code with specific instructions (e.g. "/edit migrate this digital ocean terraform file into one that works for GCP")
+ 2. Get answers to questions without switching windows (e.g. "how do I find running process on port 8000?")
+ 3. Generate files from scratch (e.g. "/edit Create a Python CLI tool that uses the posthog api to get events from DAUs")
+
+ You tell Continue to edit a specific section of code by highlighting it. If you highlight multiple code sections, then it will only edit the one with the purple glow around it. You can switch which one has the purple glow by clicking the paint brush.
+
+ If you don't highlight any code, then Continue will insert at the location of your cursor.
+
+ Continue passes all of the sections of code you highlight, the code above and below the to-be edited highlighted code section, and all previous steps above input box as context to the LLM.
+
+ You can use cmd+k (Mac) / ctrl+k (Windows) to open Continue. You can use cmd+shift+e / ctrl+shift+e to open file Explorer. You can add your own OpenAI API key to VS Code Settings with `cmd+,`
+
+ If Continue is stuck loading, try using `cmd+shift+p` to open the command palette, search "Reload Window", and then select it. This will reload VS Code and Continue and often fixes issues.
+
+ If you have feedback, please use /feedback to let us know how you would like to use Continue. We are excited to hear from you!""")
+
+class HelpStep(Step):
+
+ name: str = "Help"
+ user_input: str
+ manage_own_chat_context: bool = True
+ description: str = ""
+
+ async def run(self, sdk: ContinueSDK):
+
+ question = self.user_input
+
+ prompt = dedent(f"""Please us the information below to provide a succinct answer to the following quesiton: {question}
+
+ Information:
+
+ {help}""")
+
+ self.chat_context.append(ChatMessage(
+ role="user",
+ content=prompt,
+ summary="Help"
+ ))
+ messages = await sdk.get_chat_context()
+ generator = sdk.models.gpt4.stream_chat(messages)
+ async for chunk in generator:
+ if "content" in chunk:
+ self.description += chunk["content"]
+ await sdk.update_ui()
+
+ capture_event(sdk.ide.unique_id, "help", {"question": question, "answer": self.description})
\ No newline at end of file
--
cgit v1.2.3-70-g09d2
From 363ae8732aa74300cca5870944253a41181c3f83 Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 21:44:52 -0700
Subject: highlight.gif
---
media/highlight.gif | Bin 0 -> 4103271 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 media/highlight.gif
diff --git a/media/highlight.gif b/media/highlight.gif
new file mode 100644
index 00000000..2b77383f
Binary files /dev/null and b/media/highlight.gif differ
--
cgit v1.2.3-70-g09d2
From 64bd2dd033618f00a57bd17c10f139a3ce85311c Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 21:58:46 -0700
Subject: question.gif
---
media/question.gif | Bin 0 -> 8673396 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 media/question.gif
diff --git a/media/question.gif b/media/question.gif
new file mode 100644
index 00000000..3b062dd0
Binary files /dev/null and b/media/question.gif differ
--
cgit v1.2.3-70-g09d2
From a612f229d152133d2ec6c5d3a167d3b04f5955ba Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 22:03:02 -0700
Subject: new question.gif
---
media/question.gif | Bin 8673396 -> 2293656 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
diff --git a/media/question.gif b/media/question.gif
index 3b062dd0..be3fac3c 100644
Binary files a/media/question.gif and b/media/question.gif differ
--
cgit v1.2.3-70-g09d2
--
cgit v1.2.3-70-g09d2
From 01d73a334714ae341f30c6ad73d9c3ddf4eec3b2 Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 22:04:16 -0700
Subject: delete question.gif
---
media/question.gif | Bin 2293656 -> 0 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
delete mode 100644 media/question.gif
diff --git a/media/question.gif b/media/question.gif
deleted file mode 100644
index be3fac3c..00000000
Binary files a/media/question.gif and /dev/null differ
--
cgit v1.2.3-70-g09d2
From 499a79d7d0fcadb8de55e162c51eee5638576b5a Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 22:04:41 -0700
Subject: updated question.gif
---
media/question.gif | Bin 0 -> 2293656 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 media/question.gif
diff --git a/media/question.gif b/media/question.gif
new file mode 100644
index 00000000..be3fac3c
Binary files /dev/null and b/media/question.gif differ
--
cgit v1.2.3-70-g09d2
From 5553b00b916e9e309175a71bd3f5455757f06e80 Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 22:15:12 -0700
Subject: Add files via upload
---
media/help.gif | Bin 0 -> 1046474 bytes
1 file changed, 0 insertions(+), 0 deletions(-)
create mode 100644 media/help.gif
diff --git a/media/help.gif b/media/help.gif
new file mode 100644
index 00000000..96c24b47
Binary files /dev/null and b/media/help.gif differ
--
cgit v1.2.3-70-g09d2
From 7ba66d847d2c5b2d813ce93d343ab10bca4e83d5 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Thu, 13 Jul 2023 22:15:27 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index d84be971..169d1eea 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.159",
+ "version": "0.0.160",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.159",
+ "version": "0.0.160",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index bdc67cc9..f985607d 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.159",
+ "version": "0.0.160",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 5ad4b19e2b442d10700297dd9b8328f5c6288c48 Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Thu, 13 Jul 2023 22:20:40 -0700
Subject: new onboarding
---
extension/react-app/src/components/Onboarding.tsx | 17 +++++------------
1 file changed, 5 insertions(+), 12 deletions(-)
diff --git a/extension/react-app/src/components/Onboarding.tsx b/extension/react-app/src/components/Onboarding.tsx
index 7772a25e..e2dd6f57 100644
--- a/extension/react-app/src/components/Onboarding.tsx
+++ b/extension/react-app/src/components/Onboarding.tsx
@@ -26,18 +26,12 @@ const StyledSpan = styled.span`
const Onboarding = () => {
const [counter, setCounter] = useState(4);
- const gifs = ["intro", "explain", "edit", "generate", "intro"];
+ const gifs = ["intro", "highlight", "question", "help"];
const topMessages = [
- "Welcome to Continue!",
- "Answer coding questions",
- "Edit in natural language",
- "Generate files from scratch",
- ];
- const bottomMessages = [
- "",
- "Ask Continue about a part of your code to get another perspective",
- "Highlight a section of code and instruct Continue to refactor it",
- "Let Continue build the scaffolding of Python scripts, React components, and more",
+ "Welcome!",
+ "Highlight code",
+ "Ask a question",
+ "Use /help to learn more",
];
useEffect(() => {
@@ -107,7 +101,6 @@ const Onboarding = () => {
/>
)}
- {bottomMessages[counter]}
Date: Thu, 13 Jul 2023 22:37:08 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 169d1eea..98e1d560 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.160",
+ "version": "0.0.161",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.160",
+ "version": "0.0.161",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index f985607d..3636686b 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.160",
+ "version": "0.0.161",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From b0a9c6bb6d22a19ac4b39fbe20a9481a4839ac46 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 01:45:24 -0700
Subject: copy while streaming!
---
extension/react-app/package-lock.json | 1509 +++++++++++++++++++-
extension/react-app/package.json | 2 +-
.../react-app/src/components/StepContainer.tsx | 71 +-
3 files changed, 1523 insertions(+), 59 deletions(-)
diff --git a/extension/react-app/package-lock.json b/extension/react-app/package-lock.json
index 7316581d..13e02e86 100644
--- a/extension/react-app/package-lock.json
+++ b/extension/react-app/package-lock.json
@@ -11,12 +11,12 @@
"@styled-icons/heroicons-outline": "^10.47.0",
"@styled-icons/heroicons-solid": "^10.47.0",
"@types/vscode-webview": "^1.57.1",
+ "@uiw/react-markdown-preview": "^4.1.13",
"downshift": "^7.6.0",
"posthog-js": "^1.58.0",
"prismjs": "^1.29.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
- "react-markdown": "^8.0.5",
"react-redux": "^8.0.5",
"react-switch": "^7.0.0",
"react-syntax-highlighter": "^15.5.0",
@@ -963,6 +963,16 @@
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz",
"integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA=="
},
+ "node_modules/@types/parse5": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz",
+ "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g=="
+ },
+ "node_modules/@types/prismjs": {
+ "version": "1.26.0",
+ "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.0.tgz",
+ "integrity": "sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ=="
+ },
"node_modules/@types/prop-types": {
"version": "15.7.5",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
@@ -1027,6 +1037,34 @@
"resolved": "https://registry.npmjs.org/@types/vscode-webview/-/vscode-webview-1.57.1.tgz",
"integrity": "sha512-ghW5SfuDmsGDS2A4xkvGsLwDRNc3Vj5rS6rPOyPm/IryZuf3wceZKxgYaUoW+k9f0f/CB7y2c1rRsdOWZWn0PQ=="
},
+ "node_modules/@uiw/copy-to-clipboard": {
+ "version": "1.0.15",
+ "resolved": "https://registry.npmjs.org/@uiw/copy-to-clipboard/-/copy-to-clipboard-1.0.15.tgz",
+ "integrity": "sha512-1bbGZ3T+SGmA07BoVPK4UCUDcowDN/moctviJGQexfOc9qL8TMLDQPr7mTPvDKhgJkgnlKkAQNFU8PiarIi9sQ=="
+ },
+ "node_modules/@uiw/react-markdown-preview": {
+ "version": "4.1.13",
+ "resolved": "https://registry.npmjs.org/@uiw/react-markdown-preview/-/react-markdown-preview-4.1.13.tgz",
+ "integrity": "sha512-fmIGvBpK6HJyDFf7EokjZSIS0713Bq5KwhOsZ8IkbCMYDcDThFlmMkTTqyzGjL3phrkP9ED5O63WSILzefqe6A==",
+ "dependencies": {
+ "@babel/runtime": "^7.17.2",
+ "@uiw/copy-to-clipboard": "~1.0.12",
+ "react-markdown": "~8.0.0",
+ "rehype-attr": "~2.1.0",
+ "rehype-autolink-headings": "~6.1.1",
+ "rehype-ignore": "^1.0.1",
+ "rehype-prism-plus": "~1.5.0",
+ "rehype-raw": "^6.1.1",
+ "rehype-rewrite": "~3.0.6",
+ "rehype-slug": "~5.1.0",
+ "remark-gfm": "~3.0.1",
+ "unist-util-visit": "^4.1.0"
+ },
+ "peerDependencies": {
+ "react": ">=16.8.0",
+ "react-dom": ">=16.8.0"
+ }
+ },
"node_modules/@vitejs/plugin-react-swc": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.2.0.tgz",
@@ -1163,6 +1201,15 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/bcp-47-match": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz",
+ "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
@@ -1172,6 +1219,11 @@
"node": ">=8"
}
},
+ "node_modules/boolbase": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
+ "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="
+ },
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
@@ -1245,6 +1297,15 @@
}
]
},
+ "node_modules/ccount": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
+ "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
@@ -1370,6 +1431,11 @@
"node": ">=4"
}
},
+ "node_modules/css-selector-parser": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-1.4.1.tgz",
+ "integrity": "sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g=="
+ },
"node_modules/css-to-react-native": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz",
@@ -1473,6 +1539,18 @@
"node": ">=0.3.1"
}
},
+ "node_modules/direction": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/direction/-/direction-2.0.1.tgz",
+ "integrity": "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA==",
+ "bin": {
+ "direction": "cli.js"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/dlv": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz",
@@ -1684,6 +1762,11 @@
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
"dev": true
},
+ "node_modules/github-slugger": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz",
+ "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="
+ },
"node_modules/glob-parent": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
@@ -1729,6 +1812,86 @@
"node": ">=4"
}
},
+ "node_modules/hast-util-from-parse5": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz",
+ "integrity": "sha512-Nz7FfPBuljzsN3tCQ4kCBKqdNhQE2l0Tn+X1ubgKBPRoiDIu1mL08Cfw4k7q71+Duyaw7DXDN+VTAp4Vh3oCOw==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "@types/unist": "^2.0.0",
+ "hastscript": "^7.0.0",
+ "property-information": "^6.0.0",
+ "vfile": "^5.0.0",
+ "vfile-location": "^4.0.0",
+ "web-namespaces": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-from-parse5/node_modules/hast-util-parse-selector": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
+ "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
+ "dependencies": {
+ "@types/hast": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-from-parse5/node_modules/hastscript": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
+ "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "hast-util-parse-selector": "^3.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-has-property": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-2.0.1.tgz",
+ "integrity": "sha512-X2+RwZIMTMKpXUzlotatPzWj8bspCymtXH3cfG3iQKV+wPF53Vgaqxi/eLqGck0wKq1kS9nvoB1wchbCPEL8sg==",
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-heading-rank": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-2.1.1.tgz",
+ "integrity": "sha512-iAuRp+ESgJoRFJbSyaqsfvJDY6zzmFoEnL1gtz1+U8gKtGGj1p0CVlysuUAUjq95qlZESHINLThwJzNGmgGZxA==",
+ "dependencies": {
+ "@types/hast": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-is-element": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-2.1.3.tgz",
+ "integrity": "sha512-O1bKah6mhgEq2WtVMk+Ta5K7pPMqsBBlmzysLdcwKVrqzZQ0CHqUPiIVspNhAG1rvxpvJjtGee17XfauZYKqVA==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "@types/unist": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-parse-selector": {
"version": "2.2.5",
"resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz",
@@ -1738,6 +1901,83 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/hast-util-raw": {
+ "version": "7.2.3",
+ "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.3.tgz",
+ "integrity": "sha512-RujVQfVsOrxzPOPSzZFiwofMArbQke6DJjnFfceiEbFh7S05CbPt0cYN+A5YeD3pso0JQk6O1aHBnx9+Pm2uqg==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "@types/parse5": "^6.0.0",
+ "hast-util-from-parse5": "^7.0.0",
+ "hast-util-to-parse5": "^7.0.0",
+ "html-void-elements": "^2.0.0",
+ "parse5": "^6.0.0",
+ "unist-util-position": "^4.0.0",
+ "unist-util-visit": "^4.0.0",
+ "vfile": "^5.0.0",
+ "web-namespaces": "^2.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-select": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/hast-util-select/-/hast-util-select-5.0.5.tgz",
+ "integrity": "sha512-QQhWMhgTFRhCaQdgTKzZ5g31GLQ9qRb1hZtDPMqQaOhpLBziWcshUS0uCR5IJ0U1jrK/mxg35fmcq+Dp/Cy2Aw==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "@types/unist": "^2.0.0",
+ "bcp-47-match": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "css-selector-parser": "^1.0.0",
+ "direction": "^2.0.0",
+ "hast-util-has-property": "^2.0.0",
+ "hast-util-to-string": "^2.0.0",
+ "hast-util-whitespace": "^2.0.0",
+ "not": "^0.1.0",
+ "nth-check": "^2.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0",
+ "unist-util-visit": "^4.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-to-parse5": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.1.0.tgz",
+ "integrity": "sha512-YNRgAJkH2Jky5ySkIqFXTQiaqcAtJyVE+D5lkN6CdtOqrnkLfGYYrEcKuHOJZlp+MwjSwuD3fZuawI+sic/RBw==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0",
+ "web-namespaces": "^2.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/hast-util-to-string": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-2.0.0.tgz",
+ "integrity": "sha512-02AQ3vLhuH3FisaMM+i/9sm4OXGSq1UhOOCpTLLQtHdL3tZt7qil69r8M8iDkZYyC0HCFylcYoP+8IO7ddta1A==",
+ "dependencies": {
+ "@types/hast": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/hast-util-whitespace": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz",
@@ -1814,6 +2054,15 @@
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="
},
+ "node_modules/html-void-elements": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz",
+ "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/inline-style-parser": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz",
@@ -1995,6 +2244,15 @@
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
+ "node_modules/longest-streak": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
+ "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -2019,6 +2277,15 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/markdown-table": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz",
+ "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/mdast-util-definitions": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz",
@@ -2033,6 +2300,32 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/mdast-util-find-and-replace": {
+ "version": "2.2.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.2.2.tgz",
+ "integrity": "sha512-MTtdFRz/eMDHXzeK6W3dO7mXUlF82Gom4y0oOgvHhh/HXZAGvIQDUvQ0SuUx+j2tv44b8xTHOm8K/9OoRFnXKw==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "escape-string-regexp": "^5.0.0",
+ "unist-util-is": "^5.0.0",
+ "unist-util-visit-parents": "^5.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+ "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
"node_modules/mdast-util-from-markdown": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.0.tgz",
@@ -2056,6 +2349,107 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/mdast-util-gfm": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.2.tgz",
+ "integrity": "sha512-qvZ608nBppZ4icQlhQQIAdc6S3Ffj9RGmzwUKUWuEICFnd1LVkN3EktF7ZHAgfcEdvZB5owU9tQgt99e2TlLjg==",
+ "dependencies": {
+ "mdast-util-from-markdown": "^1.0.0",
+ "mdast-util-gfm-autolink-literal": "^1.0.0",
+ "mdast-util-gfm-footnote": "^1.0.0",
+ "mdast-util-gfm-strikethrough": "^1.0.0",
+ "mdast-util-gfm-table": "^1.0.0",
+ "mdast-util-gfm-task-list-item": "^1.0.0",
+ "mdast-util-to-markdown": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-autolink-literal": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.3.tgz",
+ "integrity": "sha512-My8KJ57FYEy2W2LyNom4n3E7hKTuQk/0SES0u16tjA9Z3oFkF4RrC/hPAPgjlSpezsOvI8ObcXcElo92wn5IGA==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "ccount": "^2.0.0",
+ "mdast-util-find-and-replace": "^2.0.0",
+ "micromark-util-character": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-footnote": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-1.0.2.tgz",
+ "integrity": "sha512-56D19KOGbE00uKVj3sgIykpwKL179QsVFwx/DCW0u/0+URsryacI4MAdNJl0dh+u2PSsD9FtxPFbHCzJ78qJFQ==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-to-markdown": "^1.3.0",
+ "micromark-util-normalize-identifier": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-strikethrough": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.3.tgz",
+ "integrity": "sha512-DAPhYzTYrRcXdMjUtUjKvW9z/FNAMTdU0ORyMcbmkwYNbKocDpdk+PX1L1dQgOID/+vVs1uBQ7ElrBQfZ0cuiQ==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-to-markdown": "^1.3.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-table": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.7.tgz",
+ "integrity": "sha512-jjcpmNnQvrmN5Vx7y7lEc2iIOEytYv7rTvu+MeyAsSHTASGCCRA79Igg2uKssgOs1i1po8s3plW0sTu1wkkLGg==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "markdown-table": "^3.0.0",
+ "mdast-util-from-markdown": "^1.0.0",
+ "mdast-util-to-markdown": "^1.3.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-gfm-task-list-item": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.2.tgz",
+ "integrity": "sha512-PFTA1gzfp1B1UaiJVyhJZA1rm0+Tzn690frc/L8vNX1Jop4STZgOE6bxUhnzdVSB+vm2GU1tIsuQcA9bxTQpMQ==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-to-markdown": "^1.3.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/mdast-util-phrasing": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz",
+ "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "unist-util-is": "^5.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/mdast-util-to-hast": {
"version": "12.3.0",
"resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz",
@@ -2075,6 +2469,25 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/mdast-util-to-markdown": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz",
+ "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "@types/unist": "^2.0.0",
+ "longest-streak": "^3.0.0",
+ "mdast-util-phrasing": "^3.0.0",
+ "mdast-util-to-string": "^3.0.0",
+ "micromark-util-decode-string": "^1.0.0",
+ "unist-util-visit": "^4.0.0",
+ "zwitch": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/mdast-util-to-string": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.1.1.tgz",
@@ -2163,23 +2576,137 @@
"uvu": "^0.5.0"
}
},
- "node_modules/micromark-factory-destination": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz",
- "integrity": "sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw==",
- "funding": [
- {
- "type": "GitHub Sponsors",
- "url": "https://github.com/sponsors/unifiedjs"
- },
- {
- "type": "OpenCollective",
- "url": "https://opencollective.com/unified"
- }
- ],
- "dependencies": {
- "micromark-util-character": "^1.0.0",
- "micromark-util-symbol": "^1.0.0",
+ "node_modules/micromark-extension-gfm": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-2.0.3.tgz",
+ "integrity": "sha512-vb9OoHqrhCmbRidQv/2+Bc6pkP0FrtlhurxZofvOEy5o8RtuuvTq+RQ1Vw5ZDNrVraQZu3HixESqbG+0iKk/MQ==",
+ "dependencies": {
+ "micromark-extension-gfm-autolink-literal": "^1.0.0",
+ "micromark-extension-gfm-footnote": "^1.0.0",
+ "micromark-extension-gfm-strikethrough": "^1.0.0",
+ "micromark-extension-gfm-table": "^1.0.0",
+ "micromark-extension-gfm-tagfilter": "^1.0.0",
+ "micromark-extension-gfm-task-list-item": "^1.0.0",
+ "micromark-util-combine-extensions": "^1.0.0",
+ "micromark-util-types": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-autolink-literal": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.5.tgz",
+ "integrity": "sha512-z3wJSLrDf8kRDOh2qBtoTRD53vJ+CWIyo7uyZuxf/JAbNJjiHsOpG1y5wxk8drtv3ETAHutCu6N3thkOOgueWg==",
+ "dependencies": {
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-sanitize-uri": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-footnote": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.2.tgz",
+ "integrity": "sha512-Yxn7z7SxgyGWRNa4wzf8AhYYWNrwl5q1Z8ii+CSTTIqVkmGZF1CElX2JI8g5yGoM3GAman9/PVCUFUSJ0kB/8Q==",
+ "dependencies": {
+ "micromark-core-commonmark": "^1.0.0",
+ "micromark-factory-space": "^1.0.0",
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-normalize-identifier": "^1.0.0",
+ "micromark-util-sanitize-uri": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-strikethrough": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.7.tgz",
+ "integrity": "sha512-sX0FawVE1o3abGk3vRjOH50L5TTLr3b5XMqnP9YDRb34M0v5OoZhG+OHFz1OffZ9dlwgpTBKaT4XW/AsUVnSDw==",
+ "dependencies": {
+ "micromark-util-chunked": "^1.0.0",
+ "micromark-util-classify-character": "^1.0.0",
+ "micromark-util-resolve-all": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-table": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.7.tgz",
+ "integrity": "sha512-3ZORTHtcSnMQEKtAOsBQ9/oHp9096pI/UvdPtN7ehKvrmZZ2+bbWhi0ln+I9drmwXMt5boocn6OlwQzNXeVeqw==",
+ "dependencies": {
+ "micromark-factory-space": "^1.0.0",
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-tagfilter": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.2.tgz",
+ "integrity": "sha512-5XWB9GbAUSHTn8VPU8/1DBXMuKYT5uOgEjJb8gN3mW0PNW5OPHpSdojoqf+iq1xo7vWzw/P8bAHY0n6ijpXF7g==",
+ "dependencies": {
+ "micromark-util-types": "^1.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-extension-gfm-task-list-item": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.5.tgz",
+ "integrity": "sha512-RMFXl2uQ0pNQy6Lun2YBYT9g9INXtWJULgbt01D/x8/6yJ2qpKyzdZD3pi6UIkzF++Da49xAelVKUeUMqd5eIQ==",
+ "dependencies": {
+ "micromark-factory-space": "^1.0.0",
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/micromark-factory-destination": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz",
+ "integrity": "sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw==",
+ "funding": [
+ {
+ "type": "GitHub Sponsors",
+ "url": "https://github.com/sponsors/unifiedjs"
+ },
+ {
+ "type": "OpenCollective",
+ "url": "https://opencollective.com/unified"
+ }
+ ],
+ "dependencies": {
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
"micromark-util-types": "^1.0.0"
}
},
@@ -2589,6 +3116,22 @@
"node": ">=0.10.0"
}
},
+ "node_modules/not": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/not/-/not-0.1.0.tgz",
+ "integrity": "sha512-5PDmaAsVfnWUgTUbJ3ERwn7u79Z0dYxN9ErxCpVJJqe2RK0PJ3z+iFUxuqjwtlDDegXvtWoxD/3Fzxox7tFGWA=="
+ },
+ "node_modules/nth-check": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
+ "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
+ "dependencies": {
+ "boolbase": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/nth-check?sponsor=1"
+ }
+ },
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
@@ -2632,6 +3175,16 @@
"url": "https://github.com/sponsors/wooorm"
}
},
+ "node_modules/parse-numeric-range": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz",
+ "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ=="
+ },
+ "node_modules/parse5": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz",
+ "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw=="
+ },
"node_modules/path-parse": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
@@ -3047,6 +3600,257 @@
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg=="
},
+ "node_modules/rehype-attr": {
+ "version": "2.1.4",
+ "resolved": "https://registry.npmjs.org/rehype-attr/-/rehype-attr-2.1.4.tgz",
+ "integrity": "sha512-iAeaL5JyF4XxkcvWzpi/0SAF7iV7qOTaHS56tJuEsXziQc3+PEmMn65kV8OFgbO9mRVY7J1fRC/aLvot1PsNkg==",
+ "dependencies": {
+ "unified": "~10.1.1",
+ "unist-util-visit": "~4.1.0"
+ },
+ "engines": {
+ "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
+ }
+ },
+ "node_modules/rehype-autolink-headings": {
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/rehype-autolink-headings/-/rehype-autolink-headings-6.1.1.tgz",
+ "integrity": "sha512-NMYzZIsHM3sA14nC5rAFuUPIOfg+DFmf9EY1YMhaNlB7+3kK/ZlE6kqPfuxr1tsJ1XWkTrMtMoyHosU70d35mA==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "extend": "^3.0.0",
+ "hast-util-has-property": "^2.0.0",
+ "hast-util-heading-rank": "^2.0.0",
+ "hast-util-is-element": "^2.0.0",
+ "unified": "^10.0.0",
+ "unist-util-visit": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/rehype-ignore": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/rehype-ignore/-/rehype-ignore-1.0.5.tgz",
+ "integrity": "sha512-JQXS5eDwXaYKwB8JEYFJJA/YvGi0sSNUOYuiURMtuPTg8tuWHFB91JMYLbImH1FyvyGQM4fIBqNMAPB50WR2Bw==",
+ "dependencies": {
+ "hast-util-select": "^5.0.5",
+ "unified": "^10.1.2",
+ "unist-util-visit": "^4.1.2"
+ },
+ "engines": {
+ "node": "^14.13.1 || >=16.0.0"
+ }
+ },
+ "node_modules/rehype-parse": {
+ "version": "8.0.4",
+ "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-8.0.4.tgz",
+ "integrity": "sha512-MJJKONunHjoTh4kc3dsM1v3C9kGrrxvA3U8PxZlP2SjH8RNUSrb+lF7Y0KVaUDnGH2QZ5vAn7ulkiajM9ifuqg==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "hast-util-from-parse5": "^7.0.0",
+ "parse5": "^6.0.0",
+ "unified": "^10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/rehype-prism-plus": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/rehype-prism-plus/-/rehype-prism-plus-1.5.1.tgz",
+ "integrity": "sha512-mowYefSfrIkMMxkb0fwuEXlvc5nA9b1vQ6mzujM81Qx28RI0mo7jCHsBZ2tJ4eIJKXdFn+EdPkZZBGB10K02vg==",
+ "dependencies": {
+ "hast-util-to-string": "^2.0.0",
+ "parse-numeric-range": "^1.3.0",
+ "refractor": "^4.7.0",
+ "rehype-parse": "^8.0.2",
+ "unist-util-filter": "^4.0.0",
+ "unist-util-visit": "^4.0.0"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/character-entities-legacy": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
+ "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/character-reference-invalid": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz",
+ "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/hast-util-parse-selector": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
+ "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
+ "dependencies": {
+ "@types/hast": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/hastscript": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
+ "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "hast-util-parse-selector": "^3.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/is-alphabetical": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz",
+ "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/is-alphanumerical": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz",
+ "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==",
+ "dependencies": {
+ "is-alphabetical": "^2.0.0",
+ "is-decimal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/is-decimal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz",
+ "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/is-hexadecimal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz",
+ "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/parse-entities": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz",
+ "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==",
+ "dependencies": {
+ "@types/unist": "^2.0.0",
+ "character-entities": "^2.0.0",
+ "character-entities-legacy": "^3.0.0",
+ "character-reference-invalid": "^2.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "is-alphanumerical": "^2.0.0",
+ "is-decimal": "^2.0.0",
+ "is-hexadecimal": "^2.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-prism-plus/node_modules/refractor": {
+ "version": "4.8.1",
+ "resolved": "https://registry.npmjs.org/refractor/-/refractor-4.8.1.tgz",
+ "integrity": "sha512-/fk5sI0iTgFYlmVGYVew90AoYnNMP6pooClx/XKqyeeCQXrL0Kvgn8V0VEht5ccdljbzzF1i3Q213gcntkRExg==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "@types/prismjs": "^1.0.0",
+ "hastscript": "^7.0.0",
+ "parse-entities": "^4.0.0"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
+ "node_modules/rehype-raw": {
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-6.1.1.tgz",
+ "integrity": "sha512-d6AKtisSRtDRX4aSPsJGTfnzrX2ZkHQLE5kiUuGOeEoLpbEulFF4hj0mLPbsa+7vmguDKOVVEQdHKDSwoaIDsQ==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "hast-util-raw": "^7.2.0",
+ "unified": "^10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/rehype-rewrite": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/rehype-rewrite/-/rehype-rewrite-3.0.6.tgz",
+ "integrity": "sha512-REDTNCvsKcAazy8IQWzKp66AhSUDSOIKssSCqNqCcT9sN7JCwAAm3mWGTUdUzq80ABuy8d0D6RBwbnewu1aY1g==",
+ "dependencies": {
+ "hast-util-select": "~5.0.1",
+ "unified": "~10.1.1",
+ "unist-util-visit": "~4.1.0"
+ },
+ "engines": {
+ "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
+ }
+ },
+ "node_modules/rehype-slug": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/rehype-slug/-/rehype-slug-5.1.0.tgz",
+ "integrity": "sha512-Gf91dJoXneiorNEnn+Phx97CO7oRMrpi+6r155tTxzGuLtm+QrI4cTwCa9e1rtePdL4i9tSO58PeSS6HWfgsiw==",
+ "dependencies": {
+ "@types/hast": "^2.0.0",
+ "github-slugger": "^2.0.0",
+ "hast-util-has-property": "^2.0.0",
+ "hast-util-heading-rank": "^2.0.0",
+ "hast-util-to-string": "^2.0.0",
+ "unified": "^10.0.0",
+ "unist-util-visit": "^4.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
+ "node_modules/remark-gfm": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-3.0.1.tgz",
+ "integrity": "sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==",
+ "dependencies": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-gfm": "^2.0.0",
+ "micromark-extension-gfm": "^2.0.0",
+ "unified": "^10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/remark-parse": {
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.1.tgz",
@@ -3365,7 +4169,17 @@
"url": "https://opencollective.com/unified"
}
},
- "node_modules/unist-util-generated": {
+ "node_modules/unist-util-filter": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/unist-util-filter/-/unist-util-filter-4.0.1.tgz",
+ "integrity": "sha512-RynicUM/vbOSTSiUK+BnaK9XMfmQUh6gyi7L6taNgc7FIf84GukXVV3ucGzEN/PhUUkdP5hb1MmXc+3cvPUm5Q==",
+ "dependencies": {
+ "@types/unist": "^2.0.0",
+ "unist-util-is": "^5.0.0",
+ "unist-util-visit-parents": "^5.0.0"
+ }
+ },
+ "node_modules/unist-util-generated": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz",
"integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==",
@@ -3517,6 +4331,19 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/vfile-location": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.1.0.tgz",
+ "integrity": "sha512-YF23YMyASIIJXpktBa4vIGLJ5Gs88UB/XePgqPmTa7cDA+JeO3yclbpheQYCHjVHBn/yePzrXuygIL+xbvRYHw==",
+ "dependencies": {
+ "@types/unist": "^2.0.0",
+ "vfile": "^5.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/vfile-message": {
"version": "3.1.4",
"resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
@@ -3587,6 +4414,15 @@
"fs-extra": "^10.0.0"
}
},
+ "node_modules/web-namespaces": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz",
+ "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
+ },
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
@@ -3603,6 +4439,15 @@
"engines": {
"node": ">= 6"
}
+ },
+ "node_modules/zwitch": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
+ "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==",
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/wooorm"
+ }
}
},
"dependencies": {
@@ -4143,6 +4988,16 @@
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz",
"integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA=="
},
+ "@types/parse5": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz",
+ "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g=="
+ },
+ "@types/prismjs": {
+ "version": "1.26.0",
+ "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.0.tgz",
+ "integrity": "sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ=="
+ },
"@types/prop-types": {
"version": "15.7.5",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
@@ -4207,6 +5062,30 @@
"resolved": "https://registry.npmjs.org/@types/vscode-webview/-/vscode-webview-1.57.1.tgz",
"integrity": "sha512-ghW5SfuDmsGDS2A4xkvGsLwDRNc3Vj5rS6rPOyPm/IryZuf3wceZKxgYaUoW+k9f0f/CB7y2c1rRsdOWZWn0PQ=="
},
+ "@uiw/copy-to-clipboard": {
+ "version": "1.0.15",
+ "resolved": "https://registry.npmjs.org/@uiw/copy-to-clipboard/-/copy-to-clipboard-1.0.15.tgz",
+ "integrity": "sha512-1bbGZ3T+SGmA07BoVPK4UCUDcowDN/moctviJGQexfOc9qL8TMLDQPr7mTPvDKhgJkgnlKkAQNFU8PiarIi9sQ=="
+ },
+ "@uiw/react-markdown-preview": {
+ "version": "4.1.13",
+ "resolved": "https://registry.npmjs.org/@uiw/react-markdown-preview/-/react-markdown-preview-4.1.13.tgz",
+ "integrity": "sha512-fmIGvBpK6HJyDFf7EokjZSIS0713Bq5KwhOsZ8IkbCMYDcDThFlmMkTTqyzGjL3phrkP9ED5O63WSILzefqe6A==",
+ "requires": {
+ "@babel/runtime": "^7.17.2",
+ "@uiw/copy-to-clipboard": "~1.0.12",
+ "react-markdown": "~8.0.0",
+ "rehype-attr": "~2.1.0",
+ "rehype-autolink-headings": "~6.1.1",
+ "rehype-ignore": "^1.0.1",
+ "rehype-prism-plus": "~1.5.0",
+ "rehype-raw": "^6.1.1",
+ "rehype-rewrite": "~3.0.6",
+ "rehype-slug": "~5.1.0",
+ "remark-gfm": "~3.0.1",
+ "unist-util-visit": "^4.1.0"
+ }
+ },
"@vitejs/plugin-react-swc": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.2.0.tgz",
@@ -4299,12 +5178,22 @@
"resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz",
"integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="
},
+ "bcp-47-match": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz",
+ "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ=="
+ },
"binary-extensions": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
"integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
"dev": true
},
+ "boolbase": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
+ "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="
+ },
"braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
@@ -4343,6 +5232,11 @@
"integrity": "sha512-SDIV6bgE1aVbK6XyxdURbUE89zY7+k1BBBaOwYwkNCglXlel/E7mELiHC64HQ+W0xSKlqWhV9Wh7iHxUjMs4fA==",
"dev": true
},
+ "ccount": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
+ "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg=="
+ },
"chalk": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
@@ -4436,6 +5330,11 @@
"resolved": "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz",
"integrity": "sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg=="
},
+ "css-selector-parser": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-1.4.1.tgz",
+ "integrity": "sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g=="
+ },
"css-to-react-native": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz",
@@ -4506,6 +5405,11 @@
"resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz",
"integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw=="
},
+ "direction": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/direction/-/direction-2.0.1.tgz",
+ "integrity": "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA=="
+ },
"dlv": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz",
@@ -4672,6 +5576,11 @@
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
"dev": true
},
+ "github-slugger": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz",
+ "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="
+ },
"glob-parent": {
"version": "6.0.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
@@ -4705,11 +5614,130 @@
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="
},
+ "hast-util-from-parse5": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz",
+ "integrity": "sha512-Nz7FfPBuljzsN3tCQ4kCBKqdNhQE2l0Tn+X1ubgKBPRoiDIu1mL08Cfw4k7q71+Duyaw7DXDN+VTAp4Vh3oCOw==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "@types/unist": "^2.0.0",
+ "hastscript": "^7.0.0",
+ "property-information": "^6.0.0",
+ "vfile": "^5.0.0",
+ "vfile-location": "^4.0.0",
+ "web-namespaces": "^2.0.0"
+ },
+ "dependencies": {
+ "hast-util-parse-selector": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
+ "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
+ "requires": {
+ "@types/hast": "^2.0.0"
+ }
+ },
+ "hastscript": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
+ "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "hast-util-parse-selector": "^3.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0"
+ }
+ }
+ }
+ },
+ "hast-util-has-property": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-2.0.1.tgz",
+ "integrity": "sha512-X2+RwZIMTMKpXUzlotatPzWj8bspCymtXH3cfG3iQKV+wPF53Vgaqxi/eLqGck0wKq1kS9nvoB1wchbCPEL8sg=="
+ },
+ "hast-util-heading-rank": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-2.1.1.tgz",
+ "integrity": "sha512-iAuRp+ESgJoRFJbSyaqsfvJDY6zzmFoEnL1gtz1+U8gKtGGj1p0CVlysuUAUjq95qlZESHINLThwJzNGmgGZxA==",
+ "requires": {
+ "@types/hast": "^2.0.0"
+ }
+ },
+ "hast-util-is-element": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-2.1.3.tgz",
+ "integrity": "sha512-O1bKah6mhgEq2WtVMk+Ta5K7pPMqsBBlmzysLdcwKVrqzZQ0CHqUPiIVspNhAG1rvxpvJjtGee17XfauZYKqVA==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "@types/unist": "^2.0.0"
+ }
+ },
"hast-util-parse-selector": {
"version": "2.2.5",
"resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz",
"integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ=="
},
+ "hast-util-raw": {
+ "version": "7.2.3",
+ "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.3.tgz",
+ "integrity": "sha512-RujVQfVsOrxzPOPSzZFiwofMArbQke6DJjnFfceiEbFh7S05CbPt0cYN+A5YeD3pso0JQk6O1aHBnx9+Pm2uqg==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "@types/parse5": "^6.0.0",
+ "hast-util-from-parse5": "^7.0.0",
+ "hast-util-to-parse5": "^7.0.0",
+ "html-void-elements": "^2.0.0",
+ "parse5": "^6.0.0",
+ "unist-util-position": "^4.0.0",
+ "unist-util-visit": "^4.0.0",
+ "vfile": "^5.0.0",
+ "web-namespaces": "^2.0.0",
+ "zwitch": "^2.0.0"
+ }
+ },
+ "hast-util-select": {
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/hast-util-select/-/hast-util-select-5.0.5.tgz",
+ "integrity": "sha512-QQhWMhgTFRhCaQdgTKzZ5g31GLQ9qRb1hZtDPMqQaOhpLBziWcshUS0uCR5IJ0U1jrK/mxg35fmcq+Dp/Cy2Aw==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "@types/unist": "^2.0.0",
+ "bcp-47-match": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "css-selector-parser": "^1.0.0",
+ "direction": "^2.0.0",
+ "hast-util-has-property": "^2.0.0",
+ "hast-util-to-string": "^2.0.0",
+ "hast-util-whitespace": "^2.0.0",
+ "not": "^0.1.0",
+ "nth-check": "^2.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0",
+ "unist-util-visit": "^4.0.0",
+ "zwitch": "^2.0.0"
+ }
+ },
+ "hast-util-to-parse5": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.1.0.tgz",
+ "integrity": "sha512-YNRgAJkH2Jky5ySkIqFXTQiaqcAtJyVE+D5lkN6CdtOqrnkLfGYYrEcKuHOJZlp+MwjSwuD3fZuawI+sic/RBw==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0",
+ "web-namespaces": "^2.0.0",
+ "zwitch": "^2.0.0"
+ }
+ },
+ "hast-util-to-string": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-2.0.0.tgz",
+ "integrity": "sha512-02AQ3vLhuH3FisaMM+i/9sm4OXGSq1UhOOCpTLLQtHdL3tZt7qil69r8M8iDkZYyC0HCFylcYoP+8IO7ddta1A==",
+ "requires": {
+ "@types/hast": "^2.0.0"
+ }
+ },
"hast-util-whitespace": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz",
@@ -4767,6 +5795,11 @@
}
}
},
+ "html-void-elements": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz",
+ "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A=="
+ },
"inline-style-parser": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz",
@@ -4880,6 +5913,11 @@
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
+ "longest-streak": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
+ "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g=="
+ },
"loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -4897,6 +5935,11 @@
"highlight.js": "~10.7.0"
}
},
+ "markdown-table": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz",
+ "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw=="
+ },
"mdast-util-definitions": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz",
@@ -4907,6 +5950,24 @@
"unist-util-visit": "^4.0.0"
}
},
+ "mdast-util-find-and-replace": {
+ "version": "2.2.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.2.2.tgz",
+ "integrity": "sha512-MTtdFRz/eMDHXzeK6W3dO7mXUlF82Gom4y0oOgvHhh/HXZAGvIQDUvQ0SuUx+j2tv44b8xTHOm8K/9OoRFnXKw==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "escape-string-regexp": "^5.0.0",
+ "unist-util-is": "^5.0.0",
+ "unist-util-visit-parents": "^5.0.0"
+ },
+ "dependencies": {
+ "escape-string-regexp": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+ "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="
+ }
+ }
+ },
"mdast-util-from-markdown": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.0.tgz",
@@ -4926,6 +5987,79 @@
"uvu": "^0.5.0"
}
},
+ "mdast-util-gfm": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.2.tgz",
+ "integrity": "sha512-qvZ608nBppZ4icQlhQQIAdc6S3Ffj9RGmzwUKUWuEICFnd1LVkN3EktF7ZHAgfcEdvZB5owU9tQgt99e2TlLjg==",
+ "requires": {
+ "mdast-util-from-markdown": "^1.0.0",
+ "mdast-util-gfm-autolink-literal": "^1.0.0",
+ "mdast-util-gfm-footnote": "^1.0.0",
+ "mdast-util-gfm-strikethrough": "^1.0.0",
+ "mdast-util-gfm-table": "^1.0.0",
+ "mdast-util-gfm-task-list-item": "^1.0.0",
+ "mdast-util-to-markdown": "^1.0.0"
+ }
+ },
+ "mdast-util-gfm-autolink-literal": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.3.tgz",
+ "integrity": "sha512-My8KJ57FYEy2W2LyNom4n3E7hKTuQk/0SES0u16tjA9Z3oFkF4RrC/hPAPgjlSpezsOvI8ObcXcElo92wn5IGA==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "ccount": "^2.0.0",
+ "mdast-util-find-and-replace": "^2.0.0",
+ "micromark-util-character": "^1.0.0"
+ }
+ },
+ "mdast-util-gfm-footnote": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-1.0.2.tgz",
+ "integrity": "sha512-56D19KOGbE00uKVj3sgIykpwKL179QsVFwx/DCW0u/0+URsryacI4MAdNJl0dh+u2PSsD9FtxPFbHCzJ78qJFQ==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-to-markdown": "^1.3.0",
+ "micromark-util-normalize-identifier": "^1.0.0"
+ }
+ },
+ "mdast-util-gfm-strikethrough": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.3.tgz",
+ "integrity": "sha512-DAPhYzTYrRcXdMjUtUjKvW9z/FNAMTdU0ORyMcbmkwYNbKocDpdk+PX1L1dQgOID/+vVs1uBQ7ElrBQfZ0cuiQ==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-to-markdown": "^1.3.0"
+ }
+ },
+ "mdast-util-gfm-table": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.7.tgz",
+ "integrity": "sha512-jjcpmNnQvrmN5Vx7y7lEc2iIOEytYv7rTvu+MeyAsSHTASGCCRA79Igg2uKssgOs1i1po8s3plW0sTu1wkkLGg==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "markdown-table": "^3.0.0",
+ "mdast-util-from-markdown": "^1.0.0",
+ "mdast-util-to-markdown": "^1.3.0"
+ }
+ },
+ "mdast-util-gfm-task-list-item": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.2.tgz",
+ "integrity": "sha512-PFTA1gzfp1B1UaiJVyhJZA1rm0+Tzn690frc/L8vNX1Jop4STZgOE6bxUhnzdVSB+vm2GU1tIsuQcA9bxTQpMQ==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-to-markdown": "^1.3.0"
+ }
+ },
+ "mdast-util-phrasing": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz",
+ "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "unist-util-is": "^5.0.0"
+ }
+ },
"mdast-util-to-hast": {
"version": "12.3.0",
"resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz",
@@ -4941,6 +6075,21 @@
"unist-util-visit": "^4.0.0"
}
},
+ "mdast-util-to-markdown": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz",
+ "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "@types/unist": "^2.0.0",
+ "longest-streak": "^3.0.0",
+ "mdast-util-phrasing": "^3.0.0",
+ "mdast-util-to-string": "^3.0.0",
+ "micromark-util-decode-string": "^1.0.0",
+ "unist-util-visit": "^4.0.0",
+ "zwitch": "^2.0.0"
+ }
+ },
"mdast-util-to-string": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.1.1.tgz",
@@ -5002,6 +6151,92 @@
"uvu": "^0.5.0"
}
},
+ "micromark-extension-gfm": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-2.0.3.tgz",
+ "integrity": "sha512-vb9OoHqrhCmbRidQv/2+Bc6pkP0FrtlhurxZofvOEy5o8RtuuvTq+RQ1Vw5ZDNrVraQZu3HixESqbG+0iKk/MQ==",
+ "requires": {
+ "micromark-extension-gfm-autolink-literal": "^1.0.0",
+ "micromark-extension-gfm-footnote": "^1.0.0",
+ "micromark-extension-gfm-strikethrough": "^1.0.0",
+ "micromark-extension-gfm-table": "^1.0.0",
+ "micromark-extension-gfm-tagfilter": "^1.0.0",
+ "micromark-extension-gfm-task-list-item": "^1.0.0",
+ "micromark-util-combine-extensions": "^1.0.0",
+ "micromark-util-types": "^1.0.0"
+ }
+ },
+ "micromark-extension-gfm-autolink-literal": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.5.tgz",
+ "integrity": "sha512-z3wJSLrDf8kRDOh2qBtoTRD53vJ+CWIyo7uyZuxf/JAbNJjiHsOpG1y5wxk8drtv3ETAHutCu6N3thkOOgueWg==",
+ "requires": {
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-sanitize-uri": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0"
+ }
+ },
+ "micromark-extension-gfm-footnote": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.2.tgz",
+ "integrity": "sha512-Yxn7z7SxgyGWRNa4wzf8AhYYWNrwl5q1Z8ii+CSTTIqVkmGZF1CElX2JI8g5yGoM3GAman9/PVCUFUSJ0kB/8Q==",
+ "requires": {
+ "micromark-core-commonmark": "^1.0.0",
+ "micromark-factory-space": "^1.0.0",
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-normalize-identifier": "^1.0.0",
+ "micromark-util-sanitize-uri": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ }
+ },
+ "micromark-extension-gfm-strikethrough": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.7.tgz",
+ "integrity": "sha512-sX0FawVE1o3abGk3vRjOH50L5TTLr3b5XMqnP9YDRb34M0v5OoZhG+OHFz1OffZ9dlwgpTBKaT4XW/AsUVnSDw==",
+ "requires": {
+ "micromark-util-chunked": "^1.0.0",
+ "micromark-util-classify-character": "^1.0.0",
+ "micromark-util-resolve-all": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ }
+ },
+ "micromark-extension-gfm-table": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.7.tgz",
+ "integrity": "sha512-3ZORTHtcSnMQEKtAOsBQ9/oHp9096pI/UvdPtN7ehKvrmZZ2+bbWhi0ln+I9drmwXMt5boocn6OlwQzNXeVeqw==",
+ "requires": {
+ "micromark-factory-space": "^1.0.0",
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ }
+ },
+ "micromark-extension-gfm-tagfilter": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.2.tgz",
+ "integrity": "sha512-5XWB9GbAUSHTn8VPU8/1DBXMuKYT5uOgEjJb8gN3mW0PNW5OPHpSdojoqf+iq1xo7vWzw/P8bAHY0n6ijpXF7g==",
+ "requires": {
+ "micromark-util-types": "^1.0.0"
+ }
+ },
+ "micromark-extension-gfm-task-list-item": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.5.tgz",
+ "integrity": "sha512-RMFXl2uQ0pNQy6Lun2YBYT9g9INXtWJULgbt01D/x8/6yJ2qpKyzdZD3pi6UIkzF++Da49xAelVKUeUMqd5eIQ==",
+ "requires": {
+ "micromark-factory-space": "^1.0.0",
+ "micromark-util-character": "^1.0.0",
+ "micromark-util-symbol": "^1.0.0",
+ "micromark-util-types": "^1.0.0",
+ "uvu": "^0.5.0"
+ }
+ },
"micromark-factory-destination": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz",
@@ -5217,6 +6452,19 @@
"integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==",
"dev": true
},
+ "not": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/not/-/not-0.1.0.tgz",
+ "integrity": "sha512-5PDmaAsVfnWUgTUbJ3ERwn7u79Z0dYxN9ErxCpVJJqe2RK0PJ3z+iFUxuqjwtlDDegXvtWoxD/3Fzxox7tFGWA=="
+ },
+ "nth-check": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
+ "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
+ "requires": {
+ "boolbase": "^1.0.0"
+ }
+ },
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
@@ -5248,6 +6496,16 @@
}
}
},
+ "parse-numeric-range": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz",
+ "integrity": "sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ=="
+ },
+ "parse5": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz",
+ "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw=="
+ },
"path-parse": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
@@ -5510,6 +6768,190 @@
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg=="
},
+ "rehype-attr": {
+ "version": "2.1.4",
+ "resolved": "https://registry.npmjs.org/rehype-attr/-/rehype-attr-2.1.4.tgz",
+ "integrity": "sha512-iAeaL5JyF4XxkcvWzpi/0SAF7iV7qOTaHS56tJuEsXziQc3+PEmMn65kV8OFgbO9mRVY7J1fRC/aLvot1PsNkg==",
+ "requires": {
+ "unified": "~10.1.1",
+ "unist-util-visit": "~4.1.0"
+ }
+ },
+ "rehype-autolink-headings": {
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/rehype-autolink-headings/-/rehype-autolink-headings-6.1.1.tgz",
+ "integrity": "sha512-NMYzZIsHM3sA14nC5rAFuUPIOfg+DFmf9EY1YMhaNlB7+3kK/ZlE6kqPfuxr1tsJ1XWkTrMtMoyHosU70d35mA==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "extend": "^3.0.0",
+ "hast-util-has-property": "^2.0.0",
+ "hast-util-heading-rank": "^2.0.0",
+ "hast-util-is-element": "^2.0.0",
+ "unified": "^10.0.0",
+ "unist-util-visit": "^4.0.0"
+ }
+ },
+ "rehype-ignore": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/rehype-ignore/-/rehype-ignore-1.0.5.tgz",
+ "integrity": "sha512-JQXS5eDwXaYKwB8JEYFJJA/YvGi0sSNUOYuiURMtuPTg8tuWHFB91JMYLbImH1FyvyGQM4fIBqNMAPB50WR2Bw==",
+ "requires": {
+ "hast-util-select": "^5.0.5",
+ "unified": "^10.1.2",
+ "unist-util-visit": "^4.1.2"
+ }
+ },
+ "rehype-parse": {
+ "version": "8.0.4",
+ "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-8.0.4.tgz",
+ "integrity": "sha512-MJJKONunHjoTh4kc3dsM1v3C9kGrrxvA3U8PxZlP2SjH8RNUSrb+lF7Y0KVaUDnGH2QZ5vAn7ulkiajM9ifuqg==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "hast-util-from-parse5": "^7.0.0",
+ "parse5": "^6.0.0",
+ "unified": "^10.0.0"
+ }
+ },
+ "rehype-prism-plus": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/rehype-prism-plus/-/rehype-prism-plus-1.5.1.tgz",
+ "integrity": "sha512-mowYefSfrIkMMxkb0fwuEXlvc5nA9b1vQ6mzujM81Qx28RI0mo7jCHsBZ2tJ4eIJKXdFn+EdPkZZBGB10K02vg==",
+ "requires": {
+ "hast-util-to-string": "^2.0.0",
+ "parse-numeric-range": "^1.3.0",
+ "refractor": "^4.7.0",
+ "rehype-parse": "^8.0.2",
+ "unist-util-filter": "^4.0.0",
+ "unist-util-visit": "^4.0.0"
+ },
+ "dependencies": {
+ "character-entities-legacy": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
+ "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ=="
+ },
+ "character-reference-invalid": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz",
+ "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw=="
+ },
+ "hast-util-parse-selector": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
+ "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
+ "requires": {
+ "@types/hast": "^2.0.0"
+ }
+ },
+ "hastscript": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
+ "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "comma-separated-tokens": "^2.0.0",
+ "hast-util-parse-selector": "^3.0.0",
+ "property-information": "^6.0.0",
+ "space-separated-tokens": "^2.0.0"
+ }
+ },
+ "is-alphabetical": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz",
+ "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ=="
+ },
+ "is-alphanumerical": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz",
+ "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==",
+ "requires": {
+ "is-alphabetical": "^2.0.0",
+ "is-decimal": "^2.0.0"
+ }
+ },
+ "is-decimal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz",
+ "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A=="
+ },
+ "is-hexadecimal": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz",
+ "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg=="
+ },
+ "parse-entities": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz",
+ "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==",
+ "requires": {
+ "@types/unist": "^2.0.0",
+ "character-entities": "^2.0.0",
+ "character-entities-legacy": "^3.0.0",
+ "character-reference-invalid": "^2.0.0",
+ "decode-named-character-reference": "^1.0.0",
+ "is-alphanumerical": "^2.0.0",
+ "is-decimal": "^2.0.0",
+ "is-hexadecimal": "^2.0.0"
+ }
+ },
+ "refractor": {
+ "version": "4.8.1",
+ "resolved": "https://registry.npmjs.org/refractor/-/refractor-4.8.1.tgz",
+ "integrity": "sha512-/fk5sI0iTgFYlmVGYVew90AoYnNMP6pooClx/XKqyeeCQXrL0Kvgn8V0VEht5ccdljbzzF1i3Q213gcntkRExg==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "@types/prismjs": "^1.0.0",
+ "hastscript": "^7.0.0",
+ "parse-entities": "^4.0.0"
+ }
+ }
+ }
+ },
+ "rehype-raw": {
+ "version": "6.1.1",
+ "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-6.1.1.tgz",
+ "integrity": "sha512-d6AKtisSRtDRX4aSPsJGTfnzrX2ZkHQLE5kiUuGOeEoLpbEulFF4hj0mLPbsa+7vmguDKOVVEQdHKDSwoaIDsQ==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "hast-util-raw": "^7.2.0",
+ "unified": "^10.0.0"
+ }
+ },
+ "rehype-rewrite": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/rehype-rewrite/-/rehype-rewrite-3.0.6.tgz",
+ "integrity": "sha512-REDTNCvsKcAazy8IQWzKp66AhSUDSOIKssSCqNqCcT9sN7JCwAAm3mWGTUdUzq80ABuy8d0D6RBwbnewu1aY1g==",
+ "requires": {
+ "hast-util-select": "~5.0.1",
+ "unified": "~10.1.1",
+ "unist-util-visit": "~4.1.0"
+ }
+ },
+ "rehype-slug": {
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/rehype-slug/-/rehype-slug-5.1.0.tgz",
+ "integrity": "sha512-Gf91dJoXneiorNEnn+Phx97CO7oRMrpi+6r155tTxzGuLtm+QrI4cTwCa9e1rtePdL4i9tSO58PeSS6HWfgsiw==",
+ "requires": {
+ "@types/hast": "^2.0.0",
+ "github-slugger": "^2.0.0",
+ "hast-util-has-property": "^2.0.0",
+ "hast-util-heading-rank": "^2.0.0",
+ "hast-util-to-string": "^2.0.0",
+ "unified": "^10.0.0",
+ "unist-util-visit": "^4.0.0"
+ }
+ },
+ "remark-gfm": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-3.0.1.tgz",
+ "integrity": "sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==",
+ "requires": {
+ "@types/mdast": "^3.0.0",
+ "mdast-util-gfm": "^2.0.0",
+ "micromark-extension-gfm": "^2.0.0",
+ "unified": "^10.0.0"
+ }
+ },
"remark-parse": {
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.1.tgz",
@@ -5722,6 +7164,16 @@
"vfile": "^5.0.0"
}
},
+ "unist-util-filter": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/unist-util-filter/-/unist-util-filter-4.0.1.tgz",
+ "integrity": "sha512-RynicUM/vbOSTSiUK+BnaK9XMfmQUh6gyi7L6taNgc7FIf84GukXVV3ucGzEN/PhUUkdP5hb1MmXc+3cvPUm5Q==",
+ "requires": {
+ "@types/unist": "^2.0.0",
+ "unist-util-is": "^5.0.0",
+ "unist-util-visit-parents": "^5.0.0"
+ }
+ },
"unist-util-generated": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz",
@@ -5819,6 +7271,15 @@
"vfile-message": "^3.0.0"
}
},
+ "vfile-location": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.1.0.tgz",
+ "integrity": "sha512-YF23YMyASIIJXpktBa4vIGLJ5Gs88UB/XePgqPmTa7cDA+JeO3yclbpheQYCHjVHBn/yePzrXuygIL+xbvRYHw==",
+ "requires": {
+ "@types/unist": "^2.0.0",
+ "vfile": "^5.0.0"
+ }
+ },
"vfile-message": {
"version": "3.1.4",
"resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
@@ -5849,6 +7310,11 @@
"fs-extra": "^10.0.0"
}
},
+ "web-namespaces": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz",
+ "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ=="
+ },
"xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
@@ -5859,6 +7325,11 @@
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
"dev": true
+ },
+ "zwitch": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
+ "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="
}
}
}
diff --git a/extension/react-app/package.json b/extension/react-app/package.json
index 4bedb813..704f520a 100644
--- a/extension/react-app/package.json
+++ b/extension/react-app/package.json
@@ -12,12 +12,12 @@
"@styled-icons/heroicons-outline": "^10.47.0",
"@styled-icons/heroicons-solid": "^10.47.0",
"@types/vscode-webview": "^1.57.1",
+ "@uiw/react-markdown-preview": "^4.1.13",
"downshift": "^7.6.0",
"posthog-js": "^1.58.0",
"prismjs": "^1.29.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
- "react-markdown": "^8.0.5",
"react-redux": "^8.0.5",
"react-switch": "^7.0.0",
"react-syntax-highlighter": "^15.5.0",
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index d1a8a46a..6fa4ba13 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -1,4 +1,4 @@
-import { useCallback, useEffect, useRef, useState } from "react";
+import { useEffect, useRef, useState } from "react";
import styled, { keyframes } from "styled-components";
import {
appear,
@@ -15,9 +15,8 @@ import {
} from "@styled-icons/heroicons-outline";
import { StopCircle } from "@styled-icons/heroicons-solid";
import { HistoryNode } from "../../../schema/HistoryNode";
-import ReactMarkdown from "react-markdown";
import HeaderButtonWithText from "./HeaderButtonWithText";
-import CodeBlock from "./CodeBlock";
+import MarkdownPreview from "@uiw/react-markdown-preview";
interface StepContainerProps {
historyNode: HistoryNode;
@@ -72,19 +71,6 @@ const ContentDiv = styled.div<{ isUserInput: boolean }>`
font-size: 13px;
`;
-const MarkdownPre = styled.pre`
- background-color: ${secondaryDark};
- padding: 10px;
- border-radius: ${defaultBorderRadius};
- border: 0.5px solid white;
-`;
-
-const StyledCode = styled.code`
- word-wrap: break-word;
- color: #f69292;
- background: transparent;
-`;
-
const gradient = keyframes`
0% {
background-position: 0px 0;
@@ -124,6 +110,31 @@ const GradientBorder = styled.div<{
background-size: 200% 200%;
`;
+const StyledMarkdownPreview = styled(MarkdownPreview)`
+ pre {
+ background-color: ${secondaryDark};
+ padding: 1px;
+ border-radius: ${defaultBorderRadius};
+ border: 0.5px solid white;
+ }
+
+ code {
+ color: #f69292;
+ word-wrap: break-word;
+ }
+
+ pre > code {
+ background-color: ${secondaryDark};
+ color: white;
+ }
+
+ background-color: ${vscBackground};
+ font-family: "Lexend", sans-serif;
+ font-size: 13px;
+ padding: 8px;
+ color: white;
+`;
+
// #endregion
function StepContainer(props: StepContainerProps) {
@@ -158,7 +169,7 @@ function StepContainer(props: StepContainerProps) {
>
@@ -246,27 +257,9 @@ function StepContainer(props: StepContainerProps) {
{props.historyNode.observation.error as string}
) : (
-
{
- return (
-
- );
- },
- code: ({ node, ...props }) => {
- return ;
- },
- ul: ({ node, ...props }) => {
- return ;
- },
- }}
- >
- {props.historyNode.step.description as any}
-
+
)}
--
cgit v1.2.3-70-g09d2
From f4d546c6ccddf8b6dca7c360f799e08f152bdf96 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 02:02:20 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 98e1d560..65fdab12 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.161",
+ "version": "0.0.162",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.161",
+ "version": "0.0.162",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 3636686b..ef39582b 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.161",
+ "version": "0.0.162",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 5c6d2f1be8474d26124506e0c2a640fa68efe52d Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 02:30:45 -0700
Subject: fixed unique_id being asyncio.run property
---
continuedev/src/continuedev/server/ide.py | 22 +++++++++++-----------
continuedev/src/continuedev/server/ide_protocol.py | 5 +----
continuedev/src/continuedev/steps/help.py | 8 +++++---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
extension/react-app/src/components/ComboBox.tsx | 17 ++---------------
extension/react-app/src/components/Onboarding.tsx | 1 +
7 files changed, 23 insertions(+), 36 deletions(-)
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index 12a21f19..73cce201 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -123,10 +123,12 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
self.websocket = websocket
self.session_manager = session_manager
- workspace_directory: str
+ workspace_directory: str = None
+ unique_id: str = None
async def initialize(self) -> List[str]:
await self._send_json("workspaceDirectory", {})
+ await self._send_json("uniqueId", {})
other_msgs = []
while True:
msg_string = await self.websocket.receive_text()
@@ -137,9 +139,13 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
data = message["data"]
if message_type == "workspaceDirectory":
self.workspace_directory = data["workspaceDirectory"]
- break
+ elif message_type == "uniqueId":
+ self.unique_id = data["uniqueId"]
else:
other_msgs.append(msg_string)
+
+ if self.workspace_directory is not None and self.unique_id is not None:
+ break
return other_msgs
async def _send_json(self, message_type: str, data: Any):
@@ -183,10 +189,12 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
self.onMainUserInput(data["input"])
elif message_type == "deleteAtIndex":
self.onDeleteAtIndex(data["index"])
- elif message_type in ["highlightedCode", "openFiles", "visibleFiles", "readFile", "editFile", "getUserSecret", "runCommand", "uniqueId"]:
+ elif message_type in ["highlightedCode", "openFiles", "visibleFiles", "readFile", "editFile", "getUserSecret", "runCommand"]:
self.sub_queue.post(message_type, data)
elif message_type == "workspaceDirectory":
self.workspace_directory = data["workspaceDirectory"]
+ elif message_type == "uniqueId":
+ self.unique_id = data["uniqueId"]
else:
raise ValueError("Unknown message type", message_type)
@@ -311,14 +319,6 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
resp = await self._send_and_receive_json({}, VisibleFilesResponse, "visibleFiles")
return resp.visibleFiles
- async def get_unique_id(self) -> str:
- resp = await self._send_and_receive_json({}, UniqueIdResponse, "uniqueId")
- return resp.uniqueId
-
- @cached_property_no_none
- def unique_id(self) -> str:
- return asyncio.run(self.get_unique_id())
-
async def getHighlightedCode(self) -> List[RangeInFile]:
resp = await self._send_and_receive_json({}, HighlightedCodeResponse, "highlightedCode")
return resp.highlightedCode
diff --git a/continuedev/src/continuedev/server/ide_protocol.py b/continuedev/src/continuedev/server/ide_protocol.py
index 2f78cf0e..d0fb0bf8 100644
--- a/continuedev/src/continuedev/server/ide_protocol.py
+++ b/continuedev/src/continuedev/server/ide_protocol.py
@@ -108,7 +108,4 @@ class AbstractIdeProtocolServer(ABC):
"""Show a diff"""
workspace_directory: str
-
- @abstractproperty
- def unique_id(self) -> str:
- """Get a unique ID for this IDE"""
+ unique_id: str
diff --git a/continuedev/src/continuedev/steps/help.py b/continuedev/src/continuedev/steps/help.py
index fdfb986f..2dc3647c 100644
--- a/continuedev/src/continuedev/steps/help.py
+++ b/continuedev/src/continuedev/steps/help.py
@@ -6,7 +6,7 @@ from ..libs.util.telemetry import capture_event
help = dedent("""\
Continue is an open-source coding autopilot. It is a VS Code extension that brings the power of ChatGPT to your IDE.
- It gathers context for you and stores your interactions automatically, so that you can avoid copy/paste now and benefit from a customized LLM later.
+ It gathers context for you and stores your interactions automatically, so that you can avoid copy/paste now and benefit from a customized Large Language Model (LLM) later.
Continue can be used to...
1. Edit chunks of code with specific instructions (e.g. "/edit migrate this digital ocean terraform file into one that works for GCP")
@@ -25,6 +25,7 @@ help = dedent("""\
If you have feedback, please use /feedback to let us know how you would like to use Continue. We are excited to hear from you!""")
+
class HelpStep(Step):
name: str = "Help"
@@ -41,7 +42,7 @@ class HelpStep(Step):
Information:
{help}""")
-
+
self.chat_context.append(ChatMessage(
role="user",
content=prompt,
@@ -54,4 +55,5 @@ class HelpStep(Step):
self.description += chunk["content"]
await sdk.update_ui()
- capture_event(sdk.ide.unique_id, "help", {"question": question, "answer": self.description})
\ No newline at end of file
+ capture_event(sdk.ide.unique_id, "help", {
+ "question": question, "answer": self.description})
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 65fdab12..9d5c73e1 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.162",
+ "version": "0.0.163",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.162",
+ "version": "0.0.163",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index ef39582b..2b0f6b94 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.162",
+ "version": "0.0.163",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index 7d6541c7..73db33ca 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -1,29 +1,16 @@
-import React, {
- useCallback,
- useEffect,
- useImperativeHandle,
- useState,
-} from "react";
+import React, { useEffect, useImperativeHandle, useState } from "react";
import { useCombobox } from "downshift";
import styled from "styled-components";
import {
- buttonColor,
defaultBorderRadius,
lightGray,
secondaryDark,
vscBackground,
} from ".";
import CodeBlock from "./CodeBlock";
-import { RangeInFile } from "../../../src/client";
import PillButton from "./PillButton";
import HeaderButtonWithText from "./HeaderButtonWithText";
-import {
- Trash,
- LockClosed,
- LockOpen,
- Plus,
- DocumentPlus,
-} from "@styled-icons/heroicons-outline";
+import { DocumentPlus } from "@styled-icons/heroicons-outline";
import { HighlightedRangeContext } from "../../../schema/FullState";
// #region styled components
diff --git a/extension/react-app/src/components/Onboarding.tsx b/extension/react-app/src/components/Onboarding.tsx
index e2dd6f57..6bfb0ccd 100644
--- a/extension/react-app/src/components/Onboarding.tsx
+++ b/extension/react-app/src/components/Onboarding.tsx
@@ -22,6 +22,7 @@ const StyledSpan = styled.span`
&:hover {
background-color: #ffffff33;
}
+ white-space: nowrap;
`;
const Onboarding = () => {
--
cgit v1.2.3-70-g09d2
From 39cd2ef27d6ed439b00a9edec4a487343ff1c2c9 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 03:24:46 -0700
Subject: warn of large highlighted ranges, cmd+k->m
---
continuedev/src/continuedev/core/policy.py | 2 +-
continuedev/src/continuedev/steps/help.py | 2 +-
extension/package-lock.json | 4 +-
extension/package.json | 6 +-
extension/react-app/src/components/ComboBox.tsx | 5 +
extension/react-app/src/components/Onboarding.tsx | 1 +
extension/react-app/src/components/PillButton.tsx | 167 +++++++++++++---------
extension/react-app/src/pages/gui.tsx | 27 ++--
extension/src/commands.ts | 17 ++-
extension/src/lang-server/codeLens.ts | 4 +-
10 files changed, 141 insertions(+), 94 deletions(-)
diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py
index 59ea78b1..bc897357 100644
--- a/continuedev/src/continuedev/core/policy.py
+++ b/continuedev/src/continuedev/core/policy.py
@@ -59,7 +59,7 @@ class DemoPolicy(Policy):
return (
MessageStep(name="Welcome to Continue", message=dedent("""\
- Highlight code and ask a question or give instructions
- - Use `cmd+k` (Mac) / `ctrl+k` (Windows) to open Continue
+ - Use `cmd+m` (Mac) / `ctrl+m` (Windows) to open Continue
- Use `/help` to ask questions about how to use Continue""")) >>
WelcomeStep() >>
# SetupContinueWorkspaceStep() >>
diff --git a/continuedev/src/continuedev/steps/help.py b/continuedev/src/continuedev/steps/help.py
index 2dc3647c..ba1e6087 100644
--- a/continuedev/src/continuedev/steps/help.py
+++ b/continuedev/src/continuedev/steps/help.py
@@ -19,7 +19,7 @@ help = dedent("""\
Continue passes all of the sections of code you highlight, the code above and below the to-be edited highlighted code section, and all previous steps above input box as context to the LLM.
- You can use cmd+k (Mac) / ctrl+k (Windows) to open Continue. You can use cmd+shift+e / ctrl+shift+e to open file Explorer. You can add your own OpenAI API key to VS Code Settings with `cmd+,`
+ You can use cmd+m (Mac) / ctrl+m (Windows) to open Continue. You can use cmd+shift+e / ctrl+shift+e to open file Explorer. You can add your own OpenAI API key to VS Code Settings with `cmd+,`
If Continue is stuck loading, try using `cmd+shift+p` to open the command palette, search "Reload Window", and then select it. This will reload VS Code and Continue and often fixes issues.
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 9d5c73e1..a79dd6b4 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.163",
+ "version": "0.0.164",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.163",
+ "version": "0.0.164",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 2b0f6b94..de1f395d 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.163",
+ "version": "0.0.164",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
@@ -111,8 +111,8 @@
"keybindings": [
{
"command": "continue.focusContinueInput",
- "mac": "cmd+k",
- "key": "ctrl+k"
+ "mac": "cmd+m",
+ "key": "ctrl+m"
},
{
"command": "continue.suggestionDown",
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index 73db33ca..bd0d59b5 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -228,6 +228,11 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
)} */}
{highlightedCodeSections.map((section, idx) => (
4000 && section.editing
+ ? "Editing such a large range may be slow"
+ : undefined
+ }
editing={section.editing}
pinned={section.pinned}
index={idx}
diff --git a/extension/react-app/src/components/Onboarding.tsx b/extension/react-app/src/components/Onboarding.tsx
index 6bfb0ccd..231c1e93 100644
--- a/extension/react-app/src/components/Onboarding.tsx
+++ b/extension/react-app/src/components/Onboarding.tsx
@@ -109,6 +109,7 @@ const Onboarding = () => {
paddingBottom: "50px",
textAlign: "center",
cursor: "pointer",
+ whiteSpace: "nowrap",
}}
>
`
}
`;
+const CircleDiv = styled.div`
+ position: absolute;
+ top: -10px;
+ right: -10px;
+ width: 20px;
+ height: 20px;
+ border-radius: 50%;
+ background-color: red;
+ color: white;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 2px;
+`;
+
interface PillButtonProps {
onHover?: (arg0: boolean) => void;
onDelete?: () => void;
@@ -55,6 +68,7 @@ interface PillButtonProps {
index: number;
editing: boolean;
pinned: boolean;
+ warning?: string;
}
const PillButton = (props: PillButtonProps) => {
@@ -63,75 +77,96 @@ const PillButton = (props: PillButtonProps) => {
return (
<>
- {
- setIsHovered(true);
- if (props.onHover) {
- props.onHover(true);
- }
- }}
- onMouseLeave={() => {
- setIsHovered(false);
- if (props.onHover) {
- props.onHover(false);
- }
- }}
- >
- {isHovered && (
-
- {
- client?.setEditingAtIndices([props.index]);
- }}
- >
-
-
+
+
{
+ setIsHovered(true);
+ if (props.onHover) {
+ props.onHover(true);
+ }
+ }}
+ onMouseLeave={() => {
+ setIsHovered(false);
+ if (props.onHover) {
+ props.onHover(false);
+ }
+ }}
+ >
+ {isHovered && (
+
+ {
+ client?.setEditingAtIndices([props.index]);
+ }}
+ >
+
+
- {/* {
client?.setPinnedAtIndices([props.index]);
}}
- >
+ >
*/}
-
- Edit this range
+
+ Edit this range
+
+ {
+ if (props.onDelete) {
+ props.onDelete();
+ }
+ }}
+ >
+
+
+
+ )}
+ {props.title}
+
+
+ {props.editing
+ ? "Editing this range (with rest of file as context)"
+ : "Edit this range"}
+
+
Delete
+ {props.warning && (
+ <>
+
+
+
+
+ {props.warning}
-
{
- if (props.onDelete) {
- props.onDelete();
- }
- }}
- >
-
-
-
+ >
)}
- {props.title}
-
-
- {props.editing
- ? "Editing this range (with rest of file as context)"
- : "Edit this range"}
-
-
Delete
+
>
);
};
diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx
index 4ff260fa..57cebac3 100644
--- a/extension/react-app/src/pages/gui.tsx
+++ b/extension/react-app/src/pages/gui.tsx
@@ -95,11 +95,8 @@ function GUI(props: GUIProps) {
name: "Welcome to Continue",
hide: false,
description: `- Highlight code and ask a question or give instructions
-- Use \`cmd+k\` (Mac) / \`ctrl+k\` (Windows) to open Continue
-- Use \`cmd+shift+e\` / \`ctrl+shift+e\` to open file Explorer
-- Add your own OpenAI API key to VS Code Settings with \`cmd+,\`
-- Use slash commands when you want fine-grained control
-- Past steps are included as part of the context by default`,
+ - Use \`cmd+m\` (Mac) / \`ctrl+m\` (Windows) to open Continue
+ - Use \`/help\` to ask questions about how to use Continue`,
system_message: null,
chat_context: [],
manage_own_chat_context: false,
@@ -269,15 +266,17 @@ function GUI(props: GUIProps) {
return (
<>
- {
- client?.sendMainInput(`/feedback ${text}`);
- setShowFeedbackDialog(false);
- }}
- onClose={() => {
- setShowFeedbackDialog(false);
- }}
- message={feedbackDialogMessage} />
+ {
+ client?.sendMainInput(`/feedback ${text}`);
+ setShowFeedbackDialog(false);
+ }}
+ onClose={() => {
+ setShowFeedbackDialog(false);
+ }}
+ message={feedbackDialogMessage}
+ />
any } = {
"continue.suggestionDown": suggestionDownCommand,
@@ -30,10 +32,15 @@ const commandsMap: { [command: string]: (...args: any) => any } = {
"continue.acceptAllSuggestions": acceptAllSuggestionsCommand,
"continue.rejectAllSuggestions": rejectAllSuggestionsCommand,
"continue.focusContinueInput": async () => {
- vscode.commands.executeCommand("continue.continueGUIView.focus");
- debugPanelWebview?.postMessage({
- type: "focusContinueInput",
- });
+ if (focusedOnContinueInput) {
+ vscode.commands.executeCommand("workbench.action.focusActiveEditorGroup");
+ } else {
+ vscode.commands.executeCommand("continue.continueGUIView.focus");
+ debugPanelWebview?.postMessage({
+ type: "focusContinueInput",
+ });
+ }
+ focusedOnContinueInput = !focusedOnContinueInput;
},
"continue.quickTextEntry": async () => {
const text = await vscode.window.showInputBox({
@@ -53,4 +60,4 @@ export function registerAllCommands(context: vscode.ExtensionContext) {
vscode.commands.registerCommand(command, callback)
);
}
-}
\ No newline at end of file
+}
diff --git a/extension/src/lang-server/codeLens.ts b/extension/src/lang-server/codeLens.ts
index 5800a00e..1cfef5d5 100644
--- a/extension/src/lang-server/codeLens.ts
+++ b/extension/src/lang-server/codeLens.ts
@@ -60,12 +60,12 @@ class DiffViewerCodeLensProvider implements vscode.CodeLensProvider {
}
codeLenses.push(
new vscode.CodeLens(range, {
- title: "Accept ✅ (⌘⇧↩)",
+ title: "Accept All ✅ (⌘⇧↩)",
command: "continue.acceptDiff",
arguments: [document.uri.fsPath],
}),
new vscode.CodeLens(range, {
- title: "Reject ❌ (⌘⇧⌫)",
+ title: "Reject All ❌ (⌘⇧⌫)",
command: "continue.rejectDiff",
arguments: [document.uri.fsPath],
})
--
cgit v1.2.3-70-g09d2
From b19076ddb6d11acb5ffd54046d9e5cad549c00c1 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 11:01:06 -0700
Subject: command m reliable toggle
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
extension/react-app/src/components/ComboBox.tsx | 4 ++++
extension/src/commands.ts | 5 ++++-
extension/src/debugPanel.ts | 5 +++++
extension/src/diffs.ts | 21 ++++++++++++---------
6 files changed, 28 insertions(+), 13 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index a79dd6b4..12aa27c9 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.164",
+ "version": "0.0.165",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.164",
+ "version": "0.0.165",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index de1f395d..05bd4d84 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.164",
+ "version": "0.0.165",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index bd0d59b5..5d9b5109 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -12,6 +12,7 @@ import PillButton from "./PillButton";
import HeaderButtonWithText from "./HeaderButtonWithText";
import { DocumentPlus } from "@styled-icons/heroicons-outline";
import { HighlightedRangeContext } from "../../../schema/FullState";
+import { postVscMessage } from "../vscode";
// #region styled components
const mainInputFontSize = 13;
@@ -297,6 +298,9 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
// setShowContextDropdown(target.value.endsWith("@"));
},
+ onBlur: (e) => {
+ postVscMessage("blurContinueInput", {});
+ },
onKeyDown: (event) => {
if (event.key === "Enter" && event.shiftKey) {
// Prevent Downshift's default 'Enter' behavior.
diff --git a/extension/src/commands.ts b/extension/src/commands.ts
index 0025340a..888f01ed 100644
--- a/extension/src/commands.ts
+++ b/extension/src/commands.ts
@@ -16,11 +16,14 @@ import {
import { acceptDiffCommand, rejectDiffCommand } from "./diffs";
import * as bridge from "./bridge";
import { debugPanelWebview } from "./debugPanel";
-import { sendTelemetryEvent, TelemetryEvent } from "./telemetry";
import { ideProtocolClient } from "./activation/activate";
let focusedOnContinueInput = false;
+export const setFocusedOnContinueInput = (value: boolean) => {
+ focusedOnContinueInput = value;
+};
+
// Copy everything over from extension.ts
const commandsMap: { [command: string]: (...args: any) => any } = {
"continue.suggestionDown": suggestionDownCommand,
diff --git a/extension/src/debugPanel.ts b/extension/src/debugPanel.ts
index 5e1689d1..dd24a8d8 100644
--- a/extension/src/debugPanel.ts
+++ b/extension/src/debugPanel.ts
@@ -6,6 +6,7 @@ import {
openEditorAndRevealRange,
} from "./util/vscode";
import { RangeInFile } from "./client";
+import { setFocusedOnContinueInput } from "./commands";
const WebSocket = require("ws");
let websocketConnections: { [url: string]: WebsocketConnection | undefined } =
@@ -226,6 +227,10 @@ export function setupDebugPanel(
openEditorAndRevealRange(data.path, undefined, vscode.ViewColumn.One);
break;
}
+ case "blurContinueInput": {
+ setFocusedOnContinueInput(false);
+ break;
+ }
case "withProgress": {
// This message allows withProgress to be used in the webview
if (data.done) {
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index 910c30f2..37943de4 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -104,6 +104,17 @@ class DiffManager {
return editor;
}
+ private _findFirstDifferentLine(contentA: string, contentB: string): number {
+ const linesA = contentA.split("\n");
+ const linesB = contentB.split("\n");
+ for (let i = 0; i < linesA.length && i < linesB.length; i++) {
+ if (linesA[i] !== linesB[i]) {
+ return i;
+ }
+ }
+ return 0;
+ }
+
writeDiff(
originalFilepath: string,
newContent: string,
@@ -119,15 +130,7 @@ class DiffManager {
if (!this.diffs.has(newFilepath)) {
// Figure out the first line that is different
const oldContent = fs.readFileSync(originalFilepath).toString("utf-8");
- let line = 0;
- const newLines = newContent.split("\n");
- const oldLines = oldContent.split("\n");
- for (let i = 0; i < newLines.length && i < oldLines.length; i++) {
- if (newLines[i] !== oldLines[i]) {
- line = i;
- break;
- }
- }
+ const line = this._findFirstDifferentLine(oldContent, newContent);
const diffInfo: DiffInfo = {
originalFilepath,
--
cgit v1.2.3-70-g09d2
From c5102b0997baa81ce544514d6b5b4d5a2eae804f Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 13:45:10 -0700
Subject: insidious client_state vs application_state err
---
continuedev/src/continuedev/core/autopilot.py | 2 ++
continuedev/src/continuedev/server/gui.py | 2 +-
continuedev/src/continuedev/server/ide.py | 2 +-
continuedev/src/continuedev/steps/core/core.py | 9 ++++++++-
extension/react-app/src/components/ComboBox.tsx | 9 ++++++++-
extension/react-app/src/components/StepContainer.tsx | 9 ++++++---
extension/src/continueIdeClient.ts | 8 ++++++--
extension/src/diffs.ts | 2 +-
extension/src/util/messenger.ts | 2 +-
9 files changed, 34 insertions(+), 11 deletions(-)
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index e1c8a076..82439f49 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -37,6 +37,8 @@ def get_error_title(e: Exception) -> str:
return "The request failed. Please check your internet connection and try again. If this issue persists, you can use our API key for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to \"\""
elif isinstance(e, openai_errors.InvalidRequestError):
return 'Your API key does not have access to GPT-4. You can use ours for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to ""'
+ elif e.__str__().startswith("Cannot connect to host"):
+ return "The request failed. Please check your internet connection and try again."
return e.__str__() or e.__repr__()
diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py
index 238273b2..9a411fbe 100644
--- a/continuedev/src/continuedev/server/gui.py
+++ b/continuedev/src/continuedev/server/gui.py
@@ -53,7 +53,7 @@ class GUIProtocolServer(AbstractGUIProtocolServer):
self.session = session
async def _send_json(self, message_type: str, data: Any):
- if self.websocket.client_state == WebSocketState.DISCONNECTED:
+ if self.websocket.application_state == WebSocketState.DISCONNECTED:
return
await self.websocket.send_json({
"messageType": message_type,
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index 73cce201..7875c94d 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -149,7 +149,7 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
return other_msgs
async def _send_json(self, message_type: str, data: Any):
- if self.websocket.client_state == WebSocketState.DISCONNECTED:
+ if self.websocket.application_state == WebSocketState.DISCONNECTED:
return
await self.websocket.send_json({
"messageType": message_type,
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index 787da316..75f8e460 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -9,7 +9,7 @@ from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder
from ...models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit
from ...models.filesystem import FileSystem, RangeInFile, RangeInFileWithContents
from ...core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation
-from ...core.main import ChatMessage, Step, SequentialStep
+from ...core.main import ChatMessage, ContinueCustomException, Step, SequentialStep
from ...libs.util.count_tokens import MAX_TOKENS_FOR_MODEL, DEFAULT_MAX_TOKENS
from ...libs.util.dedent import dedent_and_get_common_whitespace
import difflib
@@ -608,6 +608,13 @@ Please output the code to be inserted at the cursor in order to fulfill the user
rif_dict[rif.filepath] = rif.contents
for rif in rif_with_contents:
+ # If the file doesn't exist, ask them to save it first
+ if not os.path.exists(rif.filepath):
+ message = f"The file {rif.filepath} does not exist. Please save it first."
+ raise ContinueCustomException(
+ title=message, message=message
+ )
+
await sdk.ide.setFileOpen(rif.filepath)
await sdk.ide.setSuggestionsLocked(rif.filepath, True)
await self.stream_rif(rif, sdk)
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index 5d9b5109..754c9445 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -169,6 +169,7 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
useImperativeHandle(ref, () => downshiftProps, [downshiftProps]);
const [metaKeyPressed, setMetaKeyPressed] = useState(false);
+ const [focused, setFocused] = useState(false);
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === "Meta") {
@@ -298,7 +299,11 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
// setShowContextDropdown(target.value.endsWith("@"));
},
+ onFocus: (e) => {
+ setFocused(true);
+ },
onBlur: (e) => {
+ setFocused(false);
postVscMessage("blurContinueInput", {});
},
onKeyDown: (event) => {
@@ -374,7 +379,9 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
{highlightedCodeSections.length === 0 &&
(downshiftProps.inputValue?.startsWith("/edit") ||
- (metaKeyPressed && downshiftProps.inputValue?.length > 0)) && (
+ (focused &&
+ metaKeyPressed &&
+ downshiftProps.inputValue?.length > 0)) && (
Inserting at cursor
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 6fa4ba13..14e9b854 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -253,9 +253,12 @@ function StepContainer(props: StepContainerProps) {
)}
{props.historyNode.observation?.error ? (
-
- {props.historyNode.observation.error as string}
-
+
+ View Traceback
+
+ {props.historyNode.observation.error as string}
+
+
) : (
void): void;
abstract onClose(callback: () => void): void;
-
+
abstract onError(callback: () => void): void;
abstract sendAndReceive(messageType: string, data: any): Promise;
--
cgit v1.2.3-70-g09d2
From 6baa3fe2f83971b92f6617c3caa2c6c0351a8e8c Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 13:47:38 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 12aa27c9..0edd4885 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.165",
+ "version": "0.0.166",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.165",
+ "version": "0.0.166",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 05bd4d84..7cd7b793 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.165",
+ "version": "0.0.166",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 98085ebc2eb39569db89a34c8d820a5d0bfe8f84 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Fri, 14 Jul 2023 17:40:16 -0700
Subject: fixed config explanation, don't read terminals
---
continuedev/src/continuedev/steps/open_config.py | 4 ++--
extension/src/continueIdeClient.ts | 26 ++++++++++++++----------
2 files changed, 17 insertions(+), 13 deletions(-)
diff --git a/continuedev/src/continuedev/steps/open_config.py b/continuedev/src/continuedev/steps/open_config.py
index 87f03e9f..af55a95a 100644
--- a/continuedev/src/continuedev/steps/open_config.py
+++ b/continuedev/src/continuedev/steps/open_config.py
@@ -14,10 +14,10 @@ class OpenConfigStep(Step):
"custom_commands": [
{
"name": "test",
- "description": "Write unit tests like I do for the highlighted code"
+ "description": "Write unit tests like I do for the highlighted code",
"prompt": "Write a comprehensive set of unit tests for the selected code. It should setup, run tests that check for correctness including important edge cases, and teardown. Ensure that the tests are complete and sophisticated."
}
- ],
+ ]
```
`"name"` is the command you will type.
`"description"` is the description displayed in the slash command menu.
diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts
index 6dd117d3..2c96763d 100644
--- a/extension/src/continueIdeClient.ts
+++ b/extension/src/continueIdeClient.ts
@@ -15,6 +15,7 @@ import { FileEditWithFullContents } from "../schema/FileEditWithFullContents";
import fs = require("fs");
import { WebsocketMessenger } from "./util/messenger";
import { diffManager } from "./diffs";
+import path = require("path");
class IdeProtocolClient {
private messenger: WebsocketMessenger | null = null;
@@ -350,25 +351,28 @@ class IdeProtocolClient {
// ------------------------------------ //
// Respond to request
+ private editorIsTerminal(editor: vscode.TextEditor) {
+ return (
+ !!path.basename(editor.document.uri.fsPath).match(/\d/) ||
+ (editor.document.languageId === "plaintext" &&
+ editor.document.getText() === "accessible-buffer-accessible-buffer-")
+ );
+ }
+
getOpenFiles(): string[] {
return vscode.window.visibleTextEditors
- .filter((editor) => {
- return !(
- editor.document.uri.fsPath.endsWith("/1") ||
- (editor.document.languageId === "plaintext" &&
- editor.document.getText() ===
- "accessible-buffer-accessible-buffer-")
- );
- })
+ .filter((editor) => !this.editorIsTerminal(editor))
.map((editor) => {
return editor.document.uri.fsPath;
});
}
getVisibleFiles(): string[] {
- return vscode.window.visibleTextEditors.map((editor) => {
- return editor.document.uri.fsPath;
- });
+ return vscode.window.visibleTextEditors
+ .filter((editor) => !this.editorIsTerminal(editor))
+ .map((editor) => {
+ return editor.document.uri.fsPath;
+ });
}
saveFile(filepath: string) {
--
cgit v1.2.3-70-g09d2
From 48e5c8001e897eb37493357087410ee8f98217fa Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 14:30:11 -0700
Subject: ctrl shortcuts on windows, load models immediately
---
continuedev/src/continuedev/core/autopilot.py | 10 ++--
continuedev/src/continuedev/core/sdk.py | 59 +++++++++++++++-------
.../src/continuedev/libs/llm/hf_inference_api.py | 6 ++-
continuedev/src/continuedev/server/ide.py | 4 +-
.../src/continuedev/server/session_manager.py | 6 +--
.../react-app/src/components/StepContainer.tsx | 2 +-
extension/react-app/src/components/TextDialog.tsx | 6 ++-
extension/react-app/src/pages/gui.tsx | 6 +--
extension/react-app/src/util/index.ts | 30 +++++++++++
9 files changed, 98 insertions(+), 31 deletions(-)
create mode 100644 extension/react-app/src/util/index.ts
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 82439f49..0696c360 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -50,6 +50,8 @@ class Autopilot(ContinueBaseModel):
full_state: Union[FullState, None] = None
_on_update_callbacks: List[Callable[[FullState], None]] = []
+ continue_sdk: ContinueSDK = None
+
_active: bool = False
_should_halt: bool = False
_main_user_input_queue: List[str] = []
@@ -57,9 +59,11 @@ class Autopilot(ContinueBaseModel):
_user_input_queue = AsyncSubscriptionQueue()
_retry_queue = AsyncSubscriptionQueue()
- @cached_property
- def continue_sdk(self) -> ContinueSDK:
- return ContinueSDK(self)
+ @classmethod
+ async def create(cls, policy: Policy, ide: AbstractIdeProtocolServer, full_state: FullState) -> "Autopilot":
+ autopilot = cls(ide=ide, policy=policy)
+ autopilot.continue_sdk = await ContinueSDK.create(autopilot)
+ return autopilot
class Config:
arbitrary_types_allowed = True
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index aa2d8892..d73561d2 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -1,6 +1,6 @@
import asyncio
from functools import cached_property
-from typing import Coroutine, Union
+from typing import Coroutine, Dict, Union
import os
from ..steps.core.core import DefaultModelEditCodeStep
@@ -13,7 +13,7 @@ from ..libs.llm.hf_inference_api import HuggingFaceInferenceAPI
from ..libs.llm.openai import OpenAI
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
-from .main import Context, ContinueCustomException, HighlightedRangeContext, History, Step, ChatMessage, ChatMessageRole
+from .main import Context, ContinueCustomException, History, Step, ChatMessage
from ..steps.core.core import *
from ..libs.llm.proxy_server import ProxyServer
@@ -22,26 +22,46 @@ class Autopilot:
pass
+ModelProvider = Literal["openai", "hf_inference_api", "ggml", "anthropic"]
+MODEL_PROVIDER_TO_ENV_VAR = {
+ "openai": "OPENAI_API_KEY",
+ "hf_inference_api": "HUGGING_FACE_TOKEN",
+ "anthropic": "ANTHROPIC_API_KEY"
+}
+
+
class Models:
- def __init__(self, sdk: "ContinueSDK"):
+ provider_keys: Dict[ModelProvider, str] = {}
+ model_providers: List[ModelProvider]
+
+ def __init__(self, sdk: "ContinueSDK", model_providers: List[ModelProvider]):
self.sdk = sdk
+ self.model_providers = model_providers
+
+ @classmethod
+ async def create(cls, sdk: "ContinueSDK", with_providers: List[ModelProvider] = ["openai"]) -> "Models":
+ models = Models(sdk, with_providers)
+ for provider in with_providers:
+ if provider in MODEL_PROVIDER_TO_ENV_VAR:
+ env_var = MODEL_PROVIDER_TO_ENV_VAR[provider]
+ models.provider_keys[provider] = await sdk.get_user_secret(
+ env_var, f'Please add your {env_var} to the .env file')
+
+ return models
def __load_openai_model(self, model: str) -> OpenAI:
- async def load_openai_model():
- api_key = await self.sdk.get_user_secret(
- 'OPENAI_API_KEY', 'Enter your OpenAI API key or press enter to try for free')
- if api_key == "":
- return ProxyServer(self.sdk.ide.unique_id, model)
- return OpenAI(api_key=api_key, default_model=model)
- return asyncio.get_event_loop().run_until_complete(load_openai_model())
+ api_key = self.provider_keys["openai"]
+ if api_key == "":
+ return ProxyServer(self.sdk.ide.unique_id, model)
+ return OpenAI(api_key=api_key, default_model=model)
+
+ def __load_hf_inference_api_model(self, model: str) -> HuggingFaceInferenceAPI:
+ api_key = self.provider_keys["hf_inference_api"]
+ return HuggingFaceInferenceAPI(api_key=api_key, model=model)
@cached_property
def starcoder(self):
- async def load_starcoder():
- api_key = await self.sdk.get_user_secret(
- 'HUGGING_FACE_TOKEN', 'Please add your Hugging Face token to the .env file')
- return HuggingFaceInferenceAPI(api_key=api_key)
- return asyncio.get_event_loop().run_until_complete(load_starcoder())
+ return self.__load_hf_inference_api_model("bigcode/starcoder")
@cached_property
def gpt35(self):
@@ -74,7 +94,7 @@ class Models:
@property
def default(self):
default_model = self.sdk.config.default_model
- return self.__model_from_name(default_model) if default_model is not None else self.gpt35
+ return self.__model_from_name(default_model) if default_model is not None else self.gpt4
class ContinueSDK(AbstractContinueSDK):
@@ -87,10 +107,15 @@ class ContinueSDK(AbstractContinueSDK):
def __init__(self, autopilot: Autopilot):
self.ide = autopilot.ide
self.__autopilot = autopilot
- self.models = Models(self)
self.context = autopilot.context
self.config = self._load_config()
+ @classmethod
+ async def create(cls, autopilot: Autopilot) -> "ContinueSDK":
+ sdk = ContinueSDK(autopilot)
+ sdk.models = await Models.create(sdk)
+ return sdk
+
config: ContinueConfig
def _load_config(self) -> ContinueConfig:
diff --git a/continuedev/src/continuedev/libs/llm/hf_inference_api.py b/continuedev/src/continuedev/libs/llm/hf_inference_api.py
index 1586c620..803ba122 100644
--- a/continuedev/src/continuedev/libs/llm/hf_inference_api.py
+++ b/continuedev/src/continuedev/libs/llm/hf_inference_api.py
@@ -9,7 +9,11 @@ DEFAULT_MAX_TIME = 120.
class HuggingFaceInferenceAPI(LLM):
api_key: str
- model: str = "bigcode/starcoder"
+ model: str
+
+ def __init__(self, api_key: str, model: str):
+ self.api_key = api_key
+ self.model = model
def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs):
"""Return the completion of the text with the given temperature."""
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index 7875c94d..77b13483 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -227,8 +227,8 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
})
async def getSessionId(self):
- session_id = self.session_manager.new_session(
- self, self.session_id).session_id
+ session_id = (await self.session_manager.new_session(
+ self, self.session_id)).session_id
await self._send_json("getSessionId", {
"sessionId": session_id
})
diff --git a/continuedev/src/continuedev/server/session_manager.py b/continuedev/src/continuedev/server/session_manager.py
index fb8ac386..6d109ca6 100644
--- a/continuedev/src/continuedev/server/session_manager.py
+++ b/continuedev/src/continuedev/server/session_manager.py
@@ -53,18 +53,18 @@ class SessionManager:
session_files = os.listdir(sessions_folder)
if f"{session_id}.json" in session_files and session_id in self.registered_ides:
if self.registered_ides[session_id].session_id is not None:
- return self.new_session(self.registered_ides[session_id], session_id=session_id)
+ return await self.new_session(self.registered_ides[session_id], session_id=session_id)
raise KeyError("Session ID not recognized", session_id)
return self.sessions[session_id]
- def new_session(self, ide: AbstractIdeProtocolServer, session_id: Union[str, None] = None) -> Session:
+ async def new_session(self, ide: AbstractIdeProtocolServer, session_id: Union[str, None] = None) -> Session:
full_state = None
if session_id is not None and os.path.exists(getSessionFilePath(session_id)):
with open(getSessionFilePath(session_id), "r") as f:
full_state = FullState(**json.load(f))
- autopilot = DemoAutopilot(
+ autopilot = await DemoAutopilot.create(
policy=DemoPolicy(), ide=ide, full_state=full_state)
session_id = session_id or str(uuid4())
ide.session_id = session_id
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 14e9b854..7f23e333 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -181,7 +181,7 @@ function StepContainer(props: StepContainerProps) {
}
className="overflow-hidden cursor-pointer"
onClick={(e) => {
- if (e.metaKey) {
+ if (isMetaEquivalentKeyPressed(e)) {
props.onToggleAll();
} else {
props.onToggle();
diff --git a/extension/react-app/src/components/TextDialog.tsx b/extension/react-app/src/components/TextDialog.tsx
index ea5727f0..c724697d 100644
--- a/extension/react-app/src/components/TextDialog.tsx
+++ b/extension/react-app/src/components/TextDialog.tsx
@@ -81,7 +81,11 @@ const TextDialog = (props: {
rows={10}
ref={textAreaRef}
onKeyDown={(e) => {
- if (e.key === "Enter" && e.metaKey && textAreaRef.current) {
+ if (
+ e.key === "Enter" &&
+ isMetaEquivalentKeyPressed(e) &&
+ textAreaRef.current
+ ) {
props.onEnter(textAreaRef.current.value);
setText("");
} else if (e.key === "Escape") {
diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx
index 57cebac3..cb0404ab 100644
--- a/extension/react-app/src/pages/gui.tsx
+++ b/extension/react-app/src/pages/gui.tsx
@@ -137,12 +137,12 @@ function GUI(props: GUIProps) {
useEffect(() => {
const listener = (e: any) => {
// Cmd + i to toggle fast model
- if (e.key === "i" && e.metaKey && e.shiftKey) {
+ if (e.key === "i" && isMetaEquivalentKeyPressed(e) && e.shiftKey) {
setUsingFastModel((prev) => !prev);
// Cmd + backspace to stop currently running step
} else if (
e.key === "Backspace" &&
- e.metaKey &&
+ isMetaEquivalentKeyPressed(e) &&
typeof history?.current_index !== "undefined" &&
history.timeline[history.current_index]?.active
) {
@@ -220,7 +220,7 @@ function GUI(props: GUIProps) {
if (mainTextInputRef.current) {
let input = (mainTextInputRef.current as any).inputValue;
// cmd+enter to /edit
- if (event?.metaKey) {
+ if (isMetaEquivalentKeyPressed(event)) {
input = `/edit ${input}`;
}
(mainTextInputRef.current as any).setInputValue("");
diff --git a/extension/react-app/src/util/index.ts b/extension/react-app/src/util/index.ts
new file mode 100644
index 00000000..ad711321
--- /dev/null
+++ b/extension/react-app/src/util/index.ts
@@ -0,0 +1,30 @@
+type Platform = "mac" | "linux" | "windows" | "unknown";
+
+function getPlatform(): Platform {
+ const platform = window.navigator.platform.toUpperCase();
+ if (platform.indexOf("MAC") >= 0) {
+ return "mac";
+ } else if (platform.indexOf("LINUX") >= 0) {
+ return "linux";
+ } else if (platform.indexOf("WIN") >= 0) {
+ return "windows";
+ } else {
+ return "unknown";
+ }
+}
+
+function isMetaEquivalentKeyPressed(event: {
+ metaKey: boolean;
+ ctrlKey: boolean;
+}): boolean {
+ const platform = getPlatform();
+ switch (platform) {
+ case "mac":
+ return event.metaKey;
+ case "linux":
+ case "windows":
+ return event.ctrlKey;
+ default:
+ return event.metaKey;
+ }
+}
--
cgit v1.2.3-70-g09d2
From abe77c56abd7aea66fa85bd1257f76dc2d435a15 Mon Sep 17 00:00:00 2001
From: sestinj
Date: Sat, 15 Jul 2023 14:35:52 -0700
Subject: fix vscode uri parsing for diff
---
extension/src/diffs.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index d04f9bdb..db6a6490 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -56,7 +56,7 @@ class DiffManager {
return undefined;
}
- const rightUri = vscode.Uri.parse(newFilepath);
+ const rightUri = vscode.Uri.file(newFilepath);
const leftUri = vscode.Uri.file(originalFilepath);
const title = "Continue Diff";
console.log(
--
cgit v1.2.3-70-g09d2
From 45ee33f7fd84c0bc49d35d9d1a7a3a8e9f6addd7 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 15:06:32 -0700
Subject: use correct label for meta key
---
extension/react-app/src/components/ComboBox.tsx | 3 ++-
.../react-app/src/components/StepContainer.tsx | 7 +++++-
extension/react-app/src/util/index.ts | 17 +++++++++++--
extension/src/diffs.ts | 3 ++-
extension/src/lang-server/codeLens.ts | 7 +++---
extension/src/util/util.ts | 29 ++++++++++++++++++++++
6 files changed, 58 insertions(+), 8 deletions(-)
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index 754c9445..f11e07af 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -13,6 +13,7 @@ import HeaderButtonWithText from "./HeaderButtonWithText";
import { DocumentPlus } from "@styled-icons/heroicons-outline";
import { HighlightedRangeContext } from "../../../schema/FullState";
import { postVscMessage } from "../vscode";
+import { getMetaKeyLabel } from "../util";
// #region styled components
const mainInputFontSize = 13;
@@ -286,7 +287,7 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
{
const target = e.target as HTMLTextAreaElement;
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 7f23e333..93bdbc89 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -17,6 +17,7 @@ import { StopCircle } from "@styled-icons/heroicons-solid";
import { HistoryNode } from "../../../schema/HistoryNode";
import HeaderButtonWithText from "./HeaderButtonWithText";
import MarkdownPreview from "@uiw/react-markdown-preview";
+import { getMetaKeyLabel, isMetaEquivalentKeyPressed } from "../util";
interface StepContainerProps {
historyNode: HistoryNode;
@@ -217,7 +218,11 @@ function StepContainer(props: StepContainerProps) {
e.stopPropagation();
props.onDelete();
}}
- text={props.historyNode.active ? "Stop (⌘⌫)" : "Delete"}
+ text={
+ props.historyNode.active
+ ? `Stop (${getMetaKeyLabel()}⌫)`
+ : "Delete"
+ }
>
{props.historyNode.active ? (
diff --git a/extension/react-app/src/util/index.ts b/extension/react-app/src/util/index.ts
index ad711321..c4168e13 100644
--- a/extension/react-app/src/util/index.ts
+++ b/extension/react-app/src/util/index.ts
@@ -1,6 +1,6 @@
type Platform = "mac" | "linux" | "windows" | "unknown";
-function getPlatform(): Platform {
+export function getPlatform(): Platform {
const platform = window.navigator.platform.toUpperCase();
if (platform.indexOf("MAC") >= 0) {
return "mac";
@@ -13,7 +13,7 @@ function getPlatform(): Platform {
}
}
-function isMetaEquivalentKeyPressed(event: {
+export function isMetaEquivalentKeyPressed(event: {
metaKey: boolean;
ctrlKey: boolean;
}): boolean {
@@ -28,3 +28,16 @@ function isMetaEquivalentKeyPressed(event: {
return event.metaKey;
}
}
+
+export function getMetaKeyLabel(): string {
+ const platform = getPlatform();
+ switch (platform) {
+ case "mac":
+ return "⌘";
+ case "linux":
+ case "windows":
+ return "^";
+ default:
+ return "⌘";
+ }
+}
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index d04f9bdb..9d0c9fe7 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -3,6 +3,7 @@ import * as path from "path";
import * as fs from "fs";
import * as vscode from "vscode";
import { extensionContext, ideProtocolClient } from "./activation/activate";
+import { getMetaKeyLabel } from "./util/util";
interface DiffInfo {
originalFilepath: string;
@@ -86,7 +87,7 @@ class DiffManager {
) {
vscode.window
.showInformationMessage(
- "Accept (⌘⇧↩) or reject (⌘⇧⌫) at the top of the file.",
+ `Accept (${getMetaKeyLabel()}⇧↩) or reject (${getMetaKeyLabel()}⇧⌫) at the top of the file.`,
"Got it",
"Don't show again"
)
diff --git a/extension/src/lang-server/codeLens.ts b/extension/src/lang-server/codeLens.ts
index 1cfef5d5..ba80e557 100644
--- a/extension/src/lang-server/codeLens.ts
+++ b/extension/src/lang-server/codeLens.ts
@@ -3,6 +3,7 @@ import { editorToSuggestions, editorSuggestionsLocked } from "../suggestions";
import * as path from "path";
import * as os from "os";
import { DIFF_DIRECTORY, diffManager } from "../diffs";
+import { getMetaKeyLabel } from "../util/util";
class SuggestionsCodeLensProvider implements vscode.CodeLensProvider {
public provideCodeLenses(
document: vscode.TextDocument,
@@ -35,7 +36,7 @@ class SuggestionsCodeLensProvider implements vscode.CodeLensProvider {
if (codeLenses.length === 2) {
codeLenses.push(
new vscode.CodeLens(range, {
- title: "(⌘⇧↩/⌘⇧⌫ to accept/reject all)",
+ title: `(${getMetaKeyLabel()}⇧↩/${getMetaKeyLabel()}⇧⌫ to accept/reject all)`,
command: "",
})
);
@@ -60,12 +61,12 @@ class DiffViewerCodeLensProvider implements vscode.CodeLensProvider {
}
codeLenses.push(
new vscode.CodeLens(range, {
- title: "Accept All ✅ (⌘⇧↩)",
+ title: `Accept All ✅ (${getMetaKeyLabel()}⇧↩)`,
command: "continue.acceptDiff",
arguments: [document.uri.fsPath],
}),
new vscode.CodeLens(range, {
- title: "Reject All ❌ (⌘⇧⌫)",
+ title: `Reject All ❌ (${getMetaKeyLabel()}⇧⌫)`,
command: "continue.rejectDiff",
arguments: [document.uri.fsPath],
})
diff --git a/extension/src/util/util.ts b/extension/src/util/util.ts
index d33593e1..dfc10c90 100644
--- a/extension/src/util/util.ts
+++ b/extension/src/util/util.ts
@@ -1,5 +1,6 @@
import { RangeInFile, SerializedDebugContext } from "../client";
import * as fs from "fs";
+const os = require("os");
function charIsEscapedAtIndex(index: number, str: string): boolean {
if (index === 0) return false;
@@ -113,3 +114,31 @@ export function debounced(delay: number, fn: Function) {
}, delay);
};
}
+
+type Platform = "mac" | "linux" | "windows" | "unknown";
+
+function getPlatform(): Platform {
+ const platform = os.platform();
+ if (platform === "darwin") {
+ return "mac";
+ } else if (platform === "linux") {
+ return "linux";
+ } else if (platform === "win32") {
+ return "windows";
+ } else {
+ return "unknown";
+ }
+}
+
+export function getMetaKeyLabel() {
+ const platform = getPlatform();
+ switch (platform) {
+ case "mac":
+ return "⌘";
+ case "linux":
+ case "windows":
+ return "^";
+ default:
+ return "⌘";
+ }
+}
--
cgit v1.2.3-70-g09d2
From 5921c70228ea74fe2ed687c6bfda8a876d5455c5 Mon Sep 17 00:00:00 2001
From: sestinj
Date: Sat, 15 Jul 2023 15:11:37 -0700
Subject: lowercase C: in win path
---
extension/src/diffs.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index db6a6490..5a82deb6 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -12,7 +12,7 @@ interface DiffInfo {
range: vscode.Range;
}
-export const DIFF_DIRECTORY = path.join(os.homedir(), ".continue", "diffs");
+export const DIFF_DIRECTORY = path.join(os.homedir(), ".continue", "diffs").replace(/^C:/, "c:");
class DiffManager {
// Create a temporary file in the global .continue directory which displays the updated version
--
cgit v1.2.3-70-g09d2
From f6229ad2ab4f5587b2f48f2ec098aa109be9ebf3 Mon Sep 17 00:00:00 2001
From: sestinj
Date: Sat, 15 Jul 2023 16:25:51 -0700
Subject: Reliably show error messages when environmentSetup fails
---
extension/src/activation/environmentSetup.ts | 59 +++++++++++++++++++---------
extension/src/diffs.ts | 5 +++
2 files changed, 46 insertions(+), 18 deletions(-)
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 7bd08929..374c38c0 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -11,6 +11,8 @@ import * as os from "os";
import fkill from "fkill";
import { sendTelemetryEvent, TelemetryEvent } from "../telemetry";
+const WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR = "A Python virtual enviroment cannot be activated because running scripts is disabled for this user. Please enable signed scripts to run with this command in PowerShell: `Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser`, reload VS Code, and then try again.";
+
const MAX_RETRIES = 3;
async function retryThenFail(
fn: () => Promise,
@@ -22,9 +24,25 @@ async function retryThenFail(
if (retries > 0) {
return await retryThenFail(fn, retries - 1);
}
- vscode.window.showInformationMessage(
- "Failed to set up Continue extension. Please email nate@continue.dev and we'll get this fixed ASAP!"
- );
+
+ // Show corresponding error message depending on the platform
+ let msg = "Failed to set up Continue extension. Please email nate@continue.dev and we'll get this fixed ASAP!";
+ try {
+ switch (process.platform) {
+ case "win32":
+ msg = WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR;
+ break;
+ case "darwin":
+ break;
+ case "linux":
+ const [pythonCmd,] = await getPythonPipCommands();
+ msg = await getLinuxAptInstallError(pythonCmd);
+ break;
+ }
+ } finally {
+ vscode.window.showErrorMessage(msg);
+ }
+
sendTelemetryEvent(TelemetryEvent.ExtensionSetupError, {
error: e.message,
});
@@ -186,6 +204,24 @@ async function checkRequirementsInstalled() {
return fs.existsSync(continuePath);
}
+async function getLinuxAptInstallError(pythonCmd: string) {
+ // First, try to run the command to install python3-venv
+ let [stdout, stderr] = await runCommand(`${pythonCmd} --version`);
+ if (stderr) {
+ await vscode.window.showErrorMessage(
+ "Python3 is not installed. Please install from https://www.python.org/downloads, reload VS Code, and try again."
+ );
+ throw new Error(stderr);
+ }
+ const version = stdout.split(" ")[1].split(".")[1];
+ const installVenvCommand = `apt-get install python3.${version}-venv`;
+ await runCommand("apt-get update");
+ // Ask the user to run the command to install python3-venv (requires sudo, so we can't)
+ // First, get the python version
+ const msg = `[Important] Continue needs to create a Python virtual environment, but python3.${version}-venv is not installed. Please run this command in your terminal: \`${installVenvCommand}\`, reload VS Code, and then try again.`;
+ return msg;
+}
+
async function setupPythonEnv() {
console.log("Setting up python env for Continue extension...");
@@ -211,27 +247,14 @@ async function setupPythonEnv() {
stderr.includes("running scripts is disabled on this system")
) {
await vscode.window.showErrorMessage(
- "A Python virtual enviroment cannot be activated because running scripts is disabled for this user. Please enable signed scripts to run with this command in PowerShell: `Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser`, reload VS Code, and then try again."
+ WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR
);
throw new Error(stderr);
} else if (
stderr?.includes("On Debian/Ubuntu systems") ||
stdout?.includes("On Debian/Ubuntu systems")
) {
- // First, try to run the command to install python3-venv
- let [stdout, stderr] = await runCommand(`${pythonCmd} --version`);
- if (stderr) {
- await vscode.window.showErrorMessage(
- "Python3 is not installed. Please install from https://www.python.org/downloads, reload VS Code, and try again."
- );
- throw new Error(stderr);
- }
- const version = stdout.split(" ")[1].split(".")[1];
- const installVenvCommand = `apt-get install python3.${version}-venv`;
- await runCommand("apt-get update");
- // Ask the user to run the command to install python3-venv (requires sudo, so we can't)
- // First, get the python version
- const msg = `[Important] Continue needs to create a Python virtual environment, but python3.${version}-venv is not installed. Please run this command in your terminal: \`${installVenvCommand}\`, reload VS Code, and then try again.`;
+ const msg = await getLinuxAptInstallError(pythonCmd);
console.log(msg);
await vscode.window.showErrorMessage(msg);
} else if (checkEnvExists()) {
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index 19004dfb..2860258d 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -149,6 +149,11 @@ class DiffManager {
this.diffs.set(newFilepath, diffInfo);
}
+ vscode.commands.executeCommand(
+ "workbench.action.files.revert",
+ vscode.Uri.file(newFilepath)
+ );
+
return newFilepath;
}
--
cgit v1.2.3-70-g09d2
From 3a39b979c55b005d9bb18b88b43ca7293ee5410d Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 16:32:56 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
extension/react-app/src/components/TextDialog.tsx | 1 +
extension/react-app/src/pages/gui.tsx | 1 +
extension/src/activation/environmentSetup.ts | 19 ++++++++-----------
5 files changed, 13 insertions(+), 14 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 0edd4885..e77bfac2 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.166",
+ "version": "0.0.167",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.166",
+ "version": "0.0.167",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 7cd7b793..bbd18b12 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.166",
+ "version": "0.0.167",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/react-app/src/components/TextDialog.tsx b/extension/react-app/src/components/TextDialog.tsx
index c724697d..646d6846 100644
--- a/extension/react-app/src/components/TextDialog.tsx
+++ b/extension/react-app/src/components/TextDialog.tsx
@@ -2,6 +2,7 @@
import React, { useEffect, useState } from "react";
import styled from "styled-components";
import { Button, buttonColor, secondaryDark, vscBackground } from ".";
+import { isMetaEquivalentKeyPressed } from "../util";
const ScreenCover = styled.div`
position: absolute;
diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx
index cb0404ab..64207487 100644
--- a/extension/react-app/src/pages/gui.tsx
+++ b/extension/react-app/src/pages/gui.tsx
@@ -23,6 +23,7 @@ import { RootStore } from "../redux/store";
import { postVscMessage } from "../vscode";
import UserInputContainer from "../components/UserInputContainer";
import Onboarding from "../components/Onboarding";
+import { isMetaEquivalentKeyPressed } from "../util";
const TopGUIDiv = styled.div`
overflow: hidden;
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 374c38c0..6a66532e 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -11,7 +11,8 @@ import * as os from "os";
import fkill from "fkill";
import { sendTelemetryEvent, TelemetryEvent } from "../telemetry";
-const WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR = "A Python virtual enviroment cannot be activated because running scripts is disabled for this user. Please enable signed scripts to run with this command in PowerShell: `Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser`, reload VS Code, and then try again.";
+const WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR =
+ "A Python virtual enviroment cannot be activated because running scripts is disabled for this user. In order to use Continue, please enable signed scripts to run with this command in PowerShell: `Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser`, reload VS Code, and then try again.";
const MAX_RETRIES = 3;
async function retryThenFail(
@@ -26,7 +27,8 @@ async function retryThenFail(
}
// Show corresponding error message depending on the platform
- let msg = "Failed to set up Continue extension. Please email nate@continue.dev and we'll get this fixed ASAP!";
+ let msg =
+ "Failed to set up Continue extension. Please email nate@continue.dev and we'll get this fixed ASAP!";
try {
switch (process.platform) {
case "win32":
@@ -35,14 +37,14 @@ async function retryThenFail(
case "darwin":
break;
case "linux":
- const [pythonCmd,] = await getPythonPipCommands();
+ const [pythonCmd] = await getPythonPipCommands();
msg = await getLinuxAptInstallError(pythonCmd);
break;
}
} finally {
vscode.window.showErrorMessage(msg);
}
-
+
sendTelemetryEvent(TelemetryEvent.ExtensionSetupError, {
error: e.message,
});
@@ -216,10 +218,7 @@ async function getLinuxAptInstallError(pythonCmd: string) {
const version = stdout.split(" ")[1].split(".")[1];
const installVenvCommand = `apt-get install python3.${version}-venv`;
await runCommand("apt-get update");
- // Ask the user to run the command to install python3-venv (requires sudo, so we can't)
- // First, get the python version
- const msg = `[Important] Continue needs to create a Python virtual environment, but python3.${version}-venv is not installed. Please run this command in your terminal: \`${installVenvCommand}\`, reload VS Code, and then try again.`;
- return msg;
+ return `[Important] Continue needs to create a Python virtual environment, but python3.${version}-venv is not installed. Please run this command in your terminal: \`${installVenvCommand}\`, reload VS Code, and then try again.`;
}
async function setupPythonEnv() {
@@ -246,9 +245,7 @@ async function setupPythonEnv() {
stderr &&
stderr.includes("running scripts is disabled on this system")
) {
- await vscode.window.showErrorMessage(
- WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR
- );
+ await vscode.window.showErrorMessage(WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR);
throw new Error(stderr);
} else if (
stderr?.includes("On Debian/Ubuntu systems") ||
--
cgit v1.2.3-70-g09d2
From 2e52eb4cc50f7c6fffcf7687ecd15d80073b07c6 Mon Sep 17 00:00:00 2001
From: sestinj
Date: Sat, 15 Jul 2023 16:42:58 -0700
Subject: Change to remote signed in order to setup python venv
---
extension/src/activation/environmentSetup.ts | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 6a66532e..43e7832c 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -20,6 +20,10 @@ async function retryThenFail(
retries: number = MAX_RETRIES
): Promise {
try {
+ if (retries < MAX_RETRIES && process.platform === "win32") {
+ const [stdout, stderr] = await runCommand("Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser");
+ }
+
return await fn();
} catch (e: any) {
if (retries > 0) {
--
cgit v1.2.3-70-g09d2
From 26e8d5df03d516cb2a6d6ee53144def595c47ff5 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 16:49:59 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index e77bfac2..ba455540 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.167",
+ "version": "0.0.168",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.167",
+ "version": "0.0.168",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index bbd18b12..0c8e2037 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.167",
+ "version": "0.0.168",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 21dc20871347165a8526bbbea1d351e2f10f4d93 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 17:46:03 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
extension/src/activation/environmentSetup.ts | 6 +++++-
3 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index ba455540..edd0f0d0 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.168",
+ "version": "0.0.169",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.168",
+ "version": "0.0.169",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 0c8e2037..91a863ff 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.168",
+ "version": "0.0.169",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 43e7832c..b4ada632 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -21,7 +21,11 @@ async function retryThenFail(
): Promise {
try {
if (retries < MAX_RETRIES && process.platform === "win32") {
- const [stdout, stderr] = await runCommand("Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser");
+ const [stdout, stderr] = await runCommand(
+ "Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser"
+ );
+ console.log("Execution policy stdout: ", stdout);
+ console.log("Execution policy stderr: ", stderr);
}
return await fn();
--
cgit v1.2.3-70-g09d2
From 1c41fb0a803c886841dbea9fa5e4129059a32e5d Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 17:57:51 -0700
Subject: patch
---
extension/package-lock.json | 4 +-
extension/package.json | 2 +-
extension/src/activation/environmentSetup.ts | 92 +++++++++++++++-------------
3 files changed, 54 insertions(+), 44 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index edd0f0d0..f793abae 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.169",
+ "version": "0.0.171",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.169",
+ "version": "0.0.171",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 91a863ff..38dc4542 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.169",
+ "version": "0.0.171",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index b4ada632..be1c220c 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -21,11 +21,16 @@ async function retryThenFail(
): Promise {
try {
if (retries < MAX_RETRIES && process.platform === "win32") {
- const [stdout, stderr] = await runCommand(
- "Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser"
- );
- console.log("Execution policy stdout: ", stdout);
- console.log("Execution policy stderr: ", stderr);
+ let [stdout, stderr] = await runCommand("Get-ExecutionPolicy");
+ if (!stdout.includes("RemoteSigned")) {
+ [stdout, stderr] = await runCommand(
+ "Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser"
+ );
+ console.log("Execution policy stdout: ", stdout);
+ console.log("Execution policy stderr: ", stderr);
+ // Then reload the window for this to take effect
+ await vscode.commands.executeCommand("workbench.action.reloadWindow");
+ }
}
return await fn();
@@ -238,45 +243,50 @@ async function setupPythonEnv() {
pipCmd
);
- // First, create the virtual environment
- if (checkEnvExists()) {
- console.log("Python env already exists, skipping...");
- } else {
- // Assemble the command to create the env
- const createEnvCommand = [
- `cd "${serverPath()}"`,
- `${pythonCmd} -m venv env`,
- ].join(" ; ");
-
- const [stdout, stderr] = await runCommand(createEnvCommand);
- if (
- stderr &&
- stderr.includes("running scripts is disabled on this system")
- ) {
- await vscode.window.showErrorMessage(WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR);
- throw new Error(stderr);
- } else if (
- stderr?.includes("On Debian/Ubuntu systems") ||
- stdout?.includes("On Debian/Ubuntu systems")
- ) {
- const msg = await getLinuxAptInstallError(pythonCmd);
- console.log(msg);
- await vscode.window.showErrorMessage(msg);
- } else if (checkEnvExists()) {
- console.log("Successfully set up python env at ", `${serverPath()}/env`);
+ await retryThenFail(async () => {
+ // First, create the virtual environment
+ if (checkEnvExists()) {
+ console.log("Python env already exists, skipping...");
} else {
- const msg = [
- "Python environment not successfully created. Trying again. Here was the stdout + stderr: ",
- `stdout: ${stdout}`,
- `stderr: ${stderr}`,
- ].join("\n\n");
- console.log(msg);
- throw new Error(msg);
+ // Assemble the command to create the env
+ const createEnvCommand = [
+ `cd "${serverPath()}"`,
+ `${pythonCmd} -m venv env`,
+ ].join(" ; ");
+
+ const [stdout, stderr] = await runCommand(createEnvCommand);
+ if (
+ stderr &&
+ stderr.includes("running scripts is disabled on this system")
+ ) {
+ await vscode.window.showErrorMessage(
+ WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR
+ );
+ throw new Error(stderr);
+ } else if (
+ stderr?.includes("On Debian/Ubuntu systems") ||
+ stdout?.includes("On Debian/Ubuntu systems")
+ ) {
+ const msg = await getLinuxAptInstallError(pythonCmd);
+ console.log(msg);
+ await vscode.window.showErrorMessage(msg);
+ } else if (checkEnvExists()) {
+ console.log(
+ "Successfully set up python env at ",
+ `${serverPath()}/env`
+ );
+ } else {
+ const msg = [
+ "Python environment not successfully created. Trying again. Here was the stdout + stderr: ",
+ `stdout: ${stdout}`,
+ `stderr: ${stderr}`,
+ ].join("\n\n");
+ console.log(msg);
+ throw new Error(msg);
+ }
}
- }
- // Install the requirements
- await retryThenFail(async () => {
+ // Install the requirements
if (await checkRequirementsInstalled()) {
console.log("Python requirements already installed, skipping...");
} else {
--
cgit v1.2.3-70-g09d2
From 07809c6717856e854d9005f77d51d274dec0fcc4 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 18:47:28 -0700
Subject: 5s timeout on websocket connections
---
continuedev/src/continuedev/server/gui.py | 9 +++++++--
continuedev/src/continuedev/server/ide.py | 8 ++++++--
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
extension/src/activation/environmentSetup.ts | 5 ++---
5 files changed, 18 insertions(+), 10 deletions(-)
diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py
index 9a411fbe..4201353e 100644
--- a/continuedev/src/continuedev/server/gui.py
+++ b/continuedev/src/continuedev/server/gui.py
@@ -1,3 +1,4 @@
+import asyncio
import json
from fastapi import Depends, Header, WebSocket, APIRouter
from starlette.websockets import WebSocketState, WebSocketDisconnect
@@ -60,8 +61,12 @@ class GUIProtocolServer(AbstractGUIProtocolServer):
"data": data
})
- async def _receive_json(self, message_type: str) -> Any:
- return await self.sub_queue.get(message_type)
+ async def _receive_json(self, message_type: str, timeout: int = 5) -> Any:
+ try:
+ return await asyncio.wait_for(self.sub_queue.get(message_type), timeout=timeout)
+ except asyncio.TimeoutError:
+ raise Exception(
+ "GUI Protocol _receive_json timed out after 5 seconds")
async def _send_and_receive_json(self, data: Any, resp_model: Type[T], message_type: str) -> T:
await self._send_json(message_type, data)
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index 77b13483..e5e8de02 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -156,8 +156,12 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
"data": data
})
- async def _receive_json(self, message_type: str) -> Any:
- return await self.sub_queue.get(message_type)
+ async def _receive_json(self, message_type: str, timeout: int = 5) -> Any:
+ try:
+ return await asyncio.wait_for(self.sub_queue.get(message_type), timeout=timeout)
+ except asyncio.TimeoutError:
+ raise Exception(
+ "IDE Protocol _receive_json timed out after 5 seconds")
async def _send_and_receive_json(self, data: Any, resp_model: Type[T], message_type: str) -> T:
await self._send_json(message_type, data)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index f793abae..b86cb10e 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.171",
+ "version": "0.0.172",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.171",
+ "version": "0.0.172",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 38dc4542..6b719723 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.171",
+ "version": "0.0.172",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index be1c220c..7a0d24d4 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -28,8 +28,6 @@ async function retryThenFail(
);
console.log("Execution policy stdout: ", stdout);
console.log("Execution policy stderr: ", stderr);
- // Then reload the window for this to take effect
- await vscode.commands.executeCommand("workbench.action.reloadWindow");
}
}
@@ -447,7 +445,8 @@ export async function startContinuePythonServer() {
console.log(`stdout: ${data}`);
if (
data.includes("Uvicorn running on") || // Successfully started the server
- data.includes("address already in use") // The server is already running (probably a simultaneously opened VS Code window)
+ data.includes("only one usage of each socket address") || // [windows] The server is already running (probably a simultaneously opened VS Code window)
+ data.includes("address already in use") // [mac/linux] The server is already running (probably a simultaneously opened VS Code window)
) {
console.log("Successfully started Continue python server");
resolve(null);
--
cgit v1.2.3-70-g09d2
From 1cc788de843169400d70f26e533417f3a0a95b29 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 19:14:01 -0700
Subject: one window wait for other to create venv
---
extension/package-lock.json | 4 +-
extension/package.json | 2 +-
extension/src/activation/environmentSetup.ts | 99 +++++++++++++++++-----------
3 files changed, 63 insertions(+), 42 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index b86cb10e..f1423041 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.172",
+ "version": "0.0.173",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.172",
+ "version": "0.0.173",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 6b719723..0638e768 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.172",
+ "version": "0.0.173",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 7a0d24d4..928fe04b 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -53,6 +53,7 @@ async function retryThenFail(
break;
}
} finally {
+ console.log("After retries, failed to set up Continue extension", msg);
vscode.window.showErrorMessage(msg);
}
@@ -232,57 +233,77 @@ async function getLinuxAptInstallError(pythonCmd: string) {
return `[Important] Continue needs to create a Python virtual environment, but python3.${version}-venv is not installed. Please run this command in your terminal: \`${installVenvCommand}\`, reload VS Code, and then try again.`;
}
-async function setupPythonEnv() {
- console.log("Setting up python env for Continue extension...");
-
- const [pythonCmd, pipCmd] = await getPythonPipCommands();
- const [activateCmd, pipUpgradeCmd] = getActivateUpgradeCommands(
- pythonCmd,
- pipCmd
- );
+async function createPythonVenv(pythonCmd: string) {
+ if (checkEnvExists()) {
+ console.log("Python env already exists, skipping...");
+ } else {
+ // Assemble the command to create the env
+ const createEnvCommand = [
+ `cd "${serverPath()}"`,
+ `${pythonCmd} -m venv env`,
+ ].join(" ; ");
- await retryThenFail(async () => {
- // First, create the virtual environment
- if (checkEnvExists()) {
- console.log("Python env already exists, skipping...");
+ const [stdout, stderr] = await runCommand(createEnvCommand);
+ if (
+ stderr &&
+ stderr.includes("running scripts is disabled on this system")
+ ) {
+ console.log("Scripts disabled error when trying to create env");
+ await vscode.window.showErrorMessage(WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR);
+ throw new Error(stderr);
+ } else if (
+ stderr?.includes("On Debian/Ubuntu systems") ||
+ stdout?.includes("On Debian/Ubuntu systems")
+ ) {
+ const msg = await getLinuxAptInstallError(pythonCmd);
+ console.log(msg);
+ await vscode.window.showErrorMessage(msg);
+ } else if (checkEnvExists()) {
+ console.log("Successfully set up python env at ", `${serverPath()}/env`);
} else {
- // Assemble the command to create the env
- const createEnvCommand = [
- `cd "${serverPath()}"`,
- `${pythonCmd} -m venv env`,
- ].join(" ; ");
-
- const [stdout, stderr] = await runCommand(createEnvCommand);
- if (
- stderr &&
- stderr.includes("running scripts is disabled on this system")
- ) {
- await vscode.window.showErrorMessage(
- WINDOWS_REMOTE_SIGNED_SCRIPTS_ERROR
- );
- throw new Error(stderr);
- } else if (
- stderr?.includes("On Debian/Ubuntu systems") ||
- stdout?.includes("On Debian/Ubuntu systems")
- ) {
- const msg = await getLinuxAptInstallError(pythonCmd);
- console.log(msg);
- await vscode.window.showErrorMessage(msg);
- } else if (checkEnvExists()) {
- console.log(
- "Successfully set up python env at ",
- `${serverPath()}/env`
+ try {
+ // This might mean that another window is currently using the python.exe file to install requirements
+ // So we want to wait and try again
+ let i = 0;
+ await new Promise((resolve, reject) =>
+ setInterval(() => {
+ if (i > 5) {
+ reject();
+ }
+ if (checkEnvExists()) {
+ resolve(null);
+ } else {
+ console.log("Waiting for other window to create env...");
+ }
+ i++;
+ }, 5000)
);
- } else {
+ } catch (e) {
const msg = [
"Python environment not successfully created. Trying again. Here was the stdout + stderr: ",
`stdout: ${stdout}`,
`stderr: ${stderr}`,
+ `e: ${e}`,
].join("\n\n");
console.log(msg);
throw new Error(msg);
}
}
+ }
+}
+
+async function setupPythonEnv() {
+ console.log("Setting up python env for Continue extension...");
+
+ const [pythonCmd, pipCmd] = await getPythonPipCommands();
+ const [activateCmd, pipUpgradeCmd] = getActivateUpgradeCommands(
+ pythonCmd,
+ pipCmd
+ );
+
+ await retryThenFail(async () => {
+ // First, create the virtual environment
+ await createPythonVenv(pythonCmd);
// Install the requirements
if (await checkRequirementsInstalled()) {
--
cgit v1.2.3-70-g09d2
From 53536dce999f2858102b7bf969824141140c5fc7 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sat, 15 Jul 2023 21:55:47 -0700
Subject: fixed reading of terminal and other vscode windows
---
continuedev/src/continuedev/server/ide.py | 34 ++++++---------
extension/package-lock.json | 4 +-
extension/package.json | 2 +-
extension/src/activation/environmentSetup.ts | 54 ++++++++++++------------
extension/src/continueIdeClient.ts | 63 ++++++++++++++++------------
5 files changed, 79 insertions(+), 78 deletions(-)
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index e5e8de02..a8868a9a 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -126,7 +126,8 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
workspace_directory: str = None
unique_id: str = None
- async def initialize(self) -> List[str]:
+ async def initialize(self, session_id: str) -> List[str]:
+ self.session_id = session_id
await self._send_json("workspaceDirectory", {})
await self._send_json("uniqueId", {})
other_msgs = []
@@ -287,32 +288,24 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
pass
def onFileEdits(self, edits: List[FileEditWithFullContents]):
- # Send the file edits to ALL autopilots.
- # Maybe not ideal behavior
- for _, session in self.session_manager.sessions.items():
- session.autopilot.handle_manual_edits(edits)
+ session_manager.sessions[self.session_id].autopilot.handle_manual_edits(
+ edits)
def onDeleteAtIndex(self, index: int):
- for _, session in self.session_manager.sessions.items():
- create_async_task(
- session.autopilot.delete_at_index(index), self.unique_id)
+ create_async_task(
+ session_manager.sessions[self.session_id].autopilot.delete_at_index(index), self.unique_id)
def onCommandOutput(self, output: str):
- # Send the output to ALL autopilots.
- # Maybe not ideal behavior
- for _, session in self.session_manager.sessions.items():
- create_async_task(
- session.autopilot.handle_command_output(output), self.unique_id)
+ create_async_task(
+ self.session_manager.sessions[self.session_id].autopilot.handle_command_output(output), self.unique_id)
def onHighlightedCodeUpdate(self, range_in_files: List[RangeInFileWithContents]):
- for _, session in self.session_manager.sessions.items():
- create_async_task(
- session.autopilot.handle_highlighted_code(range_in_files), self.unique_id)
+ create_async_task(
+ self.session_manager.sessions[self.session_id].autopilot.handle_highlighted_code(range_in_files), self.unique_id)
def onMainUserInput(self, input: str):
- for _, session in self.session_manager.sessions.items():
- create_async_task(
- session.autopilot.accept_user_input(input), self.unique_id)
+ create_async_task(
+ self.session_manager.sessions[self.session_id].autopilot.accept_user_input(input), self.unique_id)
# Request information. Session doesn't matter.
async def getOpenFiles(self) -> List[str]:
@@ -440,10 +433,9 @@ async def websocket_endpoint(websocket: WebSocket, session_id: str = None):
ideProtocolServer.handle_json(message_type, data))
ideProtocolServer = IdeProtocolServer(session_manager, websocket)
- ideProtocolServer.session_id = session_id
if session_id is not None:
session_manager.registered_ides[session_id] = ideProtocolServer
- other_msgs = await ideProtocolServer.initialize()
+ other_msgs = await ideProtocolServer.initialize(session_id)
for other_msg in other_msgs:
handle_msg(other_msg)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index f1423041..6f777c72 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.173",
+ "version": "0.0.174",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.173",
+ "version": "0.0.174",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 0638e768..9fe38f7f 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.173",
+ "version": "0.0.174",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 928fe04b..df609a34 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -260,34 +260,34 @@ async function createPythonVenv(pythonCmd: string) {
await vscode.window.showErrorMessage(msg);
} else if (checkEnvExists()) {
console.log("Successfully set up python env at ", `${serverPath()}/env`);
+ } else if (
+ stderr?.includes("Permission denied") &&
+ stderr?.includes("python.exe")
+ ) {
+ // This might mean that another window is currently using the python.exe file to install requirements
+ // So we want to wait and try again
+ let i = 0;
+ await new Promise((resolve, reject) =>
+ setInterval(() => {
+ if (i > 5) {
+ reject("Timed out waiting for other window to create env...");
+ }
+ if (checkEnvExists()) {
+ resolve(null);
+ } else {
+ console.log("Waiting for other window to create env...");
+ }
+ i++;
+ }, 5000)
+ );
} else {
- try {
- // This might mean that another window is currently using the python.exe file to install requirements
- // So we want to wait and try again
- let i = 0;
- await new Promise((resolve, reject) =>
- setInterval(() => {
- if (i > 5) {
- reject();
- }
- if (checkEnvExists()) {
- resolve(null);
- } else {
- console.log("Waiting for other window to create env...");
- }
- i++;
- }, 5000)
- );
- } catch (e) {
- const msg = [
- "Python environment not successfully created. Trying again. Here was the stdout + stderr: ",
- `stdout: ${stdout}`,
- `stderr: ${stderr}`,
- `e: ${e}`,
- ].join("\n\n");
- console.log(msg);
- throw new Error(msg);
- }
+ const msg = [
+ "Python environment not successfully created. Trying again. Here was the stdout + stderr: ",
+ `stdout: ${stdout}`,
+ `stderr: ${stderr}`,
+ ].join("\n\n");
+ console.log(msg);
+ throw new Error(msg);
}
}
}
diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts
index 2c96763d..fac0a227 100644
--- a/extension/src/continueIdeClient.ts
+++ b/extension/src/continueIdeClient.ts
@@ -104,8 +104,11 @@ class IdeProtocolClient {
// }
// });
- // Setup listeners for any file changes in open editors
+ // Setup listeners for any selection changes in open editors
vscode.window.onDidChangeTextEditorSelection((event) => {
+ if (this.editorIsTerminal(event.textEditor)) {
+ return;
+ }
if (this._highlightDebounce) {
clearTimeout(this._highlightDebounce);
}
@@ -376,20 +379,24 @@ class IdeProtocolClient {
}
saveFile(filepath: string) {
- vscode.window.visibleTextEditors.forEach((editor) => {
- if (editor.document.uri.fsPath === filepath) {
- editor.document.save();
- }
- });
+ vscode.window.visibleTextEditors
+ .filter((editor) => !this.editorIsTerminal(editor))
+ .forEach((editor) => {
+ if (editor.document.uri.fsPath === filepath) {
+ editor.document.save();
+ }
+ });
}
readFile(filepath: string): string {
let contents: string | undefined;
- vscode.window.visibleTextEditors.forEach((editor) => {
- if (editor.document.uri.fsPath === filepath) {
- contents = editor.document.getText();
- }
- });
+ vscode.window.visibleTextEditors
+ .filter((editor) => !this.editorIsTerminal(editor))
+ .forEach((editor) => {
+ if (editor.document.uri.fsPath === filepath) {
+ contents = editor.document.getText();
+ }
+ });
if (typeof contents === "undefined") {
if (fs.existsSync(filepath)) {
contents = fs.readFileSync(filepath, "utf-8");
@@ -429,25 +436,27 @@ class IdeProtocolClient {
getHighlightedCode(): RangeInFile[] {
// TODO
let rangeInFiles: RangeInFile[] = [];
- vscode.window.visibleTextEditors.forEach((editor) => {
- editor.selections.forEach((selection) => {
- // if (!selection.isEmpty) {
- rangeInFiles.push({
- filepath: editor.document.uri.fsPath,
- range: {
- start: {
- line: selection.start.line,
- character: selection.start.character,
- },
- end: {
- line: selection.end.line,
- character: selection.end.character,
+ vscode.window.visibleTextEditors
+ .filter((editor) => !this.editorIsTerminal(editor))
+ .forEach((editor) => {
+ editor.selections.forEach((selection) => {
+ // if (!selection.isEmpty) {
+ rangeInFiles.push({
+ filepath: editor.document.uri.fsPath,
+ range: {
+ start: {
+ line: selection.start.line,
+ character: selection.start.character,
+ },
+ end: {
+ line: selection.end.line,
+ character: selection.end.character,
+ },
},
- },
+ });
+ // }
});
- // }
});
- });
return rangeInFiles;
}
--
cgit v1.2.3-70-g09d2
From 71a869bda2018d8fcfff56f7eccfff2943c30ee0 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 00:21:22 -0700
Subject: fixes
---
continuedev/src/continuedev/server/ide.py | 26 ++++++++++++++++----------
continuedev/src/continuedev/steps/core/core.py | 8 ++++++++
2 files changed, 24 insertions(+), 10 deletions(-)
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index a8868a9a..a91708ec 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -287,25 +287,31 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
def onOpenGUIRequest(self):
pass
+ def __get_autopilot(self):
+ return self.session_manager.sessions[self.session_id].autopilot
+
def onFileEdits(self, edits: List[FileEditWithFullContents]):
- session_manager.sessions[self.session_id].autopilot.handle_manual_edits(
- edits)
+ if autopilot := self.__get_autopilot():
+ autopilot.handle_manual_edits(edits)
def onDeleteAtIndex(self, index: int):
- create_async_task(
- session_manager.sessions[self.session_id].autopilot.delete_at_index(index), self.unique_id)
+ if autopilot := self.__get_autopilot():
+ create_async_task(autopilot.delete_at_index(index), self.unique_id)
def onCommandOutput(self, output: str):
- create_async_task(
- self.session_manager.sessions[self.session_id].autopilot.handle_command_output(output), self.unique_id)
+ if autopilot := self.__get_autopilot():
+ create_async_task(
+ autopilot.handle_command_output(output), self.unique_id)
def onHighlightedCodeUpdate(self, range_in_files: List[RangeInFileWithContents]):
- create_async_task(
- self.session_manager.sessions[self.session_id].autopilot.handle_highlighted_code(range_in_files), self.unique_id)
+ if autopilot := self.__get_autopilot():
+ create_async_task(autopilot.handle_highlighted_code(
+ range_in_files), self.unique_id)
def onMainUserInput(self, input: str):
- create_async_task(
- self.session_manager.sessions[self.session_id].autopilot.accept_user_input(input), self.unique_id)
+ if autopilot := self.__get_autopilot():
+ create_async_task(
+ autopilot.accept_user_input(input), self.unique_id)
# Request information. Session doesn't matter.
async def getOpenFiles(self) -> List[str]:
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index 75f8e460..90d64287 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -474,6 +474,14 @@ Please output the code to be inserted at the cursor in order to fulfill the user
current_block_lines.append(line)
messages = await sdk.get_chat_context()
+ # Delete the last user and assistant messages
+ i = len(messages) - 1
+ deleted = 0
+ while i >= 0 and deleted < 2:
+ if messages[i].role == "user" or messages[i].role == "assistant":
+ messages.pop(i)
+ deleted += 1
+ i -= 1
messages.append(ChatMessage(
role="user",
content=prompt,
--
cgit v1.2.3-70-g09d2
From 868e0b7ef5357b89186119c3c2fa8bd427b8db30 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 00:21:56 -0700
Subject: Anthropic support
---
continuedev/poetry.lock | 186 +++++++++++++++++++++-
continuedev/pyproject.toml | 3 +-
continuedev/src/continuedev/core/config.py | 2 +-
continuedev/src/continuedev/core/sdk.py | 16 +-
continuedev/src/continuedev/libs/llm/anthropic.py | 81 ++++++++++
continuedev/src/continuedev/steps/chat.py | 2 +-
6 files changed, 284 insertions(+), 6 deletions(-)
create mode 100644 continuedev/src/continuedev/libs/llm/anthropic.py
diff --git a/continuedev/poetry.lock b/continuedev/poetry.lock
index a49a570f..e688e076 100644
--- a/continuedev/poetry.lock
+++ b/continuedev/poetry.lock
@@ -124,6 +124,26 @@ files = [
[package.dependencies]
frozenlist = ">=1.1.0"
+[[package]]
+name = "anthropic"
+version = "0.3.4"
+description = "Client library for the anthropic API"
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "anthropic-0.3.4-py3-none-any.whl", hash = "sha256:7b0396f663b0e4eaaf485ae59a0be014cddfc0f0b8f4dad79bb35d8f28439097"},
+ {file = "anthropic-0.3.4.tar.gz", hash = "sha256:36184840bd33184697666d4f1ec951d78ef5da22e87d936cd3c04b611d84e93c"},
+]
+
+[package.dependencies]
+anyio = ">=3.5.0,<4"
+distro = ">=1.7.0,<2"
+httpx = ">=0.23.0,<1"
+pydantic = ">=1.9.0,<2.0.0"
+tokenizers = ">=0.13.0"
+typing-extensions = ">=4.1.1,<5"
+
[[package]]
name = "anyio"
version = "3.6.2"
@@ -374,6 +394,18 @@ files = [
[package.extras]
dev = ["pytest (>=3.7)"]
+[[package]]
+name = "distro"
+version = "1.8.0"
+description = "Distro - an OS platform information API"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "distro-1.8.0-py3-none-any.whl", hash = "sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff"},
+ {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"},
+]
+
[[package]]
name = "fastapi"
version = "0.95.1"
@@ -588,6 +620,52 @@ files = [
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
+[[package]]
+name = "httpcore"
+version = "0.17.3"
+description = "A minimal low-level HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"},
+ {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"},
+]
+
+[package.dependencies]
+anyio = ">=3.0,<5.0"
+certifi = "*"
+h11 = ">=0.13,<0.15"
+sniffio = ">=1.0.0,<2.0.0"
+
+[package.extras]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.24.1"
+description = "The next generation HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"},
+ {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"},
+]
+
+[package.dependencies]
+certifi = "*"
+httpcore = ">=0.15.0,<0.18.0"
+idna = "*"
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
[[package]]
name = "idna"
version = "3.4"
@@ -600,6 +678,25 @@ files = [
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+[[package]]
+name = "importlib-resources"
+version = "6.0.0"
+description = "Read resources from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"},
+ {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"},
+]
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
[[package]]
name = "jsonref"
version = "1.1.0"
@@ -626,6 +723,8 @@ files = [
[package.dependencies]
attrs = ">=17.4.0"
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
[package.extras]
@@ -1024,6 +1123,18 @@ sql-other = ["SQLAlchemy (>=1.4.16)"]
test = ["hypothesis (>=6.34.2)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.6.3)"]
+[[package]]
+name = "pkgutil-resolve-name"
+version = "1.3.10"
+description = "Resolve a name to an object."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
+ {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
+]
+
[[package]]
name = "posthog"
version = "3.0.1"
@@ -1532,6 +1643,61 @@ requests = ">=2.26.0"
[package.extras]
blobfile = ["blobfile (>=2)"]
+[[package]]
+name = "tokenizers"
+version = "0.13.3"
+description = "Fast and Customizable Tokenizers"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"},
+ {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"},
+ {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"},
+ {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"},
+ {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"},
+ {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"},
+ {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"},
+ {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"},
+ {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"},
+ {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"},
+ {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"},
+ {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"},
+ {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"},
+ {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"},
+]
+
+[package.extras]
+dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
+docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
+testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
+
[[package]]
name = "tqdm"
version = "4.65.0"
@@ -1818,7 +1984,23 @@ files = [
idna = ">=2.0"
multidict = ">=4.0"
+[[package]]
+name = "zipp"
+version = "3.16.2"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"},
+ {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
[metadata]
lock-version = "2.0"
-python-versions = "^3.9"
-content-hash = "3ba2a7278fda36a059d76e227be94b0cb5e2efc9396b47a9642b916680214d9f"
+python-versions = "^3.8.1"
+content-hash = "87dbf6d1e56ce6ba81a01a59c0de2d3717925bac9639710bf3ff3ce30f5f5e2c"
diff --git a/continuedev/pyproject.toml b/continuedev/pyproject.toml
index 6727e29a..08c3fd04 100644
--- a/continuedev/pyproject.toml
+++ b/continuedev/pyproject.toml
@@ -6,7 +6,7 @@ authors = ["Nate Sesti "]
readme = "README.md"
[tool.poetry.dependencies]
-python = "^3.8"
+python = "^3.8.1"
diff-match-patch = "^20230430"
fastapi = "^0.95.1"
typer = "^0.7.0"
@@ -24,6 +24,7 @@ tiktoken = "^0.4.0"
jsonref = "^1.1.0"
jsonschema = "^4.17.3"
directory-tree = "^0.0.3.1"
+anthropic = "^0.3.4"
[tool.poetry.scripts]
typegen = "src.continuedev.models.generate_json_schema:main"
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index 6e430c04..05ba48c6 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -76,7 +76,7 @@ class ContinueConfig(BaseModel):
server_url: Optional[str] = None
allow_anonymous_telemetry: Optional[bool] = True
default_model: Literal["gpt-3.5-turbo", "gpt-3.5-turbo-16k",
- "gpt-4"] = 'gpt-4'
+ "gpt-4", "claude-2"] = 'gpt-4'
custom_commands: Optional[List[CustomCommand]] = [CustomCommand(
name="test",
description="This is an example custom command. Use /config to edit it and create more",
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index d73561d2..28487600 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -11,6 +11,7 @@ from ..models.filesystem_edit import FileEdit, FileSystemEdit, AddFile, DeleteFi
from ..models.filesystem import RangeInFile
from ..libs.llm.hf_inference_api import HuggingFaceInferenceAPI
from ..libs.llm.openai import OpenAI
+from ..libs.llm.anthropic import Anthropic
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
from .main import Context, ContinueCustomException, History, Step, ChatMessage
@@ -26,7 +27,7 @@ ModelProvider = Literal["openai", "hf_inference_api", "ggml", "anthropic"]
MODEL_PROVIDER_TO_ENV_VAR = {
"openai": "OPENAI_API_KEY",
"hf_inference_api": "HUGGING_FACE_TOKEN",
- "anthropic": "ANTHROPIC_API_KEY"
+ "anthropic": "ANTHROPIC_API_KEY",
}
@@ -40,6 +41,9 @@ class Models:
@classmethod
async def create(cls, sdk: "ContinueSDK", with_providers: List[ModelProvider] = ["openai"]) -> "Models":
+ if sdk.config.default_model == "claude-2":
+ with_providers.append("anthropic")
+
models = Models(sdk, with_providers)
for provider in with_providers:
if provider in MODEL_PROVIDER_TO_ENV_VAR:
@@ -59,6 +63,14 @@ class Models:
api_key = self.provider_keys["hf_inference_api"]
return HuggingFaceInferenceAPI(api_key=api_key, model=model)
+ def __load_anthropic_model(self, model: str) -> Anthropic:
+ api_key = self.provider_keys["anthropic"]
+ return Anthropic(api_key=api_key, model=model)
+
+ @cached_property
+ def claude2(self):
+ return self.__load_anthropic_model("claude-2")
+
@cached_property
def starcoder(self):
return self.__load_hf_inference_api_model("bigcode/starcoder")
@@ -88,6 +100,8 @@ class Models:
return self.gpt3516k
elif model_name == "gpt-4":
return self.gpt4
+ elif model_name == "claude-2":
+ return self.claude2
else:
raise Exception(f"Unknown model {model_name}")
diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py
new file mode 100644
index 00000000..2b8831f0
--- /dev/null
+++ b/continuedev/src/continuedev/libs/llm/anthropic.py
@@ -0,0 +1,81 @@
+
+from functools import cached_property
+import time
+from typing import Any, Coroutine, Dict, Generator, List, Union
+from ...core.main import ChatMessage
+from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT
+from ..llm import LLM
+from ..util.count_tokens import DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
+
+
+class AnthropicLLM(LLM):
+ api_key: str
+ default_model: str
+ anthropic: Anthropic
+
+ def __init__(self, api_key: str, default_model: str, system_message: str = None):
+ self.api_key = api_key
+ self.default_model = default_model
+ self.system_message = system_message
+
+ self.anthropic = Anthropic(api_key)
+
+ @cached_property
+ def name(self):
+ return self.default_model
+
+ @property
+ def default_args(self):
+ return {**DEFAULT_ARGS, "model": self.default_model}
+
+ def count_tokens(self, text: str):
+ return count_tokens(self.default_model, text)
+
+ def __messages_to_prompt(self, messages: List[ChatMessage]) -> str:
+ prompt = ""
+ for msg in messages:
+ prompt += f"{HUMAN_PROMPT if msg.role == 'user' else AI_PROMPT} {msg.content} "
+
+ return prompt
+
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ args = self.default_args.copy()
+ args.update(kwargs)
+ args["stream"] = True
+
+ async for chunk in await self.anthropic.completions.create(
+ model=args["model"],
+ max_tokens_to_sample=args["max_tokens"],
+ prompt=f"{HUMAN_PROMPT} {prompt} {AI_PROMPT}",
+ **args
+ ):
+ yield chunk.completion
+
+ async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ args = self.default_args.copy()
+ args.update(kwargs)
+ args["stream"] = True
+
+ messages = compile_chat_messages(
+ args["model"], messages, args["max_tokens"], functions=args.get("functions", None))
+ async for chunk in await self.anthropic.completions.create(
+ model=args["model"],
+ max_tokens_to_sample=args["max_tokens"],
+ prompt=self.__messages_to_prompt(messages),
+ **args
+ ):
+ yield chunk.completion
+
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ args = {**self.default_args, **kwargs}
+
+ messages = compile_chat_messages(
+ args["model"], with_history, args["max_tokens"], prompt, functions=None)
+ resp = (await self.anthropic.completions.create(
+ model=args["model"],
+ max_tokens_to_sample=args["max_tokens"],
+ prompt=self.__messages_to_prompt(messages),
+ **args
+ )).completion
+
+ return resp
diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py
index 14a1cd41..3751dec2 100644
--- a/continuedev/src/continuedev/steps/chat.py
+++ b/continuedev/src/continuedev/steps/chat.py
@@ -28,7 +28,7 @@ class SimpleChatStep(Step):
completion = ""
messages = self.messages or await sdk.get_chat_context()
- generator = sdk.models.gpt4.stream_chat(messages, temperature=0.5)
+ generator = sdk.models.default.stream_chat(messages, temperature=0.5)
try:
async for chunk in generator:
if sdk.current_step_was_deleted():
--
cgit v1.2.3-70-g09d2
From 52cd93ad73f7df6a5140b7d629e4f6e473dc0380 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 12:49:47 -0700
Subject: feat: :sparkles: Continue Quick Fix
---
extension/package.json | 5 +++
extension/src/activation/activate.ts | 2 ++
extension/src/commands.ts | 7 +++++
extension/src/lang-server/codeActions.ts | 53 ++++++++++++++++++++++++++++++++
4 files changed, 67 insertions(+)
create mode 100644 extension/src/lang-server/codeActions.ts
diff --git a/extension/package.json b/extension/package.json
index 9fe38f7f..ccc3a679 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -106,6 +106,11 @@
"command": "continue.quickTextEntry",
"category": "Continue",
"title": "Quick Text Entry"
+ },
+ {
+ "command": "continue.quickFix",
+ "category": "Continue",
+ "title": "Quick Fix"
}
],
"keybindings": [
diff --git a/extension/src/activation/activate.ts b/extension/src/activation/activate.ts
index cd885b12..5c6ffa02 100644
--- a/extension/src/activation/activate.ts
+++ b/extension/src/activation/activate.ts
@@ -10,6 +10,7 @@ import {
startContinuePythonServer,
} from "./environmentSetup";
import fetch from "node-fetch";
+import registerQuickFixProvider from "../lang-server/codeActions";
// import { CapturedTerminal } from "../terminal/terminalEmulator";
const PACKAGE_JSON_RAW_GITHUB_URL =
@@ -55,6 +56,7 @@ export async function activateExtension(context: vscode.ExtensionContext) {
sendTelemetryEvent(TelemetryEvent.ExtensionActivated);
registerAllCodeLensProviders(context);
registerAllCommands(context);
+ registerQuickFixProvider();
// Initialize IDE Protocol Client
const serverUrl = getContinueServerUrl();
diff --git a/extension/src/commands.ts b/extension/src/commands.ts
index 888f01ed..2b7f4c0c 100644
--- a/extension/src/commands.ts
+++ b/extension/src/commands.ts
@@ -34,6 +34,13 @@ const commandsMap: { [command: string]: (...args: any) => any } = {
"continue.rejectDiff": rejectDiffCommand,
"continue.acceptAllSuggestions": acceptAllSuggestionsCommand,
"continue.rejectAllSuggestions": rejectAllSuggestionsCommand,
+ "continue.quickFix": async (message: string, code: string, edit: boolean) => {
+ ideProtocolClient.sendMainUserInput(
+ `${
+ edit ? "/edit " : ""
+ }${code}\n\nHow do I fix this problem in the above code?: ${message}`
+ );
+ },
"continue.focusContinueInput": async () => {
if (focusedOnContinueInput) {
vscode.commands.executeCommand("workbench.action.focusActiveEditorGroup");
diff --git a/extension/src/lang-server/codeActions.ts b/extension/src/lang-server/codeActions.ts
new file mode 100644
index 00000000..07cf5f4e
--- /dev/null
+++ b/extension/src/lang-server/codeActions.ts
@@ -0,0 +1,53 @@
+import * as vscode from "vscode";
+
+class ContinueQuickFixProvider implements vscode.CodeActionProvider {
+ public static readonly providedCodeActionKinds = [
+ vscode.CodeActionKind.QuickFix,
+ ];
+
+ provideCodeActions(
+ document: vscode.TextDocument,
+ range: vscode.Range | vscode.Selection,
+ context: vscode.CodeActionContext,
+ token: vscode.CancellationToken
+ ): vscode.ProviderResult<(vscode.Command | vscode.CodeAction)[]> {
+ if (context.diagnostics.length === 0) {
+ return [];
+ }
+
+ const createQuickFix = (edit: boolean) => {
+ const diagnostic = context.diagnostics[0];
+ const quickFix = new vscode.CodeAction(
+ edit ? "Fix with Continue" : "Ask Continue",
+ vscode.CodeActionKind.QuickFix
+ );
+ quickFix.isPreferred = false;
+ const surroundingRange = new vscode.Range(
+ range.start.translate(-3, 0),
+ range.end.translate(3, 0)
+ );
+ quickFix.command = {
+ command: "continue.quickFix",
+ title: "Continue Quick Fix",
+ arguments: [
+ diagnostic.message,
+ document.getText(surroundingRange),
+ edit,
+ ],
+ };
+ return quickFix;
+ };
+ return [createQuickFix(true), createQuickFix(false)];
+ }
+}
+
+export default function registerQuickFixProvider() {
+ // In your extension's activate function:
+ vscode.languages.registerCodeActionsProvider(
+ { language: "*" },
+ new ContinueQuickFixProvider(),
+ {
+ providedCodeActionKinds: ContinueQuickFixProvider.providedCodeActionKinds,
+ }
+ );
+}
--
cgit v1.2.3-70-g09d2
From fcfbbf562ac4b576bc80259fa9665b6067ccdcaa Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 12:51:38 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 6f777c72..fbd3d92d 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.174",
+ "version": "0.0.175",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.174",
+ "version": "0.0.175",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index ccc3a679..02a2ec1a 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.174",
+ "version": "0.0.175",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 6e2634f70662ce04207e4a1fad2e27761179b123 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 16:10:50 -0700
Subject: record accept/reject
---
extension/src/activation/environmentSetup.ts | 8 ++++
extension/src/diffs.ts | 46 ++++++++++++++++++++-
extension/src/suggestions.ts | 60 +---------------------------
3 files changed, 54 insertions(+), 60 deletions(-)
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index df609a34..69a3b75a 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -400,6 +400,14 @@ function serverPath(): string {
return sPath;
}
+export function devDataPath(): string {
+ const sPath = path.join(getContinueGlobalPath(), "dev_data");
+ if (!fs.existsSync(sPath)) {
+ fs.mkdirSync(sPath);
+ }
+ return sPath;
+}
+
function serverVersionPath(): string {
return path.join(serverPath(), "server_version.txt");
}
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index 2860258d..0bab326a 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -4,6 +4,7 @@ import * as fs from "fs";
import * as vscode from "vscode";
import { extensionContext, ideProtocolClient } from "./activation/activate";
import { getMetaKeyLabel } from "./util/util";
+import { devDataPath } from "./activation/environmentSetup";
interface DiffInfo {
originalFilepath: string;
@@ -13,7 +14,9 @@ interface DiffInfo {
range: vscode.Range;
}
-export const DIFF_DIRECTORY = path.join(os.homedir(), ".continue", "diffs").replace(/^C:/, "c:");
+export const DIFF_DIRECTORY = path
+ .join(os.homedir(), ".continue", "diffs")
+ .replace(/^C:/, "c:");
class DiffManager {
// Create a temporary file in the global .continue directory which displays the updated version
@@ -222,6 +225,8 @@ class DiffManager {
);
this.cleanUpDiff(diffInfo);
});
+
+ recordAcceptReject(true, diffInfo);
}
rejectDiff(newFilepath?: string) {
@@ -251,11 +256,50 @@ class DiffManager {
.then(() => {
this.cleanUpDiff(diffInfo);
});
+
+ recordAcceptReject(false, diffInfo);
}
}
export const diffManager = new DiffManager();
+function recordAcceptReject(accepted: boolean, diffInfo: DiffInfo) {
+ const collectOn = vscode.workspace
+ .getConfiguration("continue")
+ .get("dataSwitch");
+
+ if (collectOn) {
+ const devDataDir = devDataPath();
+ const suggestionsPath = path.join(devDataDir, "suggestions.json");
+
+ // Initialize suggestions list
+ let suggestions = [];
+
+ // Check if suggestions.json exists
+ if (fs.existsSync(suggestionsPath)) {
+ const rawData = fs.readFileSync(suggestionsPath, "utf-8");
+ suggestions = JSON.parse(rawData);
+ }
+
+ // Add the new suggestion to the list
+ suggestions.push({
+ accepted,
+ timestamp: Date.now(),
+ suggestion: diffInfo.originalFilepath,
+ });
+
+ // Send the suggestion to the server
+ ideProtocolClient.sendAcceptRejectSuggestion(accepted);
+
+ // Write the updated suggestions back to the file
+ fs.writeFileSync(
+ suggestionsPath,
+ JSON.stringify(suggestions, null, 4),
+ "utf-8"
+ );
+ }
+}
+
export async function acceptDiffCommand(newFilepath?: string) {
diffManager.acceptDiff(newFilepath);
ideProtocolClient.sendAcceptRejectDiff(true);
diff --git a/extension/src/suggestions.ts b/extension/src/suggestions.ts
index 6e5a444f..c2373223 100644
--- a/extension/src/suggestions.ts
+++ b/extension/src/suggestions.ts
@@ -1,9 +1,7 @@
import * as vscode from "vscode";
import { sendTelemetryEvent, TelemetryEvent } from "./telemetry";
import { openEditorAndRevealRange } from "./util/vscode";
-import { translate, readFileAtRange } from "./util/vscode";
-import * as fs from "fs";
-import * as path from "path";
+import { translate } from "./util/vscode";
import { registerAllCodeLensProviders } from "./lang-server/codeLens";
import { extensionContext, ideProtocolClient } from "./activation/activate";
@@ -214,62 +212,6 @@ function selectSuggestion(
: suggestion.newRange;
}
- let workspaceDir = vscode.workspace.workspaceFolders
- ? vscode.workspace.workspaceFolders[0]?.uri.fsPath
- : undefined;
-
- let collectOn = vscode.workspace
- .getConfiguration("continue")
- .get("dataSwitch");
-
- if (workspaceDir && collectOn) {
- let continueDir = path.join(workspaceDir, ".continue");
-
- // Check if .continue directory doesn't exists
- if (!fs.existsSync(continueDir)) {
- fs.mkdirSync(continueDir);
- }
-
- let suggestionsPath = path.join(continueDir, "suggestions.json");
-
- // Initialize suggestions list
- let suggestions = [];
-
- // Check if suggestions.json exists
- if (fs.existsSync(suggestionsPath)) {
- let rawData = fs.readFileSync(suggestionsPath, "utf-8");
- suggestions = JSON.parse(rawData);
- }
-
- const accepted =
- accept === "new" || (accept === "selected" && suggestion.newSelected);
- suggestions.push({
- accepted,
- timestamp: Date.now(),
- suggestion: suggestion.newContent,
- });
- ideProtocolClient.sendAcceptRejectSuggestion(accepted);
-
- // Write the updated suggestions back to the file
- fs.writeFileSync(
- suggestionsPath,
- JSON.stringify(suggestions, null, 4),
- "utf-8"
- );
-
- // If it's not already there, add .continue to .gitignore
- const gitignorePath = path.join(workspaceDir, ".gitignore");
- if (fs.existsSync(gitignorePath)) {
- const gitignoreData = fs.readFileSync(gitignorePath, "utf-8");
- const gitIgnoreLines = gitignoreData.split("\n");
- if (!gitIgnoreLines.includes(".continue")) {
- fs.appendFileSync(gitignorePath, "\n.continue\n");
- }
- } else {
- fs.writeFileSync(gitignorePath, ".continue\n");
- }
- }
-
rangeToDelete = new vscode.Range(
rangeToDelete.start,
new vscode.Position(rangeToDelete.end.line, 0)
--
cgit v1.2.3-70-g09d2
From d80119982e9b60ca0022533a0086eb526dc7d957 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 16:16:41 -0700
Subject: ggml
---
continuedev/src/continuedev/core/sdk.py | 6 ++
continuedev/src/continuedev/libs/llm/ggml.py | 99 ++++++++++++++++++++++
.../src/continuedev/libs/util/count_tokens.py | 7 +-
continuedev/src/continuedev/steps/chat.py | 2 +-
continuedev/src/continuedev/steps/core/core.py | 8 +-
5 files changed, 118 insertions(+), 4 deletions(-)
create mode 100644 continuedev/src/continuedev/libs/llm/ggml.py
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 8649cd58..22393746 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -11,6 +11,7 @@ from ..models.filesystem_edit import FileEdit, FileSystemEdit, AddFile, DeleteFi
from ..models.filesystem import RangeInFile
from ..libs.llm.hf_inference_api import HuggingFaceInferenceAPI
from ..libs.llm.openai import OpenAI
+from ..libs.llm.ggml import GGML
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
from .main import Context, ContinueCustomException, HighlightedRangeContext, History, Step, ChatMessage, ChatMessageRole
@@ -59,6 +60,10 @@ class Models:
def gpt4(self):
return self.__load_openai_model("gpt-4")
+ @cached_property
+ def ggml(self):
+ return GGML("", "ggml")
+
def __model_from_name(self, model_name: str):
if model_name == "starcoder":
return self.starcoder
@@ -73,6 +78,7 @@ class Models:
@property
def default(self):
+ return self.ggml
default_model = self.sdk.config.default_model
return self.__model_from_name(default_model) if default_model is not None else self.gpt35
diff --git a/continuedev/src/continuedev/libs/llm/ggml.py b/continuedev/src/continuedev/libs/llm/ggml.py
new file mode 100644
index 00000000..bef0d993
--- /dev/null
+++ b/continuedev/src/continuedev/libs/llm/ggml.py
@@ -0,0 +1,99 @@
+from functools import cached_property
+import json
+from typing import Any, Coroutine, Dict, Generator, List, Union
+
+import aiohttp
+from ...core.main import ChatMessage
+import openai
+from ..llm import LLM
+from ..util.count_tokens import DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
+import certifi
+import ssl
+
+ca_bundle_path = certifi.where()
+ssl_context = ssl.create_default_context(cafile=ca_bundle_path)
+
+SERVER_URL = "http://localhost:8000"
+
+
+class GGML(LLM):
+ api_key: str
+ default_model: str
+
+ def __init__(self, api_key: str, default_model: str, system_message: str = None):
+ self.api_key = api_key
+ self.default_model = default_model
+ self.system_message = system_message
+
+ openai.api_key = api_key
+
+ @cached_property
+ def name(self):
+ return self.default_model
+
+ @property
+ def default_args(self):
+ return {**DEFAULT_ARGS, "model": self.default_model}
+
+ def count_tokens(self, text: str):
+ return count_tokens(self.default_model, text)
+
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ args = self.default_args.copy()
+ args.update(kwargs)
+ args["stream"] = True
+
+ args = {**self.default_args, **kwargs}
+ messages = compile_chat_messages(
+ self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None))
+
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
+ async with session.post(f"{SERVER_URL}/v1/completions", json={
+ "messages": messages,
+ **args
+ }) as resp:
+ async for line in resp.content.iter_any():
+ if line:
+ try:
+ yield line.decode("utf-8")
+ except:
+ raise Exception(str(line))
+
+ async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
+ args = {**self.default_args, **kwargs}
+ messages = compile_chat_messages(
+ self.default_model, messages, args["max_tokens"], None, functions=args.get("functions", None))
+ args["stream"] = True
+
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
+ async with session.post(f"{SERVER_URL}/v1/chat/completions", json={
+ "messages": messages,
+ **args
+ }) as resp:
+ # This is streaming application/json instaed of text/event-stream
+ async for line in resp.content.iter_chunks():
+ if line[1]:
+ try:
+ json_chunk = line[0].decode("utf-8")
+ if json_chunk.startswith(": ping - ") or json_chunk.startswith("data: [DONE]"):
+ continue
+ json_chunk = "{}" if json_chunk == "" else json_chunk
+ chunks = json_chunk.split("\n")
+ for chunk in chunks:
+ if chunk.strip() != "":
+ yield json.loads(chunk[6:])["choices"][0]["delta"]
+ except:
+ raise Exception(str(line[0]))
+
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
+ args = {**self.default_args, **kwargs}
+
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
+ async with session.post(f"{SERVER_URL}/v1/completions", json={
+ "messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None),
+ **args
+ }) as resp:
+ try:
+ return await resp.text()
+ except:
+ raise Exception(await resp.text())
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 73be0717..e1baeca1 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -3,13 +3,16 @@ from typing import Dict, List, Union
from ...core.main import ChatMessage
import tiktoken
-aliases = {}
+aliases = {
+ "ggml": "gpt-3.5-turbo",
+}
DEFAULT_MAX_TOKENS = 2048
MAX_TOKENS_FOR_MODEL = {
"gpt-3.5-turbo": 4096,
"gpt-3.5-turbo-0613": 4096,
"gpt-3.5-turbo-16k": 16384,
- "gpt-4": 8192
+ "gpt-4": 8192,
+ "ggml": 2048
}
CHAT_MODELS = {
"gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "gpt-3.5-turbo-0613"
diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py
index a10319d8..1df1e0bf 100644
--- a/continuedev/src/continuedev/steps/chat.py
+++ b/continuedev/src/continuedev/steps/chat.py
@@ -27,7 +27,7 @@ class SimpleChatStep(Step):
async def run(self, sdk: ContinueSDK):
completion = ""
messages = self.messages or await sdk.get_chat_context()
- async for chunk in sdk.models.gpt4.stream_chat(messages, temperature=0.5):
+ async for chunk in sdk.models.default.stream_chat(messages, temperature=0.5):
if sdk.current_step_was_deleted():
return
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index 4b35a758..0b067d7d 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -4,6 +4,7 @@ import subprocess
from textwrap import dedent
from typing import Coroutine, List, Literal, Union
+from ...libs.llm.ggml import GGML
from ...models.main import Range
from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder
from ...models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit
@@ -180,7 +181,7 @@ class DefaultModelEditCodeStep(Step):
# We don't know here all of the functions being passed in.
# We care because if this prompt itself goes over the limit, then the entire message will have to be cut from the completion.
# Overflow won't happen, but prune_chat_messages in count_tokens.py will cut out this whole thing, instead of us cutting out only as many lines as we need.
- model_to_use = sdk.models.gpt4
+ model_to_use = sdk.models.default
max_tokens = DEFAULT_MAX_TOKENS
TOKENS_TO_BE_CONSIDERED_LARGE_RANGE = 1200
@@ -442,6 +443,11 @@ class DefaultModelEditCodeStep(Step):
completion_lines_covered = 0
repeating_file_suffix = False
line_below_highlighted_range = file_suffix.lstrip().split("\n")[0]
+
+ if isinstance(model_to_use, GGML):
+ messages = [ChatMessage(
+ role="user", content=f"```\n{rif.contents}\n```\n{self.user_input}\n```\n", summary=self.user_input)]
+
async for chunk in model_to_use.stream_chat(messages, temperature=0, max_tokens=max_tokens):
# Stop early if it is repeating the file_suffix or the step was deleted
if repeating_file_suffix:
--
cgit v1.2.3-70-g09d2
From 8f70c4fb6cc0b86d24b2f5dc813b17d0f423d423 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 16:55:24 -0700
Subject: better prompt for editing
---
continuedev/src/continuedev/core/config.py | 2 +-
continuedev/src/continuedev/core/sdk.py | 5 ++--
continuedev/src/continuedev/libs/llm/ggml.py | 33 ++++++++------------------
continuedev/src/continuedev/steps/core/core.py | 5 ++--
4 files changed, 17 insertions(+), 28 deletions(-)
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index 6e430c04..957609c5 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -76,7 +76,7 @@ class ContinueConfig(BaseModel):
server_url: Optional[str] = None
allow_anonymous_telemetry: Optional[bool] = True
default_model: Literal["gpt-3.5-turbo", "gpt-3.5-turbo-16k",
- "gpt-4"] = 'gpt-4'
+ "gpt-4", "ggml"] = 'gpt-4'
custom_commands: Optional[List[CustomCommand]] = [CustomCommand(
name="test",
description="This is an example custom command. Use /config to edit it and create more",
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 9389e1e9..eb60109c 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -82,7 +82,7 @@ class Models:
@cached_property
def ggml(self):
- return GGML("", "ggml")
+ return GGML()
def __model_from_name(self, model_name: str):
if model_name == "starcoder":
@@ -93,12 +93,13 @@ class Models:
return self.gpt3516k
elif model_name == "gpt-4":
return self.gpt4
+ elif model_name == "ggml":
+ return self.ggml
else:
raise Exception(f"Unknown model {model_name}")
@property
def default(self):
- return self.ggml
default_model = self.sdk.config.default_model
return self.__model_from_name(default_model) if default_model is not None else self.gpt4
diff --git a/continuedev/src/continuedev/libs/llm/ggml.py b/continuedev/src/continuedev/libs/llm/ggml.py
index bef0d993..d3589b70 100644
--- a/continuedev/src/continuedev/libs/llm/ggml.py
+++ b/continuedev/src/continuedev/libs/llm/ggml.py
@@ -4,39 +4,27 @@ from typing import Any, Coroutine, Dict, Generator, List, Union
import aiohttp
from ...core.main import ChatMessage
-import openai
from ..llm import LLM
-from ..util.count_tokens import DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
-import certifi
-import ssl
-
-ca_bundle_path = certifi.where()
-ssl_context = ssl.create_default_context(cafile=ca_bundle_path)
+from ..util.count_tokens import compile_chat_messages, DEFAULT_ARGS, count_tokens
SERVER_URL = "http://localhost:8000"
class GGML(LLM):
- api_key: str
- default_model: str
- def __init__(self, api_key: str, default_model: str, system_message: str = None):
- self.api_key = api_key
- self.default_model = default_model
+ def __init__(self, system_message: str = None):
self.system_message = system_message
- openai.api_key = api_key
-
@cached_property
def name(self):
- return self.default_model
+ return "ggml"
@property
def default_args(self):
- return {**DEFAULT_ARGS, "model": self.default_model}
+ return {**DEFAULT_ARGS, "model": self.name, "max_tokens": 1024}
def count_tokens(self, text: str):
- return count_tokens(self.default_model, text)
+ return count_tokens(self.name, text)
async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
@@ -45,9 +33,9 @@ class GGML(LLM):
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None))
+ self.name, with_history, args["max_tokens"], prompt, functions=args.get("functions", None))
- async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
+ async with aiohttp.ClientSession() as session:
async with session.post(f"{SERVER_URL}/v1/completions", json={
"messages": messages,
**args
@@ -62,10 +50,10 @@ class GGML(LLM):
async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.default_model, messages, args["max_tokens"], None, functions=args.get("functions", None))
+ self.name, messages, args["max_tokens"], None, functions=args.get("functions", None))
args["stream"] = True
- async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
+ async with aiohttp.ClientSession() as session:
async with session.post(f"{SERVER_URL}/v1/chat/completions", json={
"messages": messages,
**args
@@ -77,7 +65,6 @@ class GGML(LLM):
json_chunk = line[0].decode("utf-8")
if json_chunk.startswith(": ping - ") or json_chunk.startswith("data: [DONE]"):
continue
- json_chunk = "{}" if json_chunk == "" else json_chunk
chunks = json_chunk.split("\n")
for chunk in chunks:
if chunk.strip() != "":
@@ -88,7 +75,7 @@ class GGML(LLM):
async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
- async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
+ async with aiohttp.ClientSession() as session:
async with session.post(f"{SERVER_URL}/v1/completions", json={
"messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None),
**args
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index 2c9d8c01..d5a7cd9a 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -192,7 +192,8 @@ class DefaultModelEditCodeStep(Step):
# We care because if this prompt itself goes over the limit, then the entire message will have to be cut from the completion.
# Overflow won't happen, but prune_chat_messages in count_tokens.py will cut out this whole thing, instead of us cutting out only as many lines as we need.
model_to_use = sdk.models.default
- max_tokens = DEFAULT_MAX_TOKENS
+ max_tokens = MAX_TOKENS_FOR_MODEL.get(
+ model_to_use.name, DEFAULT_MAX_TOKENS) / 2
TOKENS_TO_BE_CONSIDERED_LARGE_RANGE = 1200
if model_to_use.count_tokens(rif.contents) > TOKENS_TO_BE_CONSIDERED_LARGE_RANGE:
@@ -498,7 +499,7 @@ Please output the code to be inserted at the cursor in order to fulfill the user
if isinstance(model_to_use, GGML):
messages = [ChatMessage(
- role="user", content=f"```\n{rif.contents}\n```\n{self.user_input}\n```\n", summary=self.user_input)]
+ role="user", content=f"```\n{rif.contents}\n```\n\nUser request: \"{self.user_input}\"\n\nThis is the code after changing to perfectly comply with the user request. It does not include any placeholder code, only real implementations:\n\n```\n", summary=self.user_input)]
generator = model_to_use.stream_chat(
messages, temperature=0, max_tokens=max_tokens)
--
cgit v1.2.3-70-g09d2
From fd1977eb96aaa3f9ac0de1df954625d8ab6d59d4 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 21:08:13 -0700
Subject: quick fix for quick fix
---
extension/src/bridge.ts | 6 +-----
extension/src/lang-server/codeActions.ts | 6 ++++--
2 files changed, 5 insertions(+), 7 deletions(-)
diff --git a/extension/src/bridge.ts b/extension/src/bridge.ts
index 7e6398be..d614ace4 100644
--- a/extension/src/bridge.ts
+++ b/extension/src/bridge.ts
@@ -1,11 +1,7 @@
import fetch from "node-fetch";
import * as path from "path";
import * as vscode from "vscode";
-import {
- Configuration,
- DebugApi,
- UnittestApi,
-} from "./client";
+import { Configuration, DebugApi, UnittestApi } from "./client";
import { convertSingleToDoubleQuoteJSON } from "./util/util";
import { getExtensionUri } from "./util/vscode";
import { extensionContext } from "./activation/activate";
diff --git a/extension/src/lang-server/codeActions.ts b/extension/src/lang-server/codeActions.ts
index 07cf5f4e..f0d61ace 100644
--- a/extension/src/lang-server/codeActions.ts
+++ b/extension/src/lang-server/codeActions.ts
@@ -23,8 +23,10 @@ class ContinueQuickFixProvider implements vscode.CodeActionProvider {
);
quickFix.isPreferred = false;
const surroundingRange = new vscode.Range(
- range.start.translate(-3, 0),
- range.end.translate(3, 0)
+ Math.max(0, range.start.line - 3),
+ 0,
+ Math.min(document.lineCount, range.end.line + 3),
+ 0
);
quickFix.command = {
command: "continue.quickFix",
--
cgit v1.2.3-70-g09d2
From 73e1cfbefbf450ab6564aba653e0132843223c7a Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 22:12:44 -0700
Subject: templated system messages
---
continuedev/poetry.lock | 65 +++++++++++++++++++++-
continuedev/pyproject.toml | 3 +-
continuedev/src/continuedev/core/config.py | 1 +
continuedev/src/continuedev/core/sdk.py | 10 ++--
continuedev/src/continuedev/libs/llm/ggml.py | 6 +-
.../src/continuedev/libs/llm/hf_inference_api.py | 3 +-
continuedev/src/continuedev/libs/llm/openai.py | 6 +-
.../src/continuedev/libs/llm/proxy_server.py | 6 +-
.../src/continuedev/libs/util/count_tokens.py | 7 ++-
.../src/continuedev/libs/util/templating.py | 39 +++++++++++++
10 files changed, 127 insertions(+), 19 deletions(-)
create mode 100644 continuedev/src/continuedev/libs/util/templating.py
diff --git a/continuedev/poetry.lock b/continuedev/poetry.lock
index a49a570f..625aabc9 100644
--- a/continuedev/poetry.lock
+++ b/continuedev/poetry.lock
@@ -297,6 +297,18 @@ files = [
{file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
]
+[[package]]
+name = "chevron"
+version = "0.14.0"
+description = "Mustache templating language renderer"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "chevron-0.14.0-py3-none-any.whl", hash = "sha256:fbf996a709f8da2e745ef763f482ce2d311aa817d287593a5b990d6d6e4f0443"},
+ {file = "chevron-0.14.0.tar.gz", hash = "sha256:87613aafdf6d77b6a90ff073165a61ae5086e21ad49057aa0e53681601800ebf"},
+]
+
[[package]]
name = "click"
version = "8.1.3"
@@ -600,6 +612,25 @@ files = [
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+[[package]]
+name = "importlib-resources"
+version = "6.0.0"
+description = "Read resources from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"},
+ {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"},
+]
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
[[package]]
name = "jsonref"
version = "1.1.0"
@@ -626,6 +657,8 @@ files = [
[package.dependencies]
attrs = ">=17.4.0"
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
+pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""}
pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
[package.extras]
@@ -1024,6 +1057,18 @@ sql-other = ["SQLAlchemy (>=1.4.16)"]
test = ["hypothesis (>=6.34.2)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.6.3)"]
+[[package]]
+name = "pkgutil-resolve-name"
+version = "1.3.10"
+description = "Resolve a name to an object."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"},
+ {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"},
+]
+
[[package]]
name = "posthog"
version = "3.0.1"
@@ -1818,7 +1863,23 @@ files = [
idna = ">=2.0"
multidict = ">=4.0"
+[[package]]
+name = "zipp"
+version = "3.16.2"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"},
+ {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+
[metadata]
lock-version = "2.0"
-python-versions = "^3.9"
-content-hash = "3ba2a7278fda36a059d76e227be94b0cb5e2efc9396b47a9642b916680214d9f"
+python-versions = "^3.8.1"
+content-hash = "82510deb9f4afb5bc38db0dfd88ad88005fa0b6221c24e8c1700c006360f3f88"
diff --git a/continuedev/pyproject.toml b/continuedev/pyproject.toml
index 6727e29a..3077de1c 100644
--- a/continuedev/pyproject.toml
+++ b/continuedev/pyproject.toml
@@ -6,7 +6,7 @@ authors = ["Nate Sesti "]
readme = "README.md"
[tool.poetry.dependencies]
-python = "^3.8"
+python = "^3.8.1"
diff-match-patch = "^20230430"
fastapi = "^0.95.1"
typer = "^0.7.0"
@@ -24,6 +24,7 @@ tiktoken = "^0.4.0"
jsonref = "^1.1.0"
jsonschema = "^4.17.3"
directory-tree = "^0.0.3.1"
+chevron = "^0.14.0"
[tool.poetry.scripts]
typegen = "src.continuedev.models.generate_json_schema:main"
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index 957609c5..91a47c8e 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -85,6 +85,7 @@ class ContinueConfig(BaseModel):
slash_commands: Optional[List[SlashCommand]] = DEFAULT_SLASH_COMMANDS
on_traceback: Optional[List[OnTracebackSteps]] = [
OnTracebackSteps(step_name="DefaultOnTracebackStep")]
+ system_message: Optional[str] = None
# Want to force these to be the slash commands for now
@validator('slash_commands', pre=True)
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index eb60109c..ac57c122 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -34,10 +34,12 @@ MODEL_PROVIDER_TO_ENV_VAR = {
class Models:
provider_keys: Dict[ModelProvider, str] = {}
model_providers: List[ModelProvider]
+ system_message: str
def __init__(self, sdk: "ContinueSDK", model_providers: List[ModelProvider]):
self.sdk = sdk
self.model_providers = model_providers
+ self.system_message = sdk.config.system_message
@classmethod
async def create(cls, sdk: "ContinueSDK", with_providers: List[ModelProvider] = ["openai"]) -> "Models":
@@ -53,12 +55,12 @@ class Models:
def __load_openai_model(self, model: str) -> OpenAI:
api_key = self.provider_keys["openai"]
if api_key == "":
- return ProxyServer(self.sdk.ide.unique_id, model)
- return OpenAI(api_key=api_key, default_model=model)
+ return ProxyServer(self.sdk.ide.unique_id, model, system_message=self.system_message)
+ return OpenAI(api_key=api_key, default_model=model, system_message=self.system_message)
def __load_hf_inference_api_model(self, model: str) -> HuggingFaceInferenceAPI:
api_key = self.provider_keys["hf_inference_api"]
- return HuggingFaceInferenceAPI(api_key=api_key, model=model)
+ return HuggingFaceInferenceAPI(api_key=api_key, model=model, system_message=self.system_message)
@cached_property
def starcoder(self):
@@ -82,7 +84,7 @@ class Models:
@cached_property
def ggml(self):
- return GGML()
+ return GGML(system_message=self.system_message)
def __model_from_name(self, model_name: str):
if model_name == "starcoder":
diff --git a/continuedev/src/continuedev/libs/llm/ggml.py b/continuedev/src/continuedev/libs/llm/ggml.py
index d3589b70..6007fdb4 100644
--- a/continuedev/src/continuedev/libs/llm/ggml.py
+++ b/continuedev/src/continuedev/libs/llm/ggml.py
@@ -33,7 +33,7 @@ class GGML(LLM):
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.name, with_history, args["max_tokens"], prompt, functions=args.get("functions", None))
+ self.name, with_history, args["max_tokens"], prompt, functions=args.get("functions", None), system_message=self.system_message)
async with aiohttp.ClientSession() as session:
async with session.post(f"{SERVER_URL}/v1/completions", json={
@@ -50,7 +50,7 @@ class GGML(LLM):
async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.name, messages, args["max_tokens"], None, functions=args.get("functions", None))
+ self.name, messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
args["stream"] = True
async with aiohttp.ClientSession() as session:
@@ -77,7 +77,7 @@ class GGML(LLM):
async with aiohttp.ClientSession() as session:
async with session.post(f"{SERVER_URL}/v1/completions", json={
- "messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None),
+ "messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
**args
}) as resp:
try:
diff --git a/continuedev/src/continuedev/libs/llm/hf_inference_api.py b/continuedev/src/continuedev/libs/llm/hf_inference_api.py
index 803ba122..7e11fbbe 100644
--- a/continuedev/src/continuedev/libs/llm/hf_inference_api.py
+++ b/continuedev/src/continuedev/libs/llm/hf_inference_api.py
@@ -11,9 +11,10 @@ class HuggingFaceInferenceAPI(LLM):
api_key: str
model: str
- def __init__(self, api_key: str, model: str):
+ def __init__(self, api_key: str, model: str, system_message: str = None):
self.api_key = api_key
self.model = model
+ self.system_message = system_message # TODO: Nothing being done with this
def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs):
"""Return the completion of the text with the given temperature."""
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index f0877d90..d973f19e 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -37,7 +37,7 @@ class OpenAI(LLM):
if args["model"] in CHAT_MODELS:
async for chunk in await openai.ChatCompletion.acreate(
messages=compile_chat_messages(
- args["model"], with_history, args["max_tokens"], prompt, functions=None),
+ args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
**args,
):
if "content" in chunk.choices[0].delta:
@@ -58,7 +58,7 @@ class OpenAI(LLM):
async for chunk in await openai.ChatCompletion.acreate(
messages=compile_chat_messages(
- args["model"], messages, args["max_tokens"], functions=args.get("functions", None)),
+ args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message),
**args,
):
yield chunk.choices[0].delta
@@ -69,7 +69,7 @@ class OpenAI(LLM):
if args["model"] in CHAT_MODELS:
resp = (await openai.ChatCompletion.acreate(
messages=compile_chat_messages(
- args["model"], with_history, args["max_tokens"], prompt, functions=None),
+ args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
**args,
)).choices[0].message.content
else:
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index eab6e441..3ec492f3 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -38,7 +38,7 @@ class ProxyServer(LLM):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/complete", json={
- "messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None),
+ "messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
"unique_id": self.unique_id,
**args
}) as resp:
@@ -50,7 +50,7 @@ class ProxyServer(LLM):
async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.default_model, messages, args["max_tokens"], None, functions=args.get("functions", None))
+ self.default_model, messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_chat", json={
@@ -74,7 +74,7 @@ class ProxyServer(LLM):
async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None))
+ self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None), system_message=self.system_message)
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_complete", json={
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index e1baeca1..1ca98fe6 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -1,6 +1,7 @@
import json
from typing import Dict, List, Union
from ...core.main import ChatMessage
+from .templating import render_system_message
import tiktoken
aliases = {
@@ -85,13 +86,15 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int,
for function in functions:
prompt_tokens += count_tokens(model, json.dumps(function))
+ rendered_system_message = render_system_message(system_message)
+
msgs = prune_chat_history(model,
- msgs, MAX_TOKENS_FOR_MODEL[model], prompt_tokens + max_tokens + count_tokens(model, system_message))
+ msgs, MAX_TOKENS_FOR_MODEL[model], prompt_tokens + max_tokens + count_tokens(model, rendered_system_message))
history = []
if system_message:
history.append({
"role": "system",
- "content": system_message
+ "content": rendered_system_message
})
history += [msg.to_dict(with_functions=functions is not None)
for msg in msgs]
diff --git a/continuedev/src/continuedev/libs/util/templating.py b/continuedev/src/continuedev/libs/util/templating.py
new file mode 100644
index 00000000..ebfc2e31
--- /dev/null
+++ b/continuedev/src/continuedev/libs/util/templating.py
@@ -0,0 +1,39 @@
+import os
+import chevron
+
+
+def get_vars_in_template(template):
+ """
+ Get the variables in a template
+ """
+ return [token[1] for token in chevron.tokenizer.tokenize(template) if token[0] == 'variable']
+
+
+def escape_var(var: str) -> str:
+ """
+ Escape a variable so it can be used in a template
+ """
+ return var.replace(os.path.sep, '').replace('.', '')
+
+
+def render_system_message(system_message: str) -> str:
+ """
+ Render system message with mustache syntax.
+ Right now it only supports rendering absolute file paths as their contents.
+ """
+ vars = get_vars_in_template(system_message)
+
+ args = {}
+ for var in vars:
+ if var.startswith(os.path.sep):
+ # Escape vars which are filenames, because mustache doesn't allow / in variable names
+ escaped_var = escape_var(var)
+ system_message = system_message.replace(
+ var, escaped_var)
+
+ if os.path.exists(var):
+ args[escaped_var] = open(var, 'r').read()
+ else:
+ args[escaped_var] = ''
+
+ return chevron.render(system_message, args)
--
cgit v1.2.3-70-g09d2
From fcbfe6aed505306c3109092f3da28d48a35c3e4e Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 22:16:19 -0700
Subject: patch
---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/extension/package-lock.json b/extension/package-lock.json
index fbd3d92d..33f81dec 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.175",
+ "version": "0.0.176",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.175",
+ "version": "0.0.176",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 02a2ec1a..e515ed36 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.175",
+ "version": "0.0.176",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From c3a5010c7e2b49022c08f8fec113d4fdf21e0438 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Sun, 16 Jul 2023 23:48:57 -0700
Subject: fix alignment of dropdown
---
extension/react-app/src/components/ComboBox.tsx | 49 +------------------------
1 file changed, 2 insertions(+), 47 deletions(-)
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index f11e07af..773fcf72 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -37,21 +37,6 @@ const EmptyPillDiv = styled.div`
}
`;
-const ContextDropdown = styled.div`
- position: absolute;
- padding: 4px;
- width: calc(100% - 16px - 8px);
- background-color: ${secondaryDark};
- color: white;
- border-bottom-right-radius: ${defaultBorderRadius};
- border-bottom-left-radius: ${defaultBorderRadius};
- /* border: 1px solid white; */
- border-top: none;
- margin: 8px;
- outline: 1px solid orange;
- z-index: 5;
-`;
-
const MainTextInput = styled.textarea`
resize: none;
@@ -88,12 +73,13 @@ const Ul = styled.ul<{
background-color: ${secondaryDark};
color: white;
max-height: ${UlMaxHeight}px;
+ width: calc(100% - 16px);
overflow-y: scroll;
overflow-x: hidden;
padding: 0;
${({ hidden }) => hidden && "display: none;"}
border-radius: ${defaultBorderRadius};
- border: 0.5px solid gray;
+ outline: 0.5px solid gray;
z-index: 2;
// Get rid of scrollbar and its padding
scrollbar-width: none;
@@ -138,10 +124,6 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
// The position of the current command you are typing now, so the one that will be appended to history once you press enter
const [positionInHistory, setPositionInHistory] = React.useState(0);
const [items, setItems] = React.useState(props.items);
- const [hoveringButton, setHoveringButton] = React.useState(false);
- const [hoveringContextDropdown, setHoveringContextDropdown] =
- React.useState(false);
- const [pinned, setPinned] = useState(false);
const [highlightedCodeSections, setHighlightedCodeSections] = React.useState(
props.highlightedCodeSections || []
);
@@ -253,15 +235,6 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
return newSections;
});
}}
- onHover={(val: boolean) => {
- if (val) {
- setHoveringButton(val);
- } else {
- setTimeout(() => {
- setHoveringButton(val);
- }, 100);
- }
- }}
/>
))}
{props.highlightedCodeSections.length > 0 &&
@@ -387,24 +360,6 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
Inserting at cursor
)}
- {
- setHoveringContextDropdown(true);
- }}
- onMouseLeave={() => {
- setHoveringContextDropdown(false);
- }}
- hidden={true || (!hoveringContextDropdown && !hoveringButton)}
- >
- {highlightedCodeSections.map((section, idx) => (
- <>
- {section.display_name}
-
- {section.range.contents}
-
- >
- ))}
-
>
);
});
--
cgit v1.2.3-70-g09d2
From 88c1f16c597e0a55271e622a5283562ccb7a80a1 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 00:00:31 -0700
Subject: only show delete when only one range selected
---
extension/react-app/src/components/ComboBox.tsx | 2 ++
extension/react-app/src/components/PillButton.tsx | 34 +++++++++++++----------
2 files changed, 22 insertions(+), 14 deletions(-)
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index 773fcf72..dbebd534 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -218,6 +218,7 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
? "Editing such a large range may be slow"
: undefined
}
+ onlyShowDelete={highlightedCodeSections.length <= 1}
editing={section.editing}
pinned={section.pinned}
index={idx}
@@ -334,6 +335,7 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
})}
showAbove={showAbove()}
ulHeightPixels={ulRef.current?.getBoundingClientRect().height || 0}
+ hidden={!downshiftProps.isOpen || items.length === 0}
>
{downshiftProps.isOpen &&
items.map((item, index) => (
diff --git a/extension/react-app/src/components/PillButton.tsx b/extension/react-app/src/components/PillButton.tsx
index d9d779d1..5a16516e 100644
--- a/extension/react-app/src/components/PillButton.tsx
+++ b/extension/react-app/src/components/PillButton.tsx
@@ -27,7 +27,6 @@ const GridDiv = styled.div`
height: 100%;
display: grid;
grid-gap: 0;
- grid-template-columns: 1fr 1fr;
align-items: center;
border-radius: ${defaultBorderRadius};
@@ -69,6 +68,7 @@ interface PillButtonProps {
editing: boolean;
pinned: boolean;
warning?: string;
+ onlyShowDelete?: boolean;
}
const PillButton = (props: PillButtonProps) => {
@@ -105,19 +105,25 @@ const PillButton = (props: PillButtonProps) => {
}}
>
{isHovered && (
-
- {
- client?.setEditingAtIndices([props.index]);
- }}
- >
-
-
+
+ {props.onlyShowDelete || (
+ {
+ client?.setEditingAtIndices([props.index]);
+ }}
+ >
+
+
+ )}
{/*
Date: Mon, 17 Jul 2023 10:19:54 -0500
Subject: use difflib to give edit change description
---
continuedev/src/continuedev/steps/core/core.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index d5a7cd9a..41988000 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -1,6 +1,7 @@
# These steps are depended upon by ContinueSDK
import os
import subprocess
+import difflib
from textwrap import dedent
from typing import Coroutine, List, Literal, Union
@@ -172,13 +173,12 @@ class DefaultModelEditCodeStep(Step):
if self._previous_contents.strip() == self._new_contents.strip():
description = "No edits were made"
else:
+ changes = '\n'.join(difflib.ndiff(self._previous_contents.splitlines(), self._new_contents.splitlines()))
description = await models.gpt3516k.complete(dedent(f"""\
- ```original
- {self._previous_contents}
- ```
+ Diff summary: "{self.user_input}"
- ```new
- {self._new_contents}
+ ```diff
+ {changes}
```
Please give brief a description of the changes made above using markdown bullet points. Be concise:"""))
--
cgit v1.2.3-70-g09d2
From e5b15e38982300dd555006c010bf3e887bf4bd7a Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Mon, 17 Jul 2023 11:36:21 -0500
Subject: align on `code section`
---
continuedev/src/continuedev/core/policy.py | 2 +-
extension/react-app/src/components/ComboBox.tsx | 6 +++---
extension/react-app/src/components/PillButton.tsx | 4 ++--
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py
index bc897357..d007c92b 100644
--- a/continuedev/src/continuedev/core/policy.py
+++ b/continuedev/src/continuedev/core/policy.py
@@ -58,7 +58,7 @@ class DemoPolicy(Policy):
if history.get_current() is None:
return (
MessageStep(name="Welcome to Continue", message=dedent("""\
- - Highlight code and ask a question or give instructions
+ - Highlight code section and ask a question or give instructions
- Use `cmd+m` (Mac) / `ctrl+m` (Windows) to open Continue
- Use `/help` to ask questions about how to use Continue""")) >>
WelcomeStep() >>
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index dbebd534..8136399a 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -245,11 +245,11 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
props.onToggleAddContext();
}}
>
- Highlight to Add Context
+ Highlight code section
) : (
{
props.onToggleAddContext();
}}
@@ -261,7 +261,7 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
{
const target = e.target as HTMLTextAreaElement;
diff --git a/extension/react-app/src/components/PillButton.tsx b/extension/react-app/src/components/PillButton.tsx
index 5a16516e..6bfe6369 100644
--- a/extension/react-app/src/components/PillButton.tsx
+++ b/extension/react-app/src/components/PillButton.tsx
@@ -154,8 +154,8 @@ const PillButton = (props: PillButtonProps) => {
{props.editing
- ? "Editing this range (with rest of file as context)"
- : "Edit this range"}
+ ? "Editing this section (with entire file as context)"
+ : "Edit this section"}
Delete
{props.warning && (
--
cgit v1.2.3-70-g09d2
From 08221a0879b4a163eab6860524f255dbcb4743ae Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 12:05:03 -0700
Subject: match vscode color theme
---
continuedev/src/continuedev/libs/util/dedent.py | 25 -----------
continuedev/src/continuedev/libs/util/strings.py | 49 ++++++++++++++++++++++
continuedev/src/continuedev/steps/chat.py | 8 ++--
continuedev/src/continuedev/steps/core/core.py | 17 ++------
extension/package-lock.json | 4 +-
extension/package.json | 2 +-
extension/react-app/src/components/ComboBox.tsx | 10 +++--
.../react-app/src/components/InputAndButton.tsx | 10 ++---
extension/react-app/src/components/PillButton.tsx | 9 +++-
.../react-app/src/components/StepContainer.tsx | 12 ++++--
extension/react-app/src/components/TextDialog.tsx | 14 ++++---
extension/react-app/src/components/index.ts | 23 ++++++----
extension/react-app/src/index.css | 4 +-
extension/react-app/src/pages/gui.tsx | 11 +++--
14 files changed, 119 insertions(+), 79 deletions(-)
delete mode 100644 continuedev/src/continuedev/libs/util/dedent.py
create mode 100644 continuedev/src/continuedev/libs/util/strings.py
diff --git a/continuedev/src/continuedev/libs/util/dedent.py b/continuedev/src/continuedev/libs/util/dedent.py
deleted file mode 100644
index e59c2e97..00000000
--- a/continuedev/src/continuedev/libs/util/dedent.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from typing import Tuple
-
-
-def dedent_and_get_common_whitespace(s: str) -> Tuple[str, str]:
- lines = s.splitlines()
- if len(lines) == 0:
- return "", ""
-
- # Longest common whitespace prefix
- lcp = lines[0].split(lines[0].strip())[0]
- # Iterate through the lines
- for i in range(1, len(lines)):
- # Empty lines are wildcards
- if lines[i].strip() == "":
- continue
- # Iterate through the leading whitespace characters of the current line
- for j in range(0, len(lcp)):
- # If it doesn't have the same whitespace as lcp, then update lcp
- if j >= len(lines[i]) or lcp[j] != lines[i][j]:
- lcp = lcp[:j]
- if lcp == "":
- return s, ""
- break
-
- return "\n".join(map(lambda x: x.lstrip(lcp), lines)), lcp
diff --git a/continuedev/src/continuedev/libs/util/strings.py b/continuedev/src/continuedev/libs/util/strings.py
new file mode 100644
index 00000000..f1fb8d0b
--- /dev/null
+++ b/continuedev/src/continuedev/libs/util/strings.py
@@ -0,0 +1,49 @@
+from typing import Tuple
+
+
+def dedent_and_get_common_whitespace(s: str) -> Tuple[str, str]:
+ lines = s.splitlines()
+ if len(lines) == 0:
+ return "", ""
+
+ # Longest common whitespace prefix
+ lcp = lines[0].split(lines[0].strip())[0]
+ # Iterate through the lines
+ for i in range(1, len(lines)):
+ # Empty lines are wildcards
+ if lines[i].strip() == "":
+ continue
+ # Iterate through the leading whitespace characters of the current line
+ for j in range(0, len(lcp)):
+ # If it doesn't have the same whitespace as lcp, then update lcp
+ if j >= len(lines[i]) or lcp[j] != lines[i][j]:
+ lcp = lcp[:j]
+ if lcp == "":
+ return s, ""
+ break
+
+ return "\n".join(map(lambda x: x.lstrip(lcp), lines)), lcp
+
+
+def remove_quotes_and_escapes(output: str) -> str:
+ """
+ Clean up the output of the completion API, removing unnecessary escapes and quotes
+ """
+ output = output.strip()
+
+ # Replace smart quotes
+ output = output.replace("“", '"')
+ output = output.replace("”", '"')
+ output = output.replace("‘", "'")
+ output = output.replace("’", "'")
+
+ # Remove escapes
+ output = output.replace('\\"', '"')
+ output = output.replace("\\'", "'")
+ output = output.replace("\\n", "\n")
+ output = output.replace("\\t", "\t")
+ output = output.replace("\\\\", "\\")
+ if (output.startswith('"') and output.endswith('"')) or (output.startswith("'") and output.endswith("'")):
+ output = output[1:-1]
+
+ return output
diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py
index 3751dec2..7c6b42db 100644
--- a/continuedev/src/continuedev/steps/chat.py
+++ b/continuedev/src/continuedev/steps/chat.py
@@ -3,6 +3,7 @@ from typing import Any, Coroutine, List
from pydantic import Field
+from ..libs.util.strings import remove_quotes_and_escapes
from .main import EditHighlightedCodeStep
from .core.core import MessageStep
from ..core.main import FunctionCall, Models
@@ -43,11 +44,8 @@ class SimpleChatStep(Step):
finally:
await generator.aclose()
- self.name = (await sdk.models.gpt35.complete(
- f"Write a short title for the following chat message: {self.description}")).strip()
-
- if self.name.startswith('"') and self.name.endswith('"'):
- self.name = self.name[1:-1]
+ self.name = remove_quotes_and_escapes(await sdk.models.gpt35.complete(
+ f"Write a short title for the following chat message: {self.description}"))
self.chat_context.append(ChatMessage(
role="assistant",
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index d5a7cd9a..5b9b9fd5 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -12,7 +12,7 @@ from ...models.filesystem import FileSystem, RangeInFile, RangeInFileWithContent
from ...core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation
from ...core.main import ChatMessage, ContinueCustomException, Step, SequentialStep
from ...libs.util.count_tokens import MAX_TOKENS_FOR_MODEL, DEFAULT_MAX_TOKENS
-from ...libs.util.dedent import dedent_and_get_common_whitespace
+from ...libs.util.strings import dedent_and_get_common_whitespace, remove_quotes_and_escapes
import difflib
@@ -157,17 +157,6 @@ class DefaultModelEditCodeStep(Step):
_new_contents: str = ""
_prompt_and_completion: str = ""
- def _cleanup_output(self, output: str) -> str:
- output = output.replace('\\"', '"')
- output = output.replace("\\'", "'")
- output = output.replace("\\n", "\n")
- output = output.replace("\\t", "\t")
- output = output.replace("\\\\", "\\")
- if output.startswith('"') and output.endswith('"'):
- output = output[1:-1]
-
- return output
-
async def describe(self, models: Models) -> Coroutine[str, None, None]:
if self._previous_contents.strip() == self._new_contents.strip():
description = "No edits were made"
@@ -183,9 +172,9 @@ class DefaultModelEditCodeStep(Step):
Please give brief a description of the changes made above using markdown bullet points. Be concise:"""))
name = await models.gpt3516k.complete(f"Write a very short title to describe this requested change (no quotes): '{self.user_input}'. This is the title:")
- self.name = self._cleanup_output(name)
+ self.name = remove_quotes_and_escapes(name)
- return f"{self._cleanup_output(description)}"
+ return f"{remove_quotes_and_escapes(description)}"
async def get_prompt_parts(self, rif: RangeInFileWithContents, sdk: ContinueSDK, full_file_contents: str):
# We don't know here all of the functions being passed in.
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 33f81dec..0e0125b0 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.176",
+ "version": "0.0.177",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.176",
+ "version": "0.0.177",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index e515ed36..8462bf68 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.176",
+ "version": "0.0.177",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/react-app/src/components/ComboBox.tsx b/extension/react-app/src/components/ComboBox.tsx
index dbebd534..0ea8a3e1 100644
--- a/extension/react-app/src/components/ComboBox.tsx
+++ b/extension/react-app/src/components/ComboBox.tsx
@@ -6,6 +6,7 @@ import {
lightGray,
secondaryDark,
vscBackground,
+ vscForeground,
} from ".";
import CodeBlock from "./CodeBlock";
import PillButton from "./PillButton";
@@ -48,7 +49,7 @@ const MainTextInput = styled.textarea`
height: auto;
width: 100%;
background-color: ${secondaryDark};
- color: white;
+ color: ${vscForeground};
z-index: 1;
border: 1px solid transparent;
@@ -71,7 +72,7 @@ const Ul = styled.ul<{
position: absolute;
background: ${vscBackground};
background-color: ${secondaryDark};
- color: white;
+ color: ${vscForeground};
max-height: ${UlMaxHeight}px;
width: calc(100% - 16px);
overflow-y: scroll;
@@ -95,6 +96,7 @@ const Li = styled.li<{
selected: boolean;
isLastItem: boolean;
}>`
+ background-color: ${secondaryDark};
${({ highlighted }) => highlighted && "background: #ff000066;"}
${({ selected }) => selected && "font-weight: bold;"}
padding: 0.5rem 0.75rem;
@@ -218,7 +220,9 @@ const ComboBox = React.forwardRef((props: ComboBoxProps, ref) => {
? "Editing such a large range may be slow"
: undefined
}
- onlyShowDelete={highlightedCodeSections.length <= 1}
+ onlyShowDelete={
+ highlightedCodeSections.length <= 1 || section.editing
+ }
editing={section.editing}
pinned={section.pinned}
index={idx}
diff --git a/extension/react-app/src/components/InputAndButton.tsx b/extension/react-app/src/components/InputAndButton.tsx
index 0a8592f2..8019d014 100644
--- a/extension/react-app/src/components/InputAndButton.tsx
+++ b/extension/react-app/src/components/InputAndButton.tsx
@@ -1,6 +1,6 @@
import React, { useRef } from "react";
import styled from "styled-components";
-import { vscBackground } from ".";
+import { vscBackground, vscForeground } from ".";
interface InputAndButtonProps {
onUserInput: (input: string) => void;
@@ -16,7 +16,7 @@ const Input = styled.input`
padding: 0.5rem;
border: 1px solid white;
background-color: ${vscBackground};
- color: white;
+ color: ${vscForeground};
border-radius: 4px;
border-top-right-radius: 0;
border-bottom-right-radius: 0;
@@ -27,7 +27,7 @@ const Button = styled.button`
padding: 0.5rem;
border: 1px solid white;
background-color: ${vscBackground};
- color: white;
+ color: ${vscForeground};
border-radius: 4px;
border-top-left-radius: 0;
border-bottom-left-radius: 0;
@@ -35,8 +35,8 @@ const Button = styled.button`
cursor: pointer;
&:hover {
- background-color: white;
- color: black;
+ background-color: ${vscForeground};
+ color: ${vscBackground};
}
`;
diff --git a/extension/react-app/src/components/PillButton.tsx b/extension/react-app/src/components/PillButton.tsx
index 5a16516e..eba5cf8f 100644
--- a/extension/react-app/src/components/PillButton.tsx
+++ b/extension/react-app/src/components/PillButton.tsx
@@ -1,6 +1,11 @@
import { useContext, useState } from "react";
import styled from "styled-components";
-import { StyledTooltip, defaultBorderRadius, secondaryDark } from ".";
+import {
+ StyledTooltip,
+ defaultBorderRadius,
+ secondaryDark,
+ vscForeground,
+} from ".";
import {
Trash,
PaintBrush,
@@ -10,7 +15,7 @@ import { GUIClientContext } from "../App";
const Button = styled.button`
border: none;
- color: white;
+ color: ${vscForeground};
background-color: ${secondaryDark};
border-radius: ${defaultBorderRadius};
padding: 8px;
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 93bdbc89..26bc8e33 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -6,6 +6,7 @@ import {
secondaryDark,
vscBackground,
vscBackgroundTransparent,
+ vscForeground,
} from ".";
import {
ChevronDown,
@@ -120,20 +121,22 @@ const StyledMarkdownPreview = styled(MarkdownPreview)`
}
code {
- color: #f69292;
+ color: #f78383;
word-wrap: break-word;
+ border-radius: ${defaultBorderRadius};
+ background-color: ${secondaryDark};
}
pre > code {
background-color: ${secondaryDark};
- color: white;
+ color: ${vscForeground};
}
background-color: ${vscBackground};
font-family: "Lexend", sans-serif;
font-size: 13px;
padding: 8px;
- color: white;
+ color: ${vscForeground};
`;
// #endregion
@@ -267,6 +270,9 @@ function StepContainer(props: StepContainerProps) {
) : (
)}
diff --git a/extension/react-app/src/components/TextDialog.tsx b/extension/react-app/src/components/TextDialog.tsx
index 646d6846..cba3852d 100644
--- a/extension/react-app/src/components/TextDialog.tsx
+++ b/extension/react-app/src/components/TextDialog.tsx
@@ -1,7 +1,7 @@
// Write a component that displays a dialog box with a text field and a button.
import React, { useEffect, useState } from "react";
import styled from "styled-components";
-import { Button, buttonColor, secondaryDark, vscBackground } from ".";
+import { Button, secondaryDark, vscBackground, vscForeground } from ".";
import { isMetaEquivalentKeyPressed } from "../util";
const ScreenCover = styled.div`
@@ -21,13 +21,13 @@ const DialogContainer = styled.div`
`;
const Dialog = styled.div`
- background-color: white;
+ color: ${vscForeground};
+ background-color: ${vscBackground};
border-radius: 8px;
padding: 8px;
display: flex;
flex-direction: column;
- /* box-shadow: 0 0 10px 0 rgba(255, 255, 255, 0.5); */
- border: 2px solid ${buttonColor};
+ box-shadow: 0 0 10px 0 ${vscForeground};
width: fit-content;
margin: auto;
`;
@@ -38,14 +38,16 @@ const TextArea = styled.textarea`
padding: 8px;
outline: 1px solid black;
resize: none;
+ background-color: ${secondaryDark};
+ color: ${vscForeground};
&:focus {
- outline: 1px solid ${buttonColor};
+ outline: 1px solid ${vscForeground};
}
`;
const P = styled.p`
- color: black;
+ color: ${vscForeground};
margin: 8px auto;
`;
diff --git a/extension/react-app/src/components/index.ts b/extension/react-app/src/components/index.ts
index 9ae0f097..cb5e7915 100644
--- a/extension/react-app/src/components/index.ts
+++ b/extension/react-app/src/components/index.ts
@@ -3,12 +3,16 @@ import styled, { keyframes } from "styled-components";
export const defaultBorderRadius = "5px";
export const lightGray = "rgb(100 100 100)";
-export const secondaryDark = "rgb(45 45 45)";
-export const vscBackground = "rgb(30 30 30)";
+// export const secondaryDark = "rgb(45 45 45)";
+// export const vscBackground = "rgb(30 30 30)";
export const vscBackgroundTransparent = "#1e1e1ede";
export const buttonColor = "rgb(113 28 59)";
export const buttonColorHover = "rgb(113 28 59 0.67)";
+export const secondaryDark = "var(--vscode-textBlockQuote-background)";
+export const vscBackground = "var(--vscode-editor-background)";
+export const vscForeground = "var(--vscode-editor-foreground)";
+
export const Button = styled.button`
padding: 10px 12px;
margin: 8px 0;
@@ -46,8 +50,8 @@ export const TextArea = styled.textarea`
resize: vertical;
padding: 4px;
- caret-color: white;
- color: white;
+ caret-color: ${vscForeground};
+ color: #{vscForeground};
&:focus {
outline: 1px solid ${buttonColor};
@@ -120,7 +124,7 @@ export const MainTextInput = styled.textarea`
border: 1px solid #ccc;
margin: 8px 8px;
background-color: ${vscBackground};
- color: white;
+ color: ${vscForeground};
outline: 1px solid orange;
resize: none;
`;
@@ -137,8 +141,9 @@ export const appear = keyframes`
`;
export const HeaderButton = styled.button<{ inverted: boolean | undefined }>`
- background-color: ${({ inverted }) => (inverted ? "white" : "transparent")};
- color: ${({ inverted }) => (inverted ? "black" : "white")};
+ background-color: ${({ inverted }) =>
+ inverted ? vscForeground : "transparent"};
+ color: ${({ inverted }) => (inverted ? vscBackground : vscForeground)};
border: none;
border-radius: ${defaultBorderRadius};
@@ -146,7 +151,9 @@ export const HeaderButton = styled.button<{ inverted: boolean | undefined }>`
&:hover {
background-color: ${({ inverted }) =>
- typeof inverted === "undefined" || inverted ? lightGray : "transparent"};
+ typeof inverted === "undefined" || inverted
+ ? secondaryDark
+ : "transparent"};
}
display: flex;
align-items: center;
diff --git a/extension/react-app/src/index.css b/extension/react-app/src/index.css
index 6e33c89c..bac7fe97 100644
--- a/extension/react-app/src/index.css
+++ b/extension/react-app/src/index.css
@@ -14,13 +14,13 @@ html,
body,
#root {
height: 100%;
- background-color: var(--vsc-background);
+ background-color: var(--vscode-editor-background);
font-family: "Lexend", sans-serif;
}
body {
padding: 0;
- color: white;
+ color: var(--vscode-editor-foreground);
padding: 0px;
margin: 0px;
height: 100%;
diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx
index 64207487..c35cf21b 100644
--- a/extension/react-app/src/pages/gui.tsx
+++ b/extension/react-app/src/pages/gui.tsx
@@ -1,5 +1,9 @@
import styled from "styled-components";
-import { defaultBorderRadius } from "../components";
+import {
+ defaultBorderRadius,
+ vscBackground,
+ vscForeground,
+} from "../components";
import Loader from "../components/Loader";
import ContinueButton from "../components/ContinueButton";
import { FullState, HighlightedRangeContext } from "../../../schema/FullState";
@@ -371,12 +375,13 @@ function GUI(props: GUIProps) {
style={{
position: "fixed",
bottom: "50px",
- backgroundColor: "white",
- color: "black",
+ backgroundColor: vscBackground,
+ color: vscForeground,
borderRadius: defaultBorderRadius,
padding: "16px",
margin: "16px",
zIndex: 100,
+ boxShadow: `0px 0px 10px 0px ${vscForeground}`,
}}
hidden={!showDataSharingInfo}
>
--
cgit v1.2.3-70-g09d2
From 436225436ef8379687a80e0b9595ddd4b488d946 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 12:24:22 -0700
Subject: disambiguate highlighted ranges with dirname
---
continuedev/src/continuedev/core/autopilot.py | 17 +++++++++++++++++
extension/react-app/src/components/StepContainer.tsx | 7 +------
2 files changed, 18 insertions(+), 6 deletions(-)
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 0696c360..fb8da2e8 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -166,6 +166,22 @@ class Autopilot(ContinueBaseModel):
if not any(map(lambda x: x.editing, self._highlighted_ranges)):
self._highlighted_ranges[0].editing = True
+ def _disambiguate_highlighted_ranges(self):
+ """If any files have the same name, also display their folder name"""
+ name_counts = {}
+ for rif in self._highlighted_ranges:
+ if rif.display_name in name_counts:
+ name_counts[rif.display_name] += 1
+ else:
+ name_counts[rif.display_name] = 1
+
+ for rif in self._highlighted_ranges:
+ if name_counts[rif.display_name] > 1:
+ rif.display_name = os.path.join(
+ os.path.basename(os.path.dirname(rif.range.filepath)), rif.display_name)
+ else:
+ rif.display_name = os.path.basename(rif.range.filepath)
+
async def handle_highlighted_code(self, range_in_files: List[RangeInFileWithContents]):
# Filter out rifs from ~/.continue/diffs folder
range_in_files = [
@@ -211,6 +227,7 @@ class Autopilot(ContinueBaseModel):
) for rif in range_in_files]
self._make_sure_is_editing_range()
+ self._disambiguate_highlighted_ranges()
await self.update_subscribers()
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 26bc8e33..9ab7430c 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -53,12 +53,7 @@ const StepContainerDiv = styled.div<{ open: boolean }>`
`;
const HeaderDiv = styled.div<{ error: boolean; loading: boolean }>`
- background-color: ${(props) =>
- props.error
- ? "#522"
- : props.loading
- ? vscBackgroundTransparent
- : vscBackground};
+ background-color: ${(props) => (props.error ? "#522" : vscBackground)};
display: grid;
grid-template-columns: 1fr auto auto;
grid-gap: 8px;
--
cgit v1.2.3-70-g09d2
From 36a2b72db549e2dde5a28d06c87df036a4e3afa0 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 12:48:10 -0700
Subject: float -> int hot fix
---
continuedev/src/continuedev/steps/core/core.py | 7 ++++---
extension/package-lock.json | 4 ++--
extension/package.json | 2 +-
3 files changed, 7 insertions(+), 6 deletions(-)
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index ea09f475..2b049ecc 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -162,7 +162,8 @@ class DefaultModelEditCodeStep(Step):
if self._previous_contents.strip() == self._new_contents.strip():
description = "No edits were made"
else:
- changes = '\n'.join(difflib.ndiff(self._previous_contents.splitlines(), self._new_contents.splitlines()))
+ changes = '\n'.join(difflib.ndiff(
+ self._previous_contents.splitlines(), self._new_contents.splitlines()))
description = await models.gpt3516k.complete(dedent(f"""\
Diff summary: "{self.user_input}"
@@ -181,8 +182,8 @@ class DefaultModelEditCodeStep(Step):
# We care because if this prompt itself goes over the limit, then the entire message will have to be cut from the completion.
# Overflow won't happen, but prune_chat_messages in count_tokens.py will cut out this whole thing, instead of us cutting out only as many lines as we need.
model_to_use = sdk.models.default
- max_tokens = MAX_TOKENS_FOR_MODEL.get(
- model_to_use.name, DEFAULT_MAX_TOKENS) / 2
+ max_tokens = int(MAX_TOKENS_FOR_MODEL.get(
+ model_to_use.name, DEFAULT_MAX_TOKENS) / 2)
TOKENS_TO_BE_CONSIDERED_LARGE_RANGE = 1200
if model_to_use.count_tokens(rif.contents) > TOKENS_TO_BE_CONSIDERED_LARGE_RANGE:
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 0e0125b0..e67fa950 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.177",
+ "version": "0.0.178",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.177",
+ "version": "0.0.178",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 8462bf68..121423ed 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.177",
+ "version": "0.0.178",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
--
cgit v1.2.3-70-g09d2
From 96a48d3484b927db4625ece53e393b60d685783e Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 13:30:47 -0700
Subject: support for azure endpoints
---
continuedev/src/continuedev/core/autopilot.py | 2 +-
continuedev/src/continuedev/core/config.py | 8 +++++++-
continuedev/src/continuedev/core/sdk.py | 2 +-
continuedev/src/continuedev/libs/llm/openai.py | 19 +++++++++++++++----
4 files changed, 24 insertions(+), 7 deletions(-)
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index fb8da2e8..4e177ac9 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -36,7 +36,7 @@ def get_error_title(e: Exception) -> str:
elif isinstance(e, openai_errors.APIConnectionError):
return "The request failed. Please check your internet connection and try again. If this issue persists, you can use our API key for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to \"\""
elif isinstance(e, openai_errors.InvalidRequestError):
- return 'Your API key does not have access to GPT-4. You can use ours for free by going to VS Code settings and changing the value of continue.OPENAI_API_KEY to ""'
+ return 'Invalid request sent to OpenAI. Please try again.'
elif e.__str__().startswith("Cannot connect to host"):
return "The request failed. Please check your internet connection and try again."
return e.__str__() or e.__repr__()
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index 91a47c8e..98615c64 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -67,13 +67,18 @@ DEFAULT_SLASH_COMMANDS = [
]
+class AzureInfo(BaseModel):
+ endpoint: str
+ engine: str
+ api_version: str
+
+
class ContinueConfig(BaseModel):
"""
A pydantic class for the continue config file.
"""
steps_on_startup: Optional[Dict[str, Dict]] = {}
disallowed_steps: Optional[List[str]] = []
- server_url: Optional[str] = None
allow_anonymous_telemetry: Optional[bool] = True
default_model: Literal["gpt-3.5-turbo", "gpt-3.5-turbo-16k",
"gpt-4", "ggml"] = 'gpt-4'
@@ -86,6 +91,7 @@ class ContinueConfig(BaseModel):
on_traceback: Optional[List[OnTracebackSteps]] = [
OnTracebackSteps(step_name="DefaultOnTracebackStep")]
system_message: Optional[str] = None
+ azure_openai_info: Optional[AzureInfo] = None
# Want to force these to be the slash commands for now
@validator('slash_commands', pre=True)
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index ac57c122..7e612d3b 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -56,7 +56,7 @@ class Models:
api_key = self.provider_keys["openai"]
if api_key == "":
return ProxyServer(self.sdk.ide.unique_id, model, system_message=self.system_message)
- return OpenAI(api_key=api_key, default_model=model, system_message=self.system_message)
+ return OpenAI(api_key=api_key, default_model=model, system_message=self.system_message, azure_info=self.sdk.config.azure_openai_info)
def __load_hf_inference_api_model(self, model: str) -> HuggingFaceInferenceAPI:
api_key = self.provider_keys["hf_inference_api"]
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index d973f19e..33d10985 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -1,30 +1,41 @@
from functools import cached_property
-import time
from typing import Any, Coroutine, Dict, Generator, List, Union
+
from ...core.main import ChatMessage
import openai
from ..llm import LLM
-from ..util.count_tokens import DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
+from ..util.count_tokens import compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
+from ...core.config import AzureInfo
class OpenAI(LLM):
api_key: str
default_model: str
- def __init__(self, api_key: str, default_model: str, system_message: str = None):
+ def __init__(self, api_key: str, default_model: str, system_message: str = None, azure_info: AzureInfo = None):
self.api_key = api_key
self.default_model = default_model
self.system_message = system_message
+ self.azure_info = azure_info
openai.api_key = api_key
+ # Using an Azure OpenAI deployment
+ if azure_info is not None:
+ openai.api_type = "azure"
+ openai.api_base = azure_info.endpoint
+ openai.api_version = azure_info.api_version
+
@cached_property
def name(self):
return self.default_model
@property
def default_args(self):
- return {**DEFAULT_ARGS, "model": self.default_model}
+ args = {**DEFAULT_ARGS, "model": self.default_model}
+ if self.azure_info is not None:
+ args["engine"] = self.azure_info.engine
+ return args
def count_tokens(self, text: str):
return count_tokens(self.default_model, text)
--
cgit v1.2.3-70-g09d2
From d1819268fb3f6fadbb763ef98cf306ed33add8fb Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Mon, 17 Jul 2023 15:31:10 -0500
Subject: session start and end telemetry
---
continuedev/src/continuedev/server/ide.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index a91708ec..43538407 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -442,6 +442,7 @@ async def websocket_endpoint(websocket: WebSocket, session_id: str = None):
if session_id is not None:
session_manager.registered_ides[session_id] = ideProtocolServer
other_msgs = await ideProtocolServer.initialize(session_id)
+ capture_event(ideProtocolServer.unique_id, "session_started", { "session_id": ideProtocolServer.session_id })
for other_msg in other_msgs:
handle_msg(other_msg)
@@ -462,4 +463,5 @@ async def websocket_endpoint(websocket: WebSocket, session_id: str = None):
if websocket.client_state != WebSocketState.DISCONNECTED:
await websocket.close()
+ capture_event(ideProtocolServer.unique_id, "session_ended", { "session_id": ideProtocolServer.session_id })
session_manager.registered_ides.pop(ideProtocolServer.session_id)
--
cgit v1.2.3-70-g09d2
From 1c9034cddeab0c131babe741e9145cc276bd7521 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 14:54:36 -0700
Subject: anthropic support
---
continuedev/src/continuedev/core/sdk.py | 6 +--
continuedev/src/continuedev/libs/llm/anthropic.py | 50 ++++++++++++++--------
.../src/continuedev/libs/util/count_tokens.py | 4 +-
.../react-app/src/components/StepContainer.tsx | 1 -
4 files changed, 39 insertions(+), 22 deletions(-)
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index d3501f08..280fefa8 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -11,7 +11,7 @@ from ..models.filesystem_edit import FileEdit, FileSystemEdit, AddFile, DeleteFi
from ..models.filesystem import RangeInFile
from ..libs.llm.hf_inference_api import HuggingFaceInferenceAPI
from ..libs.llm.openai import OpenAI
-from ..libs.llm.anthropic import Anthropic
+from ..libs.llm.anthropic import AnthropicLLM
from ..libs.llm.ggml import GGML
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
@@ -66,9 +66,9 @@ class Models:
api_key = self.provider_keys["hf_inference_api"]
return HuggingFaceInferenceAPI(api_key=api_key, model=model, system_message=self.system_message)
- def __load_anthropic_model(self, model: str) -> Anthropic:
+ def __load_anthropic_model(self, model: str) -> AnthropicLLM:
api_key = self.provider_keys["anthropic"]
- return Anthropic(api_key=api_key, model=model)
+ return AnthropicLLM(api_key, model, self.system_message)
@cached_property
def claude2(self):
diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py
index 2b8831f0..566f7150 100644
--- a/continuedev/src/continuedev/libs/llm/anthropic.py
+++ b/continuedev/src/continuedev/libs/llm/anthropic.py
@@ -3,7 +3,7 @@ from functools import cached_property
import time
from typing import Any, Coroutine, Dict, Generator, List, Union
from ...core.main import ChatMessage
-from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT
+from anthropic import HUMAN_PROMPT, AI_PROMPT, AsyncAnthropic
from ..llm import LLM
from ..util.count_tokens import DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
@@ -11,14 +11,14 @@ from ..util.count_tokens import DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_
class AnthropicLLM(LLM):
api_key: str
default_model: str
- anthropic: Anthropic
+ async_client: AsyncAnthropic
def __init__(self, api_key: str, default_model: str, system_message: str = None):
self.api_key = api_key
self.default_model = default_model
self.system_message = system_message
- self.anthropic = Anthropic(api_key)
+ self.async_client = AsyncAnthropic(api_key=api_key)
@cached_property
def name(self):
@@ -28,24 +28,39 @@ class AnthropicLLM(LLM):
def default_args(self):
return {**DEFAULT_ARGS, "model": self.default_model}
+ def _transform_args(self, args: Dict[str, Any]) -> Dict[str, Any]:
+ args = args.copy()
+ if "max_tokens" in args:
+ args["max_tokens_to_sample"] = args["max_tokens"]
+ del args["max_tokens"]
+ if "frequency_penalty" in args:
+ del args["frequency_penalty"]
+ if "presence_penalty" in args:
+ del args["presence_penalty"]
+ return args
+
def count_tokens(self, text: str):
return count_tokens(self.default_model, text)
- def __messages_to_prompt(self, messages: List[ChatMessage]) -> str:
+ def __messages_to_prompt(self, messages: List[Dict[str, str]]) -> str:
prompt = ""
+
+ # Anthropic prompt must start with a Human turn
+ if len(messages) > 0 and messages[0]["role"] != "user" and messages[0]["role"] != "system":
+ prompt += f"{HUMAN_PROMPT} Hello."
for msg in messages:
- prompt += f"{HUMAN_PROMPT if msg.role == 'user' else AI_PROMPT} {msg.content} "
+ prompt += f"{HUMAN_PROMPT if (msg['role'] == 'user' or msg['role'] == 'system') else AI_PROMPT} {msg['content']} "
+ prompt += AI_PROMPT
return prompt
async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
+ args = self._transform_args(args)
- async for chunk in await self.anthropic.completions.create(
- model=args["model"],
- max_tokens_to_sample=args["max_tokens"],
+ async for chunk in await self.async_client.completions.create(
prompt=f"{HUMAN_PROMPT} {prompt} {AI_PROMPT}",
**args
):
@@ -55,25 +70,26 @@ class AnthropicLLM(LLM):
args = self.default_args.copy()
args.update(kwargs)
args["stream"] = True
+ args = self._transform_args(args)
messages = compile_chat_messages(
- args["model"], messages, args["max_tokens"], functions=args.get("functions", None))
- async for chunk in await self.anthropic.completions.create(
- model=args["model"],
- max_tokens_to_sample=args["max_tokens"],
+ args["model"], messages, args["max_tokens_to_sample"], functions=args.get("functions", None))
+ async for chunk in await self.async_client.completions.create(
prompt=self.__messages_to_prompt(messages),
**args
):
- yield chunk.completion
+ yield {
+ "role": "assistant",
+ "content": chunk.completion
+ }
async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
+ args = self._transform_args(args)
messages = compile_chat_messages(
- args["model"], with_history, args["max_tokens"], prompt, functions=None)
- resp = (await self.anthropic.completions.create(
- model=args["model"],
- max_tokens_to_sample=args["max_tokens"],
+ args["model"], with_history, args["max_tokens_to_sample"], prompt, functions=None)
+ resp = (await self.async_client.completions.create(
prompt=self.__messages_to_prompt(messages),
**args
)).completion
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 1ca98fe6..1d5d6729 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -6,6 +6,7 @@ import tiktoken
aliases = {
"ggml": "gpt-3.5-turbo",
+ "claude-2": "gpt-3.5-turbo",
}
DEFAULT_MAX_TOKENS = 2048
MAX_TOKENS_FOR_MODEL = {
@@ -13,7 +14,8 @@ MAX_TOKENS_FOR_MODEL = {
"gpt-3.5-turbo-0613": 4096,
"gpt-3.5-turbo-16k": 16384,
"gpt-4": 8192,
- "ggml": 2048
+ "ggml": 2048,
+ "claude-2": 100000
}
CHAT_MODELS = {
"gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "gpt-3.5-turbo-0613"
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 9ab7430c..93b90f0d 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -39,7 +39,6 @@ interface StepContainerProps {
const MainDiv = styled.div<{ stepDepth: number; inFuture: boolean }>`
opacity: ${(props) => (props.inFuture ? 0.3 : 1)};
animation: ${appear} 0.3s ease-in-out;
- /* padding-left: ${(props) => props.stepDepth * 20}px; */
overflow: hidden;
margin-left: 0px;
margin-right: 0px;
--
cgit v1.2.3-70-g09d2
From 043bdade5605a1c509a9f1927ebbe54db7d900f4 Mon Sep 17 00:00:00 2001
From: Ty Dunn
Date: Mon, 17 Jul 2023 19:25:27 -0500
Subject: commenting out old suggestion telemetry
---
extension/src/diffs.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index 0bab326a..1e63c5f6 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -289,7 +289,7 @@ function recordAcceptReject(accepted: boolean, diffInfo: DiffInfo) {
});
// Send the suggestion to the server
- ideProtocolClient.sendAcceptRejectSuggestion(accepted);
+ // ideProtocolClient.sendAcceptRejectSuggestion(accepted);
// Write the updated suggestions back to the file
fs.writeFileSync(
--
cgit v1.2.3-70-g09d2
From dc64c73adb8c8a2aeb3210bc9f4ff1bd82c03de2 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Mon, 17 Jul 2023 21:09:30 -0700
Subject: show exact prompt/completion logs
---
continuedev/src/continuedev/core/main.py | 1 +
continuedev/src/continuedev/core/sdk.py | 28 +++++++++++--
continuedev/src/continuedev/libs/llm/openai.py | 47 +++++++++++++++++-----
.../src/continuedev/libs/llm/proxy_server.py | 33 +++++++++++----
.../src/continuedev/libs/util/count_tokens.py | 7 ++++
continuedev/src/continuedev/server/gui.py | 9 +++++
continuedev/src/continuedev/server/ide.py | 14 ++++++-
continuedev/src/continuedev/server/ide_protocol.py | 4 ++
.../src/continuedev/server/session_manager.py | 2 +-
extension/package-lock.json | 4 +-
extension/package.json | 2 +-
.../react-app/src/components/StepContainer.tsx | 17 +++++++-
.../src/hooks/ContinueGUIClientProtocol.ts | 2 +
.../react-app/src/hooks/useContinueGUIProtocol.ts | 4 ++
extension/react-app/src/pages/gui.tsx | 1 +
extension/src/continueIdeClient.ts | 38 +++++++++++++++++
16 files changed, 185 insertions(+), 28 deletions(-)
diff --git a/continuedev/src/continuedev/core/main.py b/continuedev/src/continuedev/core/main.py
index 88690c83..5931d978 100644
--- a/continuedev/src/continuedev/core/main.py
+++ b/continuedev/src/continuedev/core/main.py
@@ -102,6 +102,7 @@ class HistoryNode(ContinueBaseModel):
depth: int
deleted: bool = False
active: bool = True
+ logs: List[str] = []
def to_chat_messages(self) -> List[ChatMessage]:
if self.step.description is None or self.step.manage_own_chat_context:
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 280fefa8..53214384 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -37,6 +37,25 @@ class Models:
model_providers: List[ModelProvider]
system_message: str
+ """
+ Better to have sdk.llm.stream_chat(messages, model="claude-2").
+ Then you also don't care that it' async.
+ And it's easier to add more models.
+ And intermediate shared code is easier to add.
+ And you can make constants like ContinueModels.GPT35 = "gpt-3.5-turbo"
+ PromptTransformer would be a good concept: You pass a prompt or list of messages and a model, then it outputs the prompt for that model.
+ Easy to reason about, can place anywhere.
+ And you can even pass a Prompt object to sdk.llm.stream_chat maybe, and it'll automatically be transformed for the given model.
+ This can all happen inside of Models?
+
+ class Prompt:
+ def __init__(self, ...info):
+ '''take whatever info is needed to describe the prompt'''
+
+ def to_string(self, model: str) -> str:
+ '''depending on the model, return the single prompt string'''
+ """
+
def __init__(self, sdk: "ContinueSDK", model_providers: List[ModelProvider]):
self.sdk = sdk
self.model_providers = model_providers
@@ -59,8 +78,8 @@ class Models:
def __load_openai_model(self, model: str) -> OpenAI:
api_key = self.provider_keys["openai"]
if api_key == "":
- return ProxyServer(self.sdk.ide.unique_id, model, system_message=self.system_message)
- return OpenAI(api_key=api_key, default_model=model, system_message=self.system_message, azure_info=self.sdk.config.azure_openai_info)
+ return ProxyServer(self.sdk.ide.unique_id, model, system_message=self.system_message, write_log=self.sdk.write_log)
+ return OpenAI(api_key=api_key, default_model=model, system_message=self.system_message, azure_info=self.sdk.config.azure_openai_info, write_log=self.sdk.write_log)
def __load_hf_inference_api_model(self, model: str) -> HuggingFaceInferenceAPI:
api_key = self.provider_keys["hf_inference_api"]
@@ -156,6 +175,9 @@ class ContinueSDK(AbstractContinueSDK):
def history(self) -> History:
return self.__autopilot.history
+ def write_log(self, message: str):
+ self.history.timeline[self.history.current_index].logs.append(message)
+
async def _ensure_absolute_path(self, path: str) -> str:
if os.path.isabs(path):
return path
@@ -263,7 +285,7 @@ class ContinueSDK(AbstractContinueSDK):
for rif in highlighted_code:
msg = ChatMessage(content=f"{preface} ({rif.filepath}):\n```\n{rif.contents}\n```",
- role="system", summary=f"{preface}: {rif.filepath}")
+ role="user", summary=f"{preface}: {rif.filepath}")
# Don't insert after latest user message or function call
i = -1
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index 33d10985..64bb39a2 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -1,10 +1,11 @@
from functools import cached_property
-from typing import Any, Coroutine, Dict, Generator, List, Union
+import json
+from typing import Any, Callable, Coroutine, Dict, Generator, List, Union
from ...core.main import ChatMessage
import openai
from ..llm import LLM
-from ..util.count_tokens import compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, prune_raw_prompt_from_top
+from ..util.count_tokens import compile_chat_messages, CHAT_MODELS, DEFAULT_ARGS, count_tokens, format_chat_messages, prune_raw_prompt_from_top
from ...core.config import AzureInfo
@@ -12,11 +13,12 @@ class OpenAI(LLM):
api_key: str
default_model: str
- def __init__(self, api_key: str, default_model: str, system_message: str = None, azure_info: AzureInfo = None):
+ def __init__(self, api_key: str, default_model: str, system_message: str = None, azure_info: AzureInfo = None, write_log: Callable[[str], None] = None):
self.api_key = api_key
self.default_model = default_model
self.system_message = system_message
self.azure_info = azure_info
+ self.write_log = write_log
openai.api_key = api_key
@@ -46,18 +48,29 @@ class OpenAI(LLM):
args["stream"] = True
if args["model"] in CHAT_MODELS:
+ messages = compile_chat_messages(
+ args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message)
+ self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
+ completion = ""
async for chunk in await openai.ChatCompletion.acreate(
- messages=compile_chat_messages(
- args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
+ messages=messages,
**args,
):
if "content" in chunk.choices[0].delta:
yield chunk.choices[0].delta.content
+ completion += chunk.choices[0].delta.content
else:
continue
+
+ self.write_log(f"Completion: \n\n{completion}")
else:
+ self.write_log(f"Prompt:\n\n{prompt}")
+ completion = ""
async for chunk in await openai.Completion.acreate(prompt=prompt, **args):
yield chunk.choices[0].text
+ completion += chunk.choices[0].text
+
+ self.write_log(f"Completion:\n\n{completion}")
async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = self.default_args.copy()
@@ -67,27 +80,39 @@ class OpenAI(LLM):
if not args["model"].endswith("0613") and "functions" in args:
del args["functions"]
+ messages = compile_chat_messages(
+ args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message)
+ self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
+ completion = ""
async for chunk in await openai.ChatCompletion.acreate(
- messages=compile_chat_messages(
- args["model"], messages, args["max_tokens"], functions=args.get("functions", None), system_message=self.system_message),
+ messages=messages,
**args,
):
yield chunk.choices[0].delta
+ if "content" in chunk.choices[0].delta:
+ completion += chunk.choices[0].delta.content
+ self.write_log(f"Completion: \n\n{completion}")
async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
if args["model"] in CHAT_MODELS:
+ messages = compile_chat_messages(
+ args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message)
+ self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
resp = (await openai.ChatCompletion.acreate(
- messages=compile_chat_messages(
- args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
+ messages=messages,
**args,
)).choices[0].message.content
+ self.write_log(f"Completion: \n\n{resp}")
else:
+ prompt = prune_raw_prompt_from_top(
+ args["model"], prompt, args["max_tokens"])
+ self.write_log(f"Prompt:\n\n{prompt}")
resp = (await openai.Completion.acreate(
- prompt=prune_raw_prompt_from_top(
- args["model"], prompt, args["max_tokens"]),
+ prompt=prompt,
**args,
)).choices[0].text
+ self.write_log(f"Completion:\n\n{resp}")
return resp
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index 3ec492f3..91b5842a 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -1,10 +1,11 @@
+
from functools import cached_property
import json
-from typing import Any, Coroutine, Dict, Generator, List, Literal, Union
+from typing import Any, Callable, Coroutine, Dict, Generator, List, Literal, Union
import aiohttp
from ...core.main import ChatMessage
from ..llm import LLM
-from ..util.count_tokens import DEFAULT_ARGS, DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, count_tokens
+from ..util.count_tokens import DEFAULT_ARGS, DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, count_tokens, format_chat_messages
import certifi
import ssl
@@ -19,12 +20,14 @@ class ProxyServer(LLM):
unique_id: str
name: str
default_model: Literal["gpt-3.5-turbo", "gpt-4"]
+ write_log: Callable[[str], None]
- def __init__(self, unique_id: str, default_model: Literal["gpt-3.5-turbo", "gpt-4"], system_message: str = None):
+ def __init__(self, unique_id: str, default_model: Literal["gpt-3.5-turbo", "gpt-4"], system_message: str = None, write_log: Callable[[str], None] = None):
self.unique_id = unique_id
self.default_model = default_model
self.system_message = system_message
self.name = default_model
+ self.write_log = write_log
@property
def default_args(self):
@@ -36,14 +39,19 @@ class ProxyServer(LLM):
async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
args = {**self.default_args, **kwargs}
+ messages = compile_chat_messages(
+ args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message)
+ self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/complete", json={
- "messages": compile_chat_messages(args["model"], with_history, args["max_tokens"], prompt, functions=None, system_message=self.system_message),
+ "messages": messages,
"unique_id": self.unique_id,
**args
}) as resp:
try:
- return await resp.text()
+ response_text = await resp.text()
+ self.write_log(f"Completion: \n\n{response_text}")
+ return response_text
except:
raise Exception(await resp.text())
@@ -51,6 +59,7 @@ class ProxyServer(LLM):
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
self.default_model, messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
+ self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_chat", json={
@@ -59,6 +68,7 @@ class ProxyServer(LLM):
**args
}) as resp:
# This is streaming application/json instaed of text/event-stream
+ completion = ""
async for line in resp.content.iter_chunks():
if line[1]:
try:
@@ -67,14 +77,19 @@ class ProxyServer(LLM):
chunks = json_chunk.split("\n")
for chunk in chunks:
if chunk.strip() != "":
- yield json.loads(chunk)
+ loaded_chunk = json.loads(chunk)
+ yield loaded_chunk
+ if "content" in loaded_chunk:
+ completion += loaded_chunk["content"]
except:
raise Exception(str(line[0]))
+ self.write_log(f"Completion: \n\n{completion}")
async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
self.default_model, with_history, args["max_tokens"], prompt, functions=args.get("functions", None), system_message=self.system_message)
+ self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_complete", json={
@@ -82,9 +97,13 @@ class ProxyServer(LLM):
"unique_id": self.unique_id,
**args
}) as resp:
+ completion = ""
async for line in resp.content.iter_any():
if line:
try:
- yield line.decode("utf-8")
+ decoded_line = line.decode("utf-8")
+ yield decoded_line
+ completion += decoded_line
except:
raise Exception(str(line))
+ self.write_log(f"Completion: \n\n{completion}")
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 1d5d6729..13de7990 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -107,3 +107,10 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int,
})
return history
+
+
+def format_chat_messages(messages: List[ChatMessage]) -> str:
+ formatted = ""
+ for msg in messages:
+ formatted += f"<{msg['role'].capitalize()}>\n{msg['content']}\n\n"
+ return formatted
diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py
index 4201353e..ae57c0b6 100644
--- a/continuedev/src/continuedev/server/gui.py
+++ b/continuedev/src/continuedev/server/gui.py
@@ -99,6 +99,8 @@ class GUIProtocolServer(AbstractGUIProtocolServer):
self.on_set_editing_at_indices(data["indices"])
elif message_type == "set_pinned_at_indices":
self.on_set_pinned_at_indices(data["indices"])
+ elif message_type == "show_logs_at_index":
+ self.on_show_logs_at_index(data["index"])
except Exception as e:
print(e)
@@ -166,6 +168,13 @@ class GUIProtocolServer(AbstractGUIProtocolServer):
indices), self.session.autopilot.continue_sdk.ide.unique_id
)
+ def on_show_logs_at_index(self, index: int):
+ name = f"continue_logs.txt"
+ logs = "\n\n############################################\n\n".join(
+ ["This is a log of the exact prompt/completion pairs sent/received from the LLM during this step"] + self.session.autopilot.continue_sdk.history.timeline[index].logs)
+ create_async_task(
+ self.session.autopilot.ide.showVirtualFile(name, logs))
+
@router.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket, session: Session = Depends(websocket_session)):
diff --git a/continuedev/src/continuedev/server/ide.py b/continuedev/src/continuedev/server/ide.py
index 43538407..aeff5623 100644
--- a/continuedev/src/continuedev/server/ide.py
+++ b/continuedev/src/continuedev/server/ide.py
@@ -224,6 +224,12 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
"open": open
})
+ async def showVirtualFile(self, name: str, contents: str):
+ await self._send_json("showVirtualFile", {
+ "name": name,
+ "contents": contents
+ })
+
async def setSuggestionsLocked(self, filepath: str, locked: bool = True):
# Lock suggestions in the file so they don't ruin the offset before others are inserted
await self._send_json("setSuggestionsLocked", {
@@ -288,6 +294,8 @@ class IdeProtocolServer(AbstractIdeProtocolServer):
pass
def __get_autopilot(self):
+ if self.session_id not in self.session_manager.sessions:
+ return None
return self.session_manager.sessions[self.session_id].autopilot
def onFileEdits(self, edits: List[FileEditWithFullContents]):
@@ -442,7 +450,8 @@ async def websocket_endpoint(websocket: WebSocket, session_id: str = None):
if session_id is not None:
session_manager.registered_ides[session_id] = ideProtocolServer
other_msgs = await ideProtocolServer.initialize(session_id)
- capture_event(ideProtocolServer.unique_id, "session_started", { "session_id": ideProtocolServer.session_id })
+ capture_event(ideProtocolServer.unique_id, "session_started", {
+ "session_id": ideProtocolServer.session_id})
for other_msg in other_msgs:
handle_msg(other_msg)
@@ -463,5 +472,6 @@ async def websocket_endpoint(websocket: WebSocket, session_id: str = None):
if websocket.client_state != WebSocketState.DISCONNECTED:
await websocket.close()
- capture_event(ideProtocolServer.unique_id, "session_ended", { "session_id": ideProtocolServer.session_id })
+ capture_event(ideProtocolServer.unique_id, "session_ended", {
+ "session_id": ideProtocolServer.session_id})
session_manager.registered_ides.pop(ideProtocolServer.session_id)
diff --git a/continuedev/src/continuedev/server/ide_protocol.py b/continuedev/src/continuedev/server/ide_protocol.py
index d0fb0bf8..0ae7e7fa 100644
--- a/continuedev/src/continuedev/server/ide_protocol.py
+++ b/continuedev/src/continuedev/server/ide_protocol.py
@@ -23,6 +23,10 @@ class AbstractIdeProtocolServer(ABC):
async def setFileOpen(self, filepath: str, open: bool = True):
"""Set whether a file is open"""
+ @abstractmethod
+ async def showVirtualFile(self, name: str, contents: str):
+ """Show a virtual file"""
+
@abstractmethod
async def setSuggestionsLocked(self, filepath: str, locked: bool = True):
"""Set whether suggestions are locked"""
diff --git a/continuedev/src/continuedev/server/session_manager.py b/continuedev/src/continuedev/server/session_manager.py
index 6d109ca6..90172a4e 100644
--- a/continuedev/src/continuedev/server/session_manager.py
+++ b/continuedev/src/continuedev/server/session_manager.py
@@ -100,7 +100,7 @@ class SessionManager:
if session_id not in self.sessions:
raise SessionNotFound(f"Session {session_id} not found")
if self.sessions[session_id].ws is None:
- print(f"Session {session_id} has no websocket")
+ # print(f"Session {session_id} has no websocket")
return
await self.sessions[session_id].ws.send_json({
diff --git a/extension/package-lock.json b/extension/package-lock.json
index e67fa950..107a7001 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.178",
+ "version": "0.0.179",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.178",
+ "version": "0.0.179",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 121423ed..89c6daf5 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.178",
+ "version": "0.0.179",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/react-app/src/components/StepContainer.tsx b/extension/react-app/src/components/StepContainer.tsx
index 93b90f0d..bc8665fd 100644
--- a/extension/react-app/src/components/StepContainer.tsx
+++ b/extension/react-app/src/components/StepContainer.tsx
@@ -1,4 +1,4 @@
-import { useEffect, useRef, useState } from "react";
+import { useContext, useEffect, useRef, useState } from "react";
import styled, { keyframes } from "styled-components";
import {
appear,
@@ -13,12 +13,14 @@ import {
ChevronRight,
ArrowPath,
XMark,
+ MagnifyingGlass,
} from "@styled-icons/heroicons-outline";
import { StopCircle } from "@styled-icons/heroicons-solid";
import { HistoryNode } from "../../../schema/HistoryNode";
import HeaderButtonWithText from "./HeaderButtonWithText";
import MarkdownPreview from "@uiw/react-markdown-preview";
import { getMetaKeyLabel, isMetaEquivalentKeyPressed } from "../util";
+import { GUIClientContext } from "../App";
interface StepContainerProps {
historyNode: HistoryNode;
@@ -32,6 +34,7 @@ interface StepContainerProps {
onToggle: () => void;
isFirst: boolean;
isLast: boolean;
+ index: number;
}
// #region styled components
@@ -140,6 +143,7 @@ function StepContainer(props: StepContainerProps) {
const naturalLanguageInputRef = useRef(null);
const userInputRef = useRef(null);
const isUserInput = props.historyNode.step.name === "UserInputStep";
+ const client = useContext(GUIClientContext);
useEffect(() => {
if (userInputRef?.current) {
@@ -210,6 +214,17 @@ function StepContainer(props: StepContainerProps) {
*/}
<>
+ {(props.historyNode.logs as any)?.length > 0 && (
+ {
+ e.stopPropagation();
+ client?.showLogsAtIndex(props.index);
+ }}
+ >
+
+
+ )}
{
e.stopPropagation();
diff --git a/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts b/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts
index a179c2bf..6c0df8fc 100644
--- a/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts
+++ b/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts
@@ -28,6 +28,8 @@ abstract class AbstractContinueGUIClientProtocol {
abstract setPinnedAtIndices(indices: number[]): void;
abstract toggleAddingHighlightedCode(): void;
+
+ abstract showLogsAtIndex(index: number): void;
}
export default AbstractContinueGUIClientProtocol;
diff --git a/extension/react-app/src/hooks/useContinueGUIProtocol.ts b/extension/react-app/src/hooks/useContinueGUIProtocol.ts
index 2060dd7f..fef5b2e1 100644
--- a/extension/react-app/src/hooks/useContinueGUIProtocol.ts
+++ b/extension/react-app/src/hooks/useContinueGUIProtocol.ts
@@ -86,6 +86,10 @@ class ContinueGUIClientProtocol extends AbstractContinueGUIClientProtocol {
toggleAddingHighlightedCode(): void {
this.messenger.send("toggle_adding_highlighted_code", {});
}
+
+ showLogsAtIndex(index: number): void {
+ this.messenger.send("show_logs_at_index", { index });
+ }
}
export default ContinueGUIClientProtocol;
diff --git a/extension/react-app/src/pages/gui.tsx b/extension/react-app/src/pages/gui.tsx
index c35cf21b..fccc9b4b 100644
--- a/extension/react-app/src/pages/gui.tsx
+++ b/extension/react-app/src/pages/gui.tsx
@@ -311,6 +311,7 @@ function GUI(props: GUIProps) {
)
) : (
();
+ onDidChange = this.onDidChangeEmitter.event;
+
+ provideTextDocumentContent(uri: vscode.Uri): string {
+ return uri.query;
+ }
+ })();
+ context.subscriptions.push(
+ vscode.workspace.registerTextDocumentContentProvider(
+ continueVirtualDocumentScheme,
+ documentContentProvider
+ )
+ );
}
async handleMessage(
@@ -200,6 +221,9 @@ class IdeProtocolClient {
this.openFile(data.filepath);
// TODO: Close file if False
break;
+ case "showVirtualFile":
+ this.showVirtualFile(data.name, data.contents);
+ break;
case "setSuggestionsLocked":
this.setSuggestionsLocked(data.filepath, data.locked);
break;
@@ -295,6 +319,20 @@ class IdeProtocolClient {
openEditorAndRevealRange(filepath, undefined, vscode.ViewColumn.One);
}
+ showVirtualFile(name: string, contents: string) {
+ vscode.workspace
+ .openTextDocument(
+ vscode.Uri.parse(
+ `${continueVirtualDocumentScheme}:${name}?${encodeURIComponent(
+ contents
+ )}`
+ )
+ )
+ .then((doc) => {
+ vscode.window.showTextDocument(doc, { preview: false });
+ });
+ }
+
setSuggestionsLocked(filepath: string, locked: boolean) {
editorSuggestionsLocked.set(filepath, locked);
// TODO: Rerender?
--
cgit v1.2.3-70-g09d2
From a7ab1918f8894c5e5f71e31a88a21680e6e1d2dc Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 11:53:06 -0700
Subject: context length fix and cleanup
---
.../src/continuedev/libs/llm/proxy_server.py | 2 +-
.../src/continuedev/libs/util/count_tokens.py | 75 +++++++++++++++-------
continuedev/src/continuedev/steps/chat.py | 19 +++---
3 files changed, 63 insertions(+), 33 deletions(-)
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index 91b5842a..18e0e6f4 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -58,7 +58,7 @@ class ProxyServer(LLM):
async def stream_chat(self, messages: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, Generator[Union[Any, List, Dict], None, None]]:
args = {**self.default_args, **kwargs}
messages = compile_chat_messages(
- self.default_model, messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
+ args["model"], messages, args["max_tokens"], None, functions=args.get("functions", None), system_message=self.system_message)
self.write_log(f"Prompt: \n\n{format_chat_messages(messages)}")
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index 13de7990..c81d8aa4 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -46,9 +46,17 @@ def prune_raw_prompt_from_top(model: str, prompt: str, tokens_for_completion: in
return encoding.decode(tokens[-max_tokens:])
+def count_chat_message_tokens(model: str, chat_message: ChatMessage) -> int:
+ # Doing simpler, safer version of what is here:
+ # https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
+ # every message follows <|start|>{role/name}\n{content}<|end|>\n
+ TOKENS_PER_MESSAGE = 4
+ return count_tokens(model, chat_message.content) + TOKENS_PER_MESSAGE
+
+
def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens: int, tokens_for_completion: int):
total_tokens = tokens_for_completion + \
- sum(count_tokens(model, message.content)
+ sum(count_chat_message_tokens(model, message)
for message in chat_history)
# 1. Replace beyond last 5 messages with summary
@@ -74,37 +82,58 @@ def prune_chat_history(model: str, chat_history: List[ChatMessage], max_tokens:
message.content = message.summary
i += 1
- # 4. Remove entire messages in the last 5
- while total_tokens > max_tokens and len(chat_history) > 0:
+ # 4. Remove entire messages in the last 5, except last 1
+ while total_tokens > max_tokens and len(chat_history) > 1:
message = chat_history.pop(0)
total_tokens -= count_tokens(model, message.content)
+ # 5. Truncate last message
+ if total_tokens > max_tokens and len(chat_history) > 0:
+ message = chat_history[0]
+ message.content = prune_raw_prompt_from_top(
+ model, message.content, tokens_for_completion)
+ total_tokens = max_tokens
+
return chat_history
+# In case we've missed weird edge cases
+TOKEN_BUFFER_FOR_SAFETY = 100
+
+
def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int, prompt: Union[str, None] = None, functions: Union[List, None] = None, system_message: Union[str, None] = None) -> List[Dict]:
- prompt_tokens = count_tokens(model, prompt)
+ """
+ The total number of tokens is system_message + sum(msgs) + functions + prompt after it is converted to a message
+ """
+ if prompt is not None:
+ prompt_msg = ChatMessage(role="user", content=prompt, summary=prompt)
+ msgs += [prompt_msg]
+
+ if system_message is not None:
+ # NOTE: System message takes second precedence to user prompt, so it is placed just before
+ # but move back to start after processing
+ rendered_system_message = render_system_message(system_message)
+ system_chat_msg = ChatMessage(
+ role="system", content=rendered_system_message, summary=rendered_system_message)
+ # insert at second-to-last position
+ msgs.insert(-1, system_chat_msg)
+
+ # Add tokens from functions
+ function_tokens = 0
if functions is not None:
for function in functions:
- prompt_tokens += count_tokens(model, json.dumps(function))
-
- rendered_system_message = render_system_message(system_message)
-
- msgs = prune_chat_history(model,
- msgs, MAX_TOKENS_FOR_MODEL[model], prompt_tokens + max_tokens + count_tokens(model, rendered_system_message))
- history = []
- if system_message:
- history.append({
- "role": "system",
- "content": rendered_system_message
- })
- history += [msg.to_dict(with_functions=functions is not None)
- for msg in msgs]
- if prompt:
- history.append({
- "role": "user",
- "content": prompt
- })
+ function_tokens += count_tokens(model, json.dumps(function))
+
+ msgs = prune_chat_history(
+ model, msgs, MAX_TOKENS_FOR_MODEL[model], function_tokens + max_tokens + TOKEN_BUFFER_FOR_SAFETY)
+
+ history = [msg.to_dict(with_functions=functions is not None)
+ for msg in msgs]
+
+ # Move system message back to start
+ if system_message is not None and len(history) >= 2 and history[-2]["role"] == "system":
+ system_message_dict = history.pop(-2)
+ history.insert(0, system_message_dict)
return history
diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py
index 7c6b42db..8c03969e 100644
--- a/continuedev/src/continuedev/steps/chat.py
+++ b/continuedev/src/continuedev/steps/chat.py
@@ -35,23 +35,24 @@ class SimpleChatStep(Step):
if sdk.current_step_was_deleted():
# So that the message doesn't disappear
self.hide = False
- return
+ break
if "content" in chunk:
self.description += chunk["content"]
completion += chunk["content"]
await sdk.update_ui()
finally:
- await generator.aclose()
+ self.name = remove_quotes_and_escapes(await sdk.models.gpt35.complete(
+ f"Write a short title for the following chat message: {self.description}"))
- self.name = remove_quotes_and_escapes(await sdk.models.gpt35.complete(
- f"Write a short title for the following chat message: {self.description}"))
+ self.chat_context.append(ChatMessage(
+ role="assistant",
+ content=completion,
+ summary=self.name
+ ))
- self.chat_context.append(ChatMessage(
- role="assistant",
- content=completion,
- summary=self.name
- ))
+ # TODO: Never actually closing.
+ await generator.aclose()
class AddFileStep(Step):
--
cgit v1.2.3-70-g09d2
From 53ac7b93f456b471eaa7f03e015e2d8c0ef393e5 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 14:02:03 -0700
Subject: error handle on invalid config file, don't immediately show loading
message
---
continuedev/src/continuedev/core/sdk.py | 19 ++++++++++-
extension/package-lock.json | 4 +--
extension/package.json | 2 +-
extension/schema/FullState.d.ts | 2 ++
extension/schema/History.d.ts | 2 ++
extension/schema/HistoryNode.d.ts | 2 ++
extension/src/activation/activate.ts | 48 ++++++++++++++++++++--------
extension/src/activation/environmentSetup.ts | 2 +-
extension/src/extension.ts | 12 +------
schema/json/FullState.json | 8 +++++
schema/json/History.json | 8 +++++
schema/json/HistoryNode.json | 8 +++++
12 files changed, 88 insertions(+), 29 deletions(-)
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 53214384..37a51efa 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -15,7 +15,7 @@ from ..libs.llm.anthropic import AnthropicLLM
from ..libs.llm.ggml import GGML
from .observation import Observation
from ..server.ide_protocol import AbstractIdeProtocolServer
-from .main import Context, ContinueCustomException, History, Step, ChatMessage
+from .main import Context, ContinueCustomException, History, HistoryNode, Step, ChatMessage
from ..steps.core.core import *
from ..libs.llm.proxy_server import ProxyServer
@@ -155,6 +155,23 @@ class ContinueSDK(AbstractContinueSDK):
@classmethod
async def create(cls, autopilot: Autopilot) -> "ContinueSDK":
sdk = ContinueSDK(autopilot)
+
+ try:
+ config = sdk._load_config()
+ sdk.config = config
+ except Exception as e:
+ print(e)
+ sdk.config = ContinueConfig()
+ msg_step = MessageStep(
+ name="Invalid Continue Config File", message=e.__repr__())
+ msg_step.description = e.__repr__()
+ sdk.history.add_node(HistoryNode(
+ step=msg_step,
+ observation=None,
+ depth=0,
+ active=False
+ ))
+
sdk.models = await Models.create(sdk)
return sdk
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 107a7001..6818857b 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "continue",
- "version": "0.0.179",
+ "version": "0.0.181",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "continue",
- "version": "0.0.179",
+ "version": "0.0.181",
"license": "Apache-2.0",
"dependencies": {
"@electron/rebuild": "^3.2.10",
diff --git a/extension/package.json b/extension/package.json
index 89c6daf5..b37bb1b6 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -14,7 +14,7 @@
"displayName": "Continue",
"pricing": "Free",
"description": "The open-source coding autopilot",
- "version": "0.0.179",
+ "version": "0.0.181",
"publisher": "Continue",
"engines": {
"vscode": "^1.67.0"
diff --git a/extension/schema/FullState.d.ts b/extension/schema/FullState.d.ts
index abb0832d..1b7b1f3b 100644
--- a/extension/schema/FullState.d.ts
+++ b/extension/schema/FullState.d.ts
@@ -21,6 +21,7 @@ export type ManageOwnChatContext = boolean;
export type Depth = number;
export type Deleted = boolean;
export type Active = boolean;
+export type Logs = string[];
export type Timeline = HistoryNode[];
export type CurrentIndex = number;
export type Active1 = boolean;
@@ -69,6 +70,7 @@ export interface HistoryNode {
depth: Depth;
deleted?: Deleted;
active?: Active;
+ logs?: Logs;
[k: string]: unknown;
}
export interface Step {
diff --git a/extension/schema/History.d.ts b/extension/schema/History.d.ts
index 6eb8ad81..90124f4a 100644
--- a/extension/schema/History.d.ts
+++ b/extension/schema/History.d.ts
@@ -21,6 +21,7 @@ export type ManageOwnChatContext = boolean;
export type Depth = number;
export type Deleted = boolean;
export type Active = boolean;
+export type Logs = string[];
export type Timeline = HistoryNode[];
export type CurrentIndex = number;
@@ -41,6 +42,7 @@ export interface HistoryNode {
depth: Depth;
deleted?: Deleted;
active?: Active;
+ logs?: Logs;
[k: string]: unknown;
}
export interface Step {
diff --git a/extension/schema/HistoryNode.d.ts b/extension/schema/HistoryNode.d.ts
index bc77be89..5ad32061 100644
--- a/extension/schema/HistoryNode.d.ts
+++ b/extension/schema/HistoryNode.d.ts
@@ -21,6 +21,7 @@ export type ManageOwnChatContext = boolean;
export type Depth = number;
export type Deleted = boolean;
export type Active = boolean;
+export type Logs = string[];
/**
* A point in history, a list of which make up History
@@ -31,6 +32,7 @@ export interface HistoryNode1 {
depth: Depth;
deleted?: Deleted;
active?: Active;
+ logs?: Logs;
[k: string]: unknown;
}
export interface Step {
diff --git a/extension/src/activation/activate.ts b/extension/src/activation/activate.ts
index 5c6ffa02..8ea08e89 100644
--- a/extension/src/activation/activate.ts
+++ b/extension/src/activation/activate.ts
@@ -36,22 +36,44 @@ export async function activateExtension(context: vscode.ExtensionContext) {
})
.catch((e) => console.log("Error checking for extension updates: ", e));
- // Start the Python server
- await new Promise((resolve, reject) => {
- vscode.window.withProgress(
- {
- location: vscode.ProgressLocation.Notification,
- title:
- "Starting Continue Server... (it may take a minute to download Python packages)",
- cancellable: false,
- },
- async (progress, token) => {
- await startContinuePythonServer();
- resolve(null);
+ // Wrap the server start logic in a new Promise
+ const serverStartPromise = new Promise((resolve, reject) => {
+ let serverStarted = false;
+
+ // Start the server and set serverStarted to true when done
+ startContinuePythonServer().then(() => {
+ serverStarted = true;
+ resolve(null);
+ });
+
+ // Wait for 2 seconds
+ setTimeout(() => {
+ // If the server hasn't started after 2 seconds, show the notification
+ if (!serverStarted) {
+ vscode.window.withProgress(
+ {
+ location: vscode.ProgressLocation.Notification,
+ title:
+ "Starting Continue Server... (it may take a minute to download Python packages)",
+ cancellable: false,
+ },
+ async (progress, token) => {
+ // Wait for the server to start
+ while (!serverStarted) {
+ await new Promise((innerResolve) =>
+ setTimeout(innerResolve, 1000)
+ );
+ }
+ return Promise.resolve();
+ }
+ );
}
- );
+ }, 2000);
});
+ // Await the server start promise
+ await serverStartPromise;
+
// Register commands and providers
sendTelemetryEvent(TelemetryEvent.ExtensionActivated);
registerAllCodeLensProviders(context);
diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts
index 69a3b75a..c341db39 100644
--- a/extension/src/activation/environmentSetup.ts
+++ b/extension/src/activation/environmentSetup.ts
@@ -39,7 +39,7 @@ async function retryThenFail(
// Show corresponding error message depending on the platform
let msg =
- "Failed to set up Continue extension. Please email nate@continue.dev and we'll get this fixed ASAP!";
+ "Failed to set up Continue extension. Please email hi@continue.dev and we'll get this fixed ASAP!";
try {
switch (process.platform) {
case "win32":
diff --git a/extension/src/extension.ts b/extension/src/extension.ts
index 6959ec05..f2e580a1 100644
--- a/extension/src/extension.ts
+++ b/extension/src/extension.ts
@@ -17,15 +17,5 @@ async function dynamicImportAndActivate(context: vscode.ExtensionContext) {
}
export function activate(context: vscode.ExtensionContext) {
- // Only show progress if we have to setup
- vscode.window.withProgress(
- {
- location: vscode.ProgressLocation.Notification,
- title: "Setting up Continue extension...",
- cancellable: false,
- },
- async () => {
- dynamicImportAndActivate(context);
- }
- );
+ dynamicImportAndActivate(context);
}
diff --git a/schema/json/FullState.json b/schema/json/FullState.json
index 5a7e9d10..62ed337b 100644
--- a/schema/json/FullState.json
+++ b/schema/json/FullState.json
@@ -120,6 +120,14 @@
"title": "Active",
"default": true,
"type": "boolean"
+ },
+ "logs": {
+ "title": "Logs",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
},
"required": [
diff --git a/schema/json/History.json b/schema/json/History.json
index ee797412..56415520 100644
--- a/schema/json/History.json
+++ b/schema/json/History.json
@@ -120,6 +120,14 @@
"title": "Active",
"default": true,
"type": "boolean"
+ },
+ "logs": {
+ "title": "Logs",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
},
"required": [
diff --git a/schema/json/HistoryNode.json b/schema/json/HistoryNode.json
index d0e12ac5..81e239b3 100644
--- a/schema/json/HistoryNode.json
+++ b/schema/json/HistoryNode.json
@@ -120,6 +120,14 @@
"title": "Active",
"default": true,
"type": "boolean"
+ },
+ "logs": {
+ "title": "Logs",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
},
"required": [
--
cgit v1.2.3-70-g09d2
From 627f260cee108476e5335584e81f5e36f3e248cb Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 16:31:39 -0700
Subject: CONTRIBUTING.md
---
CONTRIBUTING.md | 94 +++++++++++++++++++++
continuedev/src/continuedev/core/main.py | 6 +-
extension/react-app/src/App.tsx | 8 +-
.../src/hooks/AbstractContinueGUIClientProtocol.ts | 35 ++++++++
.../src/hooks/ContinueGUIClientProtocol.ts | 93 +++++++++++++++++----
.../react-app/src/hooks/useContinueGUIProtocol.ts | 95 ----------------------
extension/react-app/src/hooks/useWebsocket.ts | 2 +-
extension/src/activation/activate.ts | 14 +---
extension/src/continueIdeClient.ts | 10 +++
9 files changed, 222 insertions(+), 135 deletions(-)
create mode 100644 CONTRIBUTING.md
create mode 100644 extension/react-app/src/hooks/AbstractContinueGUIClientProtocol.ts
delete mode 100644 extension/react-app/src/hooks/useContinueGUIProtocol.ts
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000..7e49dc2d
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,94 @@
+# Contributing to Continue
+
+## Table of Contents
+
+- [Continue Architecture](#continue-architecture)
+- [Core Concepts](#core-concepts)
+ - [Step](#step)
+- [Continue VS Code Client](#continue-vs-code-client)
+- [Continue IDE Websockets Protocol](#continue-ide-websockets-protocol)
+- [Continue GUI Websockets Protocol](#continue-gui-websockets-protocol)
+- [Ways to Contribute](#ways-to-contribute)
+ - [Report Bugs](#report-bugs)
+ - [Suggest Enhancements](#suggest-enhancements)
+ - [Updating / Improving Documentation](#updating--improving-documentation)
+
+## Continue Architecture
+
+Continue consists of 3 components, designed so that Continue can easily be extended to work in any IDE:
+
+1. **Continue Server** - The Continue Server is responsible for keeping state, running the autopilot loop which takes actions, and communicating between the IDE and GUI.
+
+2. **Continue IDE Client** - The Continue IDE Client is a plugin for the IDE which implements the Continue IDE Protocol. This allows the server to request actions to be taken within the IDE, for example if `sdk.ide.setFileOpen("main.py")` is called on the server, it will communicate over websocketes with the IDE, which will open the file `main.py`. The first IDE Client we have built is for VS Code, but we plan to build clients for other IDEs in the future. The IDE Client must 1. implement the websockets protocol, as is done [here](./extension/src/continueIdeClient.ts) for VS Code and 2. launch the Continue Server, like [here](./extension/src/activation/environmentSetup.ts), and 3. display the Continue GUI in a sidebar, like [here](./extension/src/debugPanel.ts).
+
+3. **Continue GUI** - The Continue GUI is a React application that gives the user control over Continue. It displays the history of Steps, shows what context is included in the current Step, and lets the users enter natural language or slash commands to initiate new Steps. The GUI communicates with the Continue Server over its own websocket connection
+
+It is important that the IDE Client and GUI never communicate except when the IDE Client initially sets up the GUI. This ensures that the server is the source-of-truth for state, and that we can easily extend Continue to work in other IDEs.
+
+
+
+## Core Concepts
+
+All of Continue's logic happens inside of the server, and it is built around a few core concepts. Most of these are Pydantic Models defined in [core/main.py](./continuedev/src/continuedev/core/main.py).
+
+### `Step`
+
+Everything in Continue is a "Step". The `Step` class defines 2 methods:
+
+1. `async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]` - This method defines what happens when the Step is run. It has access to the Continue SDK, which lets you take actions in the IDE, call LLMs, run nested Steps, and more. Optionally, a Step can return an `Observation` object, which a `Policy` can use to make decisions about what to do next.
+
+2. `async def describe(self, models: Models) -> Coroutine[str, None, None]` - After each Step is run, this method is called to asynchronously generate a summary title for the step. A `Models` object is passed so that you have access to LLMs to summarize for you.
+
+Steps are designed to be composable, so that you can easily build new Steps by combining existing ones. And because they are Pydantic models, they can instantly be used as tools useable by an LLM, for example with OpenAI's function-calling functionality (see [ChatWithFunctions](./continuedev/src/continuedev/steps/chat.py) for an example of this).
+
+Some of the most commonly used Steps are:
+
+- [`SimpleChatStep`](./continuedev/src/continuedev/steps/chat.py) - This is the default Step that is run when the user enters natural language input. It takes the user's input and runs it through the default LLM, then displays the result in the GUI.
+
+- [`EditHighlightedCodeStep`](./continuedev/src/continuedev/steps/core/core.py) - This is the Step run when a user highlights code, enters natural language, and presses CMD/CTRL+ENTER, or uses the slash command '/edit'. It opens a side-by-side diff editor, where updated code is streamed to fulfil the user's request.
+
+### `Autopilot`
+
+### `Observation`
+
+### `Policy`
+
+### Continue VS Code Client
+
+The starting point for the VS Code extension is [activate.ts](./extension/src/activation/activate.ts). The `activateExtension` function here will:
+
+1. Check whether the current version of the extension is up-to-date and, if not, display a notification
+
+2. Initialize the Continue IDE Client and establish a connection with the Continue Server
+
+3. Load the Continue GUI in the sidebar of the IDE and begin a new session
+
+### Continue IDE Websockets Protocol
+
+On the IDE side, this is implemented in [continueIdeClient.ts](./extension/src/continueIdeClient.ts). On the server side, this is implemented in [ide.py](./continuedev/src/continuedev/server/ide.py). You can see [ide_protocol.py](./continuedev/src/continuedev/server/ide_protocol.py) for the protocol definition.
+
+### Continue GUI Websockets Protocol
+
+On the GUI side, this is implemented in [ContinueGUIClientProtocol.ts](./extension/react-app/src/hooks/ContinueGUIClientProtocol.ts). On the server side, this is implemented in [gui.py](./continuedev/src/continuedev/server/gui.py). You can see [gui_protocol.py](./continuedev/src/continuedev/server/gui_protocol.py) or [AbstractContinueGUIClientProtocol.ts](./extension/react-app/src/hooks/AbstractContinueGUIClientProtocol.ts) for the protocol definition.
+
+When state is updated on the server, we currently send the entirety of the object over websockets to the GUI. This will of course have to be improved soon. The state object, `FullState`, is defined in [core/main.py](./continuedev/src/continuedev/core/main.py) and includes:
+
+- `history`, a record of previously run Steps. Displayed in order in the sidebar.
+- `active`, whether the autopilot is currently running a step. Displayed as a loader while step is running.
+- `user_input_queue`, the queue of user inputs that have not yet been processed due to waiting for previous Steps to complete. Displayed below the `active` loader until popped from the queue.
+- `default_model`, the default model used for completions. Displayed as a toggleable button on the bottom of the GUI.
+- `highlighted_ranges`, the ranges of code that have been selected to include as context. Displayed just above the main text input.
+- `slash_commands`, the list of available slash commands. Displayed in the main text input dropdown.
+- `adding_highlighted_code`, whether highlighting of new code for context is locked. Displayed as a button adjacent to `highlighted_ranges`.
+
+Updates are sent with `await sdk.update_ui()` when needed explicitly or `await autopilot.update_subscribers()` automatically between each Step. The GUI can listen for state updates with `ContinueGUIClientProtocol.onStateUpdate()`.
+
+## Ways to Contribute
+
+### Report Bugs
+
+### Suggest Enhancements
+
+### Updating / Improving Documentation
+
+Continue is continuously improving, but a feature isn't complete until it is reflected in the documentation!
diff --git a/continuedev/src/continuedev/core/main.py b/continuedev/src/continuedev/core/main.py
index 5931d978..50d01f8d 100644
--- a/continuedev/src/continuedev/core/main.py
+++ b/continuedev/src/continuedev/core/main.py
@@ -259,10 +259,8 @@ class Step(ContinueBaseModel):
def dict(self, *args, **kwargs):
d = super().dict(*args, **kwargs)
- if self.description is not None:
- d["description"] = self.description
- else:
- d["description"] = ""
+ # Make sure description is always a string
+ d["description"] = self.description or ""
return d
@validator("name", pre=True, always=True)
diff --git a/extension/react-app/src/App.tsx b/extension/react-app/src/App.tsx
index c9bd42e0..aa462171 100644
--- a/extension/react-app/src/App.tsx
+++ b/extension/react-app/src/App.tsx
@@ -2,7 +2,7 @@ import DebugPanel from "./components/DebugPanel";
import GUI from "./pages/gui";
import { createContext } from "react";
import useContinueGUIProtocol from "./hooks/useWebsocket";
-import ContinueGUIClientProtocol from "./hooks/useContinueGUIProtocol";
+import ContinueGUIClientProtocol from "./hooks/ContinueGUIClientProtocol";
export const GUIClientContext = createContext<
ContinueGUIClientProtocol | undefined
@@ -13,11 +13,7 @@ function App() {
return (
- , title: "GUI" }
- ]}
- />
+ , title: "GUI" }]} />
);
}
diff --git a/extension/react-app/src/hooks/AbstractContinueGUIClientProtocol.ts b/extension/react-app/src/hooks/AbstractContinueGUIClientProtocol.ts
new file mode 100644
index 00000000..6c0df8fc
--- /dev/null
+++ b/extension/react-app/src/hooks/AbstractContinueGUIClientProtocol.ts
@@ -0,0 +1,35 @@
+abstract class AbstractContinueGUIClientProtocol {
+ abstract sendMainInput(input: string): void;
+
+ abstract reverseToIndex(index: number): void;
+
+ abstract sendRefinementInput(input: string, index: number): void;
+
+ abstract sendStepUserInput(input: string, index: number): void;
+
+ abstract onStateUpdate(state: any): void;
+
+ abstract onAvailableSlashCommands(
+ callback: (commands: { name: string; description: string }[]) => void
+ ): void;
+
+ abstract changeDefaultModel(model: string): void;
+
+ abstract sendClear(): void;
+
+ abstract retryAtIndex(index: number): void;
+
+ abstract deleteAtIndex(index: number): void;
+
+ abstract deleteContextAtIndices(indices: number[]): void;
+
+ abstract setEditingAtIndices(indices: number[]): void;
+
+ abstract setPinnedAtIndices(indices: number[]): void;
+
+ abstract toggleAddingHighlightedCode(): void;
+
+ abstract showLogsAtIndex(index: number): void;
+}
+
+export default AbstractContinueGUIClientProtocol;
diff --git a/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts b/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts
index 6c0df8fc..7d6c2a71 100644
--- a/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts
+++ b/extension/react-app/src/hooks/ContinueGUIClientProtocol.ts
@@ -1,35 +1,92 @@
-abstract class AbstractContinueGUIClientProtocol {
- abstract sendMainInput(input: string): void;
+import AbstractContinueGUIClientProtocol from "./AbstractContinueGUIClientProtocol";
+import { Messenger, WebsocketMessenger } from "./messenger";
+import { VscodeMessenger } from "./vscodeMessenger";
- abstract reverseToIndex(index: number): void;
+class ContinueGUIClientProtocol extends AbstractContinueGUIClientProtocol {
+ messenger: Messenger;
+ // Server URL must contain the session ID param
+ serverUrlWithSessionId: string;
- abstract sendRefinementInput(input: string, index: number): void;
+ constructor(
+ serverUrlWithSessionId: string,
+ useVscodeMessagePassing: boolean
+ ) {
+ super();
+ this.serverUrlWithSessionId = serverUrlWithSessionId;
+ this.messenger = useVscodeMessagePassing
+ ? new VscodeMessenger(serverUrlWithSessionId)
+ : new WebsocketMessenger(serverUrlWithSessionId);
+ }
- abstract sendStepUserInput(input: string, index: number): void;
+ sendMainInput(input: string) {
+ this.messenger.send("main_input", { input });
+ }
- abstract onStateUpdate(state: any): void;
+ reverseToIndex(index: number) {
+ this.messenger.send("reverse_to_index", { index });
+ }
- abstract onAvailableSlashCommands(
+ sendRefinementInput(input: string, index: number) {
+ this.messenger.send("refinement_input", { input, index });
+ }
+
+ sendStepUserInput(input: string, index: number) {
+ this.messenger.send("step_user_input", { input, index });
+ }
+
+ onStateUpdate(callback: (state: any) => void) {
+ this.messenger.onMessageType("state_update", (data: any) => {
+ if (data.state) {
+ callback(data.state);
+ }
+ });
+ }
+
+ onAvailableSlashCommands(
callback: (commands: { name: string; description: string }[]) => void
- ): void;
+ ) {
+ this.messenger.onMessageType("available_slash_commands", (data: any) => {
+ if (data.commands) {
+ callback(data.commands);
+ }
+ });
+ }
- abstract changeDefaultModel(model: string): void;
+ changeDefaultModel(model: string) {
+ this.messenger.send("change_default_model", { model });
+ }
- abstract sendClear(): void;
+ sendClear() {
+ this.messenger.send("clear_history", {});
+ }
- abstract retryAtIndex(index: number): void;
+ retryAtIndex(index: number) {
+ this.messenger.send("retry_at_index", { index });
+ }
- abstract deleteAtIndex(index: number): void;
+ deleteAtIndex(index: number) {
+ this.messenger.send("delete_at_index", { index });
+ }
- abstract deleteContextAtIndices(indices: number[]): void;
+ deleteContextAtIndices(indices: number[]) {
+ this.messenger.send("delete_context_at_indices", { indices });
+ }
- abstract setEditingAtIndices(indices: number[]): void;
+ setEditingAtIndices(indices: number[]) {
+ this.messenger.send("set_editing_at_indices", { indices });
+ }
- abstract setPinnedAtIndices(indices: number[]): void;
+ setPinnedAtIndices(indices: number[]) {
+ this.messenger.send("set_pinned_at_indices", { indices });
+ }
- abstract toggleAddingHighlightedCode(): void;
+ toggleAddingHighlightedCode(): void {
+ this.messenger.send("toggle_adding_highlighted_code", {});
+ }
- abstract showLogsAtIndex(index: number): void;
+ showLogsAtIndex(index: number): void {
+ this.messenger.send("show_logs_at_index", { index });
+ }
}
-export default AbstractContinueGUIClientProtocol;
+export default ContinueGUIClientProtocol;
diff --git a/extension/react-app/src/hooks/useContinueGUIProtocol.ts b/extension/react-app/src/hooks/useContinueGUIProtocol.ts
deleted file mode 100644
index fef5b2e1..00000000
--- a/extension/react-app/src/hooks/useContinueGUIProtocol.ts
+++ /dev/null
@@ -1,95 +0,0 @@
-import AbstractContinueGUIClientProtocol from "./ContinueGUIClientProtocol";
-// import { Messenger, WebsocketMessenger } from "../../../src/util/messenger";
-import { Messenger, WebsocketMessenger } from "./messenger";
-import { VscodeMessenger } from "./vscodeMessenger";
-
-class ContinueGUIClientProtocol extends AbstractContinueGUIClientProtocol {
- messenger: Messenger;
- // Server URL must contain the session ID param
- serverUrlWithSessionId: string;
-
- constructor(
- serverUrlWithSessionId: string,
- useVscodeMessagePassing: boolean
- ) {
- super();
- this.serverUrlWithSessionId = serverUrlWithSessionId;
- if (useVscodeMessagePassing) {
- this.messenger = new VscodeMessenger(serverUrlWithSessionId);
- } else {
- this.messenger = new WebsocketMessenger(serverUrlWithSessionId);
- }
- }
-
- sendMainInput(input: string) {
- this.messenger.send("main_input", { input });
- }
-
- reverseToIndex(index: number) {
- this.messenger.send("reverse_to_index", { index });
- }
-
- sendRefinementInput(input: string, index: number) {
- this.messenger.send("refinement_input", { input, index });
- }
-
- sendStepUserInput(input: string, index: number) {
- this.messenger.send("step_user_input", { input, index });
- }
-
- onStateUpdate(callback: (state: any) => void) {
- this.messenger.onMessageType("state_update", (data: any) => {
- if (data.state) {
- callback(data.state);
- }
- });
- }
-
- onAvailableSlashCommands(
- callback: (commands: { name: string; description: string }[]) => void
- ) {
- this.messenger.onMessageType("available_slash_commands", (data: any) => {
- if (data.commands) {
- callback(data.commands);
- }
- });
- }
-
- changeDefaultModel(model: string) {
- this.messenger.send("change_default_model", { model });
- }
-
- sendClear() {
- this.messenger.send("clear_history", {});
- }
-
- retryAtIndex(index: number) {
- this.messenger.send("retry_at_index", { index });
- }
-
- deleteAtIndex(index: number) {
- this.messenger.send("delete_at_index", { index });
- }
-
- deleteContextAtIndices(indices: number[]) {
- this.messenger.send("delete_context_at_indices", { indices });
- }
-
- setEditingAtIndices(indices: number[]) {
- this.messenger.send("set_editing_at_indices", { indices });
- }
-
- setPinnedAtIndices(indices: number[]) {
- this.messenger.send("set_pinned_at_indices", { indices });
- }
-
- toggleAddingHighlightedCode(): void {
- this.messenger.send("toggle_adding_highlighted_code", {});
- }
-
- showLogsAtIndex(index: number): void {
- this.messenger.send("show_logs_at_index", { index });
- }
-}
-
-export default ContinueGUIClientProtocol;
diff --git a/extension/react-app/src/hooks/useWebsocket.ts b/extension/react-app/src/hooks/useWebsocket.ts
index e762666f..6b36be97 100644
--- a/extension/react-app/src/hooks/useWebsocket.ts
+++ b/extension/react-app/src/hooks/useWebsocket.ts
@@ -1,7 +1,7 @@
import React, { useEffect, useState } from "react";
import { RootStore } from "../redux/store";
import { useSelector } from "react-redux";
-import ContinueGUIClientProtocol from "./useContinueGUIProtocol";
+import ContinueGUIClientProtocol from "./ContinueGUIClientProtocol";
import { postVscMessage } from "../vscode";
function useContinueGUIProtocol(useVscodeMessagePassing: boolean = true) {
diff --git a/extension/src/activation/activate.ts b/extension/src/activation/activate.ts
index 8ea08e89..a7f6c55b 100644
--- a/extension/src/activation/activate.ts
+++ b/extension/src/activation/activate.ts
@@ -36,8 +36,8 @@ export async function activateExtension(context: vscode.ExtensionContext) {
})
.catch((e) => console.log("Error checking for extension updates: ", e));
- // Wrap the server start logic in a new Promise
- const serverStartPromise = new Promise((resolve, reject) => {
+ // Start the server and display loader if taking > 2 seconds
+ await new Promise((resolve) => {
let serverStarted = false;
// Start the server and set serverStarted to true when done
@@ -71,15 +71,6 @@ export async function activateExtension(context: vscode.ExtensionContext) {
}, 2000);
});
- // Await the server start promise
- await serverStartPromise;
-
- // Register commands and providers
- sendTelemetryEvent(TelemetryEvent.ExtensionActivated);
- registerAllCodeLensProviders(context);
- registerAllCommands(context);
- registerQuickFixProvider();
-
// Initialize IDE Protocol Client
const serverUrl = getContinueServerUrl();
ideProtocolClient = new IdeProtocolClient(
@@ -87,6 +78,7 @@ export async function activateExtension(context: vscode.ExtensionContext) {
context
);
+ // Register Continue GUI as sidebar webview, and beging a new session
{
const sessionIdPromise = await ideProtocolClient.getSessionId();
const provider = new ContinueGUIWebviewViewProvider(sessionIdPromise);
diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts
index 14a8df72..a1370a01 100644
--- a/extension/src/continueIdeClient.ts
+++ b/extension/src/continueIdeClient.ts
@@ -16,6 +16,10 @@ import fs = require("fs");
import { WebsocketMessenger } from "./util/messenger";
import { diffManager } from "./diffs";
import path = require("path");
+import { sendTelemetryEvent, TelemetryEvent } from "./telemetry";
+import { registerAllCodeLensProviders } from "./lang-server/codeLens";
+import { registerAllCommands } from "./commands";
+import registerQuickFixProvider from "./lang-server/codeActions";
const continueVirtualDocumentScheme = "continue";
@@ -76,6 +80,12 @@ class IdeProtocolClient {
this._serverUrl = serverUrl;
this._newWebsocketMessenger();
+ // Register commands and providers
+ sendTelemetryEvent(TelemetryEvent.ExtensionActivated);
+ registerAllCodeLensProviders(context);
+ registerAllCommands(context);
+ registerQuickFixProvider();
+
// Setup listeners for any file changes in open editors
// vscode.workspace.onDidChangeTextDocument((event) => {
// if (this._makingEdit === 0) {
--
cgit v1.2.3-70-g09d2
From 90c76f81b4465d389ecef81aafa4d566b914669f Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 19:05:35 -0700
Subject: fix "can't edit twice" bug
---
extension/src/diffs.ts | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
diff --git a/extension/src/diffs.ts b/extension/src/diffs.ts
index 1e63c5f6..1130a06a 100644
--- a/extension/src/diffs.ts
+++ b/extension/src/diffs.ts
@@ -38,6 +38,15 @@ class DiffManager {
constructor() {
this.setupDirectory();
+
+ // Listen for file closes, and if it's a diff file, clean up
+ vscode.workspace.onDidCloseTextDocument((document) => {
+ const newFilepath = document.uri.fsPath;
+ const diffInfo = this.diffs.get(newFilepath);
+ if (diffInfo) {
+ this.cleanUpDiff(diffInfo, false);
+ }
+ });
}
private escapeFilepath(filepath: string): string {
@@ -160,9 +169,9 @@ class DiffManager {
return newFilepath;
}
- cleanUpDiff(diffInfo: DiffInfo) {
+ cleanUpDiff(diffInfo: DiffInfo, hideEditor: boolean = true) {
// Close the editor, remove the record, delete the file
- if (diffInfo.editor) {
+ if (hideEditor && diffInfo.editor) {
vscode.window.showTextDocument(diffInfo.editor.document);
vscode.commands.executeCommand("workbench.action.closeActiveEditor");
}
--
cgit v1.2.3-70-g09d2
From eb90c79f618eab774af958ebee212150dead2467 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 19:37:17 -0700
Subject: filepath disambiguation fix
---
continuedev/src/continuedev/core/autopilot.py | 22 +++++++++++++++-------
1 file changed, 15 insertions(+), 7 deletions(-)
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 4e177ac9..afbfc7ed 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -100,9 +100,14 @@ class Autopilot(ContinueBaseModel):
self.continue_sdk.update_default_model(model)
async def clear_history(self):
+ # Reset history
self.history = History.from_empty()
self._main_user_input_queue = []
self._active = False
+
+ # Also remove all context
+ self._highlighted_ranges = []
+
await self.update_subscribers()
def on_update(self, callback: Coroutine["FullState", None, None]):
@@ -168,19 +173,22 @@ class Autopilot(ContinueBaseModel):
def _disambiguate_highlighted_ranges(self):
"""If any files have the same name, also display their folder name"""
- name_counts = {}
+ name_status: Dict[str, set] = {
+ } # basename -> set of full paths with that basename
for rif in self._highlighted_ranges:
- if rif.display_name in name_counts:
- name_counts[rif.display_name] += 1
+ basename = os.path.basename(rif.range.filepath)
+ if basename in name_status:
+ name_status[basename].add(rif.range.filepath)
else:
- name_counts[rif.display_name] = 1
+ name_status[basename] = {rif.range.filepath}
for rif in self._highlighted_ranges:
- if name_counts[rif.display_name] > 1:
+ basename = os.path.basename(rif.range.filepath)
+ if len(name_status[basename]) > 1:
rif.display_name = os.path.join(
- os.path.basename(os.path.dirname(rif.range.filepath)), rif.display_name)
+ os.path.basename(os.path.dirname(rif.range.filepath)), basename)
else:
- rif.display_name = os.path.basename(rif.range.filepath)
+ rif.display_name = basename
async def handle_highlighted_code(self, range_in_files: List[RangeInFileWithContents]):
# Filter out rifs from ~/.continue/diffs folder
--
cgit v1.2.3-70-g09d2
From db1e35497de924c001f421d1d3277f02258b55db Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 21:10:08 -0700
Subject: psutil profiling, temperature in config.json
---
continuedev/poetry.lock | 29 +++++++++++++++++++++-
continuedev/pyproject.toml | 1 +
continuedev/src/continuedev/core/config.py | 5 ++++
.../src/continuedev/libs/llm/proxy_server.py | 8 ++++--
continuedev/src/continuedev/server/main.py | 25 ++++++++++++++++++-
continuedev/src/continuedev/steps/chat.py | 3 ++-
continuedev/src/continuedev/steps/core/core.py | 2 +-
7 files changed, 67 insertions(+), 6 deletions(-)
diff --git a/continuedev/poetry.lock b/continuedev/poetry.lock
index e8927fe7..1cd4a591 100644
--- a/continuedev/poetry.lock
+++ b/continuedev/poetry.lock
@@ -1171,6 +1171,33 @@ dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"]
sentry = ["django", "sentry-sdk"]
test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest"]
+[[package]]
+name = "psutil"
+version = "5.9.5"
+description = "Cross-platform lib for process and system monitoring in Python."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"},
+ {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"},
+ {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"},
+ {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"},
+ {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"},
+ {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"},
+ {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"},
+ {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"},
+ {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"},
+ {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"},
+ {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"},
+ {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"},
+ {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"},
+ {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"},
+]
+
+[package.extras]
+test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+
[[package]]
name = "pydantic"
version = "1.10.7"
@@ -2015,4 +2042,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata]
lock-version = "2.0"
python-versions = "^3.8.1"
-content-hash = "87dbf6d1e56ce6ba81a01a59c0de2d3717925bac9639710bf3ff3ce30f5f5e2c"
+content-hash = "3fcd19c11b9c338a181e591b56e21d59c7834abff431fb9f40cc1ea874b64557"
diff --git a/continuedev/pyproject.toml b/continuedev/pyproject.toml
index 6a646cbe..0abc9504 100644
--- a/continuedev/pyproject.toml
+++ b/continuedev/pyproject.toml
@@ -26,6 +26,7 @@ jsonschema = "^4.17.3"
directory-tree = "^0.0.3.1"
anthropic = "^0.3.4"
chevron = "^0.14.0"
+psutil = "^5.9.5"
[tool.poetry.scripts]
typegen = "src.continuedev.models.generate_json_schema:main"
diff --git a/continuedev/src/continuedev/core/config.py b/continuedev/src/continuedev/core/config.py
index 6af0878d..70c4876e 100644
--- a/continuedev/src/continuedev/core/config.py
+++ b/continuedev/src/continuedev/core/config.py
@@ -82,6 +82,7 @@ class ContinueConfig(BaseModel):
allow_anonymous_telemetry: Optional[bool] = True
default_model: Literal["gpt-3.5-turbo", "gpt-3.5-turbo-16k",
"gpt-4", "claude-2", "ggml"] = 'gpt-4'
+ temperature: Optional[float] = 0.5
custom_commands: Optional[List[CustomCommand]] = [CustomCommand(
name="test",
description="This is an example custom command. Use /config to edit it and create more",
@@ -98,6 +99,10 @@ class ContinueConfig(BaseModel):
def default_slash_commands_validator(cls, v):
return DEFAULT_SLASH_COMMANDS
+ @validator('temperature', pre=True)
+ def temperature_validator(cls, v):
+ return max(0.0, min(1.0, v))
+
def load_config(config_file: str) -> ContinueConfig:
"""
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index 18e0e6f4..bd50fe02 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -1,8 +1,10 @@
from functools import cached_property
import json
+import traceback
from typing import Any, Callable, Coroutine, Dict, Generator, List, Literal, Union
import aiohttp
+from ..util.telemetry import capture_event
from ...core.main import ChatMessage
from ..llm import LLM
from ..util.count_tokens import DEFAULT_ARGS, DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, count_tokens, format_chat_messages
@@ -81,8 +83,10 @@ class ProxyServer(LLM):
yield loaded_chunk
if "content" in loaded_chunk:
completion += loaded_chunk["content"]
- except:
- raise Exception(str(line[0]))
+ except Exception as e:
+ capture_event(self.unique_id, "proxy_server_parse_error", {
+ "error_title": "Proxy server stream_chat parsing failed", "error_message": '\n'.join(traceback.format_exception(e))})
+
self.write_log(f"Completion: \n\n{completion}")
async def stream_complete(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
diff --git a/continuedev/src/continuedev/server/main.py b/continuedev/src/continuedev/server/main.py
index aa093853..42dc0cc1 100644
--- a/continuedev/src/continuedev/server/main.py
+++ b/continuedev/src/continuedev/server/main.py
@@ -1,5 +1,6 @@
+import time
+import psutil
import os
-import sys
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .ide import router as ide_router
@@ -51,9 +52,31 @@ def cleanup():
session_manager.persist_session(session_id)
+def cpu_usage_report():
+ process = psutil.Process(os.getpid())
+ # Call cpu_percent once to start measurement, but ignore the result
+ process.cpu_percent(interval=None)
+ # Wait for a short period of time
+ time.sleep(1)
+ # Call cpu_percent again to get the CPU usage over the interval
+ cpu_usage = process.cpu_percent(interval=None)
+ print(f"CPU usage: {cpu_usage}%")
+
+
atexit.register(cleanup)
+
if __name__ == "__main__":
try:
+ # import threading
+
+ # def cpu_usage_loop():
+ # while True:
+ # cpu_usage_report()
+ # time.sleep(2)
+
+ # cpu_thread = threading.Thread(target=cpu_usage_loop)
+ # cpu_thread.start()
+
run_server()
except Exception as e:
cleanup()
diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py
index 8c03969e..aade1ea1 100644
--- a/continuedev/src/continuedev/steps/chat.py
+++ b/continuedev/src/continuedev/steps/chat.py
@@ -29,7 +29,8 @@ class SimpleChatStep(Step):
completion = ""
messages = self.messages or await sdk.get_chat_context()
- generator = sdk.models.default.stream_chat(messages, temperature=0.5)
+ generator = sdk.models.default.stream_chat(
+ messages, temperature=sdk.config.temperature)
try:
async for chunk in generator:
if sdk.current_step_was_deleted():
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index 2b049ecc..4afc36e8 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -492,7 +492,7 @@ Please output the code to be inserted at the cursor in order to fulfill the user
role="user", content=f"```\n{rif.contents}\n```\n\nUser request: \"{self.user_input}\"\n\nThis is the code after changing to perfectly comply with the user request. It does not include any placeholder code, only real implementations:\n\n```\n", summary=self.user_input)]
generator = model_to_use.stream_chat(
- messages, temperature=0, max_tokens=max_tokens)
+ messages, temperature=sdk.config.temperature, max_tokens=max_tokens)
try:
async for chunk in generator:
--
cgit v1.2.3-70-g09d2
From 4adef09e64e7638e458fc27b522c9962423dccc0 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 21:17:13 -0700
Subject: templated custom commands
---
continuedev/src/continuedev/core/policy.py | 18 ++++--------------
continuedev/src/continuedev/libs/util/count_tokens.py | 4 ++--
continuedev/src/continuedev/libs/util/templating.py | 10 +++++-----
continuedev/src/continuedev/steps/custom_command.py | 8 +++++---
4 files changed, 16 insertions(+), 24 deletions(-)
diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py
index d007c92b..1000f0f4 100644
--- a/continuedev/src/continuedev/core/policy.py
+++ b/continuedev/src/continuedev/core/policy.py
@@ -1,22 +1,12 @@
from textwrap import dedent
-from typing import List, Tuple, Type, Union
+from typing import Union
+from ..steps.chat import SimpleChatStep
from ..steps.welcome import WelcomeStep
from .config import ContinueConfig
-from ..steps.chroma import AnswerQuestionChroma, EditFileChroma, CreateCodebaseIndexChroma
from ..steps.steps_on_startup import StepsOnStartupStep
-from ..recipes.CreatePipelineRecipe.main import CreatePipelineRecipe
-from ..recipes.DeployPipelineAirflowRecipe.main import DeployPipelineAirflowRecipe
-from ..recipes.AddTransformRecipe.main import AddTransformRecipe
-from .main import Step, Validator, History, Policy
-from .observation import Observation, TracebackObservation, UserInputObservation
-from ..steps.main import EditHighlightedCodeStep, SolveTracebackStep
-from ..recipes.WritePytestsRecipe.main import WritePytestsRecipe
-from ..recipes.ContinueRecipeRecipe.main import ContinueStepStep
-from ..steps.comment_code import CommentCodeStep
-from ..steps.react import NLDecisionStep
-from ..steps.chat import SimpleChatStep, ChatWithFunctions, EditFileStep, AddFileStep
-from ..recipes.DDtoBQRecipe.main import DDtoBQRecipe
+from .main import Step, History, Policy
+from .observation import UserInputObservation
from ..steps.core.core import MessageStep
from ..libs.util.step_name_to_steps import get_step_from_name
from ..steps.custom_command import CustomCommandStep
diff --git a/continuedev/src/continuedev/libs/util/count_tokens.py b/continuedev/src/continuedev/libs/util/count_tokens.py
index c81d8aa4..987aa722 100644
--- a/continuedev/src/continuedev/libs/util/count_tokens.py
+++ b/continuedev/src/continuedev/libs/util/count_tokens.py
@@ -1,7 +1,7 @@
import json
from typing import Dict, List, Union
from ...core.main import ChatMessage
-from .templating import render_system_message
+from .templating import render_templated_string
import tiktoken
aliases = {
@@ -112,7 +112,7 @@ def compile_chat_messages(model: str, msgs: List[ChatMessage], max_tokens: int,
if system_message is not None:
# NOTE: System message takes second precedence to user prompt, so it is placed just before
# but move back to start after processing
- rendered_system_message = render_system_message(system_message)
+ rendered_system_message = render_templated_string(system_message)
system_chat_msg = ChatMessage(
role="system", content=rendered_system_message, summary=rendered_system_message)
# insert at second-to-last position
diff --git a/continuedev/src/continuedev/libs/util/templating.py b/continuedev/src/continuedev/libs/util/templating.py
index ebfc2e31..bb922ad7 100644
--- a/continuedev/src/continuedev/libs/util/templating.py
+++ b/continuedev/src/continuedev/libs/util/templating.py
@@ -16,19 +16,19 @@ def escape_var(var: str) -> str:
return var.replace(os.path.sep, '').replace('.', '')
-def render_system_message(system_message: str) -> str:
+def render_templated_string(template: str) -> str:
"""
- Render system message with mustache syntax.
+ Render system message or other templated string with mustache syntax.
Right now it only supports rendering absolute file paths as their contents.
"""
- vars = get_vars_in_template(system_message)
+ vars = get_vars_in_template(template)
args = {}
for var in vars:
if var.startswith(os.path.sep):
# Escape vars which are filenames, because mustache doesn't allow / in variable names
escaped_var = escape_var(var)
- system_message = system_message.replace(
+ template = template.replace(
var, escaped_var)
if os.path.exists(var):
@@ -36,4 +36,4 @@ def render_system_message(system_message: str) -> str:
else:
args[escaped_var] = ''
- return chevron.render(system_message, args)
+ return chevron.render(template, args)
diff --git a/continuedev/src/continuedev/steps/custom_command.py b/continuedev/src/continuedev/steps/custom_command.py
index 5a56efb0..d96ac8e2 100644
--- a/continuedev/src/continuedev/steps/custom_command.py
+++ b/continuedev/src/continuedev/steps/custom_command.py
@@ -1,7 +1,7 @@
+from ..libs.util.templating import render_templated_string
from ..core.main import Step
from ..core.sdk import ContinueSDK
-from ..steps.core.core import UserInputStep
-from ..steps.chat import ChatWithFunctions, SimpleChatStep
+from ..steps.chat import SimpleChatStep
class CustomCommandStep(Step):
@@ -15,7 +15,9 @@ class CustomCommandStep(Step):
return self.prompt
async def run(self, sdk: ContinueSDK):
- prompt_user_input = f"Task: {self.prompt}. Additional info: {self.user_input}"
+ task = render_templated_string(self.prompt)
+
+ prompt_user_input = f"Task: {task}. Additional info: {self.user_input}"
messages = await sdk.get_chat_context()
# Find the last chat message with this slash command and replace it with the user input
for i in range(len(messages) - 1, -1, -1):
--
cgit v1.2.3-70-g09d2
From a7c57e1d1e4a0eff3e4b598f8bf0448ea6068353 Mon Sep 17 00:00:00 2001
From: Nate Sesti
Date: Tue, 18 Jul 2023 22:14:39 -0700
Subject: pass through anthropic system message
---
continuedev/src/continuedev/libs/llm/anthropic.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/continuedev/src/continuedev/libs/llm/anthropic.py b/continuedev/src/continuedev/libs/llm/anthropic.py
index 566f7150..c82895c6 100644
--- a/continuedev/src/continuedev/libs/llm/anthropic.py
+++ b/continuedev/src/continuedev/libs/llm/anthropic.py
@@ -73,7 +73,7 @@ class AnthropicLLM(LLM):
args = self._transform_args(args)
messages = compile_chat_messages(
- args["model"], messages, args["max_tokens_to_sample"], functions=args.get("functions", None))
+ args["model"], messages, args["max_tokens_to_sample"], functions=args.get("functions", None), system_message=self.system_message)
async for chunk in await self.async_client.completions.create(
prompt=self.__messages_to_prompt(messages),
**args
@@ -88,7 +88,7 @@ class AnthropicLLM(LLM):
args = self._transform_args(args)
messages = compile_chat_messages(
- args["model"], with_history, args["max_tokens_to_sample"], prompt, functions=None)
+ args["model"], with_history, args["max_tokens_to_sample"], prompt, functions=None, system_message=self.system_message)
resp = (await self.async_client.completions.create(
prompt=self.__messages_to_prompt(messages),
**args
--
cgit v1.2.3-70-g09d2