diff options
| -rw-r--r-- | continuedev/src/continuedev/core/autopilot.py | 2 | ||||
| -rw-r--r-- | continuedev/src/continuedev/libs/llm/openai.py | 4 | ||||
| -rw-r--r-- | continuedev/src/continuedev/server/main.py | 5 | ||||
| -rw-r--r-- | continuedev/src/continuedev/steps/chat.py | 2 | ||||
| -rw-r--r-- | continuedev/src/continuedev/steps/react.py | 6 | ||||
| -rw-r--r-- | extension/package-lock.json | 4 | ||||
| -rw-r--r-- | extension/package.json | 2 | ||||
| -rw-r--r-- | extension/scripts/continuedev-0.1.1-py3-none-any.whl | bin | 84332 -> 85071 bytes | |||
| -rw-r--r-- | extension/src/activation/environmentSetup.ts | 33 | ||||
| -rw-r--r-- | extension/src/continueIdeClient.ts | 18 | 
10 files changed, 47 insertions, 29 deletions
| diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py index 73f46a37..ee249c0b 100644 --- a/continuedev/src/continuedev/core/autopilot.py +++ b/continuedev/src/continuedev/core/autopilot.py @@ -120,7 +120,7 @@ class Autopilot(ContinueBaseModel):          # If a parent step is deleted/cancelled, don't run this step          last_depth = self._step_depth          i = self.history.current_index -        while i >= 0 and self.history.timeline[i].depth > last_depth: +        while i >= 0 and self.history.timeline[i].depth == last_depth + 1:              if self.history.timeline[i].deleted:                  return None              last_depth = self.history.timeline[i].depth diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py index 22c28b20..5d65eb22 100644 --- a/continuedev/src/continuedev/libs/llm/openai.py +++ b/continuedev/src/continuedev/libs/llm/openai.py @@ -83,14 +83,14 @@ class OpenAI(LLM):      def with_system_message(self, system_message: Union[str, None]):          return OpenAI(api_key=self.api_key, default_model=self.default_model, system_message=system_message) -    def stream_chat(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]: +    async def stream_chat(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:          self.completion_count += 1          args = {"max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5, "top_p": 1,                  "frequency_penalty": 0, "presence_penalty": 0} | kwargs          args["stream"] = True          args["model"] = "gpt-3.5-turbo" -        for chunk in openai.ChatCompletion.create( +        async for chunk in await openai.ChatCompletion.acreate(              messages=self.compile_chat_messages(with_history, prompt),              **args,          ): diff --git a/continuedev/src/continuedev/server/main.py b/continuedev/src/continuedev/server/main.py index b8bfe325..fb6ead02 100644 --- a/continuedev/src/continuedev/server/main.py +++ b/continuedev/src/continuedev/server/main.py @@ -40,10 +40,7 @@ args = parser.parse_args()  def run_server(): -    if os.path.exists("logging.yaml"): -        uvicorn.run(app, host="0.0.0.0", port=args.port, log_level="info") -    else: -        uvicorn.run(app, host="0.0.0.0", port=args.port, log_level="info") +    uvicorn.run(app, host="0.0.0.0", port=args.port)  if __name__ == "__main__": diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py index 90514ad6..fd7457d9 100644 --- a/continuedev/src/continuedev/steps/chat.py +++ b/continuedev/src/continuedev/steps/chat.py @@ -11,6 +11,8 @@ class SimpleChatStep(Step):      async def run(self, sdk: ContinueSDK):          self.description = f"```{self.user_input}```\n\n" +        await sdk.update_ui() +          async for chunk in sdk.models.default.stream_chat(self.user_input, with_history=await sdk.get_chat_context()):              self.description += chunk              await sdk.update_ui() diff --git a/continuedev/src/continuedev/steps/react.py b/continuedev/src/continuedev/steps/react.py index 4d310fc8..cddb8b42 100644 --- a/continuedev/src/continuedev/steps/react.py +++ b/continuedev/src/continuedev/steps/react.py @@ -10,7 +10,8 @@ class NLDecisionStep(Step):      default_step: Union[Step, None] = None      steps: List[Tuple[Step, str]] -    hide: bool = True +    hide: bool = False +    name: str = "Deciding what to do next"      async def run(self, sdk: ContinueSDK):          step_descriptions = "\n".join([ @@ -36,4 +37,7 @@ class NLDecisionStep(Step):          step_to_run = step_to_run or self.default_step or self.steps[0] +        self.hide = True +        await sdk.update_ui() +          await sdk.run_step(step_to_run) diff --git a/extension/package-lock.json b/extension/package-lock.json index e4387636..c5d97cfb 100644 --- a/extension/package-lock.json +++ b/extension/package-lock.json @@ -1,12 +1,12 @@  {    "name": "continue", -  "version": "0.0.54", +  "version": "0.0.55",    "lockfileVersion": 2,    "requires": true,    "packages": {      "": {        "name": "continue", -      "version": "0.0.54", +      "version": "0.0.55",        "license": "Apache-2.0",        "dependencies": {          "@electron/rebuild": "^3.2.10", diff --git a/extension/package.json b/extension/package.json index 0cbe1d27..036d13f0 100644 --- a/extension/package.json +++ b/extension/package.json @@ -14,7 +14,7 @@    "displayName": "Continue",    "pricing": "Free",    "description": "Refine code 10x faster", -  "version": "0.0.54", +  "version": "0.0.55",    "publisher": "Continue",    "engines": {      "vscode": "^1.74.0" diff --git a/extension/scripts/continuedev-0.1.1-py3-none-any.whl b/extension/scripts/continuedev-0.1.1-py3-none-any.whlBinary files differ index 2e45a933..9d371e17 100644 --- a/extension/scripts/continuedev-0.1.1-py3-none-any.whl +++ b/extension/scripts/continuedev-0.1.1-py3-none-any.whl diff --git a/extension/src/activation/environmentSetup.ts b/extension/src/activation/environmentSetup.ts index bc071461..593b727e 100644 --- a/extension/src/activation/environmentSetup.ts +++ b/extension/src/activation/environmentSetup.ts @@ -161,6 +161,21 @@ function writeEnvFile(path: string, key: string, value: string) {    fs.writeFileSync(path, newEnvFile);  } +async function checkServerRunning(serverUrl: string): Promise<boolean> { +  // Check if already running by calling /health +  try { +    const response = await fetch(serverUrl + "/health"); +    if (response.status === 200) { +      console.log("Continue python server already running"); +      return true; +    } else { +      return false; +    } +  } catch (e) { +    return false; +  } +} +  export async function startContinuePythonServer() {    await setupPythonEnv(); @@ -172,14 +187,7 @@ export async function startContinuePythonServer() {    console.log("Starting Continue python server..."); -  // Check if already running by calling /health -  try { -    const response = await fetch(serverUrl + "/health"); -    if (response.status === 200) { -      console.log("Continue python server already running"); -      return; -    } -  } catch (e) {} +  if (await checkServerRunning(serverUrl)) return;    let activateCmd = ". env/bin/activate";    let pythonCmd = "python3"; @@ -193,7 +201,7 @@ export async function startContinuePythonServer() {      "scripts"    )} && ${activateCmd} && cd .. && ${pythonCmd} -m scripts.run_continue_server`; -  return new Promise((resolve, reject) => { +  return new Promise(async (resolve, reject) => {      try {        const child = spawn(command, {          shell: true, @@ -213,7 +221,12 @@ export async function startContinuePythonServer() {        });      } catch (e) {        console.log("Failed to start Continue python server", e); -      reject(); +      // If failed, check if it's because the server is already running (might have happened just after we checked above) +      if (await checkServerRunning(serverUrl)) { +        resolve(null); +      } else { +        reject(); +      }      }    });  } diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts index b4937ac4..3b5de93f 100644 --- a/extension/src/continueIdeClient.ts +++ b/extension/src/continueIdeClient.ts @@ -158,15 +158,17 @@ class IdeProtocolClient {        });        editor.setDecorations(decorationType, [range]); -      // Listen for changes to cursor position -      const cursorDisposable = vscode.window.onDidChangeTextEditorSelection( -        (event) => { -          if (event.textEditor.document.uri.fsPath === rangeInFile.filepath) { -            cursorDisposable.dispose(); -            editor.setDecorations(decorationType, []); +      // Listen for changes to cursor position and then remove the decoration (but keep for at least 2 seconds) +      setTimeout(() => { +        const cursorDisposable = vscode.window.onDidChangeTextEditorSelection( +          (event) => { +            if (event.textEditor.document.uri.fsPath === rangeInFile.filepath) { +              cursorDisposable.dispose(); +              editor.setDecorations(decorationType, []); +            }            } -        } -      ); +        ); +      }, 2000);      }    } | 
