summaryrefslogtreecommitdiff
path: root/continuedev/src
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-06-15 20:46:10 -0700
committerNate Sesti <sestinj@gmail.com>2023-06-15 20:46:10 -0700
commitf4664228b0d2a612f5c0cfb35062ae51d57e8a08 (patch)
treef789399c3d49d2b45d8f60c08f9c37db546b28b8 /continuedev/src
parentf1cffbd1284983840d6c2f3ef8bc1fdc36fac89d (diff)
downloadsncontinue-f4664228b0d2a612f5c0cfb35062ae51d57e8a08.tar.gz
sncontinue-f4664228b0d2a612f5c0cfb35062ae51d57e8a08.tar.bz2
sncontinue-f4664228b0d2a612f5c0cfb35062ae51d57e8a08.zip
cancel steps without running children
Diffstat (limited to 'continuedev/src')
-rw-r--r--continuedev/src/continuedev/core/autopilot.py19
-rw-r--r--continuedev/src/continuedev/core/main.py5
-rw-r--r--continuedev/src/continuedev/libs/llm/proxy_server.py5
-rw-r--r--continuedev/src/continuedev/steps/core/core.py6
-rw-r--r--continuedev/src/continuedev/steps/main.py4
5 files changed, 28 insertions, 11 deletions
diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py
index 703a73af..b8f2695d 100644
--- a/continuedev/src/continuedev/core/autopilot.py
+++ b/continuedev/src/continuedev/core/autopilot.py
@@ -99,9 +99,19 @@ class Autopilot(ContinueBaseModel):
async def delete_at_index(self, index: int):
self.history.timeline[index].step.hide = True
+ self.history.timeline[index].deleted = True
await self.update_subscribers()
async def _run_singular_step(self, step: "Step", is_future_step: bool = False) -> Coroutine[Observation, None, None]:
+ # If a parent step is deleted/cancelled, don't run this step
+ last_depth = self._step_depth
+ i = self.history.current_index
+ while i >= 0 and self.history.timeline[i].depth > last_depth:
+ if self.history.timeline[i].deleted:
+ return None
+ last_depth = self.history.timeline[i].depth
+ i -= 1
+
capture_event(self.continue_sdk.ide.unique_id, 'step run', {
'step_name': step.name, 'params': step.dict()})
@@ -114,7 +124,7 @@ class Autopilot(ContinueBaseModel):
await self._run_singular_step(manualEditsStep)
# Update history - do this first so we get top-first tree ordering
- self.history.add_node(HistoryNode(
+ index_of_history_node = self.history.add_node(HistoryNode(
step=step, observation=None, depth=self._step_depth))
# Call all subscribed callbacks
@@ -127,6 +137,10 @@ class Autopilot(ContinueBaseModel):
try:
observation = await step(self.continue_sdk)
except Exception as e:
+ if self.history.timeline[index_of_history_node].deleted:
+ # If step was deleted/cancelled, don't show error or allow retry
+ return None
+
caught_error = True
is_continue_custom_exception = issubclass(
@@ -176,8 +190,7 @@ class Autopilot(ContinueBaseModel):
# Add observation to history, unless already attached error observation
if not caught_error:
- self.history.get_last_at_depth(
- self._step_depth, include_current=True).observation = observation
+ self.history.timeline[index_of_history_node].observation = observation
await self.update_subscribers()
# Update its description
diff --git a/continuedev/src/continuedev/core/main.py b/continuedev/src/continuedev/core/main.py
index f6b26d69..97ef9793 100644
--- a/continuedev/src/continuedev/core/main.py
+++ b/continuedev/src/continuedev/core/main.py
@@ -18,6 +18,7 @@ class HistoryNode(ContinueBaseModel):
step: "Step"
observation: Union[Observation, None]
depth: int
+ deleted: bool = False
def to_chat_messages(self) -> List[ChatMessage]:
if self.step.description is None:
@@ -37,9 +38,11 @@ class History(ContinueBaseModel):
msgs += node.to_chat_messages()
return msgs
- def add_node(self, node: HistoryNode):
+ def add_node(self, node: HistoryNode) -> int:
+ """ Add node and return the index where it was added """
self.timeline.insert(self.current_index + 1, node)
self.current_index += 1
+ return self.current_index
def get_current(self) -> Union[HistoryNode, None]:
if self.current_index < 0:
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index b2948f9a..5f8efac6 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -73,7 +73,10 @@ class ProxyServer(LLM):
"model": self.default_model,
"unique_id": self.unique_id,
}) as resp:
- return json.loads(await resp.text())
+ try:
+ return json.loads(await resp.text())
+ except json.JSONDecodeError:
+ raise Exception(await resp.text())
async def stream_chat(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:
async with aiohttp.ClientSession() as session:
diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py
index eb45d1d3..59af5f38 100644
--- a/continuedev/src/continuedev/steps/core/core.py
+++ b/continuedev/src/continuedev/steps/core/core.py
@@ -72,7 +72,7 @@ class ShellCommandsStep(Step):
return f"Error when running shell commands:\n```\n{self._err_text}\n```"
cmds_str = "\n".join(self.cmds)
- return models.gpt35.complete(f"{cmds_str}\n\nSummarize what was done in these shell commands, using markdown bullet points:")
+ return await models.gpt35.complete(f"{cmds_str}\n\nSummarize what was done in these shell commands, using markdown bullet points:")
async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]:
cwd = await sdk.ide.getWorkspaceDirectory() if self.cwd is None else self.cwd
@@ -151,10 +151,8 @@ class DefaultModelEditCodeStep(Step):
_prompt_and_completion: str = ""
async def describe(self, models: Models) -> Coroutine[str, None, None]:
- description = models.gpt35.complete(
+ description = await models.gpt35.complete(
f"{self._prompt_and_completion}\n\nPlease give brief a description of the changes made above using markdown bullet points. Be concise and only mention changes made to the commit before, not prefix or suffix:")
- # self.name = models.gpt35.complete(
- # f"Write a short title for this description: {description}")
return description
async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]:
diff --git a/continuedev/src/continuedev/steps/main.py b/continuedev/src/continuedev/steps/main.py
index 3968c4a3..b61aa3fe 100644
--- a/continuedev/src/continuedev/steps/main.py
+++ b/continuedev/src/continuedev/steps/main.py
@@ -213,7 +213,7 @@ class StarCoderEditHighlightedCodeStep(Step):
_prompt_and_completion: str = ""
async def describe(self, models: Models) -> Coroutine[str, None, None]:
- return models.gpt35.complete(f"{self._prompt_and_completion}\n\nPlease give brief a description of the changes made above using markdown bullet points:")
+ return await models.gpt35.complete(f"{self._prompt_and_completion}\n\nPlease give brief a description of the changes made above using markdown bullet points:")
async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]:
range_in_files = await sdk.ide.getHighlightedCode()
@@ -247,7 +247,7 @@ class StarCoderEditHighlightedCodeStep(Step):
segs = full_file_contents.split(rif.contents)
prompt = f"<file_prefix>{segs[0]}<file_suffix>{segs[1]}" + prompt
- completion = str((await sdk.models.starcoder()).complete(prompt))
+ completion = str(await sdk.models.starcoder.complete(prompt))
eot_token = "<|endoftext|>"
completion = completion.removesuffix(eot_token)