diff options
Diffstat (limited to 'continuedev/src')
19 files changed, 402 insertions, 81 deletions
| diff --git a/continuedev/src/continuedev/core/abstract_sdk.py b/continuedev/src/continuedev/core/abstract_sdk.py index 417971cd..3b85708d 100644 --- a/continuedev/src/continuedev/core/abstract_sdk.py +++ b/continuedev/src/continuedev/core/abstract_sdk.py @@ -88,6 +88,6 @@ class AbstractContinueSDK(ABC):      def add_chat_context(self, content: str, role: ChatMessageRole = "assistent"):          pass -    @abstractproperty -    def chat_context(self) -> List[ChatMessage]: +    @abstractmethod +    async def get_chat_context(self) -> List[ChatMessage]:          pass diff --git a/continuedev/src/continuedev/core/autopilot.py b/continuedev/src/continuedev/core/autopilot.py index c979d53a..1642003c 100644 --- a/continuedev/src/continuedev/core/autopilot.py +++ b/continuedev/src/continuedev/core/autopilot.py @@ -40,6 +40,12 @@ class Autopilot(ContinueBaseModel):      def get_full_state(self) -> FullState:          return FullState(history=self.history, active=self._active, user_input_queue=self._main_user_input_queue) +    async def clear_history(self): +        self.history = History.from_empty() +        self._main_user_input_queue = [] +        self._active = False +        await self.update_subscribers() +      def on_update(self, callback: Coroutine["FullState", None, None]):          """Subscribe to changes to state"""          self._on_update_callbacks.append(callback) @@ -88,6 +94,10 @@ class Autopilot(ContinueBaseModel):      async def retry_at_index(self, index: int):          self._retry_queue.post(str(index), None) +    async def delete_at_index(self, index: int): +        self.history.timeline[index].step.hide = True +        await self.update_subscribers() +      async def _run_singular_step(self, step: "Step", is_future_step: bool = False) -> Coroutine[Observation, None, None]:          capture_event(              'step run', {'step_name': step.name, 'params': step.dict()}) diff --git a/continuedev/src/continuedev/core/policy.py b/continuedev/src/continuedev/core/policy.py index 8e43bf55..00b5427c 100644 --- a/continuedev/src/continuedev/core/policy.py +++ b/continuedev/src/continuedev/core/policy.py @@ -11,6 +11,8 @@ from ..steps.main import EditHighlightedCodeStep, SolveTracebackStep, RunCodeSte  from ..recipes.WritePytestsRecipe.main import WritePytestsRecipe  from ..recipes.ContinueRecipeRecipe.main import ContinueStepStep  from ..steps.comment_code import CommentCodeStep +from ..steps.react import NLDecisionStep +from ..steps.chat import SimpleChatStep  from ..recipes.DDtoBQRecipe.main import DDtoBQRecipe  from ..steps.core.core import MessageStep @@ -30,6 +32,11 @@ class DemoPolicy(Policy):          observation = history.get_current().observation          if observation is not None and isinstance(observation, UserInputObservation):              # This could be defined with ObservationTypePolicy. Ergonomics not right though. +            user_input = observation.user_input +            if "/pytest" in user_input.lower(): +                return WritePytestsRecipe(instructions=user_input) +            elif "/dlt" in user_input.lower() or " dlt" in user_input.lower(): +                return CreatePipelineRecipe()              if "/pytest" in observation.user_input.lower():                  return WritePytestsRecipe(instructions=observation.user_input)              elif "/dlt" in observation.user_input.lower(): @@ -42,13 +49,21 @@ class DemoPolicy(Policy):                  return AddTransformRecipe()              elif "/comment" in observation.user_input.lower():                  return CommentCodeStep() -            elif "/ask" in observation.user_input: -                return AnswerQuestionChroma(question=" ".join(observation.user_input.split(" ")[1:])) -            elif "/edit" in observation.user_input: -                return EditFileChroma(request=" ".join(observation.user_input.split(" ")[1:])) -            elif "/step" in observation.user_input: -                return ContinueStepStep(prompt=" ".join(observation.user_input.split(" ")[1:])) -            return EditHighlightedCodeStep(user_input=observation.user_input) +            elif "/ask" in user_input: +                return AnswerQuestionChroma(question=" ".join(user_input.split(" ")[1:])) +            elif "/edit" in user_input: +                return EditFileChroma(request=" ".join(user_input.split(" ")[1:])) +            elif "/step" in user_input: +                return ContinueStepStep(prompt=" ".join(user_input.split(" ")[1:])) +            # return EditHighlightedCodeStep(user_input=user_input) +            return NLDecisionStep(user_input=user_input, steps=[ +                (EditHighlightedCodeStep(user_input=user_input), +                 "Edit the highlighted code"), +                # AnswerQuestionChroma(question=user_input), +                # EditFileChroma(request=user_input), +                (SimpleChatStep(user_input=user_input), +                 "Respond to the user with a chat message"), +            ], default_step=EditHighlightedCodeStep(user_input=user_input))          state = history.get_current() diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py index 59bfc0f2..2849b0c8 100644 --- a/continuedev/src/continuedev/core/sdk.py +++ b/continuedev/src/continuedev/core/sdk.py @@ -141,6 +141,14 @@ class ContinueSDK(AbstractContinueSDK):          self.history.timeline[self.history.current_index].step.chat_context.append(              ChatMessage(content=content, role=role)) -    @property -    def chat_context(self) -> List[ChatMessage]: -        return self.history.to_chat_history() +    async def get_chat_context(self) -> List[ChatMessage]: +        history_context = self.history.to_chat_history() +        highlighted_code = await self.ide.getHighlightedCode() +        for rif in highlighted_code: +            code = await self.ide.readRangeInFile(rif) +            history_context.append(ChatMessage( +                content=f"The following code is highlighted:\n```\n{code}\n```", role="user")) +        return history_context + +    async def update_ui(self): +        await self.__autopilot.update_subscribers() diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py index 6a537afd..9b8d3447 100644 --- a/continuedev/src/continuedev/libs/llm/openai.py +++ b/continuedev/src/continuedev/libs/llm/openai.py @@ -23,7 +23,7 @@ class OpenAI(LLM):      def with_system_message(self, system_message: Union[str, None]):          return OpenAI(api_key=self.api_key, system_message=system_message) -    def stream_chat(self, messages, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: +    def stream_chat(self, prompt, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:          self.completion_count += 1          args = {"max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5, "top_p": 1,                  "frequency_penalty": 0, "presence_penalty": 0} | kwargs @@ -31,7 +31,7 @@ class OpenAI(LLM):          args["model"] = "gpt-3.5-turbo"          for chunk in openai.ChatCompletion.create( -            messages=messages, +            messages=self.compile_chat_messages(with_history, prompt),              **args,          ):              if "content" in chunk.choices[0].delta: @@ -39,7 +39,21 @@ class OpenAI(LLM):              else:                  continue -    def stream_complete(self, prompt: str, **kwargs) -> Generator[Union[Any, List, Dict], None, None]: +    def compile_chat_messages(self, msgs: List[ChatMessage], prompt: str) -> List[Dict]: +        history = [] +        if self.system_message: +            history.append({ +                "role": "system", +                "content": self.system_message +            }) +        history += [msg.dict() for msg in msgs] +        history.append({ +            "role": "user", +            "content": prompt +        }) +        return history + +    def stream_complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Generator[Union[Any, List, Dict], None, None]:          self.completion_count += 1          args = {"model": self.default_model, "max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5,                  "top_p": 1, "frequency_penalty": 0, "presence_penalty": 0, "suffix": None} | kwargs @@ -47,10 +61,7 @@ class OpenAI(LLM):          if args["model"] == "gpt-3.5-turbo":              generator = openai.ChatCompletion.create( -                messages=[{ -                    "role": "user", -                    "content": prompt -                }], +                messages=self.compile_chat_messages(with_history, prompt),                  **args,              )              for chunk in generator: @@ -71,19 +82,8 @@ class OpenAI(LLM):                  "frequency_penalty": 0, "presence_penalty": 0, "stream": False} | kwargs          if args["model"] == "gpt-3.5-turbo": -            messages = [] -            if self.system_message: -                messages.append({ -                    "role": "system", -                    "content": self.system_message -                }) -            messages += [msg.dict() for msg in with_history] -            messages.append({ -                "role": "user", -                "content": prompt -            })              resp = openai.ChatCompletion.create( -                messages=messages, +                messages=self.compile_chat_messages(with_history, prompt),                  **args,              ).choices[0].message.content          else: diff --git a/continuedev/src/continuedev/libs/util/calculate_diff.py b/continuedev/src/continuedev/libs/util/calculate_diff.py new file mode 100644 index 00000000..ff0a135f --- /dev/null +++ b/continuedev/src/continuedev/libs/util/calculate_diff.py @@ -0,0 +1,166 @@ +import difflib +from typing import List +from ...models.main import Position, Range +from ...models.filesystem import FileEdit +from diff_match_patch import diff_match_patch + + +def calculate_diff_match_patch(filepath: str, original: str, updated: str) -> List[FileEdit]: +    dmp = diff_match_patch() +    diffs = dmp.diff_main(original, updated) +    dmp.diff_cleanupSemantic(diffs) + +    replacements = [] + +    current_index = 0 +    deleted_length = 0 + +    for diff in diffs: +        if diff[0] == diff_match_patch.DIFF_EQUAL: +            current_index += len(diff[1]) +            deleted_length = 0 +        elif diff[0] == diff_match_patch.DIFF_INSERT: +            current_index += deleted_length +            replacements.append((current_index, current_index, diff[1])) +            current_index += len(diff[1]) +            deleted_length = 0 +        elif diff[0] == diff_match_patch.DIFF_DELETE: +            replacements.append( +                (current_index, current_index + len(diff[1]), '')) +            deleted_length += len(diff[1]) +        elif diff[0] == diff_match_patch.DIFF_REPLACE: +            replacements.append( +                (current_index, current_index + len(diff[1]), '')) +            current_index += deleted_length +            replacements.append((current_index, current_index, diff[2])) +            current_index += len(diff[2]) +            deleted_length = 0 + +    return [FileEdit(filepath=filepath, range=Range.from_indices(original, r[0], r[1]), replacement=r[2]) for r in replacements] + + +def calculate_diff(filepath: str, original: str, updated: str) -> List[FileEdit]: +    s = difflib.SequenceMatcher(None, original, updated) +    offset = 0  # The indices are offset by previous deletions/insertions +    edits = [] +    for tag, i1, i2, j1, j2 in s.get_opcodes(): +        i1, i2, j1, j2 = i1 + offset, i2 + offset, j1 + offset, j2 + offset +        replacement = updated[j1:j2] +        if tag == "equal": +            pass +        elif tag == "delete": +            edits.append(FileEdit.from_deletion( +                filepath, Range.from_indices(original, i1, i2))) +            offset -= i2 - i1 +        elif tag == "insert": +            edits.append(FileEdit.from_insertion( +                filepath, Position.from_index(original, i1), replacement)) +            offset += j2 - j1 +        elif tag == "replace": +            edits.append(FileEdit(filepath=filepath, range=Range.from_indices( +                original, i1, i2), replacement=replacement)) +            offset += (j2 - j1) - (i2 - i1) +        else: +            raise Exception("Unexpected difflib.SequenceMatcher tag: " + tag) + +    return edits + + +def calculate_diff2(filepath: str, original: str, updated: str) -> List[FileEdit]: +    # original_lines = original.splitlines() +    # updated_lines = updated.splitlines() +    # offset = 0 +    # while len(original_lines) and len(updated_lines) and original_lines[0] == updated_lines[0]: +    #     original_lines = original_lines[1:] +    #     updated_lines = updated_lines[1:] + +    # while len(original_lines) and len(updated_lines) and original_lines[-1] == updated_lines[-1]: +    #     original_lines = original_lines[:-1] +    #     updated_lines = updated_lines[:-1] + +    # original = "\n".join(original_lines) +    # updated = "\n".join(updated_lines) + +    edits = [] +    max_iterations = 1000 +    i = 0 +    while not original == updated: +        # TODO - For some reason it can't handle a single newline at the end of the file? +        s = difflib.SequenceMatcher(None, original, updated) +        opcodes = s.get_opcodes() +        for edit_index in range(len(opcodes)): +            tag, i1, i2, j1, j2 = s.get_opcodes()[edit_index] +            replacement = updated[j1:j2] +            if tag == "equal": +                continue +            elif tag == "delete": +                edits.append(FileEdit.from_deletion( +                    filepath, Range.from_indices(original, i1, i2))) +            elif tag == "insert": +                edits.append(FileEdit.from_insertion( +                    filepath, Position.from_index(original, i1), replacement)) +            elif tag == "replace": +                edits.append(FileEdit(filepath=filepath, range=Range.from_indices( +                    original, i1, i2), replacement=replacement)) +            else: +                raise Exception( +                    "Unexpected difflib.SequenceMatcher tag: " + tag) +            break + +        original = apply_edit_to_str(original, edits[-1]) + +        i += 1 +        if i > max_iterations: +            raise Exception("Max iterations reached") + +    return edits + + +def read_range_in_str(s: str, r: Range) -> str: +    lines = s.splitlines()[r.start.line:r.end.line + 1] +    if len(lines) == 0: +        return "" + +    lines[0] = lines[0][r.start.character:] +    lines[-1] = lines[-1][:r.end.character + 1] +    return "\n".join(lines) + + +def apply_edit_to_str(s: str, edit: FileEdit) -> str: +    original = read_range_in_str(s, edit.range) + +    # Split lines and deal with some edge cases (could obviously be nicer) +    lines = s.splitlines() +    if s.startswith("\n"): +        lines.insert(0, "") +    if s.endswith("\n"): +        lines.append("") + +    if len(lines) == 0: +        lines = [""] + +    end = Position(line=edit.range.end.line, +                   character=edit.range.end.character) +    if edit.range.end.line == len(lines) and edit.range.end.character == 0: +        end = Position(line=edit.range.end.line - 1, +                       character=len(lines[min(len(lines) - 1, edit.range.end.line - 1)])) + +    before_lines = lines[:edit.range.start.line] +    after_lines = lines[end.line + 1:] +    between_str = lines[min(len(lines) - 1, edit.range.start.line)][:edit.range.start.character] + \ +        edit.replacement + \ +        lines[min(len(lines) - 1, end.line)][end.character + 1:] + +    new_range = Range( +        start=edit.range.start, +        end=Position( +            line=edit.range.start.line + +            len(edit.replacement.splitlines()) - 1, +            character=edit.range.start.character + +            len(edit.replacement.splitlines() +                [-1]) if edit.replacement != "" else 0 +        ) +    ) + +    lines = before_lines + between_str.splitlines() + after_lines +    return "\n".join(lines) diff --git a/continuedev/src/continuedev/libs/util/copy_codebase.py b/continuedev/src/continuedev/libs/util/copy_codebase.py index af957a34..97143faf 100644 --- a/continuedev/src/continuedev/libs/util/copy_codebase.py +++ b/continuedev/src/continuedev/libs/util/copy_codebase.py @@ -3,13 +3,12 @@ from pathlib import Path  from typing import Iterable, List, Union  from watchdog.observers import Observer  from watchdog.events import PatternMatchingEventHandler -from ..models.main import FileEdit, DeleteDirectory, DeleteFile, AddDirectory, AddFile, FileSystemEdit, Position, Range, RenameFile, RenameDirectory, SequentialFileSystemEdit -from ..models.filesystem import FileSystem -from ..libs.main import Autopilot -from ..libs.map_path import map_path -from ..libs.steps.main import ManualEditAction +from ...models.main import FileEdit, DeleteDirectory, DeleteFile, AddDirectory, AddFile, FileSystemEdit, RenameFile, RenameDirectory, SequentialFileSystemEdit +from ...models.filesystem import FileSystem +from ...core.autopilot import Autopilot +from .map_path import map_path +from ...core.sdk import ManualEditStep  import shutil -import difflib  def create_copy(orig_root: str, copy_root: str = None, ignore: Iterable[str] = []): @@ -36,33 +35,6 @@ def create_copy(orig_root: str, copy_root: str = None, ignore: Iterable[str] = [                  os.symlink(child, map_path(child)) -def calculate_diff(filepath: str, original: str, updated: str) -> List[FileEdit]: -    s = difflib.SequenceMatcher(None, original, updated) -    offset = 0  # The indices are offset by previous deletions/insertions -    edits = [] -    for tag, i1, i2, j1, j2 in s.get_opcodes(): -        i1, i2, j1, j2 = i1 + offset, i2 + offset, j1 + offset, j2 + offset -        replacement = updated[j1:j2] -        if tag == "equal": -            pass -        elif tag == "delete": -            edits.append(FileEdit.from_deletion( -                filepath, Range.from_indices(original, i1, i2))) -            offset -= i2 - i1 -        elif tag == "insert": -            edits.append(FileEdit.from_insertion( -                filepath, Position.from_index(original, i1), replacement)) -            offset += j2 - j1 -        elif tag == "replace": -            edits.append(FileEdit(filepath, Range.from_indices( -                original, i1, i2), replacement)) -            offset += (j2 - j1) - (i2 + i1) -        else: -            raise Exception("Unexpected difflib.SequenceMatcher tag: " + tag) - -    return edits - -  # The whole usage of watchdog here should only be specific to RealFileSystem, you want to have a different "Observer" class for VirtualFileSystem, which would depend on being sent notifications  class CopyCodebaseEventHandler(PatternMatchingEventHandler):      def __init__(self, ignore_directories: List[str], ignore_patterns: List[str], autopilot: Autopilot, orig_root: str, copy_root: str, filesystem: FileSystem): diff --git a/continuedev/src/continuedev/models/filesystem_edit.py b/continuedev/src/continuedev/models/filesystem_edit.py index 8e74b819..b06ca2b3 100644 --- a/continuedev/src/continuedev/models/filesystem_edit.py +++ b/continuedev/src/continuedev/models/filesystem_edit.py @@ -30,8 +30,8 @@ class FileEdit(AtomicFileSystemEdit):          return FileEdit(map_path(self.filepath, orig_root, copy_root), self.range, self.replacement)      @staticmethod -    def from_deletion(filepath: str, start: Position, end: Position) -> "FileEdit": -        return FileEdit(filepath, Range(start, end), "") +    def from_deletion(filepath: str, range: Range) -> "FileEdit": +        return FileEdit(filepath=filepath, range=range, replacement="")      @staticmethod      def from_insertion(filepath: str, position: Position, content: str) -> "FileEdit": diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py index b873a88f..e8b52004 100644 --- a/continuedev/src/continuedev/server/gui.py +++ b/continuedev/src/continuedev/server/gui.py @@ -77,6 +77,10 @@ class GUIProtocolServer(AbstractGUIProtocolServer):                  self.on_reverse_to_index(data["index"])              elif message_type == "retry_at_index":                  self.on_retry_at_index(data["index"]) +            elif message_type == "clear_history": +                self.on_clear_history() +            elif message_type == "delete_at_index": +                self.on_delete_at_index(data["index"])          except Exception as e:              print(e) @@ -106,6 +110,12 @@ class GUIProtocolServer(AbstractGUIProtocolServer):          asyncio.create_task(              self.session.autopilot.retry_at_index(index)) +    def on_clear_history(self): +        asyncio.create_task(self.session.autopilot.clear_history()) + +    def on_delete_at_index(self, index: int): +        asyncio.create_task(self.session.autopilot.delete_at_index(index)) +  @router.websocket("/ws")  async def websocket_endpoint(websocket: WebSocket, session: Session = Depends(websocket_session)): diff --git a/continuedev/src/continuedev/server/gui_protocol.py b/continuedev/src/continuedev/server/gui_protocol.py index 287f9e3b..889c6761 100644 --- a/continuedev/src/continuedev/server/gui_protocol.py +++ b/continuedev/src/continuedev/server/gui_protocol.py @@ -30,3 +30,11 @@ class AbstractGUIProtocolServer(ABC):      @abstractmethod      def on_retry_at_index(self, index: int):          """Called when the user requests a retry at a previous index""" + +    @abstractmethod +    def on_clear_history(self): +        """Called when the user requests to clear the history""" + +    @abstractmethod +    def on_delete_at_index(self, index: int): +        """Called when the user requests to delete a step at a given index""" diff --git a/continuedev/src/continuedev/server/session_manager.py b/continuedev/src/continuedev/server/session_manager.py index 0dbfaf38..ebea08a5 100644 --- a/continuedev/src/continuedev/server/session_manager.py +++ b/continuedev/src/continuedev/server/session_manager.py @@ -28,6 +28,7 @@ class DemoAutopilot(Autopilot):      cumulative_edit_string = ""      def handle_manual_edits(self, edits: List[FileEditWithFullContents]): +        return          for edit in edits:              self.cumulative_edit_string += edit.fileEdit.replacement              self._manual_edits_buffer.append(edit) diff --git a/continuedev/src/continuedev/server/state_manager.py b/continuedev/src/continuedev/server/state_manager.py new file mode 100644 index 00000000..c9bd760b --- /dev/null +++ b/continuedev/src/continuedev/server/state_manager.py @@ -0,0 +1,21 @@ +from typing import Any, List, Tuple, Union +from fastapi import WebSocket +from pydantic import BaseModel +from ..core.main import FullState + +# State updates represented as (path, replacement) pairs +StateUpdate = Tuple[List[Union[str, int]], Any] + + +class StateManager: +    """ +    A class that acts as the source of truth for state, ingesting changes to the entire object and streaming only the updated portions to client. +    """ + +    def __init__(self, ws: WebSocket): +        self.ws = ws + +    def _send_update(self, updates: List[StateUpdate]): +        self.ws.send_json( +            [update.dict() for update in updates] +        ) diff --git a/continuedev/src/continuedev/steps/chat.py b/continuedev/src/continuedev/steps/chat.py new file mode 100644 index 00000000..aadcfa8e --- /dev/null +++ b/continuedev/src/continuedev/steps/chat.py @@ -0,0 +1,19 @@ +from textwrap import dedent +from typing import List +from ..core.main import Step +from ..core.sdk import ContinueSDK +from .core.core import MessageStep + + +class SimpleChatStep(Step): +    user_input: str +    name: str = "Chat" + +    async def run(self, sdk: ContinueSDK): +        self.description = "" +        for chunk in sdk.models.gpt35.stream_chat(self.user_input, with_history=await sdk.get_chat_context()): +            self.description += chunk +            await sdk.update_ui() + +        self.name = sdk.models.gpt35.complete( +            f"Write a short title for the following chat message: {self.description}").strip() diff --git a/continuedev/src/continuedev/steps/core/core.py b/continuedev/src/continuedev/steps/core/core.py index 392339c6..8dc2478b 100644 --- a/continuedev/src/continuedev/steps/core/core.py +++ b/continuedev/src/continuedev/steps/core/core.py @@ -3,8 +3,10 @@ import os  import subprocess  from textwrap import dedent  from typing import Coroutine, List, Union -from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder +from ...models.main import Range +from ...libs.util.calculate_diff import calculate_diff2, apply_edit_to_str +from ...libs.llm.prompt_utils import MarkdownStyleEncoderDecoder  from ...models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit  from ...models.filesystem import FileSystem, RangeInFile, RangeInFileWithContents  from ...core.observation import Observation, TextObservation, TracebackObservation, UserInputObservation @@ -85,7 +87,7 @@ class ShellCommandsStep(Step):                      {output}                      ``` -                    This is a brief summary of the error followed by a suggestion on how it can be fixed:"""), with_history=sdk.chat_context) +                    This is a brief summary of the error followed by a suggestion on how it can be fixed:"""), with_history=await sdk.get_chat_context())                  sdk.raise_exception(                      title="Error while running query", message=output, with_step=MessageStep(name=f"Suggestion to solve error {AI_ASSISTED_STRING}", message=f"{suggestion}\n\nYou can click the retry button on the failed step to try again.") @@ -149,7 +151,11 @@ class Gpt35EditCodeStep(Step):      _prompt_and_completion: str = ""      async def describe(self, models: Models) -> Coroutine[str, None, None]: -        return models.gpt35.complete(f"{self._prompt_and_completion}\n\nPlease give brief a description of the changes made above using markdown bullet points:") +        description = models.gpt35.complete( +            f"{self._prompt_and_completion}\n\nPlease give brief a description of the changes made above using markdown bullet points. Be concise and only mention changes made to the commit before, not prefix or suffix:") +        self.name = models.gpt35.complete( +            f"Write a short title for this description: {description}") +        return description      async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]:          rif_with_contents = [] @@ -174,11 +180,40 @@ class Gpt35EditCodeStep(Step):              self._prompt_and_completion += prompt + completion -            await sdk.ide.applyFileSystemEdit( -                FileEdit(filepath=rif.filepath, range=rif.range, replacement=completion)) -            await sdk.ide.saveFile(rif.filepath) +            # Calculate diff, open file, apply edits, and highlight changed lines +            edits = calculate_diff2( +                rif.filepath, rif.contents, completion.removesuffix("\n")) +              await sdk.ide.setFileOpen(rif.filepath) +            lines_to_highlight = set() +            for edit in edits: +                edit.range.start.line += rif.range.start.line +                edit.range.start.character += rif.range.start.character +                edit.range.end.line += rif.range.start.line +                edit.range.end.character += rif.range.start.character if edit.range.end.line == 0 else 0 + +                for line in range(edit.range.start.line, edit.range.end.line + 1 + len(edit.replacement.splitlines()) - (edit.range.end.line - edit.range.start.line + 1)): +                    lines_to_highlight.add(line) + +                await sdk.ide.applyFileSystemEdit(edit) + +            current_start = None +            last_line = None +            for line in sorted(list(lines_to_highlight)): +                if current_start is None: +                    current_start = line +                elif line != last_line + 1: +                    await sdk.ide.highlightCode(RangeInFile(filepath=edit.filepath, range=Range.from_shorthand(current_start, 0, last_line, 0))) +                    current_start = line + +                last_line = line + +            if current_start is not None: +                await sdk.ide.highlightCode(RangeInFile(filepath=edit.filepath, range=Range.from_shorthand(current_start, 0, last_line, 0))) + +            await sdk.ide.saveFile(rif.filepath) +  class EditFileStep(Step):      filepath: str diff --git a/continuedev/src/continuedev/steps/draft/migration.py b/continuedev/src/continuedev/steps/draft/migration.py index f3b36b5e..7c4b7eb5 100644 --- a/continuedev/src/continuedev/steps/draft/migration.py +++ b/continuedev/src/continuedev/steps/draft/migration.py @@ -13,7 +13,7 @@ class MigrationStep(Step):          recent_edits = await sdk.ide.get_recent_edits(self.edited_file)          recent_edits_string = "\n\n".join(              map(lambda x: x.to_string(), recent_edits)) -        description = await sdk.models.gpt35.complete(f"{recent_edits_string}\n\nGenerate a short description of the migration made in the above changes:\n") +        description = sdk.models.gpt35.complete(f"{recent_edits_string}\n\nGenerate a short description of the migration made in the above changes:\n")          await sdk.run([              "cd libs",              "poetry run alembic revision --autogenerate -m " + description, diff --git a/continuedev/src/continuedev/steps/find_and_replace.py b/continuedev/src/continuedev/steps/find_and_replace.py index fec33997..690872c0 100644 --- a/continuedev/src/continuedev/steps/find_and_replace.py +++ b/continuedev/src/continuedev/steps/find_and_replace.py @@ -10,7 +10,7 @@ class FindAndReplaceStep(Step):      replacement: str      async def describe(self, models: Models): -        return f"Replace all instances of `{self.pattern}` with `{self.replacement}` in `{self.filepath}`" +        return f"Replaced all instances of `{self.pattern}` with `{self.replacement}` in `{self.filepath}`"      async def run(self, sdk: ContinueSDK):          file_content = await sdk.ide.readFile(self.filepath) diff --git a/continuedev/src/continuedev/steps/input/nl_multiselect.py b/continuedev/src/continuedev/steps/input/nl_multiselect.py index c3c832f5..36c489c7 100644 --- a/continuedev/src/continuedev/steps/input/nl_multiselect.py +++ b/continuedev/src/continuedev/steps/input/nl_multiselect.py @@ -23,5 +23,6 @@ class NLMultiselectStep(Step):          if first_try is not None:              return first_try -        gpt_parsed = await sdk.models.gpt35.complete(f"These are the available options are: [{', '.join(self.options)}]. The user requested {user_response}. This is the exact string from the options array that they selected:") +        gpt_parsed = sdk.models.gpt35.complete( +            f"These are the available options are: [{', '.join(self.options)}]. The user requested {user_response}. This is the exact string from the options array that they selected:")          return extract_option(gpt_parsed) or self.options[0] diff --git a/continuedev/src/continuedev/steps/main.py b/continuedev/src/continuedev/steps/main.py index 24335b4f..9634c726 100644 --- a/continuedev/src/continuedev/steps/main.py +++ b/continuedev/src/continuedev/steps/main.py @@ -16,6 +16,7 @@ from ..core.sdk import ContinueSDK, Models  from ..core.observation import Observation  import subprocess  from .core.core import Gpt35EditCodeStep +from ..libs.util.calculate_diff import calculate_diff2  class SetupContinueWorkspaceStep(Step): @@ -216,7 +217,8 @@ class StarCoderEditHighlightedCodeStep(Step):      async def run(self, sdk: ContinueSDK) -> Coroutine[Observation, None, None]:          range_in_files = await sdk.ide.getHighlightedCode() -        if len(range_in_files) == 0: +        found_highlighted_code = len(range_in_files) > 0 +        if not found_highlighted_code:              # Get the full contents of all open files              files = await sdk.ide.getOpenFiles()              contents = {} @@ -239,15 +241,29 @@ class StarCoderEditHighlightedCodeStep(Step):          for rif in rif_with_contents:              prompt = self._prompt.format(                  code=rif.contents, user_request=self.user_input) -            completion = str(sdk.models.starcoder.complete(prompt)) + +            if found_highlighted_code: +                full_file_contents = await sdk.ide.readFile(rif.filepath) +                segs = full_file_contents.split(rif.contents) +                prompt = f"<file_prefix>{segs[0]}<file_suffix>{segs[1]}" + prompt + +            completion = str((await sdk.models.starcoder()).complete(prompt))              eot_token = "<|endoftext|>" -            if completion.endswith(eot_token): -                completion = completion[:completion.rindex(eot_token)] +            completion = completion.removesuffix(eot_token) + +            if found_highlighted_code: +                rif.contents = segs[0] + rif.contents + segs[1] +                completion = segs[0] + completion + segs[1]              self._prompt_and_completion += prompt + completion -            await sdk.ide.applyFileSystemEdit( -                FileEdit(filepath=rif.filepath, range=rif.range, replacement=completion)) +            edits = calculate_diff2( +                rif.filepath, rif.contents, completion.removesuffix("\n")) +            for edit in edits: +                await sdk.ide.applyFileSystemEdit(edit) + +            # await sdk.ide.applyFileSystemEdit( +            #     FileEdit(filepath=rif.filepath, range=rif.range, replacement=completion))              await sdk.ide.saveFile(rif.filepath)              await sdk.ide.setFileOpen(rif.filepath) diff --git a/continuedev/src/continuedev/steps/react.py b/continuedev/src/continuedev/steps/react.py new file mode 100644 index 00000000..d825d424 --- /dev/null +++ b/continuedev/src/continuedev/steps/react.py @@ -0,0 +1,39 @@ +from textwrap import dedent +from typing import List, Union, Tuple +from ..core.main import Step +from ..core.sdk import ContinueSDK +from .core.core import MessageStep + + +class NLDecisionStep(Step): +    user_input: str +    default_step: Union[Step, None] = None +    steps: List[Tuple[Step, str]] + +    hide: bool = True + +    async def run(self, sdk: ContinueSDK): +        step_descriptions = "\n".join([ +            f"- {step[0].name}: {step[1]}" +            for step in self.steps +        ]) +        prompt = dedent(f"""\ +            The following steps are available, in the format "- [step name]: [step description]": +            {step_descriptions} +             +            The user gave the following input: +             +            {self.user_input} +             +            Select the step which should be taken next to satisfy the user input. Say only the name of the selected step. You must choose one:""") + +        resp = sdk.models.gpt35.complete(prompt).lower() + +        step_to_run = None +        for step in self.steps: +            if step[0].name.lower() in resp: +                step_to_run = step[0] + +        step_to_run = step_to_run or self.default_step or self.steps[0] + +        await sdk.run_step(step_to_run) | 
