diff options
Diffstat (limited to 'continuedev/src')
| -rw-r--r-- | continuedev/src/continuedev/core/agent.py | 1 | ||||
| -rw-r--r-- | continuedev/src/continuedev/core/sdk.py | 32 | ||||
| -rw-r--r-- | continuedev/src/continuedev/libs/llm/openai.py | 10 | ||||
| -rw-r--r-- | continuedev/src/continuedev/models/generate_json_schema.py | 8 | 
4 files changed, 38 insertions, 13 deletions
| diff --git a/continuedev/src/continuedev/core/agent.py b/continuedev/src/continuedev/core/agent.py index 509a54b3..6d1f542e 100644 --- a/continuedev/src/continuedev/core/agent.py +++ b/continuedev/src/continuedev/core/agent.py @@ -17,7 +17,6 @@ class Agent(ContinueBaseModel):      policy: Policy      ide: AbstractIdeProtocolServer      history: History = History.from_empty() -    continue_sdk: "ContinueSDK"      _on_update_callbacks: List[Callable[[FullState], None]] = []      _active: bool = False diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py index ff62a2b1..3559e9d7 100644 --- a/continuedev/src/continuedev/core/sdk.py +++ b/continuedev/src/continuedev/core/sdk.py @@ -1,5 +1,6 @@ +import os  from typing import Coroutine, Union -from ..models.filesystem_edit import FileSystemEdit +from ..models.filesystem_edit import FileSystemEdit, AddFile, DeleteFile, AddDirectory, DeleteDirectory  from ..models.filesystem import RangeInFile  from ..libs.llm import LLM  from .observation import Observation @@ -37,11 +38,16 @@ class ContinueSDK:      def history(self) -> History:          return self.__agent.history +    async def _ensure_absolute_path(self, path: str) -> str: +        if os.path.isabs(path): +            return path +        return os.path.join(await self.ide.getWorkspaceDirectory(), path) +      async def run_step(self, step: Step) -> Coroutine[Observation, None, None]:          return await self.__agent._run_singular_step(step)      async def apply_filesystem_edit(self, edit: FileSystemEdit): -        await self.run_step(FileSystemEditStep(edit=edit)) +        return await self.run_step(FileSystemEditStep(edit=edit))      async def wait_for_user_input(self) -> str:          return await self.__agent.wait_for_user_input() @@ -51,12 +57,26 @@ class ContinueSDK:      async def run(self, commands: List[str] | str, cwd: str = None):          commands = commands if isinstance(commands, List) else [commands] -        return self.run_step(ShellCommandsStep(commands=commands, cwd=cwd)) +        return await self.run_step(ShellCommandsStep(commands=commands, cwd=cwd))      async def edit_file(self, filename: str, prompt: str): -        await self.ide.setFileOpen(filename) -        contents = await self.ide.readFile(filename) +        filepath = await self._ensure_absolute_path(filename) + +        await self.ide.setFileOpen(filepath) +        contents = await self.ide.readFile(filepath)          await self.run_step(EditCodeStep( -            range_in_files=[RangeInFile.from_entire_file(filename, contents)], +            range_in_files=[RangeInFile.from_entire_file(filepath, contents)],              prompt=f'Here is the code before:\n\n{{code}}\n\nHere is the user request:\n\n{prompt}\n\nHere is the code edited to perfectly solve the user request:\n\n'          )) + +    async def add_file(self, filename: str, content: str | None): +        return await self.run_step(FileSystemEditStep(edit=AddFile(filename=filename, content=content))) + +    async def delete_file(self, filename: str): +        return await self.run_step(FileSystemEditStep(edit=DeleteFile(filepath=filename))) + +    async def add_directory(self, path: str): +        return await self.run_step(FileSystemEditStep(edit=AddDirectory(path=path))) + +    async def delete_directory(self, path: str): +        return await self.run_step(FileSystemEditStep(edit=DeleteDirectory(path=path))) diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py index bb745e75..10801465 100644 --- a/continuedev/src/continuedev/libs/llm/openai.py +++ b/continuedev/src/continuedev/libs/llm/openai.py @@ -6,6 +6,8 @@ import aiohttp  from ..llm import LLM  from pydantic import BaseModel, validator +DEFAULT_MAX_TOKENS = 2048 +  class OpenAI(LLM):      api_key: str @@ -22,7 +24,7 @@ class OpenAI(LLM):      def stream_chat(self, messages, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:          self.completion_count += 1 -        args = {"max_tokens": 512, "temperature": 0.5, "top_p": 1, +        args = {"max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5, "top_p": 1,                  "frequency_penalty": 0, "presence_penalty": 0} | kwargs          args["stream"] = True          args["model"] = "gpt-3.5-turbo" @@ -38,7 +40,7 @@ class OpenAI(LLM):      def stream_complete(self, prompt: str, **kwargs) -> Generator[Union[Any, List, Dict], None, None]:          self.completion_count += 1 -        args = {"model": self.default_model, "max_tokens": 512, "temperature": 0.5, +        args = {"model": self.default_model, "max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5,                  "top_p": 1, "frequency_penalty": 0, "presence_penalty": 0, "suffix": None} | kwargs          args["stream"] = True @@ -64,7 +66,7 @@ class OpenAI(LLM):          t1 = time.time()          self.completion_count += 1 -        args = {"model": self.default_model, "max_tokens": 512, "temperature": 0.5, "top_p": 1, +        args = {"model": self.default_model, "max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5, "top_p": 1,                  "frequency_penalty": 0, "presence_penalty": 0, "stream": False} | kwargs          if args["model"] == "gpt-3.5-turbo": @@ -132,7 +134,7 @@ class OpenAI(LLM):      def parallel_complete(self, prompts: list[str], suffixes: Union[list[str], None] = None, **kwargs) -> list[str]:          self.completion_count += len(prompts) -        args = {"model": self.default_model, "max_tokens": 512, "temperature": 0.5, +        args = {"model": self.default_model, "max_tokens": DEFAULT_MAX_TOKENS, "temperature": 0.5,                  "top_p": 1, "frequency_penalty": 0, "presence_penalty": 0} | kwargs          async def fn(): diff --git a/continuedev/src/continuedev/models/generate_json_schema.py b/continuedev/src/continuedev/models/generate_json_schema.py index 07337029..080787a5 100644 --- a/continuedev/src/continuedev/models/generate_json_schema.py +++ b/continuedev/src/continuedev/models/generate_json_schema.py @@ -19,7 +19,7 @@ RENAMES = {      "ExampleClass": "RenamedName"  } -SCHEMA_DIR = "schema/json" +SCHEMA_DIR = "../schema/json"  def clear_schemas(): @@ -28,7 +28,7 @@ def clear_schemas():              os.remove(os.path.join(SCHEMA_DIR, filename)) -if __name__ == "__main__": +def main():      clear_schemas()      for model in MODELS_TO_GENERATE:          title = RENAMES.get(model.__name__, model.__name__) @@ -40,3 +40,7 @@ if __name__ == "__main__":          with open(f"{SCHEMA_DIR}/{title}.json", "w") as f:              f.write(json) + + +if __name__ == "__main__": +    main() | 
