summaryrefslogtreecommitdiff
path: root/continuedev
diff options
context:
space:
mode:
Diffstat (limited to 'continuedev')
-rw-r--r--continuedev/poetry.lock38
-rw-r--r--continuedev/pyproject.toml1
-rw-r--r--continuedev/requirements.txt5
-rw-r--r--continuedev/src/continuedev/core/sdk.py2
-rw-r--r--continuedev/src/continuedev/libs/constants/default_config.py10
-rw-r--r--continuedev/src/continuedev/libs/llm/__init__.py3
-rw-r--r--continuedev/src/continuedev/libs/llm/replicate.py63
-rw-r--r--continuedev/src/continuedev/plugins/context_providers/url.py34
-rw-r--r--continuedev/src/continuedev/plugins/policies/default.py3
-rw-r--r--continuedev/src/continuedev/plugins/steps/core/core.py4
-rw-r--r--continuedev/src/continuedev/plugins/steps/share_session.py51
11 files changed, 200 insertions, 14 deletions
diff --git a/continuedev/poetry.lock b/continuedev/poetry.lock
index 3754d121..d3140756 100644
--- a/continuedev/poetry.lock
+++ b/continuedev/poetry.lock
@@ -907,13 +907,13 @@ files = [
[[package]]
name = "openai"
-version = "0.27.6"
+version = "0.27.8"
description = "Python client library for the OpenAI API"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-0.27.6-py3-none-any.whl", hash = "sha256:1f07ed06f1cfc6c25126107193726fe4cf476edcc4e1485cd9eb708f068f2606"},
- {file = "openai-0.27.6.tar.gz", hash = "sha256:63ca9f6ac619daef8c1ddec6d987fe6aa1c87a9bfdce31ff253204d077222375"},
+ {file = "openai-0.27.8-py3-none-any.whl", hash = "sha256:e0a7c2f7da26bdbe5354b03c6d4b82a2f34bd4458c7a17ae1a7092c3e397e03c"},
+ {file = "openai-0.27.8.tar.gz", hash = "sha256:2483095c7db1eee274cebac79e315a986c4e55207bb4fa7b82d185b3a2ed9536"},
]
[package.dependencies]
@@ -928,6 +928,17 @@ embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "
wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"]
[[package]]
+name = "packaging"
+version = "23.1"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
+ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+]
+
+[[package]]
name = "pkgutil-resolve-name"
version = "1.3.10"
description = "Resolve a name to an object."
@@ -1275,6 +1286,25 @@ files = [
]
[[package]]
+name = "replicate"
+version = "0.11.0"
+description = "Python client for Replicate"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "replicate-0.11.0-py3-none-any.whl", hash = "sha256:fbb8815068864dc822cd4fa7b6103d6f4089d6ef122abd6c3441ca0f0f110c46"},
+ {file = "replicate-0.11.0.tar.gz", hash = "sha256:4d54b5838c1552a6f76cc37c3af8d9a7998105382082d672acad31636ad443b5"},
+]
+
+[package.dependencies]
+packaging = "*"
+pydantic = ">1"
+requests = ">2"
+
+[package.extras]
+dev = ["black", "mypy", "pytest", "responses", "ruff"]
+
+[[package]]
name = "requests"
version = "2.29.0"
description = "Python HTTP for Humans."
@@ -1819,4 +1849,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata]
lock-version = "2.0"
python-versions = "^3.8.1"
-content-hash = "19ea56d05ab21d19f2fee6f837f7c8915ca1c597c392a539f43010dd0a4f6a78"
+content-hash = "5500ea86b06a96f5fe45939500936911e622043a67a3a5c3d02473463ff2fd6c"
diff --git a/continuedev/pyproject.toml b/continuedev/pyproject.toml
index a3c058bd..90ff0572 100644
--- a/continuedev/pyproject.toml
+++ b/continuedev/pyproject.toml
@@ -30,6 +30,7 @@ meilisearch-python-async = "^1.4.8"
socksio = "^1.0.0"
ripgrepy = "^2.0.0"
bs4 = "^0.0.1"
+replicate = "^0.11.0"
[tool.poetry.scripts]
typegen = "src.continuedev.models.generate_json_schema:main"
diff --git a/continuedev/requirements.txt b/continuedev/requirements.txt
index a7e40fed..9d541de9 100644
--- a/continuedev/requirements.txt
+++ b/continuedev/requirements.txt
@@ -7,7 +7,7 @@ uvicorn==0.21.1
python-dotenv==1.0.0
nest-asyncio==1.5.6
websockets==11.0.2
-urllib3==1.25.11 # Locked to this version per: https://github.com/Significant-Gravitas/Auto-GPT/issues/1909
+urllib3==1.26.15
posthog==3.0.1
tiktoken==0.4.0
jsonref==1.1.0
@@ -19,4 +19,5 @@ psutil==5.9.5
pygithub==1.59.0
meilisearch-python-async==1.4.8
socksio==1.0.0
-ripgrepy==2.0.0 \ No newline at end of file
+ripgrepy==2.0.0
+replicate==0.11.0 \ No newline at end of file
diff --git a/continuedev/src/continuedev/core/sdk.py b/continuedev/src/continuedev/core/sdk.py
index 0fa9d3a6..024d5cea 100644
--- a/continuedev/src/continuedev/core/sdk.py
+++ b/continuedev/src/continuedev/core/sdk.py
@@ -55,7 +55,7 @@ class ContinueSDK(AbstractContinueSDK):
formatted_err = '\n'.join(traceback.format_exception(e))
msg_step = MessageStep(
name="Invalid Continue Config File", message=formatted_err)
- msg_step.description = f"Falling back to default config settings.\n```\n{formatted_err}\n```\n\nIt's possible this error was caused by an update to the Continue config format. If you'd like to see the new recommended default `config.py`, check [here](https://github.com/continuedev/continue/blob/main/continuedev/src/continuedev/libs/constants/default_config.py)."
+ msg_step.description = f"Falling back to default config settings due to the following error in `~/.continue/config.py`.\n```\n{formatted_err}\n```\n\nIt's possible this was caused by an update to the Continue config format. If you'd like to see the new recommended default `config.py`, check [here](https://github.com/continuedev/continue/blob/main/continuedev/src/continuedev/libs/constants/default_config.py)."
sdk.history.add_node(HistoryNode(
step=msg_step,
observation=None,
diff --git a/continuedev/src/continuedev/libs/constants/default_config.py b/continuedev/src/continuedev/libs/constants/default_config.py
index dbd2c8eb..e2b033b7 100644
--- a/continuedev/src/continuedev/libs/constants/default_config.py
+++ b/continuedev/src/continuedev/libs/constants/default_config.py
@@ -20,9 +20,11 @@ from continuedev.src.continuedev.plugins.steps.open_config import OpenConfigStep
from continuedev.src.continuedev.plugins.steps.clear_history import ClearHistoryStep
from continuedev.src.continuedev.plugins.steps.feedback import FeedbackStep
from continuedev.src.continuedev.plugins.steps.comment_code import CommentCodeStep
+from continuedev.src.continuedev.plugins.steps.share_session import ShareSessionStep
from continuedev.src.continuedev.plugins.steps.main import EditHighlightedCodeStep
from continuedev.src.continuedev.plugins.context_providers.search import SearchContextProvider
from continuedev.src.continuedev.plugins.context_providers.diff import DiffContextProvider
+from continuedev.src.continuedev.plugins.context_providers.url import URLContextProvider
class CommitMessageStep(Step):
\"\"\"
@@ -108,6 +110,11 @@ config = ContinueConfig(
name="clear",
description="Clear step history",
step=ClearHistoryStep,
+ ),
+ SlashCommand(
+ name="share",
+ description="Download and share the session transcript",
+ step=ShareSessionStep,
)
],
@@ -124,7 +131,8 @@ config = ContinueConfig(
# serper_api_key="<your serper.dev api key>"
# )
SearchContextProvider(),
- DiffContextProvider()
+ DiffContextProvider(),
+ URLContextProvider()
],
# Policies hold the main logic that decides which Step to take next
diff --git a/continuedev/src/continuedev/libs/llm/__init__.py b/continuedev/src/continuedev/libs/llm/__init__.py
index 40edb99b..70c67856 100644
--- a/continuedev/src/continuedev/libs/llm/__init__.py
+++ b/continuedev/src/continuedev/libs/llm/__init__.py
@@ -12,6 +12,9 @@ class LLM(ContinueBaseModel, ABC):
system_message: Optional[str] = None
+ class Config:
+ arbitrary_types_allowed = True
+
@abstractproperty
def name(self):
"""Return the name of the LLM."""
diff --git a/continuedev/src/continuedev/libs/llm/replicate.py b/continuedev/src/continuedev/libs/llm/replicate.py
new file mode 100644
index 00000000..235fd906
--- /dev/null
+++ b/continuedev/src/continuedev/libs/llm/replicate.py
@@ -0,0 +1,63 @@
+from abc import abstractproperty
+from typing import List, Optional
+import replicate
+import concurrent.futures
+
+from ..util.count_tokens import DEFAULT_ARGS, count_tokens
+from ...core.main import ChatMessage
+from . import LLM
+
+
+class ReplicateLLM(LLM):
+ api_key: str
+ model: str = "nateraw/stablecode-completion-alpha-3b-4k:e82ebe958f0a5be6846d1a82041925767edb1d1f162596c643e48fbea332b1bb"
+ max_context_length: int = 2048
+
+ _client: replicate.Client = None
+
+ @property
+ def name(self):
+ return self.model
+
+ @property
+ def context_length(self):
+ return self.max_context_length
+
+ @property
+ def default_args(self):
+ return {**DEFAULT_ARGS, "model": self.name, "max_tokens": 1024}
+
+ def count_tokens(self, text: str):
+ return count_tokens(self.name, text)
+
+ async def start(self):
+ self._client = replicate.Client(api_token=self.api_key)
+
+ async def stop(self):
+ pass
+
+ async def complete(self, prompt: str, with_history: List[ChatMessage] = None, **kwargs):
+ def helper():
+ output = self._client.run(self.model, input={"message": prompt})
+ completion = ''
+ for item in output:
+ completion += item
+
+ return completion
+
+ with concurrent.futures.ThreadPoolExecutor() as executor:
+ future = executor.submit(helper)
+ completion = future.result()
+
+ return completion
+
+ async def stream_complete(self, prompt, with_history: List[ChatMessage] = None, **kwargs):
+ for item in self._client.run(self.model, input={"message": prompt}):
+ yield item
+
+ async def stream_chat(self, messages: List[ChatMessage] = None, **kwargs):
+ for item in self._client.run(self.model, input={"message": messages[-1].content}):
+ yield {
+ "content": item,
+ "role": "assistant"
+ }
diff --git a/continuedev/src/continuedev/plugins/context_providers/url.py b/continuedev/src/continuedev/plugins/context_providers/url.py
index 9274d84a..32c1d69c 100644
--- a/continuedev/src/continuedev/plugins/context_providers/url.py
+++ b/continuedev/src/continuedev/plugins/context_providers/url.py
@@ -1,4 +1,4 @@
-from typing import List
+from typing import List, Optional
from .util import remove_meilisearch_disallowed_chars
from ...core.main import ContextItem, ContextItemDescription, ContextItemId
@@ -8,9 +8,25 @@ from ...core.context import ContextProvider
class URLContextProvider(ContextProvider):
title = "url"
+ url: Optional[str] = None
+ display_name: Optional[str] = None
URL_CONTEXT_ITEM_ID = "url"
@property
+ def optional_url_item_id(self) -> str:
+ return remove_meilisearch_disallowed_chars(self.url)
+
+ @property
+ def optional_url_item(self) -> ContextItem:
+ cp = self.BASE_CONTEXT_ITEM.copy()
+ if self.display_name:
+ cp.description.name = self.display_name
+ cp.description.description = f"Contents of {self.url}"
+ cp.description.id.item_id = self.optional_url_item_id
+
+ return cp
+
+ @property
def BASE_CONTEXT_ITEM(self):
return ContextItem(
content="",
@@ -33,14 +49,22 @@ class URLContextProvider(ContextProvider):
return soup.get_text()
async def provide_context_items(self, workspace_dir: str) -> List[ContextItem]:
- return [self.BASE_CONTEXT_ITEM]
+ items = [self.BASE_CONTEXT_ITEM]
+ if self.url:
+ items.append(self.optional_url_item)
+
+ return items
async def get_item(self, id: ContextItemId, query: str) -> ContextItem:
+ if id.item_id == self.optional_url_item_id:
+ item = self.optional_url_item
+ item.content = self._get_url_text_contents(self.url)
+ return item
+
if not id.item_id == self.URL_CONTEXT_ITEM_ID:
raise Exception("Invalid item id")
- query = query.lstrip("url ")
- url = query.strip()
+ url = query.lstrip("url ").strip()
content = self._get_url_text_contents(url)
ctx_item = self.BASE_CONTEXT_ITEM.copy()
@@ -48,5 +72,5 @@ class URLContextProvider(ContextProvider):
ctx_item.description.name = url.replace(
"https://", "").replace("http://", "")
ctx_item.description.id.item_id = remove_meilisearch_disallowed_chars(
- query)
+ url)
return ctx_item
diff --git a/continuedev/src/continuedev/plugins/policies/default.py b/continuedev/src/continuedev/plugins/policies/default.py
index 0d74fa3f..2382f33a 100644
--- a/continuedev/src/continuedev/plugins/policies/default.py
+++ b/continuedev/src/continuedev/plugins/policies/default.py
@@ -56,7 +56,8 @@ class DefaultPolicy(Policy):
MessageStep(name="Welcome to Continue", message=dedent("""\
- Highlight code section and ask a question or give instructions
- Use `cmd+m` (Mac) / `ctrl+m` (Windows) to open Continue
- - Use `/help` to ask questions about how to use Continue""")) >>
+ - Use `/help` to ask questions about how to use Continue
+ - [Customize Continue](https://continue.dev/docs/customization) (e.g. use your own API key) by typing '/config'.""")) >>
WelcomeStep() >>
StepsOnStartupStep())
diff --git a/continuedev/src/continuedev/plugins/steps/core/core.py b/continuedev/src/continuedev/plugins/steps/core/core.py
index 4476c7ae..78174bf6 100644
--- a/continuedev/src/continuedev/plugins/steps/core/core.py
+++ b/continuedev/src/continuedev/plugins/steps/core/core.py
@@ -10,6 +10,7 @@ import difflib
from pydantic import validator
from ....libs.llm.ggml import GGML
+# from ....libs.llm.replicate import ReplicateLLM
from ....models.main import Range
from ....libs.llm.maybe_proxy_openai import MaybeProxyOpenAI
from ....models.filesystem_edit import EditDiff, FileEdit, FileEditWithFullContents, FileSystemEdit
@@ -512,6 +513,9 @@ Please output the code to be inserted at the cursor in order to fulfill the user
if isinstance(model_to_use, GGML):
messages = [ChatMessage(
role="user", content=f"```\n{rif.contents}\n```\n\nUser request: \"{self.user_input}\"\n\nThis is the code after changing to perfectly comply with the user request. It does not include any placeholder code, only real implementations:\n\n```\n", summary=self.user_input)]
+ # elif isinstance(model_to_use, ReplicateLLM):
+ # messages = [ChatMessage(
+ # role="user", content=f"// Previous implementation\n\n{rif.contents}\n\n// Updated implementation (after following directions: {self.user_input})\n\n", summary=self.user_input)]
generator = model_to_use.stream_chat(
messages, temperature=sdk.config.temperature, max_tokens=max_tokens)
diff --git a/continuedev/src/continuedev/plugins/steps/share_session.py b/continuedev/src/continuedev/plugins/steps/share_session.py
new file mode 100644
index 00000000..de8659bd
--- /dev/null
+++ b/continuedev/src/continuedev/plugins/steps/share_session.py
@@ -0,0 +1,51 @@
+import json
+import os
+import time
+from typing import Optional
+
+
+from ...core.sdk import ContinueSDK
+from ...core.main import Step, FullState
+from ...libs.util.paths import getSessionFilePath, getGlobalFolderPath
+from ...server.session_manager import session_manager
+
+
+class ShareSessionStep(Step):
+
+ session_id: Optional[str] = None
+
+ async def run(self, sdk: ContinueSDK):
+ if self.session_id is None:
+ self.session_id = sdk.ide.session_id
+
+ await session_manager.persist_session(self.session_id)
+ time.sleep(0.5)
+
+ # Load the session data and format as a markdown file
+ session_filepath = getSessionFilePath(self.session_id)
+ with open(session_filepath, 'r') as f:
+ session_state = FullState(**json.load(f))
+
+ import datetime
+ date_created = datetime.datetime.fromtimestamp(
+ float(session_state.session_info.date_created)).strftime('%Y-%m-%d %H:%M:%S')
+ content = f"This is a session transcript from [Continue](https://continue.dev) on {date_created}.\n\n"
+
+ for node in session_state.history.timeline[:-2]:
+ if node.step.hide:
+ continue # ay
+
+ content += f"## {node.step.name}\n"
+ content += f"{node.step.description}\n\n"
+
+ # Save to a markdown file
+ save_filepath = os.path.join(
+ getGlobalFolderPath(), f"{session_state.session_info.title}.md")
+
+ with open(save_filepath, 'w') as f:
+ f.write(content)
+
+ # Open the file
+ await sdk.ide.setFileOpen(save_filepath)
+
+ self.description = f"The session transcript has been saved to a markdown file at {save_filepath}."