summaryrefslogtreecommitdiff
path: root/continuedev
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-09-11 18:33:17 -0700
committerNate Sesti <sestinj@gmail.com>2023-09-11 18:33:17 -0700
commite9afb41bed9a723876cf1cf95d636b2ea498a6b3 (patch)
tree35387d7f3df1d28d742b3289c7ff930caa457d8e /continuedev
parent0c9482681f28720dcf75b2ab9d1bbf4d148912d7 (diff)
downloadsncontinue-e9afb41bed9a723876cf1cf95d636b2ea498a6b3.tar.gz
sncontinue-e9afb41bed9a723876cf1cf95d636b2ea498a6b3.tar.bz2
sncontinue-e9afb41bed9a723876cf1cf95d636b2ea498a6b3.zip
docs: :memo: working on autogenerated docs
Diffstat (limited to 'continuedev')
-rw-r--r--continuedev/src/continuedev/core/context.py1
-rw-r--r--continuedev/src/continuedev/libs/llm/openai.py11
-rw-r--r--continuedev/src/continuedev/models/reference/test.py64
-rw-r--r--continuedev/src/continuedev/plugins/policies/default.py4
4 files changed, 77 insertions, 3 deletions
diff --git a/continuedev/src/continuedev/core/context.py b/continuedev/src/continuedev/core/context.py
index 25f6be14..c9768a97 100644
--- a/continuedev/src/continuedev/core/context.py
+++ b/continuedev/src/continuedev/core/context.py
@@ -192,6 +192,7 @@ class ContextManager:
requires_query=provider.requires_query,
)
for provider in self.context_providers.values()
+ if provider.title != "code"
]
async def get_selected_items(self) -> List[ContextItem]:
diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py
index 857dc52d..70594973 100644
--- a/continuedev/src/continuedev/libs/llm/openai.py
+++ b/continuedev/src/continuedev/libs/llm/openai.py
@@ -2,6 +2,7 @@ from typing import Callable, List, Literal, Optional
import certifi
import openai
+from pydantic import Field
from ...core.main import ChatMessage
from ..llm import LLM
@@ -26,7 +27,15 @@ MAX_TOKENS_FOR_MODEL = {
class OpenAI(LLM):
- api_key: str
+ """
+ The OpenAI class can be used to access OpenAI models like gpt-4 and gpt-3.5-turbo.
+
+ If you are running a local model with an OpenAI-compatible API, you can also use the OpenAI class by changing the `api_base` argument.
+ """
+
+ api_key: str = Field(
+ description="OpenAI API key",
+ )
"OpenAI API key"
verify_ssl: Optional[bool] = None
diff --git a/continuedev/src/continuedev/models/reference/test.py b/continuedev/src/continuedev/models/reference/test.py
new file mode 100644
index 00000000..2d1db3e1
--- /dev/null
+++ b/continuedev/src/continuedev/models/reference/test.py
@@ -0,0 +1,64 @@
+import importlib
+import json
+from textwrap import dedent # noqa: F401
+
+LLM_MODULES = [
+ ("openai", "OpenAI"),
+ ("anthropic", "AnthropicLLM"),
+ ("ggml", "GGML"),
+ ("llamacpp", "LlamaCpp"),
+ ("text_gen_interface", "TextGenUI"),
+ ("ollama", "Ollama"),
+ ("queued", "QueuedLLM"),
+ ("replicate", "ReplicateLLM"),
+ ("together", "TogetherLLM"),
+]
+
+
+def import_llm_module(module_name, module_title):
+ module_name = f"continuedev.src.continuedev.libs.llm.{module_name}"
+ module = importlib.import_module(module_name)
+ obj = getattr(module, module_title)
+ return obj
+
+
+def llm_docs_from_schema(schema, filename):
+ # Generate markdown docs
+ markdown_docs = dedent(
+ f"""\
+import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx';
+
+# {schema['title']}
+
+{dedent(schema.get("description", ""))}
+
+[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/{filename}.py)
+
+## Properties
+
+"""
+ )
+
+ for prop, details in schema["properties"].items():
+ required = prop in schema.get("required", [])
+ if not required:
+ continue
+ required = "true" if required else "false"
+ markdown_docs += f"<ClassPropertyRef name='{prop}' details='{json.dumps(details)}' required={{{required}}}/>"
+
+ for prop, details in schema["properties"].items():
+ required = prop in schema.get("required", [])
+ if required:
+ continue
+ required = "true" if required else "false"
+ markdown_docs += f"<ClassPropertyRef name='{prop}' details='{json.dumps(details)}' required={{{required}}}/>"
+
+ return markdown_docs
+
+
+for module_name, module_title in LLM_MODULES:
+ obj = import_llm_module(module_name, module_title)
+ schema = obj.schema()
+ markdown_docs = llm_docs_from_schema(schema, module_name)
+ with open(f"docs/docs/reference/Models/{module_name}.md", "w") as f:
+ f.write(markdown_docs)
diff --git a/continuedev/src/continuedev/plugins/policies/default.py b/continuedev/src/continuedev/plugins/policies/default.py
index 550defa9..26b6bd48 100644
--- a/continuedev/src/continuedev/plugins/policies/default.py
+++ b/continuedev/src/continuedev/plugins/policies/default.py
@@ -18,7 +18,7 @@ def parse_slash_command(inp: str, config: ContinueConfig) -> Union[None, Step]:
Parses a slash command, returning the command name and the rest of the input.
"""
if inp.startswith("/"):
- command_name = inp.split(" ")[0]
+ command_name = inp.split(" ")[0].strip()
after_command = " ".join(inp.split(" ")[1:])
for slash_command in config.slash_commands:
@@ -35,7 +35,7 @@ def parse_slash_command(inp: str, config: ContinueConfig) -> Union[None, Step]:
def parse_custom_command(inp: str, config: ContinueConfig) -> Union[None, Step]:
- command_name = inp.split(" ")[0]
+ command_name = inp.split(" ")[0].strip()
after_command = " ".join(inp.split(" ")[1:])
for custom_cmd in config.custom_commands:
if custom_cmd.name == command_name[1:]: