From e9afb41bed9a723876cf1cf95d636b2ea498a6b3 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Mon, 11 Sep 2023 18:33:17 -0700 Subject: docs: :memo: working on autogenerated docs --- .vscode/launch.json | 7 + continuedev/src/continuedev/core/context.py | 1 + continuedev/src/continuedev/libs/llm/openai.py | 11 +- .../src/continuedev/models/reference/test.py | 64 +++++++ .../src/continuedev/plugins/policies/default.py | 4 +- docs/docs/context-providers.md | 186 --------------------- docs/docs/customization/context-providers.md | 186 +++++++++++++++++++++ docs/docs/customization/intro.md | 10 ++ docs/docs/customization/models.md | 0 docs/docs/customization/other-configuration.md | 1 + docs/docs/customization/slash-commands.md | 0 docs/docs/reference/Context Providers/intro.md | 1 + docs/docs/reference/Models/anthropic.md | 11 ++ docs/docs/reference/Models/ggml.md | 11 ++ docs/docs/reference/Models/llamacpp.md | 11 ++ docs/docs/reference/Models/ollama.md | 11 ++ docs/docs/reference/Models/openai.md | 13 ++ docs/docs/reference/Models/queued.md | 11 ++ docs/docs/reference/Models/replicate.md | 11 ++ docs/docs/reference/Models/text_gen_interface.md | 11 ++ docs/docs/reference/Models/together.md | 11 ++ docs/sidebars.js | 33 +++- docs/src/components/ClassPropertyRef.tsx | 26 +++ test.py | 21 --- 24 files changed, 440 insertions(+), 212 deletions(-) create mode 100644 continuedev/src/continuedev/models/reference/test.py delete mode 100644 docs/docs/context-providers.md create mode 100644 docs/docs/customization/context-providers.md create mode 100644 docs/docs/customization/intro.md create mode 100644 docs/docs/customization/models.md create mode 100644 docs/docs/customization/other-configuration.md create mode 100644 docs/docs/customization/slash-commands.md create mode 100644 docs/docs/reference/Context Providers/intro.md create mode 100644 docs/docs/reference/Models/anthropic.md create mode 100644 docs/docs/reference/Models/ggml.md create mode 100644 docs/docs/reference/Models/llamacpp.md create mode 100644 docs/docs/reference/Models/ollama.md create mode 100644 docs/docs/reference/Models/openai.md create mode 100644 docs/docs/reference/Models/queued.md create mode 100644 docs/docs/reference/Models/replicate.md create mode 100644 docs/docs/reference/Models/text_gen_interface.md create mode 100644 docs/docs/reference/Models/together.md create mode 100644 docs/src/components/ClassPropertyRef.tsx delete mode 100644 test.py diff --git a/.vscode/launch.json b/.vscode/launch.json index e264c367..674c23a4 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -25,6 +25,13 @@ "justMyCode": true, "subProcess": true }, + { + "name": "Reference", + "type": "python", + "request": "launch", + "module": "continuedev.src.continuedev.models.reference.test", + "justMyCode": true + }, { "name": "Server", "type": "python", diff --git a/continuedev/src/continuedev/core/context.py b/continuedev/src/continuedev/core/context.py index 25f6be14..c9768a97 100644 --- a/continuedev/src/continuedev/core/context.py +++ b/continuedev/src/continuedev/core/context.py @@ -192,6 +192,7 @@ class ContextManager: requires_query=provider.requires_query, ) for provider in self.context_providers.values() + if provider.title != "code" ] async def get_selected_items(self) -> List[ContextItem]: diff --git a/continuedev/src/continuedev/libs/llm/openai.py b/continuedev/src/continuedev/libs/llm/openai.py index 857dc52d..70594973 100644 --- a/continuedev/src/continuedev/libs/llm/openai.py +++ b/continuedev/src/continuedev/libs/llm/openai.py @@ -2,6 +2,7 @@ from typing import Callable, List, Literal, Optional import certifi import openai +from pydantic import Field from ...core.main import ChatMessage from ..llm import LLM @@ -26,7 +27,15 @@ MAX_TOKENS_FOR_MODEL = { class OpenAI(LLM): - api_key: str + """ + The OpenAI class can be used to access OpenAI models like gpt-4 and gpt-3.5-turbo. + + If you are running a local model with an OpenAI-compatible API, you can also use the OpenAI class by changing the `api_base` argument. + """ + + api_key: str = Field( + description="OpenAI API key", + ) "OpenAI API key" verify_ssl: Optional[bool] = None diff --git a/continuedev/src/continuedev/models/reference/test.py b/continuedev/src/continuedev/models/reference/test.py new file mode 100644 index 00000000..2d1db3e1 --- /dev/null +++ b/continuedev/src/continuedev/models/reference/test.py @@ -0,0 +1,64 @@ +import importlib +import json +from textwrap import dedent # noqa: F401 + +LLM_MODULES = [ + ("openai", "OpenAI"), + ("anthropic", "AnthropicLLM"), + ("ggml", "GGML"), + ("llamacpp", "LlamaCpp"), + ("text_gen_interface", "TextGenUI"), + ("ollama", "Ollama"), + ("queued", "QueuedLLM"), + ("replicate", "ReplicateLLM"), + ("together", "TogetherLLM"), +] + + +def import_llm_module(module_name, module_title): + module_name = f"continuedev.src.continuedev.libs.llm.{module_name}" + module = importlib.import_module(module_name) + obj = getattr(module, module_title) + return obj + + +def llm_docs_from_schema(schema, filename): + # Generate markdown docs + markdown_docs = dedent( + f"""\ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# {schema['title']} + +{dedent(schema.get("description", ""))} + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/{filename}.py) + +## Properties + +""" + ) + + for prop, details in schema["properties"].items(): + required = prop in schema.get("required", []) + if not required: + continue + required = "true" if required else "false" + markdown_docs += f"" + + for prop, details in schema["properties"].items(): + required = prop in schema.get("required", []) + if required: + continue + required = "true" if required else "false" + markdown_docs += f"" + + return markdown_docs + + +for module_name, module_title in LLM_MODULES: + obj = import_llm_module(module_name, module_title) + schema = obj.schema() + markdown_docs = llm_docs_from_schema(schema, module_name) + with open(f"docs/docs/reference/Models/{module_name}.md", "w") as f: + f.write(markdown_docs) diff --git a/continuedev/src/continuedev/plugins/policies/default.py b/continuedev/src/continuedev/plugins/policies/default.py index 550defa9..26b6bd48 100644 --- a/continuedev/src/continuedev/plugins/policies/default.py +++ b/continuedev/src/continuedev/plugins/policies/default.py @@ -18,7 +18,7 @@ def parse_slash_command(inp: str, config: ContinueConfig) -> Union[None, Step]: Parses a slash command, returning the command name and the rest of the input. """ if inp.startswith("/"): - command_name = inp.split(" ")[0] + command_name = inp.split(" ")[0].strip() after_command = " ".join(inp.split(" ")[1:]) for slash_command in config.slash_commands: @@ -35,7 +35,7 @@ def parse_slash_command(inp: str, config: ContinueConfig) -> Union[None, Step]: def parse_custom_command(inp: str, config: ContinueConfig) -> Union[None, Step]: - command_name = inp.split(" ")[0] + command_name = inp.split(" ")[0].strip() after_command = " ".join(inp.split(" ")[1:]) for custom_cmd in config.custom_commands: if custom_cmd.name == command_name[1:]: diff --git a/docs/docs/context-providers.md b/docs/docs/context-providers.md deleted file mode 100644 index 3147f90e..00000000 --- a/docs/docs/context-providers.md +++ /dev/null @@ -1,186 +0,0 @@ -# Context Providers - -Context Providers allow you to type '@' and see a dropdown of content that can all be fed to the LLM as context. Every context provider is a plugin, which means if you want to reference some source of information that you don't see here, you can request (or build!) a new context provider. - -As an example, say you are working on solving a new GitHub Issue. You type '@issue' and select the one you are working on. Continue can now see the issue title and contents. You also know that the issue is related to the files 'readme.md' and 'helloNested.py', so you type '@readme' and '@hello' to find and select them. Now these 3 "Context Items" are displayed above the input. - -![Context Items](/img/context-provider-example.png) - -When you enter your next input, Continue will see the full contents of each of these items, and can use them to better answer your questions throughout the conversation. - -## Built-in Context Providers - -To use any of the built-in context providers, open `~/.continue/config.py` (can do this with the '/config' slash command). For each context provider, you will - -1. Import the context provider at the top of the file -2. Add it to the `context_providers` list in the `ContinueConfig` object - -Setup instructions are below for each (showing the import and config object). You can also see examples by opening `~/.continue/config.py`. - -### GitHub - -Type '@issue' to reference the title and contents of a GitHub issue. - -```python -from continuedev.src.continuedev.plugins.context_providers.github import GitHubIssuesContextProvider -``` - -```python -GitHubIssuesContextProvider( - repo_name="continuedev/continue", # change to whichever repo you want to use - auth_token="", -) -``` - -### Codebase Search - -Type '@search' to reference the results of codebase search, just like the results you would get from VS Code search. - -```python -from continuedev.src.continuedev.plugins.context_providers.search import SearchContextProvider -``` - -```python -SearchContextProvider() # No arguments necessary -``` - -### URLs - -Type '@url' to reference the contents of a URL. You can either reference preset URLs, or reference one dynamically by typing '@url https://example.com'. The text contents of the page will be fetched and used as context. - -```python -from continuedev.src.continuedev.plugins.context_providers.url import URLContextProvider -``` - -```python -URLContextProvider(preset_urls=["https://continue.dev/docs/customization"]) -``` - -### Git Diff - -Type '@diff' to reference all of the changes you've made to your current branch. This is useful if you want to summarize what you've done or ask for a general review of your work before committing. - -```python -from continuedev.src.continuedev.plugins.context_providers.diff import DiffContextProvider -``` - -```python -DiffContextProvider() -``` - -### File Tree - -Type '@tree' to reference the contents of your current workspace. The LLM will be able to see the nested directory structure of your project. - -```python -from continuedev.src.continuedev.plugins.context_providers.filetree import FileTreeContextProvider -``` - -```python -FileTreeContextProvider() -``` - -### Google - -Type '@google' to reference the results of a Google search. For example, type "@google python tutorial" if you want to search and discuss ways of learning Python. - -```python -from continuedev.src.continuedev.plugins.context_providers.google import GoogleContextProvider -``` - -```python -GoogleContextProvider( - serper_api_key="" -) -``` - -Note: You can get an API key for free at [serper.dev](https://serper.dev). - -### Terminal - -Type '@terminal' to reference the contents of your IDE's terminal. - -```python -from continuedev.src.continuedev.plugins.context_providers.terminal import TerminalContextProvider -``` - -```python -TerminalContextProvider() -``` - -### Requesting Context Providers - -Not seeing what you want? Create an issue [here](https://github.com/continuedev/continue/issues/new?assignees=TyDunn&labels=enhancement&projects=&template=feature-request-%F0%9F%92%AA.md&title=) to request a new ContextProvider. - -## Building Your Own Context Provider - -### Introductory Example - -As an example, here is the `GitHubIssuesContextProvider`, which lets you search all open GitHub Issues in a repo: - -```python -class GitHubIssuesContextProvider(ContextProvider): - """ - The GitHubIssuesContextProvider is a ContextProvider that allows you to search GitHub issues in a repo. - """ - - title = "issues" - repo_name: str - auth_token: str - - async def provide_context_items(self) -> List[ContextItem]: - auth = Auth.Token(self.auth_token) - gh = Github(auth=auth) - - repo = gh.get_repo(self.repo_name) - issues = repo.get_issues().get_page(0) - - return [ContextItem( - content=issue.body, - description=ContextItemDescription( - name=f"Issue #{issue.number}", - description=issue.title, - id=ContextItemId( - provider_title=self.title, - item_id=issue.id - ) - ) - ) for issue in issues] -``` - -It can then be set in the `ContinueConfig` like so: - -```python -config = ContinueConfig( - ... - context_providers=[ - GitHubIssuesContextProvider( - repo_name="my-github-username-or-org/my-github-repo", - auth_token="my-github-auth-token" - ) - ] -) -``` - -This example is a situation where you request all of the data (issues in this case) beforehand, and store them in the ContextProvider. - -### Dynamic Context Providers - -There are other scenarios where you might want to just get information on demand, for example by typing '@url https://continue.dev/docs/context-providers' and having the ContextProvider fetch the contents of that URL dynamically. For this case, you can implement the `DynamicContextProvider` class like this: - -```python -from continuedev.src.continuedev.plugins.context_providers.dynamic import DynamicContextProvider - -class ExampleDynamicProvider(DynamicProvider): - title = "example" - name = "Example" - description = "Example description" - - async def get_content(self, query: str) -> str: - return f"Example content for '{query}'" - - async def setup(self): - print("Example setup") -``` - -The `setup` method optionally allows you to do any setup when Continue is first loaded. The `get_content` method takes the query (which would be 'https://continue.dev/docs/context-providers' in the example above) and returns the content that will be used as context. diff --git a/docs/docs/customization/context-providers.md b/docs/docs/customization/context-providers.md new file mode 100644 index 00000000..3147f90e --- /dev/null +++ b/docs/docs/customization/context-providers.md @@ -0,0 +1,186 @@ +# Context Providers + +Context Providers allow you to type '@' and see a dropdown of content that can all be fed to the LLM as context. Every context provider is a plugin, which means if you want to reference some source of information that you don't see here, you can request (or build!) a new context provider. + +As an example, say you are working on solving a new GitHub Issue. You type '@issue' and select the one you are working on. Continue can now see the issue title and contents. You also know that the issue is related to the files 'readme.md' and 'helloNested.py', so you type '@readme' and '@hello' to find and select them. Now these 3 "Context Items" are displayed above the input. + +![Context Items](/img/context-provider-example.png) + +When you enter your next input, Continue will see the full contents of each of these items, and can use them to better answer your questions throughout the conversation. + +## Built-in Context Providers + +To use any of the built-in context providers, open `~/.continue/config.py` (can do this with the '/config' slash command). For each context provider, you will + +1. Import the context provider at the top of the file +2. Add it to the `context_providers` list in the `ContinueConfig` object + +Setup instructions are below for each (showing the import and config object). You can also see examples by opening `~/.continue/config.py`. + +### GitHub + +Type '@issue' to reference the title and contents of a GitHub issue. + +```python +from continuedev.src.continuedev.plugins.context_providers.github import GitHubIssuesContextProvider +``` + +```python +GitHubIssuesContextProvider( + repo_name="continuedev/continue", # change to whichever repo you want to use + auth_token="", +) +``` + +### Codebase Search + +Type '@search' to reference the results of codebase search, just like the results you would get from VS Code search. + +```python +from continuedev.src.continuedev.plugins.context_providers.search import SearchContextProvider +``` + +```python +SearchContextProvider() # No arguments necessary +``` + +### URLs + +Type '@url' to reference the contents of a URL. You can either reference preset URLs, or reference one dynamically by typing '@url https://example.com'. The text contents of the page will be fetched and used as context. + +```python +from continuedev.src.continuedev.plugins.context_providers.url import URLContextProvider +``` + +```python +URLContextProvider(preset_urls=["https://continue.dev/docs/customization"]) +``` + +### Git Diff + +Type '@diff' to reference all of the changes you've made to your current branch. This is useful if you want to summarize what you've done or ask for a general review of your work before committing. + +```python +from continuedev.src.continuedev.plugins.context_providers.diff import DiffContextProvider +``` + +```python +DiffContextProvider() +``` + +### File Tree + +Type '@tree' to reference the contents of your current workspace. The LLM will be able to see the nested directory structure of your project. + +```python +from continuedev.src.continuedev.plugins.context_providers.filetree import FileTreeContextProvider +``` + +```python +FileTreeContextProvider() +``` + +### Google + +Type '@google' to reference the results of a Google search. For example, type "@google python tutorial" if you want to search and discuss ways of learning Python. + +```python +from continuedev.src.continuedev.plugins.context_providers.google import GoogleContextProvider +``` + +```python +GoogleContextProvider( + serper_api_key="" +) +``` + +Note: You can get an API key for free at [serper.dev](https://serper.dev). + +### Terminal + +Type '@terminal' to reference the contents of your IDE's terminal. + +```python +from continuedev.src.continuedev.plugins.context_providers.terminal import TerminalContextProvider +``` + +```python +TerminalContextProvider() +``` + +### Requesting Context Providers + +Not seeing what you want? Create an issue [here](https://github.com/continuedev/continue/issues/new?assignees=TyDunn&labels=enhancement&projects=&template=feature-request-%F0%9F%92%AA.md&title=) to request a new ContextProvider. + +## Building Your Own Context Provider + +### Introductory Example + +As an example, here is the `GitHubIssuesContextProvider`, which lets you search all open GitHub Issues in a repo: + +```python +class GitHubIssuesContextProvider(ContextProvider): + """ + The GitHubIssuesContextProvider is a ContextProvider that allows you to search GitHub issues in a repo. + """ + + title = "issues" + repo_name: str + auth_token: str + + async def provide_context_items(self) -> List[ContextItem]: + auth = Auth.Token(self.auth_token) + gh = Github(auth=auth) + + repo = gh.get_repo(self.repo_name) + issues = repo.get_issues().get_page(0) + + return [ContextItem( + content=issue.body, + description=ContextItemDescription( + name=f"Issue #{issue.number}", + description=issue.title, + id=ContextItemId( + provider_title=self.title, + item_id=issue.id + ) + ) + ) for issue in issues] +``` + +It can then be set in the `ContinueConfig` like so: + +```python +config = ContinueConfig( + ... + context_providers=[ + GitHubIssuesContextProvider( + repo_name="my-github-username-or-org/my-github-repo", + auth_token="my-github-auth-token" + ) + ] +) +``` + +This example is a situation where you request all of the data (issues in this case) beforehand, and store them in the ContextProvider. + +### Dynamic Context Providers + +There are other scenarios where you might want to just get information on demand, for example by typing '@url https://continue.dev/docs/context-providers' and having the ContextProvider fetch the contents of that URL dynamically. For this case, you can implement the `DynamicContextProvider` class like this: + +```python +from continuedev.src.continuedev.plugins.context_providers.dynamic import DynamicContextProvider + +class ExampleDynamicProvider(DynamicProvider): + title = "example" + name = "Example" + description = "Example description" + + async def get_content(self, query: str) -> str: + return f"Example content for '{query}'" + + async def setup(self): + print("Example setup") +``` + +The `setup` method optionally allows you to do any setup when Continue is first loaded. The `get_content` method takes the query (which would be 'https://continue.dev/docs/context-providers' in the example above) and returns the content that will be used as context. diff --git a/docs/docs/customization/intro.md b/docs/docs/customization/intro.md new file mode 100644 index 00000000..a82b5dbf --- /dev/null +++ b/docs/docs/customization/intro.md @@ -0,0 +1,10 @@ +# Customizing Continue + +Continue can be deeply customized by editing the `ContinueConfig` object in `~/.continue/config.py` (`%userprofile%\.continue\config.py` for Windows) on your machine. This file is created the first time you run Continue. + +Currently, you can customize the following: + +- [Models](./models.md) - Use Continue with any LLM, including local models, Azure OpenAI service, and any OpenAI-compatible API. +- [Context Providers](./context-providers.md) - Define which sources you want to collect context from to share with the LLM. Just type '@' to easily add attachments to your prompt. +- [Slash Commands](./slash-commands.md) - Call custom prompts or programs written with our SDK by typing `/` in the prompt. +- [Other Configuration](./other-configuration.md) - Configure other settings like the system message, temperature, and more. diff --git a/docs/docs/customization/models.md b/docs/docs/customization/models.md new file mode 100644 index 00000000..e69de29b diff --git a/docs/docs/customization/other-configuration.md b/docs/docs/customization/other-configuration.md new file mode 100644 index 00000000..088b2aac --- /dev/null +++ b/docs/docs/customization/other-configuration.md @@ -0,0 +1 @@ +# Other Configuration diff --git a/docs/docs/customization/slash-commands.md b/docs/docs/customization/slash-commands.md new file mode 100644 index 00000000..e69de29b diff --git a/docs/docs/reference/Context Providers/intro.md b/docs/docs/reference/Context Providers/intro.md new file mode 100644 index 00000000..1e0981f1 --- /dev/null +++ b/docs/docs/reference/Context Providers/intro.md @@ -0,0 +1 @@ +# Intro diff --git a/docs/docs/reference/Models/anthropic.md b/docs/docs/reference/Models/anthropic.md new file mode 100644 index 00000000..1aa31324 --- /dev/null +++ b/docs/docs/reference/Models/anthropic.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# AnthropicLLM + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/anthropic.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/ggml.md b/docs/docs/reference/Models/ggml.md new file mode 100644 index 00000000..dafc8870 --- /dev/null +++ b/docs/docs/reference/Models/ggml.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# GGML + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/ggml.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/llamacpp.md b/docs/docs/reference/Models/llamacpp.md new file mode 100644 index 00000000..7ce75574 --- /dev/null +++ b/docs/docs/reference/Models/llamacpp.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# LlamaCpp + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/llamacpp.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/ollama.md b/docs/docs/reference/Models/ollama.md new file mode 100644 index 00000000..ef058119 --- /dev/null +++ b/docs/docs/reference/Models/ollama.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# Ollama + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/ollama.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/openai.md b/docs/docs/reference/Models/openai.md new file mode 100644 index 00000000..d325ca2f --- /dev/null +++ b/docs/docs/reference/Models/openai.md @@ -0,0 +1,13 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# OpenAI + +The OpenAI class can be used to access OpenAI models like gpt-4 and gpt-3.5-turbo. + +If you are running a local model with an OpenAI-compatible API, you can also use the OpenAI class by changing the `api_base` argument. + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/openai.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/queued.md b/docs/docs/reference/Models/queued.md new file mode 100644 index 00000000..6888a4e5 --- /dev/null +++ b/docs/docs/reference/Models/queued.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# QueuedLLM + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/queued.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/replicate.md b/docs/docs/reference/Models/replicate.md new file mode 100644 index 00000000..4f05cdfa --- /dev/null +++ b/docs/docs/reference/Models/replicate.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# ReplicateLLM + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/replicate.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/text_gen_interface.md b/docs/docs/reference/Models/text_gen_interface.md new file mode 100644 index 00000000..a59a4166 --- /dev/null +++ b/docs/docs/reference/Models/text_gen_interface.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# TextGenUI + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/text_gen_interface.py) + +## Properties + + \ No newline at end of file diff --git a/docs/docs/reference/Models/together.md b/docs/docs/reference/Models/together.md new file mode 100644 index 00000000..e436644c --- /dev/null +++ b/docs/docs/reference/Models/together.md @@ -0,0 +1,11 @@ +import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; + +# TogetherLLM + + + +[View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/together.py) + +## Properties + + \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index 6d205bab..2121fea6 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -18,11 +18,40 @@ const sidebars = { "getting-started", "how-to-use-continue", "how-continue-works", - "customization", - "context-providers", + { + type: "category", + label: "Customization", + collapsible: true, + collapsed: false, + items: [ + "customization/models", + "customization/context-providers", + "customization/slash-commands", + "customization/other-configuration", + ], + }, "collecting-data", "telemetry", "troubleshooting", + { + type: "category", + label: "Walkthroughs", + collapsible: true, + collapsed: false, + items: ["walkthroughs/codellama"], + }, + { + type: "category", + label: "Reference", + collapsible: true, + collapsed: false, + items: [ + { + type: "autogenerated", + dirName: "reference", + }, + ], + }, ], }; diff --git a/docs/src/components/ClassPropertyRef.tsx b/docs/src/components/ClassPropertyRef.tsx new file mode 100644 index 00000000..46664c4c --- /dev/null +++ b/docs/src/components/ClassPropertyRef.tsx @@ -0,0 +1,26 @@ +import React from "react"; + +interface ClassPropertyRefProps { + name: string; + details: string; + required: boolean; +} + +export default function ClassPropertyRef(props: ClassPropertyRefProps) { + const details = JSON.parse(props.details); + + return ( + <> +
+

+ {props.name} +

+ + {props.required && "REQUIRED"} + + {details.type && `(${details.type})`} +
+

{details.description}

+ + ); +} diff --git a/test.py b/test.py deleted file mode 100644 index 5bd57e0e..00000000 --- a/test.py +++ /dev/null @@ -1,21 +0,0 @@ -import unittest - - -def sort_numbers(numbers): - for i in range(len(numbers)): - for j in range(i + 1, len(numbers)): - if numbers[i] > numbers[j]: - numbers[i], numbers[j] = numbers[j], numbers[i] - return numbers[:-1] # Error here: We're not returning the last number - - -class TestSortNumbers(unittest.TestCase): - def test_sort_numbers(self): - self.assertEqual(sort_numbers([3, 2, 1]), [1, 2, 3]) # This test will fail - self.assertEqual( - sort_numbers([4, 2, 5, 1, 3]), [1, 2, 3, 4, 5] - ) # This test will fail - - -if __name__ == "__main__": - unittest.main() -- cgit v1.2.3-70-g09d2