summaryrefslogtreecommitdiff
path: root/docs
diff options
context:
space:
mode:
Diffstat (limited to 'docs')
-rw-r--r--docs/docs/reference/Models/openai.md3
-rw-r--r--docs/docs/reference/config.md2
-rw-r--r--docs/docusaurus.config.js5
3 files changed, 7 insertions, 3 deletions
diff --git a/docs/docs/reference/Models/openai.md b/docs/docs/reference/Models/openai.md
index 4eb4906f..0ade1a8f 100644
--- a/docs/docs/reference/Models/openai.md
+++ b/docs/docs/reference/Models/openai.md
@@ -23,7 +23,6 @@ config = ContinueConfig(
Options for serving models locally with an OpenAI-compatible server include:
-- [LM Studio](https://lmstudio.ai/)
- [text-gen-webui](https://github.com/oobabooga/text-generation-webui/tree/main/extensions/openai#setup--installation)
- [FastChat](https://github.com/lm-sys/FastChat/blob/main/docs/openai_api.md)
- [LocalAI](https://localai.io/basics/getting_started/)
@@ -37,4 +36,4 @@ Options for serving models locally with an OpenAI-compatible server include:
### Inherited Properties
-<ClassPropertyRef name='model' details='{&quot;title&quot;: &quot;Model&quot;, &quot;description&quot;: &quot;The name of the model to be used (e.g. gpt-4, codellama)&quot;, &quot;type&quot;: &quot;string&quot;}' required={true} default=""/><ClassPropertyRef name='api_key' details='{&quot;title&quot;: &quot;Api Key&quot;, &quot;description&quot;: &quot;OpenAI API key&quot;, &quot;type&quot;: &quot;string&quot;}' required={true} default=""/><ClassPropertyRef name='title' details='{&quot;title&quot;: &quot;Title&quot;, &quot;description&quot;: &quot;A title that will identify this model in the model selection dropdown&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='system_message' details='{&quot;title&quot;: &quot;System Message&quot;, &quot;description&quot;: &quot;A system message that will always be followed by the LLM&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='context_length' details='{&quot;title&quot;: &quot;Context Length&quot;, &quot;description&quot;: &quot;The maximum context length of the LLM in tokens, as counted by count_tokens.&quot;, &quot;default&quot;: 2048, &quot;type&quot;: &quot;integer&quot;}' required={false} default="2048"/><ClassPropertyRef name='unique_id' details='{&quot;title&quot;: &quot;Unique Id&quot;, &quot;description&quot;: &quot;The unique ID of the user.&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='timeout' details='{&quot;title&quot;: &quot;Timeout&quot;, &quot;description&quot;: &quot;Set the timeout for each request to the LLM. If you are running a local LLM that takes a while to respond, you might want to set this to avoid timeouts.&quot;, &quot;default&quot;: 300, &quot;type&quot;: &quot;integer&quot;}' required={false} default="300"/><ClassPropertyRef name='prompt_templates' details='{&quot;title&quot;: &quot;Prompt Templates&quot;, &quot;description&quot;: &quot;A dictionary of prompt templates that can be used to customize the behavior of the LLM in certain situations. For example, set the \&quot;edit\&quot; key in order to change the prompt that is used for the /edit slash command. Each value in the dictionary is a string templated in mustache syntax, and filled in at runtime with the variables specific to the situation. See the documentation for more information.&quot;, &quot;default&quot;: {}, &quot;type&quot;: &quot;object&quot;}' required={false} default="{}"/>
+<ClassPropertyRef name='model' details='{&quot;title&quot;: &quot;Model&quot;, &quot;description&quot;: &quot;The name of the model to be used (e.g. gpt-4, codellama)&quot;, &quot;type&quot;: &quot;string&quot;}' required={true} default=""/><ClassPropertyRef name='api_key' details='{&quot;title&quot;: &quot;Api Key&quot;, &quot;description&quot;: &quot;OpenAI API key&quot;, &quot;type&quot;: &quot;string&quot;}' required={true} default=""/><ClassPropertyRef name='title' details='{&quot;title&quot;: &quot;Title&quot;, &quot;description&quot;: &quot;A title that will identify this model in the model selection dropdown&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='system_message' details='{&quot;title&quot;: &quot;System Message&quot;, &quot;description&quot;: &quot;A system message that will always be followed by the LLM&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='context_length' details='{&quot;title&quot;: &quot;Context Length&quot;, &quot;description&quot;: &quot;The maximum context length of the LLM in tokens, as counted by count_tokens.&quot;, &quot;default&quot;: 2048, &quot;type&quot;: &quot;integer&quot;}' required={false} default="2048"/><ClassPropertyRef name='unique_id' details='{&quot;title&quot;: &quot;Unique Id&quot;, &quot;description&quot;: &quot;The unique ID of the user.&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='timeout' details='{&quot;title&quot;: &quot;Timeout&quot;, &quot;description&quot;: &quot;Set the timeout for each request to the LLM. If you are running a local LLM that takes a while to respond, you might want to set this to avoid timeouts.&quot;, &quot;default&quot;: 300, &quot;type&quot;: &quot;integer&quot;}' required={false} default="300"/><ClassPropertyRef name='prompt_templates' details='{&quot;title&quot;: &quot;Prompt Templates&quot;, &quot;description&quot;: &quot;A dictionary of prompt templates that can be used to customize the behavior of the LLM in certain situations. For example, set the \&quot;edit\&quot; key in order to change the prompt that is used for the /edit slash command. Each value in the dictionary is a string templated in mustache syntax, and filled in at runtime with the variables specific to the situation. See the documentation for more information.&quot;, &quot;default&quot;: {}, &quot;type&quot;: &quot;object&quot;}' required={false} default="{}"/> \ No newline at end of file
diff --git a/docs/docs/reference/config.md b/docs/docs/reference/config.md
index 27612924..b26d8cde 100644
--- a/docs/docs/reference/config.md
+++ b/docs/docs/reference/config.md
@@ -8,7 +8,7 @@ Continue can be deeply customized by editing the `ContinueConfig` object in `~/.
## Properties
-<ClassPropertyRef name='steps_on_startup' details='{&quot;title&quot;: &quot;Steps On Startup&quot;, &quot;description&quot;: &quot;Steps that will be automatically run at the beginning of a new session&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/Step&quot;}}' required={false} default="[]"/><ClassPropertyRef name='disallowed_steps' details='{&quot;title&quot;: &quot;Disallowed Steps&quot;, &quot;description&quot;: &quot;Steps that are not allowed to be run, and will be skipped if attempted&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;type&quot;: &quot;string&quot;}}' required={false} default="[]"/><ClassPropertyRef name='allow_anonymous_telemetry' details='{&quot;title&quot;: &quot;Allow Anonymous Telemetry&quot;, &quot;description&quot;: &quot;If this field is set to True, we will collect anonymous telemetry as described in the documentation page on telemetry. If set to False, we will not collect any data.&quot;, &quot;default&quot;: true, &quot;type&quot;: &quot;boolean&quot;}' required={false} default="True"/><ClassPropertyRef name='models' details='{&quot;title&quot;: &quot;Models&quot;, &quot;description&quot;: &quot;Configuration for the models used by Continue. Read more about how to configure models in the documentation.&quot;, &quot;default&quot;: {&quot;default&quot;: {&quot;title&quot;: null, &quot;system_message&quot;: null, &quot;context_length&quot;: 2048, &quot;model&quot;: &quot;gpt-4&quot;, &quot;timeout&quot;: 300, &quot;prompt_templates&quot;: {}, &quot;api_key&quot;: null, &quot;llm&quot;: null, &quot;class_name&quot;: &quot;MaybeProxyOpenAI&quot;}, &quot;small&quot;: null, &quot;medium&quot;: {&quot;title&quot;: null, &quot;system_message&quot;: null, &quot;context_length&quot;: 2048, &quot;model&quot;: &quot;gpt-3.5-turbo&quot;, &quot;timeout&quot;: 300, &quot;prompt_templates&quot;: {}, &quot;api_key&quot;: null, &quot;llm&quot;: null, &quot;class_name&quot;: &quot;MaybeProxyOpenAI&quot;}, &quot;large&quot;: null, &quot;edit&quot;: null, &quot;chat&quot;: null, &quot;unused&quot;: []}, &quot;allOf&quot;: [{&quot;$ref&quot;: &quot;#/definitions/Models&quot;}]}' required={false} default="{&#x27;default&#x27;: {&#x27;title&#x27;: None, &#x27;system_message&#x27;: None, &#x27;context_length&#x27;: 2048, &#x27;model&#x27;: &#x27;gpt-4&#x27;, &#x27;timeout&#x27;: 300, &#x27;prompt_templates&#x27;: {}, &#x27;api_key&#x27;: None, &#x27;llm&#x27;: None, &#x27;class_name&#x27;: &#x27;MaybeProxyOpenAI&#x27;}, &#x27;small&#x27;: None, &#x27;medium&#x27;: {&#x27;title&#x27;: None, &#x27;system_message&#x27;: None, &#x27;context_length&#x27;: 2048, &#x27;model&#x27;: &#x27;gpt-3.5-turbo&#x27;, &#x27;timeout&#x27;: 300, &#x27;prompt_templates&#x27;: {}, &#x27;api_key&#x27;: None, &#x27;llm&#x27;: None, &#x27;class_name&#x27;: &#x27;MaybeProxyOpenAI&#x27;}, &#x27;large&#x27;: None, &#x27;edit&#x27;: None, &#x27;chat&#x27;: None, &#x27;unused&#x27;: []}"/><ClassPropertyRef name='temperature' details='{&quot;title&quot;: &quot;Temperature&quot;, &quot;description&quot;: &quot;The temperature parameter for sampling from the LLM. Higher temperatures will result in more random output, while lower temperatures will result in more predictable output. This value ranges from 0 to 1.&quot;, &quot;default&quot;: 0.5, &quot;type&quot;: &quot;number&quot;}' required={false} default="0.5"/><ClassPropertyRef name='custom_commands' details='{&quot;title&quot;: &quot;Custom Commands&quot;, &quot;description&quot;: &quot;An array of custom commands that allow you to reuse prompts. Each has name, description, and prompt properties. When you enter /&lt;name&gt; in the text input, it will act as a shortcut to the prompt.&quot;, &quot;default&quot;: [{&quot;name&quot;: &quot;test&quot;, &quot;prompt&quot;: &quot;Write a comprehensive set of unit tests for the selected code. It should setup, run tests that check for correctness including important edge cases, and teardown. Ensure that the tests are complete and sophisticated. Give the tests just as chat output, don&#x27;t edit any file.&quot;, &quot;description&quot;: &quot;This is an example custom command. Use /config to edit it and create more&quot;}], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/CustomCommand&quot;}}' required={false} default="[{&#x27;name&#x27;: &#x27;test&#x27;, &#x27;prompt&#x27;: &quot;Write a comprehensive set of unit tests for the selected code. It should setup, run tests that check for correctness including important edge cases, and teardown. Ensure that the tests are complete and sophisticated. Give the tests just as chat output, don&#x27;t edit any file.&quot;, &#x27;description&#x27;: &#x27;This is an example custom command. Use /config to edit it and create more&#x27;}]"/><ClassPropertyRef name='slash_commands' details='{&quot;title&quot;: &quot;Slash Commands&quot;, &quot;description&quot;: &quot;An array of slash commands that let you map custom Steps to a shortcut.&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/SlashCommand&quot;}}' required={false} default="[]"/><ClassPropertyRef name='on_traceback' details='{&quot;title&quot;: &quot;On Traceback&quot;, &quot;description&quot;: &quot;The step that will be run when a traceback is detected (when you use the shortcut cmd+shift+R)&quot;, &quot;allOf&quot;: [{&quot;$ref&quot;: &quot;#/definitions/Step&quot;}]}' required={false} default=""/><ClassPropertyRef name='system_message' details='{&quot;title&quot;: &quot;System Message&quot;, &quot;description&quot;: &quot;A system message that will always be followed by the LLM&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='policy_override' details='{&quot;title&quot;: &quot;Policy Override&quot;, &quot;description&quot;: &quot;A Policy object that can be used to override the default behavior of Continue, for example in order to build custom agents that take multiple steps at a time.&quot;, &quot;allOf&quot;: [{&quot;$ref&quot;: &quot;#/definitions/Policy&quot;}]}' required={false} default=""/><ClassPropertyRef name='context_providers' details='{&quot;title&quot;: &quot;Context Providers&quot;, &quot;description&quot;: &quot;A list of ContextProvider objects that can be used to provide context to the LLM by typing &#x27;@&#x27;. Read more about ContextProviders in the documentation.&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/ContextProvider&quot;}}' required={false} default="[]"/><ClassPropertyRef name='user_token' details='{&quot;title&quot;: &quot;User Token&quot;, &quot;description&quot;: &quot;An optional token to identify the user.&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='data_server_url' details='{&quot;title&quot;: &quot;Data Server Url&quot;, &quot;description&quot;: &quot;The URL of the server where development data is sent. No data is sent unless a valid user token is provided.&quot;, &quot;default&quot;: &quot;https://us-west1-autodebug.cloudfunctions.net&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default="https://us-west1-autodebug.cloudfunctions.net"/>
+<ClassPropertyRef name='steps_on_startup' details='{&quot;title&quot;: &quot;Steps On Startup&quot;, &quot;description&quot;: &quot;Steps that will be automatically run at the beginning of a new session&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/Step&quot;}}' required={false} default="[]"/><ClassPropertyRef name='disallowed_steps' details='{&quot;title&quot;: &quot;Disallowed Steps&quot;, &quot;description&quot;: &quot;Steps that are not allowed to be run, and will be skipped if attempted&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;type&quot;: &quot;string&quot;}}' required={false} default="[]"/><ClassPropertyRef name='allow_anonymous_telemetry' details='{&quot;title&quot;: &quot;Allow Anonymous Telemetry&quot;, &quot;description&quot;: &quot;If this field is set to True, we will collect anonymous telemetry as described in the documentation page on telemetry. If set to False, we will not collect any data.&quot;, &quot;default&quot;: true, &quot;type&quot;: &quot;boolean&quot;}' required={false} default="True"/><ClassPropertyRef name='models' details='{&quot;title&quot;: &quot;Models&quot;, &quot;description&quot;: &quot;Configuration for the models used by Continue. Read more about how to configure models in the documentation.&quot;, &quot;default&quot;: {&quot;default&quot;: {&quot;title&quot;: null, &quot;system_message&quot;: null, &quot;context_length&quot;: 2048, &quot;model&quot;: &quot;gpt-4&quot;, &quot;timeout&quot;: 300, &quot;prompt_templates&quot;: {}, &quot;api_key&quot;: null, &quot;llm&quot;: null, &quot;class_name&quot;: &quot;MaybeProxyOpenAI&quot;}, &quot;small&quot;: null, &quot;medium&quot;: {&quot;title&quot;: null, &quot;system_message&quot;: null, &quot;context_length&quot;: 2048, &quot;model&quot;: &quot;gpt-3.5-turbo&quot;, &quot;timeout&quot;: 300, &quot;prompt_templates&quot;: {}, &quot;api_key&quot;: null, &quot;llm&quot;: null, &quot;class_name&quot;: &quot;MaybeProxyOpenAI&quot;}, &quot;large&quot;: null, &quot;edit&quot;: null, &quot;chat&quot;: null, &quot;unused&quot;: []}, &quot;allOf&quot;: [{&quot;$ref&quot;: &quot;#/definitions/Models&quot;}]}' required={false} default="{&#x27;default&#x27;: {&#x27;title&#x27;: None, &#x27;system_message&#x27;: None, &#x27;context_length&#x27;: 2048, &#x27;model&#x27;: &#x27;gpt-4&#x27;, &#x27;timeout&#x27;: 300, &#x27;prompt_templates&#x27;: {}, &#x27;api_key&#x27;: None, &#x27;llm&#x27;: None, &#x27;class_name&#x27;: &#x27;MaybeProxyOpenAI&#x27;}, &#x27;small&#x27;: None, &#x27;medium&#x27;: {&#x27;title&#x27;: None, &#x27;system_message&#x27;: None, &#x27;context_length&#x27;: 2048, &#x27;model&#x27;: &#x27;gpt-3.5-turbo&#x27;, &#x27;timeout&#x27;: 300, &#x27;prompt_templates&#x27;: {}, &#x27;api_key&#x27;: None, &#x27;llm&#x27;: None, &#x27;class_name&#x27;: &#x27;MaybeProxyOpenAI&#x27;}, &#x27;large&#x27;: None, &#x27;edit&#x27;: None, &#x27;chat&#x27;: None, &#x27;unused&#x27;: []}"/><ClassPropertyRef name='temperature' details='{&quot;title&quot;: &quot;Temperature&quot;, &quot;description&quot;: &quot;The temperature parameter for sampling from the LLM. Higher temperatures will result in more random output, while lower temperatures will result in more predictable output. This value ranges from 0 to 1.&quot;, &quot;default&quot;: 0.5, &quot;type&quot;: &quot;number&quot;}' required={false} default="0.5"/><ClassPropertyRef name='custom_commands' details='{&quot;title&quot;: &quot;Custom Commands&quot;, &quot;description&quot;: &quot;An array of custom commands that allow you to reuse prompts. Each has name, description, and prompt properties. When you enter /&lt;name&gt; in the text input, it will act as a shortcut to the prompt.&quot;, &quot;default&quot;: [{&quot;name&quot;: &quot;test&quot;, &quot;prompt&quot;: &quot;Write a comprehensive set of unit tests for the selected code. It should setup, run tests that check for correctness including important edge cases, and teardown. Ensure that the tests are complete and sophisticated. Give the tests just as chat output, don&#x27;t edit any file.&quot;, &quot;description&quot;: &quot;This is an example custom command. Use /config to edit it and create more&quot;}], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/CustomCommand&quot;}}' required={false} default="[{&#x27;name&#x27;: &#x27;test&#x27;, &#x27;prompt&#x27;: &quot;Write a comprehensive set of unit tests for the selected code. It should setup, run tests that check for correctness including important edge cases, and teardown. Ensure that the tests are complete and sophisticated. Give the tests just as chat output, don&#x27;t edit any file.&quot;, &#x27;description&#x27;: &#x27;This is an example custom command. Use /config to edit it and create more&#x27;}]"/><ClassPropertyRef name='slash_commands' details='{&quot;title&quot;: &quot;Slash Commands&quot;, &quot;description&quot;: &quot;An array of slash commands that let you map custom Steps to a shortcut.&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/SlashCommand&quot;}}' required={false} default="[]"/><ClassPropertyRef name='on_traceback' details='{&quot;title&quot;: &quot;On Traceback&quot;, &quot;description&quot;: &quot;The step that will be run when a traceback is detected (when you use the shortcut cmd+shift+R)&quot;, &quot;allOf&quot;: [{&quot;$ref&quot;: &quot;#/definitions/Step&quot;}]}' required={false} default=""/><ClassPropertyRef name='system_message' details='{&quot;title&quot;: &quot;System Message&quot;, &quot;description&quot;: &quot;A system message that will always be followed by the LLM&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='policy_override' details='{&quot;title&quot;: &quot;Policy Override&quot;, &quot;description&quot;: &quot;A Policy object that can be used to override the default behavior of Continue, for example in order to build custom agents that take multiple steps at a time.&quot;, &quot;allOf&quot;: [{&quot;$ref&quot;: &quot;#/definitions/Policy&quot;}]}' required={false} default=""/><ClassPropertyRef name='context_providers' details='{&quot;title&quot;: &quot;Context Providers&quot;, &quot;description&quot;: &quot;A list of ContextProvider objects that can be used to provide context to the LLM by typing &#x27;@&#x27;. Read more about ContextProviders in the documentation.&quot;, &quot;default&quot;: [], &quot;type&quot;: &quot;array&quot;, &quot;items&quot;: {&quot;$ref&quot;: &quot;#/definitions/ContextProvider&quot;}}' required={false} default="[]"/><ClassPropertyRef name='user_token' details='{&quot;title&quot;: &quot;User Token&quot;, &quot;description&quot;: &quot;An optional token to identify the user.&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default=""/><ClassPropertyRef name='data_server_url' details='{&quot;title&quot;: &quot;Data Server Url&quot;, &quot;description&quot;: &quot;The URL of the server where development data is sent. No data is sent unless a valid user token is provided.&quot;, &quot;default&quot;: &quot;https://us-west1-autodebug.cloudfunctions.net&quot;, &quot;type&quot;: &quot;string&quot;}' required={false} default="https://us-west1-autodebug.cloudfunctions.net"/><ClassPropertyRef name='disable_summaries' details='{&quot;title&quot;: &quot;Disable Summaries&quot;, &quot;description&quot;: &quot;If set to `True`, Continue will not generate summaries for each Step. This can be useful if you want to save on compute.&quot;, &quot;default&quot;: false, &quot;type&quot;: &quot;boolean&quot;}' required={false} default="False"/>
### Inherited Properties
diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js
index e6f8a646..b28a0ade 100644
--- a/docs/docusaurus.config.js
+++ b/docs/docusaurus.config.js
@@ -146,10 +146,15 @@ const config = {
"@docusaurus/plugin-client-redirects",
{
redirects: [
+ // Redirects from old docs
{
from: "/customization",
to: "/customization/overview",
},
+ {
+ from: "/getting-started",
+ to: "/quickstart",
+ },
],
},
],