diff options
Diffstat (limited to 'docs/docs/reference/Models/replicate.md')
-rw-r--r-- | docs/docs/reference/Models/replicate.md | 22 |
1 files changed, 21 insertions, 1 deletions
diff --git a/docs/docs/reference/Models/replicate.md b/docs/docs/reference/Models/replicate.md index 4f05cdfa..0c93a758 100644 --- a/docs/docs/reference/Models/replicate.md +++ b/docs/docs/reference/Models/replicate.md @@ -2,10 +2,30 @@ import ClassPropertyRef from '@site/src/components/ClassPropertyRef.tsx'; # ReplicateLLM +Replicate is a great option for newly released language models or models that you've deployed through their platform. Sign up for an account [here](https://replicate.ai/), copy your API key, and then select any model from the [Replicate Streaming List](https://replicate.com/collections/streaming-language-models). Change `~/.continue/config.py` to look like this: +```python +from continuedev.src.continuedev.core.models import Models +from continuedev.src.continuedev.libs.llm.replicate import ReplicateLLM + +config = ContinueConfig( + ... + models=Models( + default=ReplicateLLM( + model="replicate/codellama-13b-instruct:da5676342de1a5a335b848383af297f592b816b950a43d251a0a9edd0113604b", + api_key="my-replicate-api-key") + ) +) +``` + +If you don't specify the `model` parameter, it will default to `replicate/llama-2-70b-chat:58d078176e02c219e11eb4da5a02a7830a283b14cf8f94537af893ccff5ee781`. [View the source](https://github.com/continuedev/continue/tree/main/continuedev/src/continuedev/libs/llm/replicate.py) ## Properties -<ClassPropertyRef name='api_key' details='{"title": "Api Key", "type": "string"}' required={true}/><ClassPropertyRef name='title' details='{"title": "Title", "type": "string"}' required={false}/><ClassPropertyRef name='system_message' details='{"title": "System Message", "type": "string"}' required={false}/><ClassPropertyRef name='context_length' details='{"title": "Context Length", "default": 2048, "type": "integer"}' required={false}/><ClassPropertyRef name='unique_id' details='{"title": "Unique Id", "type": "string"}' required={false}/><ClassPropertyRef name='model' details='{"title": "Model", "default": "replicate/llama-2-70b-chat:58d078176e02c219e11eb4da5a02a7830a283b14cf8f94537af893ccff5ee781", "type": "string"}' required={false}/><ClassPropertyRef name='timeout' details='{"title": "Timeout", "default": 300, "type": "integer"}' required={false}/><ClassPropertyRef name='prompt_templates' details='{"title": "Prompt Templates", "default": {"edit": "[INST] Consider the following code:\n```\n{{code_to_edit}}\n```\nEdit the code to perfectly satisfy the following user request:\n{{user_input}}\nOutput nothing except for the code. No code block, no English explanation, no start/end tags.\n[/INST]"}, "type": "object"}' required={false}/>
\ No newline at end of file + + +### Inherited Properties + +<ClassPropertyRef name='api_key' details='{"title": "Api Key", "description": "Replicate API key", "type": "string"}' required={true} default=""/><ClassPropertyRef name='title' details='{"title": "Title", "description": "A title that will identify this model in the model selection dropdown", "type": "string"}' required={false} default=""/><ClassPropertyRef name='system_message' details='{"title": "System Message", "description": "A system message that will always be followed by the LLM", "type": "string"}' required={false} default=""/><ClassPropertyRef name='context_length' details='{"title": "Context Length", "description": "The maximum context length of the LLM in tokens, as counted by count_tokens.", "default": 2048, "type": "integer"}' required={false} default="2048"/><ClassPropertyRef name='unique_id' details='{"title": "Unique Id", "description": "The unique ID of the user.", "type": "string"}' required={false} default=""/><ClassPropertyRef name='model' details='{"title": "Model", "description": "The name of the model to be used (e.g. gpt-4, codellama)", "default": "replicate/llama-2-70b-chat:58d078176e02c219e11eb4da5a02a7830a283b14cf8f94537af893ccff5ee781", "type": "string"}' required={false} default="replicate/llama-2-70b-chat:58d078176e02c219e11eb4da5a02a7830a283b14cf8f94537af893ccff5ee781"/><ClassPropertyRef name='timeout' details='{"title": "Timeout", "description": "Set the timeout for each request to the LLM. If you are running a local LLM that takes a while to respond, you might want to set this to avoid timeouts.", "default": 300, "type": "integer"}' required={false} default="300"/><ClassPropertyRef name='prompt_templates' details='{"title": "Prompt Templates", "description": "A dictionary of prompt templates that can be used to customize the behavior of the LLM in certain situations. For example, set the \"edit\" key in order to change the prompt that is used for the /edit slash command. Each value in the dictionary is a string templated in mustache syntax, and filled in at runtime with the variables specific to the situation. See the documentation for more information.", "default": {"edit": "[INST] Consider the following code:\n```\n{{code_to_edit}}\n```\nEdit the code to perfectly satisfy the following user request:\n{{user_input}}\nOutput nothing except for the code. No code block, no English explanation, no start/end tags.\n[/INST]"}, "type": "object"}' required={false} default="{'edit': '[INST] Consider the following code:\n```\n{{code_to_edit}}\n```\nEdit the code to perfectly satisfy the following user request:\n{{user_input}}\nOutput nothing except for the code. No code block, no English explanation, no start/end tags.\n[/INST]'}"/>
\ No newline at end of file |