From 932fadb0187d939f5f746ab7118528e04bf64bec Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Tue, 1 Aug 2023 01:34:05 -0700 Subject: docs: :memo: ollama customization docs --- docs/docs/customization.md | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'docs') diff --git a/docs/docs/customization.md b/docs/docs/customization.md index 60764527..4226b4d3 100644 --- a/docs/docs/customization.md +++ b/docs/docs/customization.md @@ -53,6 +53,10 @@ config = ContinueConfig( Continue will automatically prompt you for your Anthropic API key, which must have access to Claude 2. You can request early access [here](https://www.anthropic.com/earlyaccess). +### Run Llama-2 locally with Ollama + +[Ollama](https://ollama.ai/) is a Mac application that makes it easy to locally run open-source models, including Llama-2. Download the app from the website, and it will walk you through setup in a couple of minutes. You can also read more in their [README](https://github.com/jmorganca/ollama). Configure Continue by importing `from continuedev.libs.llm.ollama import Ollama` and setting `default=Ollama(model="llama-2")`. + ### Local models with ggml See our [5 minute quickstart](https://github.com/continuedev/ggml-server-example) to run any model locally with ggml. While these models don't yet perform as well, they are free, entirely private, and run offline. -- cgit v1.2.3-70-g09d2 From 2447182803877ac2d117d8353f652d62cc63d352 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Tue, 1 Aug 2023 09:13:55 -0700 Subject: docs: :memo: make ollama docs more clear --- docs/docs/customization.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) (limited to 'docs') diff --git a/docs/docs/customization.md b/docs/docs/customization.md index 4226b4d3..22fcbb3d 100644 --- a/docs/docs/customization.md +++ b/docs/docs/customization.md @@ -55,7 +55,18 @@ Continue will automatically prompt you for your Anthropic API key, which must ha ### Run Llama-2 locally with Ollama -[Ollama](https://ollama.ai/) is a Mac application that makes it easy to locally run open-source models, including Llama-2. Download the app from the website, and it will walk you through setup in a couple of minutes. You can also read more in their [README](https://github.com/jmorganca/ollama). Configure Continue by importing `from continuedev.libs.llm.ollama import Ollama` and setting `default=Ollama(model="llama-2")`. +[Ollama](https://ollama.ai/) is a Mac application that makes it easy to locally run open-source models, including Llama-2. Download the app from the website, and it will walk you through setup in a couple of minutes. You can also read more in their [README](https://github.com/jmorganca/ollama). Continue can then be configured to use the `Ollama` LLM class: + +```python +from continuedev.libs.llm.ollama import Ollama + +config = ContinueConfig( + ... + models=Models( + default=Ollama(model="llama2") + ) +) +``` ### Local models with ggml -- cgit v1.2.3-70-g09d2 From c4d88a8d22622d7c63ca19ba1945dd82dbc3e008 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Tue, 1 Aug 2023 10:55:38 -0700 Subject: docs: :memo: custom policies documentation --- docs/docs/customization.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) (limited to 'docs') diff --git a/docs/docs/customization.md b/docs/docs/customization.md index 22fcbb3d..a09c4ac5 100644 --- a/docs/docs/customization.md +++ b/docs/docs/customization.md @@ -192,3 +192,42 @@ config = ContinueConfig( ] ) ``` + +## Custom Policies + +Policies can be used to deeply change the behavior of Continue, or to build agents that take longer sequences of actions on their own. The [`DefaultPolicy`](https://github.com/continuedev/continue/blob/main/continuedev/src/continuedev/plugins/policies/default.py) handles the parsing of slash commands, and otherwise always chooses the `SimpleChatStep`, but you could customize by for example always taking a "review" step after editing code. To do so, create a new `Policy` subclass that implements the `next` method: + +```python +class ReviewEditsPolicy(Policy): + + default_step: Step = SimpleChatStep() + + def next(self, config: ContinueConfig, history: History) -> Step: + # Get the last step + last_step = history.get_current() + + # If it edited code, then review the changes + if isinstance(last_step, EditHighlightedCodeStep): + return ReviewStep() # Not implemented + + # Otherwise, choose between EditHighlightedCodeStep and SimpleChatStep based on slash command + if observation is not None and isinstance(last_step.observation, UserInputObservation): + if user_input.startswith("/edit"): + return EditHighlightedCodeStep(user_input=user_input[5:]) + else: + return SimpleChatStep() + + return self.default_step.copy() + + # Don't do anything until the user enters something else + return None +``` + +Then, in `~/.continue/config.py`, override the default policy: + +```python +config=ContinueConfig( + ... + policy_override=ReviewEditsPolicy() +) +``` -- cgit v1.2.3-70-g09d2