summaryrefslogtreecommitdiff
path: root/continuedev/src/continuedev/server
diff options
context:
space:
mode:
authorNate Sesti <33237525+sestinj@users.noreply.github.com>2023-09-29 20:20:45 -0700
committerGitHub <noreply@github.com>2023-09-29 20:20:45 -0700
commit0dfdd4c52a9d686af54346ade35e0bcff226c8b9 (patch)
treed4f98c7809ddfc7ed14e3be36fe921cc418a8917 /continuedev/src/continuedev/server
parent64558321addcc80de9137cf9c9ef1bf7ed85ffa5 (diff)
downloadsncontinue-0dfdd4c52a9d686af54346ade35e0bcff226c8b9.tar.gz
sncontinue-0dfdd4c52a9d686af54346ade35e0bcff226c8b9.tar.bz2
sncontinue-0dfdd4c52a9d686af54346ade35e0bcff226c8b9.zip
Model config UI (#522)
* feat: :sparkles: improved model selection * feat: :sparkles: add max_tokens option to LLM class * docs: :memo: update reference with max_tokens * feat: :loud_sound: add context to dev data loggign * feat: :sparkles: final work on model config ui
Diffstat (limited to 'continuedev/src/continuedev/server')
-rw-r--r--continuedev/src/continuedev/server/gui.py21
1 files changed, 16 insertions, 5 deletions
diff --git a/continuedev/src/continuedev/server/gui.py b/continuedev/src/continuedev/server/gui.py
index 10f6974f..cc6bc911 100644
--- a/continuedev/src/continuedev/server/gui.py
+++ b/continuedev/src/continuedev/server/gui.py
@@ -10,6 +10,7 @@ from uvicorn.main import Server
from ..core.main import ContextItem
from ..core.models import ALL_MODEL_ROLES, MODEL_CLASSES, MODEL_MODULE_NAMES
+from ..libs.llm.prompts.chat import llama2_template_messages, template_alpaca_messages
from ..libs.util.create_async_task import create_async_task
from ..libs.util.edit_config import (
add_config_import,
@@ -323,7 +324,22 @@ class GUIProtocolServer:
existing_saved_models.add(display_llm_class(val))
models.__setattr__(role, None)
+ # Add the requisite import to config.py
+ add_config_import(
+ f"from continuedev.src.continuedev.libs.llm.{MODEL_MODULE_NAMES[model_class]} import {model_class}"
+ )
+ if "template_messages" in model:
+ add_config_import(
+ f"from continuedev.src.continuedev.libs.llm.prompts.chat import {model['template_messages']}"
+ )
+
# Set and start the new default model
+
+ if "template_messages" in model:
+ model["template_messages"] = {
+ "llama2_template_messages": llama2_template_messages,
+ "template_alpaca_messages": template_alpaca_messages,
+ }[model["template_messages"]]
new_model = MODEL_CLASSES[model_class](**model)
models.default = new_model
await self.session.autopilot.continue_sdk.start_model(models.default)
@@ -343,11 +359,6 @@ class GUIProtocolServer:
create_obj_node("Models", models_args),
)
- # Add the requisite import to config.py
- add_config_import(
- f"from continuedev.src.continuedev.libs.llm.{MODEL_MODULE_NAMES[model_class]} import {model_class}"
- )
-
# Set all roles (in-memory) to the new default model
for role in ALL_MODEL_ROLES:
if role != "default":