From e976d60974a7837967d03807605cbf2e7b4f3f9a Mon Sep 17 00:00:00 2001 From: Nate Sesti <33237525+sestinj@users.noreply.github.com> Date: Sat, 23 Sep 2023 13:06:00 -0700 Subject: UI Redesign and fixing many details (#496) * feat: :lipstick: start of major design upgrade * feat: :lipstick: model selection page * feat: :lipstick: use shortcut to add highlighted code as ctx * feat: :lipstick: better display of errors * feat: :lipstick: ui for learning keyboard shortcuts, more details * refactor: :construction: testing slash commands ui * Truncate continue.log * refactor: :construction: refactoring client_session, ui, more * feat: :bug: layout fixes * refactor: :lipstick: ui to enter OpenAI Key * refactor: :truck: rename MaybeProxyOpenAI -> OpenAIFreeTrial * starting help center * removing old shortcut docs * fix: :bug: fix model setting logic to avoid overwrites * feat: :lipstick: tutorial and model descriptions * refactor: :truck: rename unused -> saved * refactor: :truck: rename model roles * feat: :lipstick: edit indicator * refactor: :lipstick: move +, folder icons * feat: :lipstick: tab to clear all context * fix: :bug: context providers ui fixes * fix: :bug: fix lag when stopping step * fix: :bug: don't override system message for models * fix: :bug: fix continue button cursor * feat: :lipstick: title bar * fix: :bug: updates to code highlighting logic and more * fix: :bug: fix renaming of summarize model role * feat: :lipstick: help page and better session title * feat: :lipstick: more help page / ui improvements * feat: :lipstick: set session title * fix: :bug: small fixes for changing sessions * fix: :bug: perfecting the highlighting code and ctx interactions * style: :lipstick: sticky headers for scroll, ollama warming * fix: :bug: fix toggle bug --------- Co-authored-by: Ty Dunn --- extension/schema/LLM.d.ts | 61 ++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 55 insertions(+), 6 deletions(-) (limited to 'extension/schema/LLM.d.ts') diff --git a/extension/schema/LLM.d.ts b/extension/schema/LLM.d.ts index 255c752e..31d38456 100644 --- a/extension/schema/LLM.d.ts +++ b/extension/schema/LLM.d.ts @@ -6,15 +6,64 @@ */ export type LLM = LLM1; -export type RequiresApiKey = string; -export type RequiresUniqueId = boolean; -export type RequiresWriteLog = boolean; +/** + * A title that will identify this model in the model selection dropdown + */ +export type Title = string; +/** + * A system message that will always be followed by the LLM + */ export type SystemMessage = string; +/** + * The maximum context length of the LLM in tokens, as counted by count_tokens. + */ +export type ContextLength = number; +/** + * The unique ID of the user. + */ +export type UniqueId = string; +/** + * The name of the model to be used (e.g. gpt-4, codellama) + */ +export type Model = string; +/** + * Tokens that will stop the completion. + */ +export type StopTokens = string[]; +/** + * Set the timeout for each request to the LLM. If you are running a local LLM that takes a while to respond, you might want to set this to avoid timeouts. + */ +export type Timeout = number; +/** + * Whether to verify SSL certificates for requests. + */ +export type VerifySsl = boolean; +/** + * Path to a custom CA bundle to use when making the HTTP request + */ +export type CaBundlePath = string; +/** + * The API key for the LLM provider. + */ +export type ApiKey = string; export interface LLM1 { - requires_api_key?: RequiresApiKey; - requires_unique_id?: RequiresUniqueId; - requires_write_log?: RequiresWriteLog; + title?: Title; system_message?: SystemMessage; + context_length?: ContextLength; + unique_id?: UniqueId; + model: Model; + stop_tokens?: StopTokens; + timeout?: Timeout; + verify_ssl?: VerifySsl; + ca_bundle_path?: CaBundlePath; + prompt_templates?: PromptTemplates; + api_key?: ApiKey; + [k: string]: unknown; +} +/** + * A dictionary of prompt templates that can be used to customize the behavior of the LLM in certain situations. For example, set the "edit" key in order to change the prompt that is used for the /edit slash command. Each value in the dictionary is a string templated in mustache syntax, and filled in at runtime with the variables specific to the situation. See the documentation for more information. + */ +export interface PromptTemplates { [k: string]: unknown; } -- cgit v1.2.3-70-g09d2