diff options
| author | Nate Sesti <33237525+sestinj@users.noreply.github.com> | 2023-09-28 01:02:52 -0700 | 
|---|---|---|
| committer | GitHub <noreply@github.com> | 2023-09-28 01:02:52 -0700 | 
| commit | 95363a5b52f3bf73531ac76b00178fa79ca97661 (patch) | |
| tree | 9b9c1614556f1f0d21f363e6a9fe950069affb5d /extension/schema/ContinueConfig.d.ts | |
| parent | d4acf4bb11dbd7d3d6210e2949d21143d721e81e (diff) | |
| download | sncontinue-95363a5b52f3bf73531ac76b00178fa79ca97661.tar.gz sncontinue-95363a5b52f3bf73531ac76b00178fa79ca97661.tar.bz2 sncontinue-95363a5b52f3bf73531ac76b00178fa79ca97661.zip | |
Past input (#513)
* feat: :construction: use ComboBox in place of UserInputContainer
* feat: :construction: adding context to previous inputs steps
* feat: :sparkles: preview context items on click
* feat: :construction: more work on context items ui
* style: :construction: working out the details of ctx item buttons
* feat: :sparkles: getting the final details
* fix: :bug: fix height of ctx items bar
* fix: :bug: last couple of details
* fix: :bug: pass model param through to hf inference api
* fix: :loud_sound: better logging for timeout
* feat: :sparkles: option to set the meilisearch url
* fix: :bug: fix height of past inputs
Diffstat (limited to 'extension/schema/ContinueConfig.d.ts')
| -rw-r--r-- | extension/schema/ContinueConfig.d.ts | 13 | 
1 files changed, 8 insertions, 5 deletions
| diff --git a/extension/schema/ContinueConfig.d.ts b/extension/schema/ContinueConfig.d.ts index 92f6e047..64aa5c02 100644 --- a/extension/schema/ContinueConfig.d.ts +++ b/extension/schema/ContinueConfig.d.ts @@ -72,10 +72,14 @@ export type VerifySsl = boolean;   */  export type CaBundlePath = string;  /** + * Proxy URL to use when making the HTTP request + */ +export type Proxy = string; +/**   * The API key for the LLM provider.   */  export type ApiKey = string; -export type Unused = LLM[]; +export type Saved = LLM[];  /**   * The temperature parameter for sampling from the LLM. Higher temperatures will result in more random output, while lower temperatures will result in more predictable output. This value ranges from 0 to 1.   */ @@ -205,12 +209,10 @@ export interface FunctionCall {   */  export interface Models1 {    default: LLM; -  small?: LLM; -  medium?: LLM; -  large?: LLM; +  summarize?: LLM;    edit?: LLM;    chat?: LLM; -  unused?: Unused; +  saved?: Saved;    sdk?: ContinueSDK;    [k: string]: unknown;  } @@ -224,6 +226,7 @@ export interface LLM {    timeout?: Timeout;    verify_ssl?: VerifySsl;    ca_bundle_path?: CaBundlePath; +  proxy?: Proxy;    prompt_templates?: PromptTemplates;    api_key?: ApiKey;    [k: string]: unknown; | 
