diff options
-rw-r--r-- | continuedev/src/continuedev/libs/llm/hf_tgi.py | 2 | ||||
-rw-r--r-- | continuedev/src/continuedev/libs/llm/llamacpp.py | 8 | ||||
-rw-r--r-- | continuedev/src/continuedev/plugins/context_providers/file.py | 2 | ||||
-rw-r--r-- | extension/src/continueIdeClient.ts | 10 |
4 files changed, 15 insertions, 7 deletions
diff --git a/continuedev/src/continuedev/libs/llm/hf_tgi.py b/continuedev/src/continuedev/libs/llm/hf_tgi.py index f106f83f..5c7e0239 100644 --- a/continuedev/src/continuedev/libs/llm/hf_tgi.py +++ b/continuedev/src/continuedev/libs/llm/hf_tgi.py @@ -52,7 +52,7 @@ class HuggingFaceTGI(LLM): ) as client_session: async with client_session.post( f"{self.server_url}", - json={"inputs": prompt, **self._transform_args(args)}, + json={"inputs": prompt, **args}, ) as resp: async for line in resp.content.iter_any(): if line: diff --git a/continuedev/src/continuedev/libs/llm/llamacpp.py b/continuedev/src/continuedev/libs/llm/llamacpp.py index 7940c4c9..84eb85fd 100644 --- a/continuedev/src/continuedev/libs/llm/llamacpp.py +++ b/continuedev/src/continuedev/libs/llm/llamacpp.py @@ -87,7 +87,7 @@ class LlamaCpp(LLM): f"{self.server_url}/completion", json={ "prompt": prompt, - **self._transform_args(args), + **args, }, headers={"Content-Type": "application/json"}, ) as resp: @@ -107,7 +107,7 @@ class LlamaCpp(LLM): self.model, messages, self.context_length, - args["max_tokens"], + args["n_predict"] if "n_predict" in args else 1024, None, functions=args.get("functions", None), system_message=self.system_message, @@ -123,7 +123,7 @@ class LlamaCpp(LLM): ) as client_session: async with client_session.post( f"{self.server_url}/completion", - json={"prompt": prompt, **self._transform_args(args)}, + json={"prompt": prompt, **args}, headers=headers, ) as resp: async for line in resp.content: @@ -170,7 +170,7 @@ class LlamaCpp(LLM): ) as client_session: async with client_session.post( f"{self.server_url}/completion", - json={"prompt": prompt, **self._transform_args(args)}, + json={"prompt": prompt, **args}, headers={"Content-Type": "application/json"}, ) as resp: json_resp = await resp.json() diff --git a/continuedev/src/continuedev/plugins/context_providers/file.py b/continuedev/src/continuedev/plugins/context_providers/file.py index 859088b8..a300a4a9 100644 --- a/continuedev/src/continuedev/plugins/context_providers/file.py +++ b/continuedev/src/continuedev/plugins/context_providers/file.py @@ -123,7 +123,7 @@ class FileContextProvider(ContextProvider): ) async def provide_context_items(self, workspace_dir: str) -> List[ContextItem]: - contents = await self.sdk.ide.listDirectoryContents(workspace_dir, True) + contents = await self.sdk.ide.listDirectoryContents(workspace_dir, False) if contents is None: return [] diff --git a/extension/src/continueIdeClient.ts b/extension/src/continueIdeClient.ts index 539eb10d..582caa53 100644 --- a/extension/src/continueIdeClient.ts +++ b/extension/src/continueIdeClient.ts @@ -72,7 +72,8 @@ class IdeProtocolClient { this.handleMessage(messageType, data, messenger).catch((err) => { vscode.window .showErrorMessage( - "Error handling message from Continue server: " + err.message, + `Error handling message (${messageType}) from Continue server: ` + + err.message, "View Logs" ) .then((selection) => { @@ -613,6 +614,13 @@ class IdeProtocolClient { let contents: string | undefined; if (typeof contents === "undefined") { try { + const fileStats = await vscode.workspace.fs.stat( + uriFromFilePath(filepath) + ); + if (fileStats.size > 1000000) { + return ""; + } + contents = await vscode.workspace.fs .readFile(uriFromFilePath(filepath)) .then((bytes) => new TextDecoder().decode(bytes)); |