summaryrefslogtreecommitdiff
path: root/continuedev
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-09-02 11:27:30 -0700
committerNate Sesti <sestinj@gmail.com>2023-09-02 11:27:30 -0700
commitb9bdf5894c1c68b60d1919ae07b0f5909b00dec2 (patch)
tree5dad407b6297c2b0030efff44dd8604eb3e64315 /continuedev
parent45bd5535363414d9c814d03e77e336b675223fb4 (diff)
downloadsncontinue-b9bdf5894c1c68b60d1919ae07b0f5909b00dec2.tar.gz
sncontinue-b9bdf5894c1c68b60d1919ae07b0f5909b00dec2.tar.bz2
sncontinue-b9bdf5894c1c68b60d1919ae07b0f5909b00dec2.zip
fix: :bug: number of bug fixes
Diffstat (limited to 'continuedev')
-rw-r--r--continuedev/src/continuedev/libs/llm/hf_tgi.py2
-rw-r--r--continuedev/src/continuedev/libs/llm/llamacpp.py8
-rw-r--r--continuedev/src/continuedev/plugins/context_providers/file.py2
3 files changed, 6 insertions, 6 deletions
diff --git a/continuedev/src/continuedev/libs/llm/hf_tgi.py b/continuedev/src/continuedev/libs/llm/hf_tgi.py
index f106f83f..5c7e0239 100644
--- a/continuedev/src/continuedev/libs/llm/hf_tgi.py
+++ b/continuedev/src/continuedev/libs/llm/hf_tgi.py
@@ -52,7 +52,7 @@ class HuggingFaceTGI(LLM):
) as client_session:
async with client_session.post(
f"{self.server_url}",
- json={"inputs": prompt, **self._transform_args(args)},
+ json={"inputs": prompt, **args},
) as resp:
async for line in resp.content.iter_any():
if line:
diff --git a/continuedev/src/continuedev/libs/llm/llamacpp.py b/continuedev/src/continuedev/libs/llm/llamacpp.py
index 7940c4c9..84eb85fd 100644
--- a/continuedev/src/continuedev/libs/llm/llamacpp.py
+++ b/continuedev/src/continuedev/libs/llm/llamacpp.py
@@ -87,7 +87,7 @@ class LlamaCpp(LLM):
f"{self.server_url}/completion",
json={
"prompt": prompt,
- **self._transform_args(args),
+ **args,
},
headers={"Content-Type": "application/json"},
) as resp:
@@ -107,7 +107,7 @@ class LlamaCpp(LLM):
self.model,
messages,
self.context_length,
- args["max_tokens"],
+ args["n_predict"] if "n_predict" in args else 1024,
None,
functions=args.get("functions", None),
system_message=self.system_message,
@@ -123,7 +123,7 @@ class LlamaCpp(LLM):
) as client_session:
async with client_session.post(
f"{self.server_url}/completion",
- json={"prompt": prompt, **self._transform_args(args)},
+ json={"prompt": prompt, **args},
headers=headers,
) as resp:
async for line in resp.content:
@@ -170,7 +170,7 @@ class LlamaCpp(LLM):
) as client_session:
async with client_session.post(
f"{self.server_url}/completion",
- json={"prompt": prompt, **self._transform_args(args)},
+ json={"prompt": prompt, **args},
headers={"Content-Type": "application/json"},
) as resp:
json_resp = await resp.json()
diff --git a/continuedev/src/continuedev/plugins/context_providers/file.py b/continuedev/src/continuedev/plugins/context_providers/file.py
index 859088b8..a300a4a9 100644
--- a/continuedev/src/continuedev/plugins/context_providers/file.py
+++ b/continuedev/src/continuedev/plugins/context_providers/file.py
@@ -123,7 +123,7 @@ class FileContextProvider(ContextProvider):
)
async def provide_context_items(self, workspace_dir: str) -> List[ContextItem]:
- contents = await self.sdk.ide.listDirectoryContents(workspace_dir, True)
+ contents = await self.sdk.ide.listDirectoryContents(workspace_dir, False)
if contents is None:
return []