summaryrefslogtreecommitdiff
path: root/continuedev
diff options
context:
space:
mode:
authorNate Sesti <sestinj@gmail.com>2023-07-02 12:47:08 -0700
committerNate Sesti <sestinj@gmail.com>2023-07-02 12:47:08 -0700
commit03499fc0b7efa75581c6c3eff40b47f2e4db58e0 (patch)
tree999467a1ea6588c3c2d4d1c71ab97970bc7b7348 /continuedev
parent4871ff4f6e579570613887b2570ae6dd86d83684 (diff)
downloadsncontinue-03499fc0b7efa75581c6c3eff40b47f2e4db58e0.tar.gz
sncontinue-03499fc0b7efa75581c6c3eff40b47f2e4db58e0.tar.bz2
sncontinue-03499fc0b7efa75581c6c3eff40b47f2e4db58e0.zip
ssl certificate fix
Diffstat (limited to 'continuedev')
-rw-r--r--continuedev/pyproject.toml2
-rw-r--r--continuedev/src/continuedev/libs/llm/proxy_server.py11
2 files changed, 9 insertions, 4 deletions
diff --git a/continuedev/pyproject.toml b/continuedev/pyproject.toml
index e33627e7..6727e29a 100644
--- a/continuedev/pyproject.toml
+++ b/continuedev/pyproject.toml
@@ -6,7 +6,7 @@ authors = ["Nate Sesti <sestinj@gmail.com>"]
readme = "README.md"
[tool.poetry.dependencies]
-python = "^3.9"
+python = "^3.8"
diff-match-patch = "^20230430"
fastapi = "^0.95.1"
typer = "^0.7.0"
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py
index bd831ad9..69c96ee8 100644
--- a/continuedev/src/continuedev/libs/llm/proxy_server.py
+++ b/continuedev/src/continuedev/libs/llm/proxy_server.py
@@ -5,6 +5,11 @@ import aiohttp
from ...core.main import ChatMessage
from ..llm import LLM
from ..util.count_tokens import DEFAULT_ARGS, DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, count_tokens
+import certifi
+import ssl
+
+ca_bundle_path = certifi.where()
+ssl_context = ssl.create_default_context(cafile=ca_bundle_path)
# SERVER_URL = "http://127.0.0.1:8080"
SERVER_URL = "https://proxy-server-l6vsfbzhba-uw.a.run.app"
@@ -31,7 +36,7 @@ class ProxyServer(LLM):
async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]:
args = self.default_args | kwargs
- async with aiohttp.ClientSession() as session:
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/complete", json={
"messages": compile_chat_messages(args["model"], with_history, prompt, functions=None),
"unique_id": self.unique_id,
@@ -47,7 +52,7 @@ class ProxyServer(LLM):
messages = compile_chat_messages(
self.default_model, messages, None, functions=args.get("functions", None))
- async with aiohttp.ClientSession() as session:
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_chat", json={
"messages": messages,
"unique_id": self.unique_id,
@@ -71,7 +76,7 @@ class ProxyServer(LLM):
messages = compile_chat_messages(
self.default_model, with_history, prompt, functions=args.get("functions", None))
- async with aiohttp.ClientSession() as session:
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session:
async with session.post(f"{SERVER_URL}/stream_complete", json={
"messages": messages,
"unique_id": self.unique_id,