diff options
author | Nate Sesti <sestinj@gmail.com> | 2023-07-02 12:47:08 -0700 |
---|---|---|
committer | Nate Sesti <sestinj@gmail.com> | 2023-07-02 12:47:08 -0700 |
commit | a86933aec5243584def582150bbba1a926be9fda (patch) | |
tree | b324b2f416aeb32e408cf84816952c1d72ccefd0 /continuedev/src | |
parent | 452af6002addd67d7047ce4faa39a680ef2b4439 (diff) | |
download | sncontinue-a86933aec5243584def582150bbba1a926be9fda.tar.gz sncontinue-a86933aec5243584def582150bbba1a926be9fda.tar.bz2 sncontinue-a86933aec5243584def582150bbba1a926be9fda.zip |
ssl certificate fix
Diffstat (limited to 'continuedev/src')
-rw-r--r-- | continuedev/src/continuedev/libs/llm/proxy_server.py | 11 |
1 files changed, 8 insertions, 3 deletions
diff --git a/continuedev/src/continuedev/libs/llm/proxy_server.py b/continuedev/src/continuedev/libs/llm/proxy_server.py index bd831ad9..69c96ee8 100644 --- a/continuedev/src/continuedev/libs/llm/proxy_server.py +++ b/continuedev/src/continuedev/libs/llm/proxy_server.py @@ -5,6 +5,11 @@ import aiohttp from ...core.main import ChatMessage from ..llm import LLM from ..util.count_tokens import DEFAULT_ARGS, DEFAULT_MAX_TOKENS, compile_chat_messages, CHAT_MODELS, count_tokens +import certifi +import ssl + +ca_bundle_path = certifi.where() +ssl_context = ssl.create_default_context(cafile=ca_bundle_path) # SERVER_URL = "http://127.0.0.1:8080" SERVER_URL = "https://proxy-server-l6vsfbzhba-uw.a.run.app" @@ -31,7 +36,7 @@ class ProxyServer(LLM): async def complete(self, prompt: str, with_history: List[ChatMessage] = [], **kwargs) -> Coroutine[Any, Any, str]: args = self.default_args | kwargs - async with aiohttp.ClientSession() as session: + async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session: async with session.post(f"{SERVER_URL}/complete", json={ "messages": compile_chat_messages(args["model"], with_history, prompt, functions=None), "unique_id": self.unique_id, @@ -47,7 +52,7 @@ class ProxyServer(LLM): messages = compile_chat_messages( self.default_model, messages, None, functions=args.get("functions", None)) - async with aiohttp.ClientSession() as session: + async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session: async with session.post(f"{SERVER_URL}/stream_chat", json={ "messages": messages, "unique_id": self.unique_id, @@ -71,7 +76,7 @@ class ProxyServer(LLM): messages = compile_chat_messages( self.default_model, with_history, prompt, functions=args.get("functions", None)) - async with aiohttp.ClientSession() as session: + async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl_context=ssl_context)) as session: async with session.post(f"{SERVER_URL}/stream_complete", json={ "messages": messages, "unique_id": self.unique_id, |