diff options
| author | Nate Sesti <sestinj@gmail.com> | 2023-08-27 23:27:03 -0700 |
|---|---|---|
| committer | Nate Sesti <sestinj@gmail.com> | 2023-08-27 23:27:03 -0700 |
| commit | e839427b02fedba1f869e9f1260b9c2095611d23 (patch) | |
| tree | 2ef9966dfd719c03e565a85dbafc1c893bf533d9 /continuedev/src/continuedev/libs/llm/prompts | |
| parent | 5c09b8077588a447d6eaac9b7f624571be3ddb1d (diff) | |
| download | sncontinue-e839427b02fedba1f869e9f1260b9c2095611d23.tar.gz sncontinue-e839427b02fedba1f869e9f1260b9c2095611d23.tar.bz2 sncontinue-e839427b02fedba1f869e9f1260b9c2095611d23.zip | |
refactor: :zap: use code llama / llama2 prompt for TogetherLLM
Diffstat (limited to 'continuedev/src/continuedev/libs/llm/prompts')
| -rw-r--r-- | continuedev/src/continuedev/libs/llm/prompts/chat.py | 56 |
1 files changed, 56 insertions, 0 deletions
diff --git a/continuedev/src/continuedev/libs/llm/prompts/chat.py b/continuedev/src/continuedev/libs/llm/prompts/chat.py new file mode 100644 index 00000000..110dfaae --- /dev/null +++ b/continuedev/src/continuedev/libs/llm/prompts/chat.py @@ -0,0 +1,56 @@ +from textwrap import dedent + +from ....core.main import ChatMessage + + +def llama2_template_messages(msgs: ChatMessage) -> str: + if len(msgs) == 0: + return "" + + prompt = "" + has_system = msgs[0]["role"] == "system" + + if has_system and msgs[0]["content"].strip() == "": + has_system = False + msgs = msgs[1:] + + if has_system: + system_message = dedent( + f"""\ + <<SYS>> + {msgs[0]["content"]} + <</SYS>> + + """ + ) + if len(msgs) > 1: + prompt += f"[INST] {system_message}{msgs[1]['content']} [/INST]" + else: + prompt += f"[INST] {system_message} [/INST]" + return + + for i in range(2 if has_system else 0, len(msgs)): + if msgs[i]["role"] == "user": + prompt += f"[INST] {msgs[i]['content']} [/INST]" + else: + prompt += msgs[i]["content"] + + return prompt + + +def code_llama_template_messages(msgs: ChatMessage) -> str: + return f"[INST] {msgs[-1]['content']}\n[/INST]" + + +def extra_space_template_messages(msgs: ChatMessage) -> str: + return f" {msgs[-1]['content']}" + + +def code_llama_python_template_messages(msgs: ChatMessage) -> str: + return dedent( + f"""\ + [INST] + You are an expert Python programmer and personal assistant, here is your task: {msgs[-1]['content']} + Your answer should start with a [PYTHON] tag and end with a [/PYTHON] tag. + [/INST]""" + ) |
