blob: c7c208c0afd06bdd36f169b97dd069b10ed1e6b4 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
|
from textwrap import dedent
from ....core.main import ChatMessage
def llama2_template_messages(msgs: ChatMessage) -> str:
if len(msgs) == 0:
return ""
if msgs[0]["role"] == "assistant":
# These models aren't trained to handle assistant message coming first,
# and typically these are just introduction messages from Continue
msgs.pop(0)
prompt = ""
has_system = msgs[0]["role"] == "system"
if has_system and msgs[0]["content"].strip() == "":
has_system = False
msgs = msgs[1:]
if has_system:
system_message = dedent(
f"""\
<<SYS>>
{msgs[0]["content"]}
<</SYS>>
"""
)
if len(msgs) > 1:
prompt += f"[INST] {system_message}{msgs[1]['content']} [/INST]"
else:
prompt += f"[INST] {system_message} [/INST]"
return
for i in range(2 if has_system else 0, len(msgs)):
if msgs[i]["role"] == "user":
prompt += f"[INST] {msgs[i]['content']} [/INST]"
else:
prompt += msgs[i]["content"]
return prompt
def code_llama_template_messages(msgs: ChatMessage) -> str:
return f"[INST] {msgs[-1]['content']}\n[/INST]"
def extra_space_template_messages(msgs: ChatMessage) -> str:
return f" {msgs[-1]['content']}"
def code_llama_python_template_messages(msgs: ChatMessage) -> str:
return dedent(
f"""\
[INST]
You are an expert Python programmer and personal assistant, here is your task: {msgs[-1]['content']}
Your answer should start with a [PYTHON] tag and end with a [/PYTHON] tag.
[/INST]"""
)
|