forked from facebookresearch/fairseq2
-
Notifications
You must be signed in to change notification settings - Fork 0
/
chatbot.py
98 lines (64 loc) · 2.56 KB
/
chatbot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from argparse import ArgumentParser
from pathlib import Path
from typing import Optional
import torch
from fairseq2.assets import default_asset_store
from fairseq2.data.text import load_text_tokenizer
from fairseq2.generation import (
Chatbot,
ChatMessage,
SamplingSequenceGenerator,
TopPSampler,
)
from fairseq2.models import create_chatbot, load_model
from fairseq2.models.decoder import DecoderModel
from fairseq2.typing import Device
def run_chatbot(checkpoint_dir: Optional[Path] = None) -> None:
model_name = "llama2_7b_chat"
model_card = default_asset_store.retrieve_card(model_name)
if checkpoint_dir is not None:
model_card.field("checkpoint").set(checkpoint_dir / "consolidated.pth")
model_card.field("tokenizer").set(checkpoint_dir / "tokenizer.model")
model = load_model(model_card, dtype=torch.float16, device=Device("cuda:0"))
if not isinstance(model, DecoderModel):
raise ValueError("The model must be a decoder model.")
tokenizer = load_text_tokenizer(model_card)
sampler = TopPSampler(p=0.8)
generator = SamplingSequenceGenerator(
model, sampler, temperature=0.6, max_gen_len=1024
)
# compat
chatbot = create_chatbot(generator, tokenizer) # type: ignore[arg-type]
do_run_chatbot(model_name, chatbot)
def do_run_chatbot(name: str, chatbot: Chatbot) -> None:
dialog = []
if chatbot.supports_system_prompt:
system_prompt = input("System Prompt (press enter to skip): ")
if system_prompt:
dialog.append(ChatMessage(role="system", content=system_prompt))
print()
print("You can end the chat by typing 'bye'.\n")
while (prompt := input("You> ")) != "bye":
message = ChatMessage(role="user", content=prompt)
dialog.append(message)
print(f"\n{name}> ", end="")
response, _ = chatbot(dialog, stdout=True)
print("\n")
dialog.append(response)
print(f"\n{name}> Bye!")
def main() -> None:
parser = ArgumentParser(prog="chatbot", description="A basic LLaMA chatbot")
# checkpoint
param = parser.add_argument(
"-c", "--checkpoint-dir", metavar="DIR", dest="checkpoint_dir", type=Path
)
param.help = "path to the model checkpoint directory"
args = parser.parse_args()
run_chatbot(args.checkpoint_dir)
if __name__ == "__main__":
main()