|
|
|
|
|
|
|
|
|
|
|
from typing import Tuple |
|
|
import os |
|
|
import sys |
|
|
import torch |
|
|
import fire |
|
|
import time |
|
|
import json |
|
|
|
|
|
from tqdm import tqdm |
|
|
|
|
|
from pathlib import Path |
|
|
|
|
|
from fairscale.nn.model_parallel.initialize import initialize_model_parallel |
|
|
|
|
|
from llama import ModelArgs, Transformer, Tokenizer, LLaMA |
|
|
|
|
|
|
|
|
def setup_model_parallel() -> Tuple[int, int]: |
|
|
local_rank = int(os.environ.get("LOCAL_RANK", -1)) |
|
|
world_size = int(os.environ.get("WORLD_SIZE", -1)) |
|
|
|
|
|
torch.distributed.init_process_group("nccl") |
|
|
initialize_model_parallel(world_size) |
|
|
torch.cuda.set_device(local_rank) |
|
|
|
|
|
|
|
|
torch.manual_seed(1) |
|
|
return local_rank, world_size |
|
|
|
|
|
|
|
|
def load(ckpt_dir: str, tokenizer_path: str, local_rank: int, world_size: int) -> LLaMA: |
|
|
start_time = time.time() |
|
|
checkpoints = sorted(Path(ckpt_dir).glob("*.pth")) |
|
|
assert ( |
|
|
world_size == len(checkpoints) |
|
|
), f"Loading a checkpoint for MP={len(checkpoints)} but world size is {world_size}" |
|
|
ckpt_path = checkpoints[local_rank] |
|
|
print("Loading") |
|
|
checkpoint = torch.load(ckpt_path, map_location="cpu") |
|
|
with open(Path(ckpt_dir) / "params.json", "r") as f: |
|
|
params = json.loads(f.read()) |
|
|
|
|
|
model_args: ModelArgs = ModelArgs(max_seq_len=1536, max_batch_size=32, **params) |
|
|
tokenizer = Tokenizer(model_path=tokenizer_path) |
|
|
model_args.vocab_size = tokenizer.n_words |
|
|
torch.set_default_tensor_type(torch.cuda.HalfTensor) |
|
|
model = Transformer(model_args) |
|
|
torch.set_default_tensor_type(torch.FloatTensor) |
|
|
model.load_state_dict(checkpoint, strict=False) |
|
|
|
|
|
generator = LLaMA(model, tokenizer) |
|
|
print(f"Loaded in {time.time() - start_time:.2f} seconds") |
|
|
return generator |
|
|
|
|
|
|
|
|
def read(lang): |
|
|
|
|
|
score_map = dict() |
|
|
|
|
|
f = open("/data/wyt/in-context/xglm/bleu_scores.txt", 'r') |
|
|
for line in f: |
|
|
idx = int(line.split("\t")[0]) |
|
|
score = float(line.split("\t")[1]) |
|
|
score_map[idx] = score |
|
|
|
|
|
new_map = sorted(score_map.items(), key=lambda x:x[1], reverse=True) |
|
|
top_k = [i[0] for i in new_map] |
|
|
|
|
|
example_src_list, example_tgt_list = [], [] |
|
|
with open("/data/wyt/in-context/xglm/corpus/dev.%s" % (lang[:2]), 'r') as f: |
|
|
src_list = [line.strip() for line in f] |
|
|
with open("/data/wyt/in-context/xglm/corpus/dev.%s" % (lang[-2:]), 'r') as f: |
|
|
tgt_list = [line.strip() for line in f] |
|
|
|
|
|
f = open("/data/wyt/in-context/xglm/corpus/wmt19/test.%s.%s" % (lang, lang[:2]), 'r') |
|
|
test_src_list = [line.strip() for line in f if line != "\n"] |
|
|
|
|
|
demonstration = "Translate these sentences from German to English:\n" if lang == "de-en" else "Translate these sentences from English to German:\n" |
|
|
for idx in top_k[:16]: |
|
|
demonstration = demonstration + "%s\n%s\n" % (src_list[idx], tgt_list[idx]) |
|
|
return demonstration, test_src_list |
|
|
|
|
|
|
|
|
def main(ckpt_dir: str, tokenizer_path: str, lang: str, temperature: float = 0.8, top_p: float = 0.95): |
|
|
local_rank, world_size = setup_model_parallel() |
|
|
if local_rank > 0: |
|
|
sys.stdout = open(os.devnull, 'w') |
|
|
|
|
|
print("temperature: ", temperature) |
|
|
generator = load(ckpt_dir, tokenizer_path, local_rank, world_size) |
|
|
|
|
|
demonstration, test_src_list = read(lang) |
|
|
step_len = 8 |
|
|
for i in tqdm(range(0, len(test_src_list), step_len)): |
|
|
prompt_list = [demonstration + test_src + "\n" for test_src in test_src_list[i:i+step_len]] |
|
|
results = generator.generate(prompt_list, max_gen_len=1536, temperature=temperature, top_p=top_p) |
|
|
|
|
|
if local_rank == 0: |
|
|
f = open("%s.%s.temp%02d.raw" % (lang, lang[-2:], temperature * 10), 'a') |
|
|
for res_idx, result in enumerate(results): |
|
|
start = result.find(test_src_list[i + res_idx]) |
|
|
if start == -1: |
|
|
tgt = "" |
|
|
else: |
|
|
lines = result[start:].split("\n") |
|
|
tgt = lines[1] if len(lines) > 1 else "" |
|
|
|
|
|
f.write(tgt + "\n") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
fire.Fire(main) |
|
|
|