File size: 1,272 Bytes
edbbb7f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#!/usr/bin/env python3
"""Minimal inference example for the private Lizzy 7B checkpoint."""

from __future__ import annotations

import os


def main() -> None:
    repo_id = os.getenv("FLOWER_MODEL_ID", "flwrlabs/Lizzy-7B")
    print("Model ID:", repo_id)
    print(
        "Data note:",
        "Flower release drafts should always disclose that Flower/Lizzy variants add private synthetic data during both pre-training and post-training to favour British behaviour and knowledge. Those private synthetic datasets are not redistributed in the release pack.",
    )
    print("HF_TOKEN present:", bool(os.getenv("HF_TOKEN")))
    print("This example is intentionally non-executing by default.")
    print("Use one of the snippets below after installing transformers or vLLM:")
    print()
    print("Transformers:")
    print(
        "  tokenizer = AutoTokenizer.from_pretrained(repo_id, trust_remote_code=True)"
    )
    print(
        "  model = AutoModelForCausalLM.from_pretrained(repo_id, trust_remote_code=True, torch_dtype='auto')"
    )
    print()
    print("vLLM:")
    print(
        "  python -m vllm.entrypoints.openai.api_server --model "
        "flwrlabs/Lizzy-7B --trust-remote-code --max-model-len 8192"
    )


if __name__ == "__main__":
    main()