hello-world / test_model.py
Chiedo John
Initial commit
c125a8a
from model import HelloWorldModel, HelloWorldConfig
from transformers import PreTrainedTokenizerFast
import torch
print("Loading configuration...")
config = HelloWorldConfig.from_pretrained(".")
print("Loading model...")
model = HelloWorldModel(config)
model.load_state_dict(torch.load("pytorch_model.bin", map_location="cpu", weights_only=True))
model.eval()
print("Loading tokenizer...")
tokenizer = PreTrainedTokenizerFast(tokenizer_file="tokenizer.json")
print("\nTesting model generation...")
output = model.generate_hello_world()
print(f"Model output: {output}")
print("\nTesting tokenization...")
text = "Hello World"
tokens = tokenizer.encode(text)
print(f"Tokenized '{text}': {tokens}")
decoded = tokenizer.decode(tokens)
print(f"Decoded back: {decoded}")
print("\nModel test completed successfully!")