Text Generation
Transformers
Safetensors
English
bolmo
custom_code
benjamin commited on
Commit
1ff0ce4
·
verified ·
1 Parent(s): 3eb1093

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -50,11 +50,11 @@ device = "cuda"
50
  bolmo = AutoModelForCausalLM.from_pretrained("allenai/Bolmo-7B", trust_remote_code=True).to(device)
51
  tokenizer = AutoTokenizer.from_pretrained("allenai/Bolmo-7B", trust_remote_code=True)
52
 
53
- message = ["Who would win in a fight - a dinosaur or a cow named Moo Moo?"]
54
  input_ids = tokenizer(message, return_tensors="pt")["input_ids"].to(device)
55
 
56
  # `max_new_tokens` is the amuont of bytes to generate
57
- response = bolmo.generate(input_ids, max_new_tokens=100, do_sample=True)
58
  print(tokenizer.decode(response[0], skip_special_tokens=True))
59
  ```
60
 
 
50
  bolmo = AutoModelForCausalLM.from_pretrained("allenai/Bolmo-7B", trust_remote_code=True).to(device)
51
  tokenizer = AutoTokenizer.from_pretrained("allenai/Bolmo-7B", trust_remote_code=True)
52
 
53
+ message = ["Language modeling is "]
54
  input_ids = tokenizer(message, return_tensors="pt")["input_ids"].to(device)
55
 
56
  # `max_new_tokens` is the amuont of bytes to generate
57
+ response = bolmo.generate(input_ids, max_new_tokens=256, do_sample=True, temperature=0.1)
58
  print(tokenizer.decode(response[0], skip_special_tokens=True))
59
  ```
60