akshitkr commited on
Commit
ac533fb
·
verified ·
1 Parent(s): 84e0b00

fix python code markdown

Browse files
Files changed (1) hide show
  1. README.md +3 -6
README.md CHANGED
@@ -7,21 +7,18 @@ Model trained on the TinyStories Dataset, see https://arxiv.org/abs/2305.07759
7
 
8
  ------ EXAMPLE USAGE ---
9
 
 
10
  from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
11
-
12
  model = AutoModelForCausalLM.from_pretrained('roneneldan/TinyStories-1M')
13
-
14
  tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
15
 
16
  prompt = "Once upon a time there was"
17
 
18
  input_ids = tokenizer.encode(prompt, return_tensors="pt")
19
-
20
  # Generate completion
21
  output = model.generate(input_ids, max_length = 1000, num_beams=1)
22
-
23
  # Decode the completion
24
  output_text = tokenizer.decode(output[0], skip_special_tokens=True)
25
-
26
  # Print the generated text
27
- print(output_text)
 
 
7
 
8
  ------ EXAMPLE USAGE ---
9
 
10
+ ```python
11
  from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig
 
12
  model = AutoModelForCausalLM.from_pretrained('roneneldan/TinyStories-1M')
 
13
  tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
14
 
15
  prompt = "Once upon a time there was"
16
 
17
  input_ids = tokenizer.encode(prompt, return_tensors="pt")
 
18
  # Generate completion
19
  output = model.generate(input_ids, max_length = 1000, num_beams=1)
 
20
  # Decode the completion
21
  output_text = tokenizer.decode(output[0], skip_special_tokens=True)
 
22
  # Print the generated text
23
+ print(output_text)
24
+ ```