DaveKevin commited on
Commit
a7e084b
·
verified ·
1 Parent(s): 9b6f34c

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -9,15 +9,15 @@ base_model:
9
  ```
10
  from transformers import AutoModelForCausalLM, AutoTokenizer
11
  import torch
12
- model = AutoModelForCausalLM.from_pretrained("phronetic-ai/Qwen3-0.6B-ToolGRPO", torch_dtype=torch.bfloat16)
13
- tokeniser = AutoTokenizer.from_pretrained("phronetic-ai/Qwen3-0.6B-ToolGRPO")
14
  ```
15
 
16
 
17
  #### Inference
18
  ```
19
  from transformers import AutoModelForCausalLM, AutoTokenizer
20
- model_name = "phronetic-ai/Qwen3-0.6B-ToolGRPO"
21
  # load the tokenizer and the model
22
  tokenizer = AutoTokenizer.from_pretrained(model_name)
23
  model = AutoModelForCausalLM.from_pretrained(
 
9
  ```
10
  from transformers import AutoModelForCausalLM, AutoTokenizer
11
  import torch
12
+ model = AutoModelForCausalLM.from_pretrained("phronetic-ai/RZN-T", torch_dtype=torch.bfloat16)
13
+ tokeniser = AutoTokenizer.from_pretrained("phronetic-ai/RZN-T")
14
  ```
15
 
16
 
17
  #### Inference
18
  ```
19
  from transformers import AutoModelForCausalLM, AutoTokenizer
20
+ model_name = "phronetic-ai/RZN-T"
21
  # load the tokenizer and the model
22
  tokenizer = AutoTokenizer.from_pretrained(model_name)
23
  model = AutoModelForCausalLM.from_pretrained(