Text Generation
Transformers
Safetensors
English
altarthedeer commited on
Commit
06d96f3
·
verified ·
1 Parent(s): f1a1e60

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +10 -6
README.md CHANGED
@@ -60,8 +60,11 @@ Specifically, we used:
60
  To load the model with HuggingFace, use the following snippet:
61
  ```
62
  from transformers import AutoModelForCausalLM
 
 
 
 
63
 
64
- sft_model = AutoModelForCausalLM.from_pretrained("pm-25/llama3-8b-sft")
65
  ```
66
 
67
  ### via Playpen
@@ -78,7 +81,7 @@ Before evaluation, the model must be registered in the model_registry.json file
78
  {
79
  "model_name": "llama3-8b-sft",
80
  "backend": "huggingface_local",
81
- "huggingface_id": "pm-25/llama3-8b-sft",
82
  "release_date": "2025-08-22",
83
  "open_weight": true,
84
  "parameters": "8B",
@@ -89,10 +92,11 @@ Before evaluation, the model must be registered in the model_registry.json file
89
  "url": "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE"
90
  },
91
  "model_config": {
92
- "requires_api_key": true,
93
- "premade_chat_template": true,
94
- "eos_to_cull": "<\|eot_id\|>"
95
- }
 
96
  }
97
  ```
98
 
 
60
  To load the model with HuggingFace, use the following snippet:
61
  ```
62
  from transformers import AutoModelForCausalLM
63
+ from peft import PeftModel
64
+
65
+ model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-3.1-8B-Instruct")
66
+ model = PeftModel.from_pretrained(model, "pm-25/llama3-8b-sft")
67
 
 
68
  ```
69
 
70
  ### via Playpen
 
81
  {
82
  "model_name": "llama3-8b-sft",
83
  "backend": "huggingface_local",
84
+ "huggingface_id": "meta-llama/Meta-Llama-3.1-8B-Instruct",
85
  "release_date": "2025-08-22",
86
  "open_weight": true,
87
  "parameters": "8B",
 
92
  "url": "https://github.com/meta-llama/llama-models/blob/main/models/llama3_1/LICENSE"
93
  },
94
  "model_config": {
95
+ "peft_model": "pm-25/llama3-8b-sft",
96
+ "requires_api_key": true,
97
+ "premade_chat_template": true,
98
+ "eos_to_cull": "<\|eot_id\|>"
99
+ }
100
  }
101
  ```
102