Harley-ml commited on
Commit
7e36bac
·
verified ·
1 Parent(s): 76e6988

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -167,7 +167,7 @@ PROMPT = ">>> "
167
 
168
  def load_tokenizer(path: str):
169
  print("Loading tokenizer...", path)
170
- tok = AutoTokenizer.from_pretrained(path, use_fast=True, local_files_only=True)
171
  if tok.pad_token is None:
172
  if getattr(tok, "eos_token", None) is not None:
173
  tok.add_special_tokens({"pad_token": tok.eos_token})
@@ -181,14 +181,14 @@ def load_model(path: str, device: str):
181
  model = None
182
  try:
183
  desired_dtype = torch.float16 if device.startswith("cuda") else torch.float32
184
- model = MistralForCausalLM.from_pretrained(path, local_files_only=True, dtype=desired_dtype)
185
  print("Loaded with dtype arg.")
186
  except TypeError:
187
- model = MistralForCausalLM.from_pretrained(path, local_files_only=True)
188
  print("Loaded without dtype; will convert.")
189
  except Exception as e:
190
  print("Load warning, retrying without dtype:", e)
191
- model = MistralForCausalLM.from_pretrained(path, local_files_only=True)
192
 
193
  try:
194
  model.to(device)
 
167
 
168
  def load_tokenizer(path: str):
169
  print("Loading tokenizer...", path)
170
+ tok = AutoTokenizer.from_pretrained(path, use_fast=True, local_files_only=False)
171
  if tok.pad_token is None:
172
  if getattr(tok, "eos_token", None) is not None:
173
  tok.add_special_tokens({"pad_token": tok.eos_token})
 
181
  model = None
182
  try:
183
  desired_dtype = torch.float16 if device.startswith("cuda") else torch.float32
184
+ model = MistralForCausalLM.from_pretrained(path, local_files_only=False, dtype=desired_dtype)
185
  print("Loaded with dtype arg.")
186
  except TypeError:
187
+ model = MistralForCausalLM.from_pretrained(path, local_files_only=False)
188
  print("Loaded without dtype; will convert.")
189
  except Exception as e:
190
  print("Load warning, retrying without dtype:", e)
191
+ model = MistralForCausalLM.from_pretrained(path, local_files_only=False)
192
 
193
  try:
194
  model.to(device)