Deva1211 commited on
Commit
4e432fc
·
verified ·
1 Parent(s): 8e85724

resplaced all previous to TheBloke/alpaca-lora-65B-GPTQ

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -25,9 +25,9 @@ except Exception as e:
25
  try:
26
  # Second try: Use a smaller, more compatible model
27
  print("🔄 Falling back to Mistral-7B-Instruct-v0.1 (more compatible)...")
28
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
29
  model = AutoModelForCausalLM.from_pretrained(
30
- "mistralai/Mistral-7B-Instruct-v0.1",
31
  device_map="auto",
32
  torch_dtype=torch.float16,
33
  low_cpu_mem_usage=True,
 
25
  try:
26
  # Second try: Use a smaller, more compatible model
27
  print("🔄 Falling back to Mistral-7B-Instruct-v0.1 (more compatible)...")
28
+ tokenizer = AutoTokenizer.from_pretrained("TheBloke/alpaca-lora-65B-GPTQ")
29
  model = AutoModelForCausalLM.from_pretrained(
30
+ "TheBloke/alpaca-lora-65B-GPTQ",
31
  device_map="auto",
32
  torch_dtype=torch.float16,
33
  low_cpu_mem_usage=True,