Update app.py
Browse files
app.py
CHANGED
|
@@ -31,30 +31,19 @@ if device == "cuda":
|
|
| 31 |
torch_dtype=torch.float16,
|
| 32 |
device_map="auto",
|
| 33 |
)
|
| 34 |
-
|
| 35 |
-
model, LORA_WEIGHTS, torch_dtype=torch.float16, force_download=True
|
| 36 |
-
)
|
| 37 |
elif device == "mps":
|
| 38 |
model = LlamaForCausalLM.from_pretrained(
|
| 39 |
BASE_MODEL,
|
| 40 |
device_map={"": device},
|
| 41 |
torch_dtype=torch.float16,
|
| 42 |
)
|
| 43 |
-
|
| 44 |
-
model,
|
| 45 |
-
LORA_WEIGHTS,
|
| 46 |
-
device_map={"": device},
|
| 47 |
-
torch_dtype=torch.float16,
|
| 48 |
-
)
|
| 49 |
else:
|
| 50 |
model = LlamaForCausalLM.from_pretrained(
|
| 51 |
BASE_MODEL, device_map={"": device}, low_cpu_mem_usage=True
|
| 52 |
)
|
| 53 |
-
|
| 54 |
-
model,
|
| 55 |
-
LORA_WEIGHTS,
|
| 56 |
-
device_map={"": device},
|
| 57 |
-
)
|
| 58 |
|
| 59 |
|
| 60 |
def generate_prompt(instruction, input=None):
|
|
|
|
| 31 |
torch_dtype=torch.float16,
|
| 32 |
device_map="auto",
|
| 33 |
)
|
| 34 |
+
|
|
|
|
|
|
|
| 35 |
elif device == "mps":
|
| 36 |
model = LlamaForCausalLM.from_pretrained(
|
| 37 |
BASE_MODEL,
|
| 38 |
device_map={"": device},
|
| 39 |
torch_dtype=torch.float16,
|
| 40 |
)
|
| 41 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
else:
|
| 43 |
model = LlamaForCausalLM.from_pretrained(
|
| 44 |
BASE_MODEL, device_map={"": device}, low_cpu_mem_usage=True
|
| 45 |
)
|
| 46 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
|
| 49 |
def generate_prompt(instruction, input=None):
|