Spaces:
Sleeping
Sleeping
Auto commit at 06-2025-08 23:55:35
Browse files- app.py +2 -1
- modeling.py +2 -2
app.py
CHANGED
|
@@ -55,7 +55,8 @@ try:
|
|
| 55 |
token=HF_TOKEN,
|
| 56 |
torch_dtype=torch.float16,
|
| 57 |
trust_remote_code=True,
|
| 58 |
-
device_map="auto",
|
|
|
|
| 59 |
)
|
| 60 |
print(" ✅ 커스텀 모델 로딩 완료")
|
| 61 |
else:
|
|
|
|
| 55 |
token=HF_TOKEN,
|
| 56 |
torch_dtype=torch.float16,
|
| 57 |
trust_remote_code=True,
|
| 58 |
+
device_map="auto",
|
| 59 |
+
low_cpu_mem_usage=False
|
| 60 |
)
|
| 61 |
print(" ✅ 커스텀 모델 로딩 완료")
|
| 62 |
else:
|
modeling.py
CHANGED
|
@@ -119,11 +119,11 @@ class DynamicCAbstractor(nn.Module):
|
|
| 119 |
# ✨ 최적화: 모든 레이어를 bfloat16으로 초기화
|
| 120 |
self.pos_emb = build_pos_embeds(config, num_input_tokens, config.encoder_hidden_size)
|
| 121 |
if self.pos_emb is not None:
|
| 122 |
-
self.pos_emb = self.pos_emb.to(torch.bfloat16)
|
| 123 |
|
| 124 |
self.eos_tokens = build_eos_tokens(config, config.output_hidden_size)
|
| 125 |
if self.eos_tokens is not None:
|
| 126 |
-
self.eos_tokens = self.eos_tokens.to(torch.bfloat16)
|
| 127 |
|
| 128 |
self.prenorm = build_prenorm(config)
|
| 129 |
if self.prenorm is not None:
|
|
|
|
| 119 |
# ✨ 최적화: 모든 레이어를 bfloat16으로 초기화
|
| 120 |
self.pos_emb = build_pos_embeds(config, num_input_tokens, config.encoder_hidden_size)
|
| 121 |
if self.pos_emb is not None:
|
| 122 |
+
self.pos_emb.data = self.pos_emb.data.to(torch.bfloat16)
|
| 123 |
|
| 124 |
self.eos_tokens = build_eos_tokens(config, config.output_hidden_size)
|
| 125 |
if self.eos_tokens is not None:
|
| 126 |
+
self.eos_tokens.data = self.eos_tokens.data.to(torch.bfloat16)
|
| 127 |
|
| 128 |
self.prenorm = build_prenorm(config)
|
| 129 |
if self.prenorm is not None:
|