Spaces:
Running
Running
Implement proper adapter switching instead of multiple models
Browse files
backend/models/character_manager.py
CHANGED
|
@@ -259,21 +259,21 @@ class CharacterManager:
|
|
| 259 |
with open(temp_config_file, 'w') as f:
|
| 260 |
json.dump(config_data, f, indent=2)
|
| 261 |
|
| 262 |
-
#
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
|
| 278 |
self.character_models[character_id] = model_with_adapter
|
| 279 |
logger.info(f"✅ Successfully loaded LoRA adapter for {character_id} with dedicated model instance")
|
|
@@ -333,6 +333,15 @@ class CharacterManager:
|
|
| 333 |
|
| 334 |
# Get character-specific model and prompt
|
| 335 |
model = self.character_models[character_id]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 336 |
system_prompt = self.character_prompts.get(character_id, "")
|
| 337 |
|
| 338 |
# Build conversation context
|
|
|
|
| 259 |
with open(temp_config_file, 'w') as f:
|
| 260 |
json.dump(config_data, f, indent=2)
|
| 261 |
|
| 262 |
+
# Use single model with adapter switching approach
|
| 263 |
+
if not hasattr(self, 'peft_model'):
|
| 264 |
+
# First adapter - create the PEFT model
|
| 265 |
+
self.peft_model = PeftModel.from_pretrained(
|
| 266 |
+
self.base_model,
|
| 267 |
+
temp_dir,
|
| 268 |
+
adapter_name=character_id,
|
| 269 |
+
is_trainable=False,
|
| 270 |
+
torch_dtype=torch.float32,
|
| 271 |
+
)
|
| 272 |
+
model_with_adapter = self.peft_model
|
| 273 |
+
else:
|
| 274 |
+
# Subsequent adapters - load as additional adapters
|
| 275 |
+
self.peft_model.load_adapter(temp_dir, adapter_name=character_id)
|
| 276 |
+
model_with_adapter = self.peft_model
|
| 277 |
|
| 278 |
self.character_models[character_id] = model_with_adapter
|
| 279 |
logger.info(f"✅ Successfully loaded LoRA adapter for {character_id} with dedicated model instance")
|
|
|
|
| 333 |
|
| 334 |
# Get character-specific model and prompt
|
| 335 |
model = self.character_models[character_id]
|
| 336 |
+
|
| 337 |
+
# If using PEFT model with multiple adapters, switch to the correct one
|
| 338 |
+
if hasattr(self, 'peft_model') and hasattr(self.peft_model, 'set_adapter'):
|
| 339 |
+
try:
|
| 340 |
+
self.peft_model.set_adapter(character_id)
|
| 341 |
+
model = self.peft_model
|
| 342 |
+
except Exception as e:
|
| 343 |
+
logger.warning(f"Failed to switch adapter to {character_id}: {e}")
|
| 344 |
+
|
| 345 |
system_prompt = self.character_prompts.get(character_id, "")
|
| 346 |
|
| 347 |
# Build conversation context
|