convaiinnovations commited on
Commit
c1c4be0
·
verified ·
1 Parent(s): 8a52161

Upload continuous_learning_session.py

Browse files
Files changed (1) hide show
  1. continuous_learning_session.py +2 -2
continuous_learning_session.py CHANGED
@@ -102,8 +102,8 @@ class ContinuousLearningSession:
102
  # If it's a python list
103
  adapter_params = [p for layer in self.model.flux_layers for p in layer.parameters()]
104
 
105
- # Switch to SGD to save memory (Adam uses 2x states, causing OOM on T4)
106
- self.optimizer = optim.SGD(controller_params + adapter_params, lr=1e-3, momentum=0.9)
107
 
108
  self.model.train() # Enable gradients for Controller/Adapters
109
 
 
102
  # If it's a python list
103
  adapter_params = [p for layer in self.model.flux_layers for p in layer.parameters()]
104
 
105
+ # Switch back to Adam (Better convergence, relying on GC/Env for memory safety)
106
+ self.optimizer = optim.Adam(controller_params + adapter_params, lr=1e-4)
107
 
108
  self.model.train() # Enable gradients for Controller/Adapters
109