b2u commited on
Commit
334eca1
·
1 Parent(s): 24a494f

changing LoRA settings

Browse files
Files changed (1) hide show
  1. model.py +5 -5
model.py CHANGED
@@ -207,8 +207,8 @@ class T5Model(LabelStudioMLBase):
207
 
208
  # Configure LoRA
209
  lora_config = LoraConfig(
210
- r=int(os.getenv('LORA_R', '16')),
211
- lora_alpha=int(os.getenv('LORA_ALPHA', '16')),
212
  target_modules=os.getenv('LORA_TARGET_MODULES', 'q,v').split(','),
213
  lora_dropout=float(os.getenv('LORA_DROPOUT', '0.1')),
214
  bias="none",
@@ -225,11 +225,11 @@ class T5Model(LabelStudioMLBase):
225
 
226
  # Training loop
227
  logger.info("Starting training loop...")
228
- optimizer = torch.optim.AdamW(model.parameters(), lr=float(os.getenv('LEARNING_RATE', '1e-4')))
229
-
230
- num_epochs = int(os.getenv('NUM_EPOCHS', '3'))
231
 
 
232
 
 
233
  # Add LoRA settings logging here
234
  logger.info("Current LoRA Configuration:")
235
  logger.info(f" - Rank (r): {lora_config.r}")
 
207
 
208
  # Configure LoRA
209
  lora_config = LoraConfig(
210
+ r=int(os.getenv('LORA_R', '4')),
211
+ lora_alpha=int(os.getenv('LORA_ALPHA', '8')),
212
  target_modules=os.getenv('LORA_TARGET_MODULES', 'q,v').split(','),
213
  lora_dropout=float(os.getenv('LORA_DROPOUT', '0.1')),
214
  bias="none",
 
225
 
226
  # Training loop
227
  logger.info("Starting training loop...")
228
+ optimizer = torch.optim.AdamW(model.parameters(), lr=float(os.getenv('LEARNING_RATE', '1e-5')))
 
 
229
 
230
+ num_epochs = int(os.getenv('NUM_EPOCHS', '6'))
231
 
232
+
233
  # Add LoRA settings logging here
234
  logger.info("Current LoRA Configuration:")
235
  logger.info(f" - Rank (r): {lora_config.r}")