File size: 1,605 Bytes
c8b77b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
"""
Gradient Descent and Backpropagation Training System
==================================================

This module provides a comprehensive implementation of gradient descent optimization
algorithms and backpropagation for neural network training, specifically designed
for the MangoMAS multi-agent system.

Key Components:
- Optimizers: SGD, Adam, AdamW with proper mathematical implementations
- Backpropagation: Chain rule-based gradient computation
- Training Loop: Complete training orchestration with monitoring
- Loss Functions: Various loss implementations for different tasks
- Monitoring: Comprehensive gradient and training metrics tracking

Usage:
    from src.training.gradient_descent import GradientDescentTrainer
    trainer = GradientDescentTrainer()
    results = trainer.train_agent(agent_spec)
"""

from .optimizers import SGD, Adam, AdamW, Optimizer
from .backpropagation import BackpropagationEngine
from .training_loop import GradientDescentTrainer
from .loss_functions import CrossEntropyLoss, KLDivergenceLoss, LossFunction
from .monitoring import GradientMonitor, TrainingMonitor
from .model_wrapper import ModelWrapper
from .schedulers import LinearScheduler, CosineScheduler, StepScheduler

__version__ = "1.0.0"
__author__ = "MangoMAS Team"

__all__ = [
    "SGD",
    "Adam", 
    "AdamW",
    "Optimizer",
    "BackpropagationEngine",
    "GradientDescentTrainer",
    "CrossEntropyLoss",
    "KLDivergenceLoss", 
    "LossFunction",
    "GradientMonitor",
    "TrainingMonitor",
    "ModelWrapper",
    "LinearScheduler",
    "CosineScheduler",
    "StepScheduler"
]