| """BitTransformerLM: Bit-native transformer with reversible layers and telemetry.""" |
|
|
| |
| from .model import ( |
| BitTransformerLM, |
| PositionalEncoding, |
| ReversibleLoggingTransformerEncoderLayer, |
| diffusion_inference, |
| example_training_step, |
| example_usage, |
| infer_long_sequence, |
| ) |
|
|
| |
| from .bit_io import bits_to_text, infer_text, text_to_bits |
| from .compression import ( |
| compress_bits, |
| decompress_bits, |
| model_output_decompress, |
| pack_bits, |
| unpack_bits, |
| ) |
| from .parity import enforce_parity |
|
|
| |
| from .optimization import adjust_learning_rate, configure_optimizer |
| from .training import train_loop |
|
|
| |
| from .collapse import collapse_submodel, save_distilled_model |
| from .distil import TelemetryLog, distill_step |
| from .scale import expand_model |
|
|
| |
| from .distributed import make_pipeline, wrap_fsdp |
|
|
| |
| from .quantization import convert_qat_fx, prepare_qat_fx, quantize_dynamic |
|
|
| |
| from .safety import demo_hil_safety, hil_safe_inference, safe_sample_with_retry |
| from .telemetry import TelemetrySynthesizer, detect_metric_drift |
|
|
| |
| from .config import ( |
| DataConfig, |
| ExperimentConfig, |
| ModelConfig, |
| SafetyConfig, |
| TrainingConfig, |
| get_config_from_env, |
| get_large_config, |
| get_medium_config, |
| get_small_config, |
| ) |
|
|
| |
| from .cli import dashboard_cli, infer_cli, train_cli |
| from .cli_standards import BitTransformerCLI |
|
|
| |
| from .dashboard import plot_telemetry |
| from .dashboard_app import run_dashboard |
| from .hf_checkpoint import download_checkpoint, hf_login, save_checkpoint |
| from .torch_utils import cpu_autocast |
| from .utils import load_model, save_model, set_dropout |
|
|
| __all__ = [ |
| |
| "BitTransformerLM", |
| "PositionalEncoding", |
| "ReversibleLoggingTransformerEncoderLayer", |
| "diffusion_inference", |
| "example_training_step", |
| "example_usage", |
| "infer_long_sequence", |
|
|
| |
| "bits_to_text", |
| "compress_bits", |
| "decompress_bits", |
| "enforce_parity", |
| "infer_text", |
| "model_output_decompress", |
| "pack_bits", |
| "text_to_bits", |
| "unpack_bits", |
|
|
| |
| "adjust_learning_rate", |
| "configure_optimizer", |
| "train_loop", |
|
|
| |
| "collapse_submodel", |
| "distill_step", |
| "expand_model", |
| "save_distilled_model", |
| "TelemetryLog", |
|
|
| |
| "make_pipeline", |
| "wrap_fsdp", |
|
|
| |
| "convert_qat_fx", |
| "prepare_qat_fx", |
| "quantize_dynamic", |
|
|
| |
| "demo_hil_safety", |
| "detect_metric_drift", |
| "hil_safe_inference", |
| "safe_sample_with_retry", |
| "TelemetrySynthesizer", |
|
|
| |
| "DataConfig", |
| "ExperimentConfig", |
| "get_config_from_env", |
| "get_large_config", |
| "get_medium_config", |
| "get_small_config", |
| "ModelConfig", |
| "SafetyConfig", |
| "TrainingConfig", |
|
|
| |
| "BitTransformerCLI", |
| "dashboard_cli", |
| "infer_cli", |
| "train_cli", |
|
|
| |
| "cpu_autocast", |
| "download_checkpoint", |
| "hf_login", |
| "load_model", |
| "plot_telemetry", |
| "run_dashboard", |
| "save_checkpoint", |
| "save_model", |
| "set_dropout", |
| ] |
|
|