File size: 2,469 Bytes
7a87926
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
"""
Example usage of YLFF pipeline.
"""

import logging

from ylff.ba_validator import BAValidator
from ylff.data_pipeline import BADataPipeline
from ylff.models import load_da3_model

logging.basicConfig(level=logging.INFO)


def example_validate_sequence():
    """Example: Validate a single sequence with BA."""
    print("Example 1: Validating a sequence with BA")

    # Load model
    load_da3_model("depth-anything/DA3-LARGE")

    # Create validator
    validator = BAValidator(  # noqa: F841
        accept_threshold=2.0,
        reject_threshold=30.0,
    )

    # Load images (example - replace with your images)
    # images = load_images_from_directory("path/to/sequence")

    # Run model
    # output = model.inference(images)

    # Validate
    # result = validator.validate(images, output.extrinsics)
    # print(f"Status: {result['status']}, Error: {result['error']:.2f}°")

    print("✓ Validation example complete")


def example_build_training_set():
    """Example: Build training set from sequences."""
    print("Example 2: Building training set")

    # Load model
    model = load_da3_model("depth-anything/DA3-LARGE")

    # Create validator and pipeline
    validator = BAValidator()
    BADataPipeline(model, validator)

    # Find sequences
    # sequence_paths = [Path("data/raw/seq1"), Path("data/raw/seq2")]

    # Build training set
    # training_samples = pipeline.build_training_set(
    #     sequence_paths=sequence_paths,
    #     max_samples=100,
    # )

    # Save
    # pipeline.save_training_set(training_samples, Path("data/training/training_set.pkl"))

    print("✓ Training set building example complete")


def example_fine_tune():
    """Example: Fine-tune model on BA-supervised samples."""
    print("Example 3: Fine-tuning model")

    # Load training set
    # with open("data/training/training_set.pkl", "rb") as f:
    #     training_samples = pickle.load(f)
    # Load model
    # model = load_da3_model("depth-anything/DA3-LARGE")
    # Fine-tune
    # fine_tuned_model = fine_tune_da3(
    #     model=model,
    #     training_samples=training_samples,
    #     epochs=5,
    #     lr=1e-5,
    # )

    print("✓ Fine-tuning example complete")


if __name__ == "__main__":
    print("YLFF Example Usage")
    print("=" * 50)
    print()

    example_validate_sequence()
    print()
    example_build_training_set()
    print()
    example_fine_tune()
    print()
    print("All examples complete!")