tostido commited on
Commit
5395d02
·
1 Parent(s): c018528

Add HuggingFace dataset loading script with configs for each log type

Browse files
Files changed (1) hide show
  1. key-data.py +215 -0
key-data.py ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Ouroboros-Key Dataset - Evolution telemetry from quine-conversion runs.
2
+
3
+ This loading script provides multiple configs for different log types:
4
+ - fitness: Population fitness evaluations per generation
5
+ - mutations: Individual mutation events with trait deltas
6
+ - crossovers: Crossover breeding events
7
+ - selection: Tournament selection outcomes
8
+ - speciation: Species clustering snapshots
9
+ - comm_events: Pod-to-pod communication exchanges
10
+ """
11
+
12
+ import json
13
+ import datasets
14
+
15
+
16
+ _DESCRIPTION = """
17
+ Ouroboros-Key Dataset contains structured JSONL logs from neuroevolution runs.
18
+ The KEY system converts pretrained models into quine-replicant capable models
19
+ using NEAT-style speciation and LoRA adapters.
20
+ """
21
+
22
+ _HOMEPAGE = "https://huggingface.co/datasets/tostido/key-data"
23
+ _LICENSE = "MIT"
24
+
25
+ _URLS = {
26
+ "fitness": "logs/fitness.jsonl",
27
+ "mutations": "logs/mutations.jsonl",
28
+ "crossovers": "logs/crossovers.jsonl",
29
+ "selection": "logs/selection.jsonl",
30
+ "speciation": "logs/speciation.jsonl",
31
+ "comm_events": "logs/comm_events.jsonl",
32
+ "errors": "logs/errors.jsonl",
33
+ "performance": "logs/performance.jsonl",
34
+ }
35
+
36
+
37
+ class KeyDataConfig(datasets.BuilderConfig):
38
+ """BuilderConfig for KeyData."""
39
+
40
+ def __init__(self, **kwargs):
41
+ super().__init__(**kwargs)
42
+
43
+
44
+ class KeyData(datasets.GeneratorBasedBuilder):
45
+ """Ouroboros-Key evolution telemetry dataset."""
46
+
47
+ VERSION = datasets.Version("1.0.0")
48
+
49
+ BUILDER_CONFIGS = [
50
+ KeyDataConfig(
51
+ name="fitness",
52
+ version=VERSION,
53
+ description="Population fitness evaluations per generation",
54
+ ),
55
+ KeyDataConfig(
56
+ name="mutations",
57
+ version=VERSION,
58
+ description="Individual mutation events with trait deltas",
59
+ ),
60
+ KeyDataConfig(
61
+ name="crossovers",
62
+ version=VERSION,
63
+ description="Crossover breeding events between parents",
64
+ ),
65
+ KeyDataConfig(
66
+ name="selection",
67
+ version=VERSION,
68
+ description="Tournament selection outcomes",
69
+ ),
70
+ KeyDataConfig(
71
+ name="speciation",
72
+ version=VERSION,
73
+ description="Species clustering and dynamics",
74
+ ),
75
+ KeyDataConfig(
76
+ name="comm_events",
77
+ version=VERSION,
78
+ description="Pod-to-pod communication exchanges",
79
+ ),
80
+ KeyDataConfig(
81
+ name="errors",
82
+ version=VERSION,
83
+ description="System errors and warnings",
84
+ ),
85
+ KeyDataConfig(
86
+ name="performance",
87
+ version=VERSION,
88
+ description="Runtime performance telemetry",
89
+ ),
90
+ ]
91
+
92
+ DEFAULT_CONFIG_NAME = "fitness"
93
+
94
+ def _info(self):
95
+ if self.config.name == "fitness":
96
+ features = datasets.Features({
97
+ "timestamp": datasets.Value("float64"),
98
+ "iso_time": datasets.Value("string"),
99
+ "event": datasets.Value("string"),
100
+ "generation": datasets.Value("int32"),
101
+ "population_size": datasets.Value("int32"),
102
+ "best_fitness": datasets.Value("float64"),
103
+ "avg_fitness": datasets.Value("float64"),
104
+ "min_fitness": datasets.Value("float64"),
105
+ "max_fitness": datasets.Value("float64"),
106
+ "std_fitness": datasets.Value("float64"),
107
+ "top_10": datasets.Value("string"), # JSON array
108
+ "bottom_5": datasets.Value("string"), # JSON array
109
+ })
110
+ elif self.config.name == "mutations":
111
+ features = datasets.Features({
112
+ "timestamp": datasets.Value("float64"),
113
+ "iso_time": datasets.Value("string"),
114
+ "event": datasets.Value("string"),
115
+ "generation": datasets.Value("int32"),
116
+ "parent_id": datasets.Value("string"),
117
+ "child_id": datasets.Value("string"),
118
+ "parent_fitness": datasets.Value("float64"),
119
+ "mutation_rate": datasets.Value("float64"),
120
+ "num_mutations": datasets.Value("int32"),
121
+ "total_delta": datasets.Value("float64"),
122
+ "parent_traits": datasets.Value("string"), # JSON object
123
+ "child_traits": datasets.Value("string"), # JSON object
124
+ "deltas": datasets.Value("string"), # JSON object
125
+ "mutated_traits": datasets.Value("string"), # JSON array
126
+ })
127
+ elif self.config.name == "crossovers":
128
+ features = datasets.Features({
129
+ "timestamp": datasets.Value("float64"),
130
+ "iso_time": datasets.Value("string"),
131
+ "event": datasets.Value("string"),
132
+ "generation": datasets.Value("int32"),
133
+ "parent1_id": datasets.Value("string"),
134
+ "parent2_id": datasets.Value("string"),
135
+ "child_id": datasets.Value("string"),
136
+ "parent1_fitness": datasets.Value("float64"),
137
+ "parent2_fitness": datasets.Value("float64"),
138
+ "parent1_traits": datasets.Value("string"),
139
+ "parent2_traits": datasets.Value("string"),
140
+ "child_traits": datasets.Value("string"),
141
+ "inheritance": datasets.Value("string"),
142
+ "contribution_p1": datasets.Value("float64"),
143
+ "contribution_p2": datasets.Value("float64"),
144
+ })
145
+ elif self.config.name == "selection":
146
+ features = datasets.Features({
147
+ "timestamp": datasets.Value("float64"),
148
+ "iso_time": datasets.Value("string"),
149
+ "event": datasets.Value("string"),
150
+ "generation": datasets.Value("int32"),
151
+ "method": datasets.Value("string"),
152
+ "survivors": datasets.Value("int32"),
153
+ "eliminated": datasets.Value("int32"),
154
+ "elites_preserved": datasets.Value("int32"),
155
+ "survivor_fitnesses": datasets.Value("string"),
156
+ "eliminated_fitnesses": datasets.Value("string"),
157
+ })
158
+ elif self.config.name == "speciation":
159
+ features = datasets.Features({
160
+ "timestamp": datasets.Value("float64"),
161
+ "iso_time": datasets.Value("string"),
162
+ "event": datasets.Value("string"),
163
+ "generation": datasets.Value("int32"),
164
+ "species_id": datasets.Value("string"),
165
+ "species_snapshot": datasets.Value("string"),
166
+ })
167
+ elif self.config.name == "comm_events":
168
+ features = datasets.Features({
169
+ "timestamp": datasets.Value("float64"),
170
+ "event_type": datasets.Value("string"),
171
+ "from_pod": datasets.Value("string"),
172
+ "to_pods": datasets.Value("string"),
173
+ "embedding_hash": datasets.Value("string"),
174
+ "merkle_root": datasets.Value("string"),
175
+ "data": datasets.Value("string"),
176
+ })
177
+ else:
178
+ # Generic fallback for errors/performance
179
+ features = datasets.Features({
180
+ "timestamp": datasets.Value("float64"),
181
+ "event": datasets.Value("string"),
182
+ "data": datasets.Value("string"),
183
+ })
184
+
185
+ return datasets.DatasetInfo(
186
+ description=_DESCRIPTION,
187
+ features=features,
188
+ homepage=_HOMEPAGE,
189
+ license=_LICENSE,
190
+ )
191
+
192
+ def _split_generators(self, dl_manager):
193
+ filepath = dl_manager.download_and_extract(_URLS[self.config.name])
194
+ return [
195
+ datasets.SplitGenerator(
196
+ name=datasets.Split.TRAIN,
197
+ gen_kwargs={"filepath": filepath},
198
+ ),
199
+ ]
200
+
201
+ def _generate_examples(self, filepath):
202
+ with open(filepath, encoding="utf-8") as f:
203
+ for idx, line in enumerate(f):
204
+ line = line.strip()
205
+ if not line:
206
+ continue
207
+ try:
208
+ row = json.loads(line)
209
+ # Serialize nested objects to JSON strings for HF compatibility
210
+ for key in row:
211
+ if isinstance(row[key], (dict, list)):
212
+ row[key] = json.dumps(row[key])
213
+ yield idx, row
214
+ except json.JSONDecodeError:
215
+ continue