alexanderchemeris commited on
Commit
563f37c
·
verified ·
1 Parent(s): ba51a1a

LeNEPA trained on a custom CauKer-2M dataset with 5000 points per series

Browse files
.gitattributes CHANGED
@@ -1,35 +1,2 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
  *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  *.safetensors filter=lfs diff=lfs merge=lfs -text
2
+
 
 
 
 
 
 
 
 
 
LICENSE ADDED
@@ -0,0 +1,399 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Attribution-NonCommercial 4.0 International
2
+
3
+ =======================================================================
4
+
5
+ Creative Commons Corporation ("Creative Commons") is not a law firm and
6
+ does not provide legal services or legal advice. Distribution of
7
+ Creative Commons public licenses does not create a lawyer-client or
8
+ other relationship. Creative Commons makes its licenses and related
9
+ information available on an "as-is" basis. Creative Commons gives no
10
+ warranties regarding its licenses, any material licensed under their
11
+ terms and conditions, or any related information. Creative Commons
12
+ disclaims all liability for damages resulting from their use to the
13
+ fullest extent possible.
14
+
15
+ Using Creative Commons Public Licenses
16
+
17
+ Creative Commons public licenses provide a standard set of terms and
18
+ conditions that creators and other rights holders may use to share
19
+ original works of authorship and other material subject to copyright
20
+ and certain other rights specified in the public license below. The
21
+ following considerations are for informational purposes only, are not
22
+ exhaustive, and do not form part of our licenses.
23
+
24
+ Considerations for licensors: Our public licenses are
25
+ intended for use by those authorized to give the public
26
+ permission to use material in ways otherwise restricted by
27
+ copyright and certain other rights. Our licenses are
28
+ irrevocable. Licensors should read and understand the terms
29
+ and conditions of the license they choose before applying it.
30
+ Licensors should also secure all rights necessary before
31
+ applying our licenses so that the public can reuse the
32
+ material as expected. Licensors should clearly mark any
33
+ material not subject to the license. This includes other CC-
34
+ licensed material, or material used under an exception or
35
+ limitation to copyright. More considerations for licensors:
36
+ wiki.creativecommons.org/Considerations_for_licensors
37
+
38
+ Considerations for the public: By using one of our public
39
+ licenses, a licensor grants the public permission to use the
40
+ licensed material under specified terms and conditions. If
41
+ the licensor's permission is not necessary for any reason--for
42
+ example, because of any applicable exception or limitation to
43
+ copyright--then that use is not regulated by the license. Our
44
+ licenses grant only permissions under copyright and certain
45
+ other rights that a licensor has authority to grant. Use of
46
+ the licensed material may still be restricted for other
47
+ reasons, including because others have copyright or other
48
+ rights in the material. A licensor may make special requests,
49
+ such as asking that all changes be marked or described.
50
+ Although not required by our licenses, you are encouraged to
51
+ respect those requests where reasonable. More_considerations
52
+ for the public:
53
+ wiki.creativecommons.org/Considerations_for_licensees
54
+
55
+ =======================================================================
56
+
57
+ Creative Commons Attribution-NonCommercial 4.0 International Public
58
+ License
59
+
60
+ By exercising the Licensed Rights (defined below), You accept and agree
61
+ to be bound by the terms and conditions of this Creative Commons
62
+ Attribution-NonCommercial 4.0 International Public License ("Public
63
+ License"). To the extent this Public License may be interpreted as a
64
+ contract, You are granted the Licensed Rights in consideration of Your
65
+ acceptance of these terms and conditions, and the Licensor grants You
66
+ such rights in consideration of benefits the Licensor receives from
67
+ making the Licensed Material available under these terms and
68
+ conditions.
69
+
70
+ Section 1 -- Definitions.
71
+
72
+ a. Adapted Material means material subject to Copyright and Similar
73
+ Rights that is derived from or based upon the Licensed Material
74
+ and in which the Licensed Material is translated, altered,
75
+ arranged, transformed, or otherwise modified in a manner requiring
76
+ permission under the Copyright and Similar Rights held by the
77
+ Licensor. For purposes of this Public License, where the Licensed
78
+ Material is a musical work, performance, or sound recording,
79
+ Adapted Material is always produced where the Licensed Material is
80
+ synched in timed relation with a moving image.
81
+
82
+ b. Adapter's License means the license You apply to Your Copyright
83
+ and Similar Rights in Your contributions to Adapted Material in
84
+ accordance with the terms and conditions of this Public License.
85
+
86
+ c. Copyright and Similar Rights means copyright and/or similar rights
87
+ closely related to copyright including, without limitation,
88
+ performance, broadcast, sound recording, and Sui Generis Database
89
+ Rights, without regard to how the rights are labeled or
90
+ categorized. For purposes of this Public License, the rights
91
+ specified in Section 2(b)(1)-(2) are not Copyright and Similar
92
+ Rights.
93
+ d. Effective Technological Measures means those measures that, in the
94
+ absence of proper authority, may not be circumvented under laws
95
+ fulfilling obligations under Article 11 of the WIPO Copyright
96
+ Treaty adopted on December 20, 1996, and/or similar international
97
+ agreements.
98
+
99
+ e. Exceptions and Limitations means fair use, fair dealing, and/or
100
+ any other exception or limitation to Copyright and Similar Rights
101
+ that applies to Your use of the Licensed Material.
102
+
103
+ f. Licensed Material means the artistic or literary work, database,
104
+ or other material to which the Licensor applied this Public
105
+ License.
106
+
107
+ g. Licensed Rights means the rights granted to You subject to the
108
+ terms and conditions of this Public License, which are limited to
109
+ all Copyright and Similar Rights that apply to Your use of the
110
+ Licensed Material and that the Licensor has authority to license.
111
+
112
+ h. Licensor means the individual(s) or entity(ies) granting rights
113
+ under this Public License.
114
+
115
+ i. NonCommercial means not primarily intended for or directed towards
116
+ commercial advantage or monetary compensation. For purposes of
117
+ this Public License, the exchange of the Licensed Material for
118
+ other material subject to Copyright and Similar Rights by digital
119
+ file-sharing or similar means is NonCommercial provided there is
120
+ no payment of monetary compensation in connection with the
121
+ exchange.
122
+
123
+ j. Share means to provide material to the public by any means or
124
+ process that requires permission under the Licensed Rights, such
125
+ as reproduction, public display, public performance, distribution,
126
+ dissemination, communication, or importation, and to make material
127
+ available to the public including in ways that members of the
128
+ public may access the material from a place and at a time
129
+ individually chosen by them.
130
+
131
+ k. Sui Generis Database Rights means rights other than copyright
132
+ resulting from Directive 96/9/EC of the European Parliament and of
133
+ the Council of 11 March 1996 on the legal protection of databases,
134
+ as amended and/or succeeded, as well as other essentially
135
+ equivalent rights anywhere in the world.
136
+
137
+ l. You means the individual or entity exercising the Licensed Rights
138
+ under this Public License. Your has a corresponding meaning.
139
+
140
+ Section 2 -- Scope.
141
+
142
+ a. License grant.
143
+
144
+ 1. Subject to the terms and conditions of this Public License,
145
+ the Licensor hereby grants You a worldwide, royalty-free,
146
+ non-sublicensable, non-exclusive, irrevocable license to
147
+ exercise the Licensed Rights in the Licensed Material to:
148
+
149
+ a. reproduce and Share the Licensed Material, in whole or
150
+ in part, for NonCommercial purposes only; and
151
+
152
+ b. produce, reproduce, and Share Adapted Material for
153
+ NonCommercial purposes only.
154
+
155
+ 2. Exceptions and Limitations. For the avoidance of doubt, where
156
+ Exceptions and Limitations apply to Your use, this Public
157
+ License does not apply, and You do not need to comply with
158
+ its terms and conditions.
159
+
160
+ 3. Term. The term of this Public License is specified in Section
161
+ 6(a).
162
+
163
+ 4. Media and formats; technical modifications allowed. The
164
+ Licensor authorizes You to exercise the Licensed Rights in
165
+ all media and formats whether now known or hereafter created,
166
+ and to make technical modifications necessary to do so. The
167
+ Licensor waives and/or agrees not to assert any right or
168
+ authority to forbid You from making technical modifications
169
+ necessary to exercise the Licensed Rights, including
170
+ technical modifications necessary to circumvent Effective
171
+ Technological Measures. For purposes of this Public License,
172
+ simply making modifications authorized by this Section 2(a)
173
+ (4) never produces Adapted Material.
174
+
175
+ 5. Downstream recipients.
176
+
177
+ a. Offer from the Licensor -- Licensed Material. Every
178
+ recipient of the Licensed Material automatically
179
+ receives an offer from the Licensor to exercise the
180
+ Licensed Rights under the terms and conditions of this
181
+ Public License.
182
+
183
+ b. No downstream restrictions. You may not offer or impose
184
+ any additional or different terms or conditions on, or
185
+ apply any Effective Technological Measures to, the
186
+ Licensed Material if doing so restricts exercise of the
187
+ Licensed Rights by any recipient of the Licensed
188
+ Material.
189
+
190
+ 6. No endorsement. Nothing in this Public License constitutes or
191
+ may be construed as permission to assert or imply that You
192
+ are, or that Your use of the Licensed Material is, connected
193
+ with, or sponsored, endorsed, or granted official status by,
194
+ the Licensor or others designated to receive attribution as
195
+ provided in Section 3(a)(1)(A)(i).
196
+
197
+ b. Other rights.
198
+
199
+ 1. Moral rights, such as the right of integrity, are not
200
+ licensed under this Public License, nor are publicity,
201
+ privacy, and/or other similar personality rights; however, to
202
+ the extent possible, the Licensor waives and/or agrees not to
203
+ assert any such rights held by the Licensor to the limited
204
+ extent necessary to allow You to exercise the Licensed
205
+ Rights, but not otherwise.
206
+
207
+ 2. Patent and trademark rights are not licensed under this
208
+ Public License.
209
+
210
+ 3. To the extent possible, the Licensor waives any right to
211
+ collect royalties from You for the exercise of the Licensed
212
+ Rights, whether directly or through a collecting society
213
+ under any voluntary or waivable statutory or compulsory
214
+ licensing scheme. In all other cases the Licensor expressly
215
+ reserves any right to collect such royalties, including when
216
+ the Licensed Material is used other than for NonCommercial
217
+ purposes.
218
+
219
+ Section 3 -- License Conditions.
220
+
221
+ Your exercise of the Licensed Rights is expressly made subject to the
222
+ following conditions.
223
+
224
+ a. Attribution.
225
+
226
+ 1. If You Share the Licensed Material (including in modified
227
+ form), You must:
228
+
229
+ a. retain the following if it is supplied by the Licensor
230
+ with the Licensed Material:
231
+
232
+ i. identification of the creator(s) of the Licensed
233
+ Material and any others designated to receive
234
+ attribution, in any reasonable manner requested by
235
+ the Licensor (including by pseudonym if
236
+ designated);
237
+
238
+ ii. a copyright notice;
239
+
240
+ iii. a notice that refers to this Public License;
241
+
242
+ iv. a notice that refers to the disclaimer of
243
+ warranties;
244
+
245
+ v. a URI or hyperlink to the Licensed Material to the
246
+ extent reasonably practicable;
247
+
248
+ b. indicate if You modified the Licensed Material and
249
+ retain an indication of any previous modifications; and
250
+
251
+ c. indicate the Licensed Material is licensed under this
252
+ Public License, and include the text of, or the URI or
253
+ hyperlink to, this Public License.
254
+
255
+ 2. You may satisfy the conditions in Section 3(a)(1) in any
256
+ reasonable manner based on the medium, means, and context in
257
+ which You Share the Licensed Material. For example, it may be
258
+ reasonable to satisfy the conditions by providing a URI or
259
+ hyperlink to a resource that includes the required
260
+ information.
261
+
262
+ 3. If requested by the Licensor, You must remove any of the
263
+ information required by Section 3(a)(1)(A) to the extent
264
+ reasonably practicable.
265
+
266
+ 4. If You Share Adapted Material You produce, the Adapter's
267
+ License You apply must not prevent recipients of the Adapted
268
+ Material from complying with this Public License.
269
+
270
+ Section 4 -- Sui Generis Database Rights.
271
+
272
+ Where the Licensed Rights include Sui Generis Database Rights that
273
+ apply to Your use of the Licensed Material:
274
+
275
+ a. for the avoidance of doubt, Section 2(a)(1) grants You the right
276
+ to extract, reuse, reproduce, and Share all or a substantial
277
+ portion of the contents of the database for NonCommercial purposes
278
+ only;
279
+
280
+ b. if You include all or a substantial portion of the database
281
+ contents in a database in which You have Sui Generis Database
282
+ Rights, then the database in which You have Sui Generis Database
283
+ Rights (but not its individual contents) is Adapted Material; and
284
+
285
+ c. You must comply with the conditions in Section 3(a) if You Share
286
+ all or a substantial portion of the contents of the database.
287
+
288
+ For the avoidance of doubt, this Section 4 supplements and does not
289
+ replace Your obligations under this Public License where the Licensed
290
+ Rights include other Copyright and Similar Rights.
291
+
292
+ Section 5 -- Disclaimer of Warranties and Limitation of Liability.
293
+
294
+ a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
295
+ EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
296
+ AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
297
+ ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
298
+ IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
299
+ WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
300
+ PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
301
+ ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
302
+ KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
303
+ ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
304
+
305
+ b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
306
+ TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
307
+ NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
308
+ INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
309
+ COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
310
+ USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
311
+ ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
312
+ DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
313
+ IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
314
+
315
+ c. The disclaimer of warranties and limitation of liability provided
316
+ above shall be interpreted in a manner that, to the extent
317
+ possible, most closely approximates an absolute disclaimer and
318
+ waiver of all liability.
319
+
320
+ Section 6 -- Term and Termination.
321
+
322
+ a. This Public License applies for the term of the Copyright and
323
+ Similar Rights licensed here. However, if You fail to comply with
324
+ this Public License, then Your rights under this Public License
325
+ terminate automatically.
326
+
327
+ b. Where Your right to use the Licensed Material has terminated under
328
+ Section 6(a), it reinstates:
329
+
330
+ 1. automatically as of the date the violation is cured, provided
331
+ it is cured within 30 days of Your discovery of the
332
+ violation; or
333
+
334
+ 2. upon express reinstatement by the Licensor.
335
+
336
+ For the avoidance of doubt, this Section 6(b) does not affect any
337
+ right the Licensor may have to seek remedies for Your violations
338
+ of this Public License.
339
+
340
+ c. For the avoidance of doubt, the Licensor may also offer the
341
+ Licensed Material under separate terms or conditions or stop
342
+ distributing the Licensed Material at any time; however, doing so
343
+ will not terminate this Public License.
344
+
345
+ d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
346
+ License.
347
+
348
+ Section 7 -- Other Terms and Conditions.
349
+
350
+ a. The Licensor shall not be bound by any additional or different
351
+ terms or conditions communicated by You unless expressly agreed.
352
+
353
+ b. Any arrangements, understandings, or agreements regarding the
354
+ Licensed Material not stated herein are separate from and
355
+ independent of the terms and conditions of this Public License.
356
+
357
+ Section 8 -- Interpretation.
358
+
359
+ a. For the avoidance of doubt, this Public License does not, and
360
+ shall not be interpreted to, reduce, limit, restrict, or impose
361
+ conditions on any use of the Licensed Material that could lawfully
362
+ be made without permission under this Public License.
363
+
364
+ b. To the extent possible, if any provision of this Public License is
365
+ deemed unenforceable, it shall be automatically reformed to the
366
+ minimum extent necessary to make it enforceable. If the provision
367
+ cannot be reformed, it shall be severed from this Public License
368
+ without affecting the enforceability of the remaining terms and
369
+ conditions.
370
+
371
+ c. No term or condition of this Public License will be waived and no
372
+ failure to comply consented to unless expressly agreed to by the
373
+ Licensor.
374
+
375
+ d. Nothing in this Public License constitutes or may be interpreted
376
+ as a limitation upon, or waiver of, any privileges and immunities
377
+ that apply to the Licensor or You, including from the legal
378
+ processes of any jurisdiction or authority.
379
+
380
+ =======================================================================
381
+
382
+ Creative Commons is not a party to its public
383
+ licenses. Notwithstanding, Creative Commons may elect to apply one of
384
+ its public licenses to material it publishes and in those instances
385
+ will be considered the “Licensor.” The text of the Creative Commons
386
+ public licenses is dedicated to the public domain under the CC0 Public
387
+ Domain Dedication. Except for the limited purpose of indicating that
388
+ material is shared under a Creative Commons public license or as
389
+ otherwise permitted by the Creative Commons policies published at
390
+ creativecommons.org/policies, Creative Commons does not authorize the
391
+ use of the trademark "Creative Commons" or any other trademark or logo
392
+ of Creative Commons without its prior written consent including,
393
+ without limitation, in connection with any unauthorized modifications
394
+ to any of its public licenses or any other arrangements,
395
+ understandings, or agreements concerning use of licensed material. For
396
+ the avoidance of doubt, this paragraph does not form part of the
397
+ public licenses.
398
+
399
+ Creative Commons may be contacted at creativecommons.org.
README.md ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: cc-by-nc-4.0
3
+ library_name: pytorch
4
+ tags:
5
+ - time-series
6
+ - synthetic
7
+ - representation-learning
8
+ - nepa
9
+ ---
10
+
11
+ # LeNEPA encoder (CauKer2M, encoder-only)
12
+
13
+ This repository contains an **encoder-only** LeNEPA checkpoint exported to `safetensors` for minimal inference.
14
+
15
+ What is included:
16
+
17
+ - `lenepa_encoder.safetensors` - **encoder weights only** (no projector, no training/probe state)
18
+ - `inference.py` - minimal end-to-end inference (no Hydra, no W&B, no repo dependency)
19
+ - `lenepa_encoder_config.json` - fixed IO + architecture contract
20
+ - `provenance.json` - original `.pt` checkpoint path + W&B URL
21
+
22
+ ## IO contract
23
+
24
+ Inputs:
25
+
26
+ - `x_waveform`: `torch.float32` with shape `[B, 1, 5000]`
27
+ - sampling frequency: `1`
28
+ - channels: `["c0"]`
29
+
30
+ Tokenizer:
31
+
32
+ - causal `conv_patch_embed` with `patch_size=8`
33
+ - per-patch normalization inside the tokenizer
34
+ - MSSE scalar embeddings for patch mean and patch std
35
+
36
+ Outputs:
37
+
38
+ - `patch_tokens`: `[B, 625, 256]` (post-final-norm tokens)
39
+ - `embedding`: `[B, 256]` (mean pooled over tokens)
40
+
41
+ ## Preparing inputs
42
+
43
+ The exported `inference.py` expects `x_waveform.shape == [B, 1, 5000]` and does **not** resample internally.
44
+
45
+ Two common options:
46
+
47
+ 1. No resampling
48
+
49
+ Use this when your waveform is already length `5000` or you just if it works better without resampling:
50
+
51
+ ```python
52
+ from pathlib import Path
53
+
54
+ import torch
55
+
56
+ from inference import encode_lenepa, load_lenepa_encoder
57
+
58
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
59
+ model = load_lenepa_encoder(weights_path=Path("lenepa_encoder.safetensors"), device=device)
60
+
61
+ x = torch.randn(2, 1, 5000, device=device, dtype=torch.float32) # [B, C, L=5000]
62
+ out = encode_lenepa(model=model, x_waveform=x)
63
+ print(out.patch_tokens.shape)
64
+ ```
65
+
66
+ 2. Interpolate to `5000`
67
+
68
+ Validation on UCR shows, that resampling samples that have different length to `5000` generates noticably better classification quality:
69
+
70
+ ```python
71
+ from pathlib import Path
72
+
73
+ import torch
74
+ from torch.nn import functional as F
75
+
76
+ from inference import encode_lenepa, load_lenepa_encoder
77
+
78
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
79
+ model = load_lenepa_encoder(weights_path=Path("lenepa_encoder.safetensors"), device=device)
80
+
81
+ x_raw = torch.randn(2, 1, 137, device=device, dtype=torch.float32) # [B, C, L_raw]
82
+ x = F.interpolate(x_raw, size=5000, mode="linear", align_corners=False) # [B, C, 5000]
83
+ out = encode_lenepa(model=model, x_waveform=x)
84
+ print(out.embedding.shape)
85
+ ```
86
+
87
+ ## Usage
88
+
89
+ Smoke test (loads `lenepa_encoder.safetensors` from the current directory and prints output shapes):
90
+
91
+ ```bash
92
+ python inference.py
93
+ ```
94
+
95
+ Programmatic usage:
96
+
97
+ ```python
98
+ from pathlib import Path
99
+
100
+ import torch
101
+ from torch.nn import functional as F
102
+
103
+ from inference import encode_lenepa, load_lenepa_encoder
104
+
105
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
106
+ model = load_lenepa_encoder(weights_path=Path("lenepa_encoder.safetensors"), device=device)
107
+ x_raw = torch.randn(2, 1, 137, device=device, dtype=torch.float32) # [B, C, L_raw]
108
+ x = F.interpolate(x_raw, size=5000, mode="linear", align_corners=False) # [B, C, 5000]
109
+ out = encode_lenepa(model=model, x_waveform=x)
110
+ print(out.embedding.shape)
111
+ ```
__pycache__/inference.cpython-312.pyc ADDED
Binary file (23.4 kB). View file
 
inference.py ADDED
@@ -0,0 +1,381 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Minimal inference for the published LeNEPA encoder checkpoint.
2
+
3
+ Published IO contract:
4
+ - x_waveform: torch.float32 [B, 1, 5000], channel order: ["c0"]
5
+ - outputs:
6
+ patch_tokens: [B, 625, 256]
7
+ embedding: [B, 256]
8
+
9
+ This code intentionally does NOT:
10
+ - resample / crop / pad inputs
11
+ - support other checkpoints or architectures
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ from dataclasses import dataclass
17
+ from pathlib import Path
18
+
19
+ import torch
20
+ from safetensors.torch import load_file as safetensors_load
21
+ from torch import nn
22
+ from torch.nn import functional as F
23
+
24
+ # -----------------------------
25
+ # Published constants (no knobs)
26
+ # -----------------------------
27
+
28
+ SAMPLING_FREQUENCY = 1
29
+ CHANNELS = ("c0",)
30
+
31
+ NUM_CHANNELS = 1
32
+ CHANNEL_SIZE = 5000
33
+ PATCH_SIZE = 8
34
+ NUM_PATCHES = 625 # 5000 / 8
35
+
36
+ DIM = 256
37
+ DEPTH = 8
38
+ NUM_HEADS = 4
39
+ MLP_RATIO = 4.0
40
+
41
+ PATCH_EMBED_CNN_DIM = 192
42
+ SCALAR_HIDDEN_DIM = 32
43
+ SCALAR_SCALES = (0.0001, 0.001, 0.01, 0.1, 1.0, 10.0, 100.0, 1000.0, 10000.0)
44
+ SCALAR_EPSILON = 1.1
45
+
46
+ QKV_BIAS = True
47
+ BIAS = True
48
+ NORM_EPS = 1e-6
49
+
50
+ ROPE_BASE = 10_000
51
+ QK_NORM_EPS = 1e-6
52
+
53
+
54
+ @dataclass(frozen=True)
55
+ class LeNEPAEncoderOutput:
56
+ """Outputs of the published LeNEPA encoder."""
57
+
58
+ patch_tokens: torch.Tensor # [B, T=625, D=256]
59
+ embedding: torch.Tensor # [B, D=256]
60
+
61
+
62
+ class ScalarEncoder(nn.Module):
63
+ """Affine + LayerNorm scalar encoder used inside the MSSE blocks."""
64
+
65
+ def __init__(self, *, k: float, hidden_dim: int, eps: float) -> None:
66
+ super().__init__()
67
+ if hidden_dim < 1:
68
+ raise ValueError(f"hidden_dim must be >= 1, got {hidden_dim}")
69
+ if eps <= 0:
70
+ raise ValueError(f"eps must be > 0, got {eps}")
71
+ self.k = float(k)
72
+ self.w = nn.Parameter(torch.rand((1, hidden_dim), dtype=torch.float32, requires_grad=True))
73
+ self.b = nn.Parameter(torch.rand((1, hidden_dim), dtype=torch.float32, requires_grad=True))
74
+ self.norm = nn.LayerNorm(hidden_dim, eps=eps)
75
+
76
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
77
+ # x: [..., 1]
78
+ if x.size(-1) != 1:
79
+ raise ValueError(f"Expected x[..., 1], got x.shape={tuple(x.shape)}")
80
+ z = x * self.w + self.k * self.b # [..., H]
81
+ return self.norm(z) # [..., H]
82
+
83
+
84
+ class MultiScaledScalarEncoder(nn.Module):
85
+ """Blend per-scale scalar encoders based on the scalar magnitude."""
86
+
87
+ def __init__(
88
+ self,
89
+ *,
90
+ scales: tuple[float, ...],
91
+ hidden_dim: int,
92
+ epsilon: float,
93
+ eps: float,
94
+ ) -> None:
95
+ super().__init__()
96
+ if not scales:
97
+ raise ValueError("scales must be non-empty")
98
+ if any(scale <= 0 for scale in scales):
99
+ raise ValueError(f"All scales must be > 0, got scales={scales}")
100
+ if hidden_dim < 1:
101
+ raise ValueError(f"hidden_dim must be >= 1, got {hidden_dim}")
102
+ if epsilon <= 0:
103
+ raise ValueError(f"epsilon must be > 0, got {epsilon}")
104
+ if eps <= 0:
105
+ raise ValueError(f"eps must be > 0, got {eps}")
106
+
107
+ scales_t = torch.tensor(scales, dtype=torch.float32) # [S]
108
+ self.register_buffer("scales", scales_t, persistent=False)
109
+ self.epsilon = float(epsilon)
110
+ self.encoders = nn.ModuleList(
111
+ [ScalarEncoder(k=float(scale), hidden_dim=hidden_dim, eps=eps) for scale in scales]
112
+ )
113
+
114
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
115
+ # x: [..., 1]
116
+ if x.size(-1) != 1:
117
+ raise ValueError(f"Expected x[..., 1], got x.shape={tuple(x.shape)}")
118
+
119
+ x_abs = x.abs().to(dtype=torch.float32) # [..., 1]
120
+ scales = self.scales.to(device=x.device) # [S]
121
+ inv_scales = (1.0 / scales).reshape(1, -1) # [1, S]
122
+ ratio = torch.matmul(x_abs, inv_scales) # [..., S]
123
+ alpha = (1.0 / torch.log(ratio + self.epsilon)).abs() # [..., S]
124
+ alpha = alpha / alpha.sum(dim=-1, keepdim=True) # [..., S]
125
+ alpha = alpha.unsqueeze(-1) # [..., S, 1]
126
+
127
+ encoded_list = [encoder(x) for encoder in self.encoders] # S * [..., H]
128
+ encoded = torch.stack(encoded_list, dim=-2) # [..., S, H]
129
+ mixed = (encoded.to(dtype=torch.float32) * alpha).sum(dim=-2) # [..., H]
130
+ return mixed.to(dtype=x.dtype) # [..., H]
131
+
132
+
133
+ class RotaryEmbedding(nn.Module):
134
+ """Rotary positional embeddings (RoPE) applied to Q/K."""
135
+
136
+ def __init__(self, *, dim: int, base: int) -> None:
137
+ super().__init__()
138
+ if dim % 2 != 0:
139
+ raise ValueError(f"RoPE requires even head_dim, got {dim}")
140
+ inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.float32) / dim)) # [Dh/2]
141
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
142
+ self._seq_len_cached: int | None = None
143
+ self._cos_cached: torch.Tensor | None = None
144
+ self._sin_cached: torch.Tensor | None = None
145
+ self._device_cached: torch.device | None = None
146
+ self._dtype_cached: torch.dtype | None = None
147
+
148
+ def _build_cache(self, *, seq_len: int, device: torch.device, dtype: torch.dtype) -> None:
149
+ positions = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) # [T]
150
+ freqs = torch.einsum("i,j->ij", positions, self.inv_freq) # [T, Dh/2]
151
+ self._cos_cached = freqs.cos().to(dtype) # [T, Dh/2]
152
+ self._sin_cached = freqs.sin().to(dtype) # [T, Dh/2]
153
+ self._seq_len_cached = seq_len
154
+ self._device_cached = device
155
+ self._dtype_cached = dtype
156
+
157
+ def _get_cos_sin(
158
+ self, *, seq_len: int, device: torch.device, dtype: torch.dtype
159
+ ) -> tuple[torch.Tensor, torch.Tensor]:
160
+ if (
161
+ self._cos_cached is None
162
+ or self._sin_cached is None
163
+ or self._seq_len_cached != seq_len
164
+ or self._device_cached != device
165
+ or self._dtype_cached != dtype
166
+ ):
167
+ self._build_cache(seq_len=seq_len, device=device, dtype=dtype)
168
+ if self._cos_cached is None or self._sin_cached is None:
169
+ raise RuntimeError("RoPE cache was not built; this is a bug")
170
+ return self._cos_cached, self._sin_cached
171
+
172
+ def _apply_rotary(self, x: torch.Tensor, *, cos: torch.Tensor, sin: torch.Tensor) -> torch.Tensor:
173
+ # x: [B, H, T, Dh]
174
+ B, H, T, Dh = x.shape
175
+ x_2 = x.view(B, H, T, Dh // 2, 2) # [B, H, T, Dh/2, 2]
176
+ x1 = x_2[..., 0] # [B, H, T, Dh/2]
177
+ x2 = x_2[..., 1] # [B, H, T, Dh/2]
178
+ cos = cos.unsqueeze(0).unsqueeze(0) # [1, 1, T, Dh/2]
179
+ sin = sin.unsqueeze(0).unsqueeze(0) # [1, 1, T, Dh/2]
180
+ out1 = x1 * cos - x2 * sin
181
+ out2 = x1 * sin + x2 * cos
182
+ return torch.stack((out1, out2), dim=-1).flatten(-2) # [B, H, T, Dh]
183
+
184
+ def apply(self, q: torch.Tensor, k: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor]:
185
+ cos, sin = self._get_cos_sin(seq_len=q.size(-2), device=q.device, dtype=q.dtype) # [T, Dh/2]
186
+ return self._apply_rotary(q, cos=cos, sin=sin), self._apply_rotary(k, cos=cos, sin=sin)
187
+
188
+
189
+ class Attention(nn.Module):
190
+ """Causal self-attention with RoPE + QK-Norm (no dropout)."""
191
+
192
+ def __init__(self) -> None:
193
+ super().__init__()
194
+ if DIM % NUM_HEADS != 0:
195
+ raise ValueError(f"DIM must be divisible by NUM_HEADS, got DIM={DIM} NUM_HEADS={NUM_HEADS}")
196
+ head_dim = DIM // NUM_HEADS
197
+ self.num_heads = NUM_HEADS
198
+ self.rope = RotaryEmbedding(dim=head_dim, base=ROPE_BASE)
199
+ self.qk_norm = nn.LayerNorm(head_dim, eps=QK_NORM_EPS, elementwise_affine=False)
200
+ self.qkv = nn.Linear(DIM, DIM * 3, bias=QKV_BIAS)
201
+ self.proj = nn.Linear(DIM, DIM, bias=BIAS)
202
+
203
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
204
+ # x: [B, T, D]
205
+ B, T, D = x.shape
206
+ qkv = (
207
+ self.qkv(x) # [B, T, 3*D]
208
+ .reshape(B, T, 3, self.num_heads, D // self.num_heads) # [B, T, 3, H, Dh]
209
+ .permute(2, 0, 3, 1, 4) # [3, B, H, T, Dh]
210
+ )
211
+ q, k, v = qkv[0], qkv[1], qkv[2] # each [B, H, T, Dh]
212
+ q, k = self.rope.apply(q, k) # [B, H, T, Dh] each
213
+ q = self.qk_norm(q) # [B, H, T, Dh]
214
+ k = self.qk_norm(k) # [B, H, T, Dh]
215
+ attn = F.scaled_dot_product_attention(q, k, v, dropout_p=0.0, is_causal=True) # [B, H, T, Dh]
216
+ out = attn.transpose(1, 2).reshape(B, T, D) # [B, T, D]
217
+ return self.proj(out) # [B, T, D]
218
+
219
+
220
+ class GatedMLP(nn.Module):
221
+ """SwiGLU MLP used in this checkpoint."""
222
+
223
+ def __init__(self) -> None:
224
+ super().__init__()
225
+ hidden_dim = int((2 / 3) * MLP_RATIO * DIM)
226
+ if hidden_dim <= 0:
227
+ raise ValueError(f"hidden_dim must be > 0, got {hidden_dim}")
228
+ self.fc1 = nn.Linear(DIM, hidden_dim * 2, bias=BIAS)
229
+ self.fc2 = nn.Linear(hidden_dim, DIM, bias=BIAS)
230
+ self.act = nn.SiLU()
231
+
232
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
233
+ # x: [B, T, D]
234
+ gate_and_value = self.fc1(x) # [B, T, 2H]
235
+ gate, value = gate_and_value.chunk(2, dim=-1) # each [B, T, H]
236
+ return self.fc2(self.act(gate) * value) # [B, T, D]
237
+
238
+
239
+ class Block(nn.Module):
240
+ """Transformer block: LN -> Attn -> residual -> LN -> MLP -> residual."""
241
+
242
+ def __init__(self) -> None:
243
+ super().__init__()
244
+ self.norm1 = nn.LayerNorm(DIM, eps=NORM_EPS)
245
+ self.attn = Attention()
246
+ self.norm2 = nn.LayerNorm(DIM, eps=NORM_EPS)
247
+ self.mlp = GatedMLP()
248
+
249
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
250
+ # x: [B, T, D]
251
+ x = x + self.attn(self.norm1(x)) # [B, T, D]
252
+ x = x + self.mlp(self.norm2(x)) # [B, T, D]
253
+ return x
254
+
255
+
256
+ class PatchEmbedding(nn.Module):
257
+ """Conv patch embedding: Conv1d(C->D_cnn, kernel=stride=patch_size)."""
258
+
259
+ def __init__(self) -> None:
260
+ super().__init__()
261
+ self.proj = nn.Conv1d(
262
+ in_channels=NUM_CHANNELS,
263
+ out_channels=PATCH_EMBED_CNN_DIM,
264
+ kernel_size=PATCH_SIZE,
265
+ stride=PATCH_SIZE,
266
+ bias=BIAS,
267
+ )
268
+
269
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
270
+ # x: [B, C, L]
271
+ z_t = self.proj(x) # [B, D_cnn, T]
272
+ return z_t.transpose(1, 2) # [B, T, D_cnn]
273
+
274
+
275
+ class LeNEPAEncoder(nn.Module):
276
+ """LeNEPA encoder trunk for this exact checkpoint."""
277
+
278
+ def __init__(self) -> None:
279
+ super().__init__()
280
+ if CHANNEL_SIZE % PATCH_SIZE != 0:
281
+ raise ValueError("CHANNEL_SIZE must be divisible by PATCH_SIZE")
282
+ self.nepa_patch_embed_mean_encoder = MultiScaledScalarEncoder(
283
+ scales=SCALAR_SCALES,
284
+ hidden_dim=SCALAR_HIDDEN_DIM,
285
+ epsilon=SCALAR_EPSILON,
286
+ eps=NORM_EPS,
287
+ )
288
+ self.nepa_patch_embed_std_encoder = MultiScaledScalarEncoder(
289
+ scales=SCALAR_SCALES,
290
+ hidden_dim=SCALAR_HIDDEN_DIM,
291
+ epsilon=SCALAR_EPSILON,
292
+ eps=NORM_EPS,
293
+ )
294
+ self.patch_embed = PatchEmbedding()
295
+ self.blocks = nn.ModuleList([Block() for _ in range(DEPTH)])
296
+ self.norm = nn.LayerNorm(DIM, eps=NORM_EPS)
297
+
298
+ def _tokenize(self, x: torch.Tensor) -> torch.Tensor:
299
+ # x: [B, C, L]
300
+ B, C, L = x.shape
301
+ if C != NUM_CHANNELS or L != CHANNEL_SIZE:
302
+ raise ValueError(
303
+ "Input must match the published contract: "
304
+ f"expected [B, {NUM_CHANNELS}, {CHANNEL_SIZE}], got {tuple(x.shape)}"
305
+ )
306
+ if L % PATCH_SIZE != 0:
307
+ raise ValueError(f"Expected L divisible by PATCH_SIZE, got L={L}, PATCH_SIZE={PATCH_SIZE}")
308
+
309
+ T = L // PATCH_SIZE
310
+ x_f32 = x.to(dtype=torch.float32) # [B, C, L]
311
+ x_patches = x_f32.reshape(B, C, T, PATCH_SIZE) # [B, C, T, P]
312
+ mean_patch = x_patches.mean(dim=(1, 3)) # [B, T]
313
+ std_patch = x_patches.std(dim=(1, 3), unbiased=False) # [B, T]
314
+ mean_broadcast = mean_patch[:, None, :, None] # [B, 1, T, 1]
315
+ std_broadcast = std_patch[:, None, :, None] # [B, 1, T, 1]
316
+ x_norm_patches = (x_patches - mean_broadcast) / (std_broadcast + NORM_EPS) # [B, C, T, P]
317
+ x_norm = x_norm_patches.reshape(B, C, L).to(dtype=x.dtype) # [B, C, L]
318
+
319
+ z_cnn = self.patch_embed(x_norm) # [B, T, D_cnn]
320
+ e_mean = self.nepa_patch_embed_mean_encoder(mean_patch.unsqueeze(-1)) # [B, T, H]
321
+ e_std = self.nepa_patch_embed_std_encoder(std_patch.unsqueeze(-1)) # [B, T, H]
322
+ tokens = torch.cat([z_cnn, e_mean.to(dtype=z_cnn.dtype), e_std.to(dtype=z_cnn.dtype)], dim=-1) # [B, T, D]
323
+ if tokens.size(-1) != DIM:
324
+ raise RuntimeError(
325
+ "Tokenizer produced unexpected dim. "
326
+ f"Got tokens.shape={tuple(tokens.shape)}, expected last dim={DIM}"
327
+ )
328
+ return tokens
329
+
330
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
331
+ # x: [B, C, L]
332
+ z = self._tokenize(x) # [B, T, D]
333
+ for block in self.blocks:
334
+ z = block(z) # [B, T, D]
335
+ return self.norm(z) # [B, T, D]
336
+
337
+
338
+ @torch.inference_mode()
339
+ def encode_lenepa(*, model: LeNEPAEncoder, x_waveform: torch.Tensor) -> LeNEPAEncoderOutput:
340
+ """Encode a batch of waveforms."""
341
+ if x_waveform.dtype != torch.float32:
342
+ raise ValueError(f"x_waveform must be float32, got {x_waveform.dtype}")
343
+ if x_waveform.dim() != 3:
344
+ raise ValueError(f"x_waveform must be [B, C, L], got {tuple(x_waveform.shape)}")
345
+ model_device = next(model.parameters()).device
346
+ if x_waveform.device != model_device:
347
+ raise ValueError(
348
+ "x_waveform must be on the same device as the model. "
349
+ f"x_waveform.device={x_waveform.device} model.device={model_device}"
350
+ )
351
+
352
+ patch_tokens = model(x_waveform) # [B, T, D]
353
+ embedding = patch_tokens.mean(dim=1) # [B, D]
354
+ return LeNEPAEncoderOutput(patch_tokens=patch_tokens, embedding=embedding)
355
+
356
+
357
+ def load_lenepa_encoder(*, weights_path: Path, device: torch.device) -> LeNEPAEncoder:
358
+ """Load the published encoder weights from a safetensors file."""
359
+ if not weights_path.is_file():
360
+ raise ValueError(f"weights_path does not exist: {str(weights_path)!r}")
361
+ state = safetensors_load(str(weights_path))
362
+ model = LeNEPAEncoder()
363
+ model.load_state_dict(state, strict=True)
364
+ model.eval()
365
+ model.requires_grad_(False)
366
+ return model.to(device)
367
+
368
+
369
+ def _smoke_test() -> None:
370
+ """Small end-to-end smoke test (random input, prints output shapes)."""
371
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
372
+ here = Path(__file__).resolve().parent
373
+ model = load_lenepa_encoder(weights_path=here / "lenepa_encoder.safetensors", device=device)
374
+ x = torch.randn(2, 1, 5000, device=device, dtype=torch.float32) # [B=2, C=1, L=5000]
375
+ out = encode_lenepa(model=model, x_waveform=x)
376
+ print("patch_tokens", tuple(out.patch_tokens.shape))
377
+ print("embedding", tuple(out.embedding.shape))
378
+
379
+
380
+ if __name__ == "__main__":
381
+ _smoke_test()
lenepa_encoder.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901d54caf39b637efba0b6c866380f33fa26e2cf7711bc6abe2fead27bbd84c1
3
+ size 25302224
lenepa_encoder_config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bias": true,
3
+ "channel_size": 5000,
4
+ "channels": [
5
+ "c0"
6
+ ],
7
+ "depth": 8,
8
+ "dim": 256,
9
+ "format": "lenepa_encoder",
10
+ "format_version": 1,
11
+ "is_causal": true,
12
+ "mlp_ratio": 4.0,
13
+ "nepa_final_norm": "ln",
14
+ "nepa_patch_embed_cnn_dim": 192,
15
+ "nepa_patch_embed_scalar_epsilon": 1.1,
16
+ "nepa_patch_embed_scalar_hidden_dim": 32,
17
+ "nepa_patch_embed_scalar_scales": [
18
+ 0.0001,
19
+ 0.001,
20
+ 0.01,
21
+ 0.1,
22
+ 1.0,
23
+ 10.0,
24
+ 100.0,
25
+ 1000.0,
26
+ 10000.0
27
+ ],
28
+ "nepa_patch_embed_scalar_stats_mode": "patch_norm",
29
+ "nepa_rep_pooling": "mean",
30
+ "nepa_static_tokenizer": "conv_patch_embed",
31
+ "norm_eps": 1e-06,
32
+ "num_heads": 4,
33
+ "num_patches": 625,
34
+ "num_registers": 0,
35
+ "patch_size": 8,
36
+ "pos_embed_type": "none",
37
+ "qk_norm_eps": 1e-06,
38
+ "qkv_bias": true,
39
+ "rope_base": 10000,
40
+ "sampling_frequency": 1,
41
+ "use_nepa": true,
42
+ "use_qk_norm": true,
43
+ "use_rope": true,
44
+ "use_swiglu": true
45
+ }
provenance.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "checkpoint_step": 20000,
3
+ "exported_at_utc": "2026-03-10T15:37:13.781012+00:00",
4
+ "git_commit": "ab2a718ee2576d9e214c831eb405d36e235164c9",
5
+ "notes": "encoder-only export (proj.* and sigreg.* removed)",
6
+ "pretrain_wandb": {
7
+ "entity": "langotime",
8
+ "project": "ECG-LeJEPA",
9
+ "run_id": "6y3c5r13",
10
+ "run_name": "CAUKER2M_L5000_LENEPA_SIGREGT2p5_L0-8_PD0_PROJ_LR2x_MSSE_PATCHNORM_D256_OPTOYTSBCBAL_s0_UCRinterp5000",
11
+ "url": "https://wandb.ai/langotime/ECG-LeJEPA/runs/6y3c5r13"
12
+ },
13
+ "source_checkpoint_path": "pretrain/CAUKER2M_L5000_LENEPA_SIGREGT2p5_L0-8_PD0_PROJ_LR2x_MSSE_PATCHNORM_D256_OPTOYTSBCBAL_s0_UCRinterp5000/chkpt_20000.pt"
14
+ }
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ torch
2
+ safetensors
3
+