SeaWolf-AI commited on
Commit
5f923cd
·
verified ·
1 Parent(s): 20954ab

Upload full LiteRT-LM codebase

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
.bazeliskrc ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2024 The AI Edge LiteRT Authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ # Use GitHub as mirror for Bazel binary downloads
17
+ # This works around outages at releases.bazel.build
18
+ BAZELISK_BASE_URL=https://github.com/bazelbuild/bazel/releases/download
.bazelrc ADDED
@@ -0,0 +1,544 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # LiteRT Bazel configuration file.
2
+ # This file tries to group and simplify build options for LiteRT
3
+ #
4
+ # ----CONFIG OPTIONS----
5
+ # Android options:
6
+ # android:
7
+ # android_arm:
8
+ # android_arm64:
9
+ # android_x86:
10
+ # android_x86_64:
11
+ #
12
+ # iOS options:
13
+ # ios:
14
+ # ios_armv7:
15
+ # ios_arm64:
16
+ # ios_x86_64:
17
+ # ios_fat:
18
+ #
19
+ # Macosx options
20
+ # darwin_arm64:
21
+ #
22
+ # Compiler options:
23
+ # avx_linux: Build with avx instruction set on linux.
24
+ # avx_win: Build with avx instruction set on windows
25
+ #
26
+ # Other build options:
27
+ # short_logs: Only log errors during build, skip warnings.
28
+ # verbose_logs: Show all compiler warnings during build.
29
+ # monolithic: Build all TF C++ code into a single shared object.
30
+ # dynamic_kernels: Try to link all kernels dynamically (experimental).
31
+ # dbg: Build with debug info
32
+ #
33
+ # TF version options;
34
+ # v2: Build TF v2
35
+ #
36
+ # Embedded Linux options (experimental and only tested with TFLite build yet)
37
+ # elinux: General Embedded Linux options shared by all flavors.
38
+ # elinux_aarch64: Embedded Linux options for aarch64 (ARM64) CPU support.
39
+ # elinux_armhf: Embedded Linux options for armhf (ARMv7) CPU support.
40
+ #
41
+ # Default build options. These are applied first and unconditionally.
42
+
43
+ # Shlibs built from the source tree should resolve symbols in the exectuable.
44
+ build --define=resolve_symbols_in_exec=true
45
+
46
+ # Exclude OpenCL from Linux and Windows builds.
47
+ build:linux --copt=-DLITERT_DISABLE_OPENCL_SUPPORT=1
48
+ build:windows --copt=/DLITERT_DISABLE_OPENCL_SUPPORT=1
49
+
50
+ build --repo_env=USE_PYWRAP_RULES=True
51
+ build --copt=-DGRPC_BAZEL_BUILD
52
+ build --host_copt=-DGRPC_BAZEL_BUILD
53
+ build --action_env=GRPC_BAZEL_RUNTIME=1
54
+ build --repo_env=PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=upb
55
+ build --action_env=PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=upb
56
+
57
+ # Some targets have the same py source file, but use different
58
+ # configurations via `requires-` tags. This results in an action
59
+ # conflict when precompiling. Disable to avoid that problem.
60
+ # See https://github.com/bazel-contrib/rules_python/issues/2445
61
+ build --@rules_python//python/config_settings:precompile=force_disabled
62
+
63
+ # Do not do this. This is how gRPC builds itself by default, but we don't want
64
+ # that as it would link protobuf into its own set of dynamic libraries, which
65
+ # would conflict with our protobuf linkage.
66
+ #build --define=use_fast_cpp_protos=true
67
+
68
+ # For projects which use TensorFlow as part of a Bazel build process, putting
69
+ # nothing in a bazelrc will default to a monolithic build. The following line
70
+ # opts in to modular op registration support by default.
71
+ build --define framework_shared_object=true
72
+ build --define tsl_protobuf_header_only=true
73
+
74
+ build --define=allow_oversize_protos=true
75
+ # protobuf will stop supporting MSVC.
76
+ build:windows --define=protobuf_allow_msvc=true
77
+
78
+ build --spawn_strategy=standalone
79
+ build -c opt
80
+
81
+ # Make Bazel print out all options from rc files.
82
+ build --announce_rc
83
+
84
+ # TODO(mihaimaruseac): Document this option or remove if no longer needed
85
+ build --define=grpc_no_ares=true
86
+
87
+ # See https://github.com/bazelbuild/bazel/issues/7362 for information on what
88
+ # --incompatible_remove_legacy_whole_archive flag does.
89
+ # This flag is set to true in Bazel 1.0 and newer versions. We tried to migrate
90
+ # Tensorflow to the default, however test coverage wasn't enough to catch the
91
+ # errors.
92
+ # There is ongoing work on Bazel team's side to provide support for transitive
93
+ # shared libraries. As part of migrating to transitive shared libraries, we
94
+ # hope to provide a better mechanism for control over symbol exporting, and
95
+ # then tackle this issue again.
96
+ #
97
+ # TODO: Remove the following two lines once TF doesn't depend on Bazel wrapping
98
+ # all library archives in -whole_archive -no_whole_archive.
99
+ build --noincompatible_remove_legacy_whole_archive
100
+ build --features=-force_no_whole_archive
101
+ build --host_features=-force_no_whole_archive
102
+
103
+ # TODO(mihaimaruseac): Document this option or remove if no longer needed
104
+ build --enable_platform_specific_config
105
+
106
+ # Enable XLA support by default.
107
+ build --define=with_xla_support=true
108
+
109
+ # TODO(mihaimaruseac): Document this option or remove if no longer needed
110
+ build --config=short_logs
111
+
112
+ # TF now has `cc_shared_library` targets, so it needs the experimental flag
113
+ # TODO(rostam): Remove when `cc_shared_library` is enabled by default
114
+ build --experimental_cc_shared_library
115
+
116
+ # cc_shared_library ensures no library is linked statically more than once.
117
+ build --experimental_link_static_libraries_once=false
118
+
119
+ # Prevent regressions on those two incompatible changes
120
+ # TODO: remove those flags when they are flipped in the default Bazel version TF uses.
121
+ build --incompatible_enforce_config_setting_visibility
122
+ # TODO: also enable this flag after fixing the visibility violations
123
+ # build --incompatible_config_setting_private_default_visibility
124
+
125
+ # Disable Hermetic CC toolchains
126
+ build --@rules_ml_toolchain//common:enable_hermetic_cc=False
127
+ build --repo_env USE_HERMETIC_CC_TOOLCHAIN=0
128
+ # Hermetic python version
129
+ build --repo_env=HERMETIC_PYTHON_VERSION=3.13
130
+
131
+ # Default options should come above this line.
132
+
133
+ # Enforce clang as C/C++ compiler on Linux.
134
+ build:linux --action_env=CC=clang
135
+ build:linux --action_env=CXX=clang++
136
+
137
+ # Android configs. Bazel needs to have --cpu and --fat_apk_cpu both set to the
138
+ # target CPU to build transient dependencies correctly. See
139
+ # https://docs.bazel.build/versions/master/user-manual.html#flag--fat_apk_cpu
140
+ build:android --crosstool_top=//external:android/crosstool
141
+ build:android --host_crosstool_top=@bazel_tools//tools/cpp:toolchain
142
+ build:android_arm --config=android
143
+ build:android_arm --cpu=armeabi-v7a
144
+ build:android_arm --fat_apk_cpu=armeabi-v7a
145
+ build:android_arm --platforms=@org_tensorflow//tensorflow/tools/toolchains/android:armeabi-v7a
146
+ build:android_arm64 --config=android
147
+ build:android_arm64 --cpu=arm64-v8a
148
+ build:android_arm64 --fat_apk_cpu=arm64-v8a
149
+ build:android_arm64 --platforms=@org_tensorflow//tensorflow/tools/toolchains/android:arm64-v8a
150
+ build:android_x86 --config=android
151
+ build:android_x86 --cpu=x86
152
+ build:android_x86 --fat_apk_cpu=x86
153
+ build:android_x86 --platforms=@org_tensorflow//tensorflow/tools/toolchains/android:x86
154
+ build:android_x86_64 --config=android
155
+ build:android_x86_64 --cpu=x86_64
156
+ build:android_x86_64 --fat_apk_cpu=x86_64
157
+ build:android_x86_64 --platforms=@org_tensorflow//tensorflow/tools/toolchains/android:x86_64
158
+
159
+ # Build everything statically for Android since all static libs are later
160
+ # bundled together into a single .so for deployment.
161
+ build:android --dynamic_mode=off
162
+ # TODO(belitskiy): Remove once on Clang 20.
163
+ build:android --define=xnn_enable_avxvnniint8=false
164
+
165
+ # Sets the default Apple platform to macOS.
166
+ build:macos --apple_platform_type=macos
167
+
168
+ # gRPC on MacOS requires this #define
169
+ build:macos --copt=-DGRPC_BAZEL_BUILD
170
+
171
+ # Avoid hitting command line argument limit
172
+ build:macos --features=archive_param_file
173
+
174
+ # Bazel 7.0.0 no longer supports dynamic symbol lookup on macOS. To resolve
175
+ # undefined symbol errors in macOS arm64 builds, explicitly add the necessary
176
+ # linker flags until dependencies are well defined. See
177
+ # https://github.com/bazelbuild/bazel/issues/19730.
178
+ build:macos --linkopt=-Wl,-undefined,dynamic_lookup
179
+ build:macos --host_linkopt=-Wl,-undefined,dynamic_lookup
180
+
181
+ # Settings for MacOS on ARM CPUs.
182
+ build:macos_arm64 --cpu=darwin_arm64
183
+ build:macos_arm64 --macos_minimum_os=11.0
184
+ build:macos_arm64 --platforms=@build_bazel_apple_support//platforms:darwin_arm64
185
+
186
+ # iOS configs for each architecture and the fat binary builds.
187
+ build:ios --apple_platform_type=ios
188
+ build:ios --copt=-fembed-bitcode
189
+ build:ios --copt=-Wno-c++11-narrowing
190
+ build:ios --ios_minimum_os=13.0
191
+ build:ios_armv7 --config=ios
192
+ build:ios_armv7 --cpu=ios_armv7
193
+ build:ios_armv7 --platforms=@org_tensorflow//tensorflow/tools/toolchains/ios:ios_armv7
194
+ build:ios_arm64 --config=ios
195
+ build:ios_arm64 --cpu=ios_arm64
196
+ build:ios_arm64 --platforms=@build_bazel_apple_support//platforms:ios_arm64
197
+ build:ios_arm64e --config=ios
198
+ build:ios_arm64e --cpu=ios_arm64e
199
+ build:ios_arm64e --platforms=@build_bazel_apple_support//platforms:ios_arm64e
200
+ build:ios_sim_arm64 --config=ios
201
+ build:ios_sim_arm64 --cpu=ios_sim_arm64
202
+ build:ios_sim_arm64 --platforms=@build_bazel_apple_support//platforms:ios_sim_arm64
203
+ build:ios_x86_64 --config=ios
204
+ build:ios_x86_64 --cpu=ios_x86_64
205
+ build:ios_x86_64 --platforms=@build_bazel_apple_support//platforms:ios_x86_64
206
+ build:ios_fat --config=ios
207
+ build:ios_fat --ios_multi_cpus=armv7,arm64,i386,x86_64
208
+
209
+ # Config to use a mostly-static build and disable modular op registration
210
+ # support (this will revert to loading TensorFlow with RTLD_GLOBAL in Python).
211
+ # By default, TensorFlow will build with a dependence on
212
+ # //tensorflow:libtensorflow_framework.so.
213
+ build:monolithic --define framework_shared_object=false
214
+ build:monolithic --define tsl_protobuf_header_only=false
215
+ build:monolithic --experimental_link_static_libraries_once=false # b/229868128
216
+
217
+ # Debug config
218
+ build:dbg -c dbg
219
+ # Only include debug info for files under tensorflow/, excluding kernels, to
220
+ # reduce the size of the debug info in the binary. This is because if the debug
221
+ # sections in the ELF binary are too large, errors can occur. See
222
+ # https://github.com/tensorflow/tensorflow/issues/48919.
223
+ # Users can still include debug info for a specific kernel, e.g. with:
224
+ # --config=dbg --per_file_copt=+tensorflow/core/kernels/identity_op.*@-g
225
+ # Since this .bazelrc file is synced between the tensorflow/tensorflow repo and
226
+ # the openxla/xla repo, also include debug info for files under xla/.
227
+ build:dbg --per_file_copt=+.*,-tensorflow.*,-xla.*@-g0
228
+ build:dbg --per_file_copt=+tensorflow/core/kernels.*@-g0
229
+ # for now, disable arm_neon. see: https://github.com/tensorflow/tensorflow/issues/33360
230
+ build:dbg --cxxopt -DTF_LITE_DISABLE_X86_NEON
231
+ # AWS SDK must be compiled in release mode. see: https://github.com/tensorflow/tensorflow/issues/37498
232
+ build:dbg --copt -DDEBUG_BUILD
233
+
234
+ # Options to disable default on features
235
+ build:nogcp --define=no_gcp_support=true
236
+ build:nonccl --define=no_nccl_support=true
237
+
238
+ # Modular TF build options
239
+ build:dynamic_kernels --define=dynamic_loaded_kernels=true
240
+ build:dynamic_kernels --copt=-DAUTOLOAD_DYNAMIC_KERNELS
241
+
242
+ # Don't trigger --config=<host platform> when cross-compiling.
243
+ build:android --noenable_platform_specific_config
244
+ build:ios --noenable_platform_specific_config
245
+
246
+ # Suppress all C++ compiler warnings, otherwise build logs become 10s of MBs.
247
+ build:android --copt=-w
248
+ build:ios --copt=-w
249
+ build:linux --host_copt=-w
250
+ build:macos --copt=-w
251
+ build:windows --copt=/W0
252
+ build:windows --host_copt=/W0
253
+
254
+ # Suppress most C++ compiler warnings to reduce log size but allow
255
+ # for specific warnings to still be present.
256
+ build:linux --copt="-Wno-all"
257
+ build:linux --copt="-Wno-extra"
258
+ build:linux --copt="-Wno-deprecated"
259
+ build:linux --copt="-Wno-deprecated-declarations"
260
+ build:linux --copt="-Wno-ignored-attributes"
261
+ build:linux --copt="-Wno-array-bounds"
262
+
263
+ # Add unused-result as an error on Linux.
264
+ build:linux --copt="-Wunused-result"
265
+ build:linux --copt="-Werror=unused-result"
266
+ # Add switch as an error on Linux.
267
+ build:linux --copt="-Wswitch"
268
+ build:linux --copt="-Werror=switch"
269
+ # This was added in clang-16 by https://reviews.llvm.org/D133574.
270
+ # Can be removed once upb is updated, since a type definition is used within
271
+ # offset of in the current version of ubp.
272
+ # See https://github.com/protocolbuffers/upb/blob/9effcbcb27f0a665f9f345030188c0b291e32482/upb/upb.c#L183.
273
+ build:linux --copt=-Wno-gnu-offsetof-extensions
274
+
275
+ # Toolchain and CUDA options for Linux CPU builds
276
+ build:release_cpu_linux --crosstool_top="@local_config_cuda//crosstool:toolchain"
277
+ build:release_cpu_linux --repo_env=TF_SYSROOT="/dt9"
278
+
279
+ # Linux ARM64 specific options
280
+ build:linux_arm64 --copt="-mtune=generic" --copt="-march=armv8-a" --copt="-O3"
281
+
282
+ # On Windows, `__cplusplus` is wrongly defined without this switch
283
+ # See https://devblogs.microsoft.com/cppblog/msvc-now-correctly-reports-__cplusplus/
284
+ build:windows --copt=/Zc:__cplusplus
285
+ build:windows --host_copt=/Zc:__cplusplus
286
+
287
+ # Tensorflow uses M_* math constants that only get defined by MSVC headers if
288
+ # _USE_MATH_DEFINES is defined.
289
+ build:windows --copt=/D_USE_MATH_DEFINES
290
+ build:windows --host_copt=/D_USE_MATH_DEFINES
291
+
292
+ # Windows has a relatively short command line limit, which TF has begun to hit.
293
+ # See https://docs.bazel.build/versions/main/windows.html
294
+ build:windows --features=compiler_param_file
295
+ build:windows --features=archive_param_file
296
+
297
+ # Speed Windows compile times. Available in VS 16.4 (we are on 16.11). See
298
+ # https://groups.google.com/a/tensorflow.org/d/topic/build/SsW98Eo7l3o/discussion
299
+ build:windows --copt=/d2ReducedOptimizeHugeFunctions
300
+ build:windows --host_copt=/d2ReducedOptimizeHugeFunctions
301
+
302
+ # Before VS 2017 15.8, the member "type" would non-conformingly have an
303
+ # alignment of only alignof(max_align_t). VS 2017 15.8 was fixed to handle this
304
+ # correctly, but the fix inherently changes layout and breaks binary
305
+ # compatibility (*only* for uses of aligned_storage with extended alignments).
306
+ build:windows --copt=-D_ENABLE_EXTENDED_ALIGNED_STORAGE
307
+ build:windows --host_copt=-D_ENABLE_EXTENDED_ALIGNED_STORAGE
308
+
309
+ # Enable the runfiles symlink tree on Windows. This makes it possible to build
310
+ # the pip package on Windows without an intermediate data-file archive, as the
311
+ # build_pip_package script in its current form (as of Aug 2023) uses the
312
+ # runfiles symlink tree to decide what to put into the Python wheel.
313
+ startup --windows_enable_symlinks
314
+ build:windows --enable_runfiles
315
+
316
+ # Default paths for TF_SYSTEM_LIBS
317
+ build:linux --define=PREFIX=/usr
318
+ build:linux --define=LIBDIR=$(PREFIX)/lib
319
+ build:linux --define=INCLUDEDIR=$(PREFIX)/include
320
+ build:linux --define=PROTOBUF_INCLUDE_PATH=$(PREFIX)/include
321
+ build:macos --define=PREFIX=/usr
322
+ build:macos --define=LIBDIR=$(PREFIX)/lib
323
+ build:macos --define=INCLUDEDIR=$(PREFIX)/include
324
+ build:macos --define=PROTOBUF_INCLUDE_PATH=$(PREFIX)/include
325
+ # TF_SYSTEM_LIBS do not work on windows.
326
+
327
+ # By default, build TF in C++ 20 mode.
328
+ build:android --cxxopt=-std=c++20
329
+ build:android --host_cxxopt=-std=c++20
330
+ build:ios --cxxopt=-std=c++20
331
+ build:ios --host_cxxopt=-std=c++20
332
+ build:linux --cxxopt=-std=c++20
333
+ build:linux --host_cxxopt=-std=c++20
334
+ build:macos --cxxopt=-std=c++20
335
+ build:macos --host_cxxopt=-std=c++20
336
+ build:windows --cxxopt=/std:c++20
337
+ build:windows --host_cxxopt=/std:c++20
338
+
339
+ # On Windows, set /std:c11 and /experimental:c11atomics for pthreadpool.
340
+ build:windows --conlyopt=/std:c11
341
+ build:windows --host_conlyopt=/std:c11
342
+ build:windows --conlyopt=/experimental:c11atomics
343
+ build:windows --host_conlyopt=/experimental:c11atomics
344
+ build:windows --per_file_copt=external/pthreadpool/.*@-std=c11
345
+ build:windows --per_file_copt=external/pthreadpool/_*@/D_CRT_SECURE_NO_WARNINGS
346
+
347
+ # On windows, we still link everything into a single DLL.
348
+ build:windows --config=monolithic
349
+
350
+ # On linux, we dynamically link small amount of kernels
351
+ build:linux --config=dynamic_kernels
352
+
353
+ # Make sure to include as little of windows.h as possible
354
+ build:windows --copt=-DWIN32_LEAN_AND_MEAN
355
+ build:windows --host_copt=-DWIN32_LEAN_AND_MEAN
356
+ build:windows --copt=-DNOGDI
357
+ build:windows --host_copt=-DNOGDI
358
+
359
+ # MSVC (Windows): Standards-conformant preprocessor mode
360
+ # See https://docs.microsoft.com/en-us/cpp/preprocessor/preprocessor-experimental-overview
361
+ build:windows --copt=/Zc:preprocessor
362
+ build:windows --host_copt=/Zc:preprocessor
363
+
364
+ # Misc build options we need for windows.
365
+ build:windows --linkopt=/DEBUG
366
+ build:windows --host_linkopt=/DEBUG
367
+ build:windows --linkopt=/OPT:REF
368
+ build:windows --host_linkopt=/OPT:REF
369
+ build:windows --linkopt=/OPT:ICF
370
+ build:windows --host_linkopt=/OPT:ICF
371
+
372
+ # Force local execution for genrules on Windows
373
+ build:windows --strategy=Genrule=local
374
+ build:windows --genrule_strategy=local
375
+ build:windows --spawn_strategy=local
376
+
377
+ # protobuf can't find its headers on windows when built locally with bazelisk.
378
+ build:windows --host_copt=/Iexternal/com_google_protobuf/src
379
+
380
+ # Use Git Bash for genrules
381
+ build:windows --shell_executable="C:/Program Files/Git/bin/bash.exe"
382
+
383
+ # Verbose failure logs when something goes wrong
384
+ build:windows --verbose_failures
385
+
386
+ # Work around potential issues with large command lines on windows.
387
+ # See: https://github.com/bazelbuild/bazel/issues/5163
388
+ build:windows --features=compiler_param_file
389
+
390
+ # Do not risk cache corruption. See:
391
+ # https://github.com/bazelbuild/bazel/issues/3360
392
+ build:linux --experimental_guard_against_concurrent_changes
393
+
394
+ # Configure short or long logs
395
+ build:short_logs --output_filter=DONT_MATCH_ANYTHING
396
+ build:verbose_logs --output_filter=
397
+
398
+ # Instruction set optimizations
399
+ # TODO(gunan): Create a feature in toolchains for avx/avx2 to
400
+ # avoid having to define linux/win separately.
401
+ build:avx_linux --copt=-mavx
402
+ build:avx_linux --host_copt=-mavx
403
+ build:avx_win --copt=/arch:AVX
404
+
405
+ # Assume AVX2 x86_64 platforms for linux and windows.
406
+ build:linux_x86_64 --copt=-mavx2
407
+ build:windows --copt=/arch:AVX2
408
+
409
+ # Disable whole archive on Windows to avoid symbol conflicts. See b/469455895.
410
+ build:windows --legacy_whole_archive=0
411
+
412
+ # Enable all targets in XLA
413
+ build:cpu_cross --define=with_cross_compiler_support=true
414
+
415
+ # Disable XLA on mobile.
416
+ build:xla --define=with_xla_support=true # TODO: remove, it's on by default.
417
+ build:android --define=with_xla_support=false
418
+ build:ios --define=with_xla_support=false
419
+
420
+ # Enable string absl flags on mobile which is disabled by default.
421
+ build:android --copt=-DABSL_FLAGS_STRIP_NAMES=0
422
+ build:ios --copt=-DABSL_FLAGS_STRIP_NAMES=0
423
+
424
+ # Flag to enable remote config
425
+ common --experimental_repo_remote_exec
426
+
427
+ # Use GitHub mirror for BCR to work around bcr.bazel.build outages
428
+ common --registry=https://raw.githubusercontent.com/bazelbuild/bazel-central-registry/main/
429
+
430
+ # TFLite build configs for generic embedded Linux
431
+ build:elinux --crosstool_top=@local_config_embedded_arm//:toolchain
432
+ build:elinux --host_crosstool_top=@bazel_tools//tools/cpp:toolchain
433
+ build:elinux_aarch64 --config=elinux
434
+ build:elinux_aarch64 --cpu=aarch64
435
+ build:elinux_armhf --config=elinux
436
+ build:elinux_armhf --cpu=armhf
437
+ build:elinux_armhf --copt -mfp16-format=ieee
438
+
439
+ # Config-specific options should come above this line.
440
+
441
+ # Load rc file written by ./configure.
442
+ try-import %workspace%/.tf_configure.bazelrc
443
+ try-import %workspace%/xla_configure.bazelrc
444
+
445
+ # Load rc file with user-specific options.
446
+ try-import %workspace%/.bazelrc.user
447
+
448
+ # Try to load the XLA warnings config if available
449
+ try-import %workspace%/warnings.bazelrc
450
+
451
+ # Options to build TensorFlow 1.x or 2.x.
452
+ build:v2 --define=tf_api_version=2 --action_env=TF2_BEHAVIOR=1
453
+ build --config=v2
454
+
455
+ # Options to filter filepath prefixes when generating python api's
456
+ build:litert_prefixes --define=litert_prefixes="external/org_tensorflow/"
457
+ build --config=litert_prefixes
458
+
459
+ # Options to disable generating api v2 under tensorflow/lite/python.
460
+ # This is not on by default and don't use this flag during wheel generation.
461
+ # Remove the flag once tf/lite is fully separated from tf.
462
+ build:disable_tf_lite_py --define=disable_tf_lite_py=true
463
+
464
+ # Config to use a gcs bucket as remote cache.
465
+ build:public_cache --remote_cache="https://storage.googleapis.com/litert-bazel-artifacts" --remote_upload_local_results=false
466
+
467
+ # Cache pushes are limited to CI system.
468
+ # WARNING: THIS OPTION WONT WORK IF YOU DO NOT HAVE PROPER AUTHENTICATION AND PERMISSIONS
469
+ build:public_cache_push --config=public_cache --remote_upload_local_results=true --google_default_credentials
470
+
471
+ # BEGIN LITERT REMOTE BUILD EXECUTION OPTIONS
472
+ # WARNING: THESE OPTIONS WONT WORK IF YOU DO NOT HAVE PROPER AUTHENTICATION AND PERMISSIONS
473
+
474
+ # Allow creation of resultstore URLs for any bazel invocation
475
+ build:resultstore --google_default_credentials
476
+ build:resultstore --bes_backend=buildeventservice.googleapis.com
477
+ build:resultstore --bes_instance_name="tensorflow-testing"
478
+ build:resultstore --bes_results_url="https://source.cloud.google.com/results/invocations"
479
+ build:resultstore --bes_timeout=600s
480
+
481
+ # Flag to enable remote config
482
+ common --experimental_repo_remote_exec
483
+
484
+ # Make Bazel not try to probe the host system for a C++ toolchain.
485
+ build:rbe_base --config=resultstore
486
+ build:rbe_base --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
487
+ build:rbe_base --define=EXECUTOR=remote
488
+ build:rbe_base --jobs=200
489
+ build:rbe_base --remote_executor=grpcs://remotebuildexecution.googleapis.com
490
+ build:rbe_base --remote_timeout=3600
491
+ build:rbe_base --spawn_strategy=remote,worker,standalone,local
492
+
493
+ # Attempt to minimize the amount of data transfer between bazel and the remote
494
+ # workers:
495
+ build:rbe_base --remote_download_toplevel
496
+ test:rbe_base --test_env=USER=anon
497
+
498
+ build:rbe_linux --config=rbe_base
499
+ build:rbe_linux --action_env=PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin"
500
+ # Non-rbe settings we should include because we do not run configure
501
+ build:rbe_linux --config=avx_linux
502
+ build:rbe_linux --linkopt=-lrt
503
+ build:rbe_linux --host_linkopt=-lrt
504
+ build:rbe_linux --linkopt=-lm
505
+ build:rbe_linux --host_linkopt=-lm
506
+
507
+ build:rbe_linux_cpu --config=rbe_linux
508
+ # Linux cpu and cuda builds share the same toolchain now.
509
+ build:rbe_linux_cpu --host_crosstool_top="@local_config_cuda//crosstool:toolchain"
510
+ build:rbe_linux_cpu --crosstool_top="@local_config_cuda//crosstool:toolchain"
511
+ build:rbe_linux_cpu --extra_toolchains="@local_config_cuda//crosstool:toolchain-linux-x86_64"
512
+ build:rbe_linux_cpu --repo_env=CC="/usr/lib/llvm-18/bin/clang"
513
+ build:rbe_linux_cpu --extra_execution_platforms="@ml_build_config_platform//:platform"
514
+ build:rbe_linux_cpu --host_platform="@ml_build_config_platform//:platform"
515
+ build:rbe_linux_cpu --platforms="@ml_build_config_platform//:platform"
516
+ # This is needed for all Clang17 builds but must not be present in GCC builds.
517
+ build:rbe_linux_cpu --copt=-Wno-error=unused-command-line-argument
518
+ # These you may need to change for your own GCP project.
519
+ common:rbe_linux_cpu --remote_instance_name=projects/tensorflow-testing/instances/default_instance
520
+ # END LITERT REMOTE BUILD EXECUTION OPTIONS
521
+
522
+ # Filters used when building in bulk in cpu.
523
+ build:bulk_build_cpu --build_tag_filters=-no_oss,-oss_serial,-gpu,-tpu,-v1only
524
+ build:bulk_test_cpu --show_timestamps
525
+ build:bulk_test_cpu --experimental_ui_max_stdouterr_bytes=3145728
526
+
527
+ # Filters used when testing in bulk in cpu.
528
+ test:bulk_test_cpu --config=bulk_build_cpu
529
+ test:bulk_test_cpu --test_tag_filters=-no_oss,-oss_serial,-gpu,-tpu,-v1only,-benchmark-test
530
+ test:bulk_test_cpu --test_summary=short
531
+ test:bulk_test_cpu --test_output=errors
532
+ test:bulk_test_cpu --verbose_failures=true
533
+ test:bulk_test_cpu --flaky_test_attempts=3
534
+ test:bulk_test_cpu --build_tests_only
535
+
536
+ # TODO: b/397625618 Enable bzlmod
537
+ common --noenable_bzlmod
538
+
539
+ # --- Fix for Clang 19+ and Strict Layering Checks ---
540
+ build --features=-layering_check
541
+ build --host_features=-layering_check
542
+
543
+ build --features=-header_modules
544
+ build --host_features=-header_modules
.bazelversion ADDED
@@ -0,0 +1 @@
 
 
1
+ 7.6.1
.gitattributes CHANGED
@@ -1,35 +1,45 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
1
+ BUILD.llguidance text eol=lf
2
+ prebuilt/*/*.so filter=lfs diff=lfs merge=lfs -text
3
+ prebuilt/*/*.dylib filter=lfs diff=lfs merge=lfs -text
4
+ prebuilt/*/*.dll filter=lfs diff=lfs merge=lfs -text
5
+ prebuilt/*/*.lib filter=lfs diff=lfs merge=lfs -text
6
+ prebuilt/*/lit filter=lfs diff=lfs merge=lfs -text
7
+ prebuilt/*/lit.exe filter=lfs diff=lfs merge=lfs -text
8
+ cmake/patches/converter.zip filter=lfs diff=lfs merge=lfs -text
9
+ docs/images/tool-call-flow.png filter=lfs diff=lfs merge=lfs -text
10
+ docs/images/tool-format-and-parse.png filter=lfs diff=lfs merge=lfs -text
11
+ runtime/components/preprocessor/testdata/apple.png filter=lfs diff=lfs merge=lfs -text
12
+ runtime/components/testdata/audio_sample.wav filter=lfs diff=lfs merge=lfs -text
13
+ runtime/components/testdata/decoded_audio_samples.bin filter=lfs diff=lfs merge=lfs -text
14
+ runtime/components/testdata/dummy_embedding_cpu_model.tflite filter=lfs diff=lfs merge=lfs -text
15
+ runtime/components/testdata/dummy_end_of_multi_modal_model.tflite filter=lfs diff=lfs merge=lfs -text
16
+ runtime/components/testdata/frontend.tflite filter=lfs diff=lfs merge=lfs -text
17
+ runtime/components/testdata/frontend_sl_v1.tflite filter=lfs diff=lfs merge=lfs -text
18
+ runtime/components/testdata/function_gemma_sentencepiece.model filter=lfs diff=lfs merge=lfs -text
19
+ runtime/components/testdata/gemma3_sentencepiece.model filter=lfs diff=lfs merge=lfs -text
20
+ runtime/components/testdata/sentencepiece.model filter=lfs diff=lfs merge=lfs -text
21
+ runtime/testdata/have_a_wonderful_day.wav filter=lfs diff=lfs merge=lfs -text
22
+ runtime/testdata/litert_dummy_lora32_f16_model.tflite filter=lfs diff=lfs merge=lfs -text
23
+ runtime/testdata/magic_test_both.tflite filter=lfs diff=lfs merge=lfs -text
24
+ runtime/testdata/magic_test_context_length.tflite filter=lfs diff=lfs merge=lfs -text
25
+ runtime/testdata/magic_test_decode_batch.tflite filter=lfs diff=lfs merge=lfs -text
26
+ runtime/testdata/magic_test_multi.tflite filter=lfs diff=lfs merge=lfs -text
27
+ runtime/testdata/magic_test_none.tflite filter=lfs diff=lfs merge=lfs -text
28
+ runtime/testdata/test_gpu_lora_rank32_f16_all_ones.tflite filter=lfs diff=lfs merge=lfs -text
29
+ runtime/testdata/test_hf_tokenizer.litertlm filter=lfs diff=lfs merge=lfs -text
30
+ runtime/testdata/test_lm.litertlm filter=lfs diff=lfs merge=lfs -text
31
+ runtime/testdata/test_lm.task filter=lfs diff=lfs merge=lfs -text
32
+ runtime/testdata/test_lm_deepseek_metadata_tokenizer.litertlm filter=lfs diff=lfs merge=lfs -text
33
+ runtime/testdata/test_lm_dynamic.litertlm filter=lfs diff=lfs merge=lfs -text
34
+ runtime/testdata/test_lm_external_weights.litertlm filter=lfs diff=lfs merge=lfs -text
35
+ runtime/testdata/test_lm_new_metadata.task filter=lfs diff=lfs merge=lfs -text
36
+ runtime/testdata/test_lm_no_model_type.litertlm filter=lfs diff=lfs merge=lfs -text
37
+ runtime/testdata/test_lora_rank32_f16_all_ones.tflite filter=lfs diff=lfs merge=lfs -text
38
+ runtime/testdata/test_lora_rank32_f16_all_twos.tflite filter=lfs diff=lfs merge=lfs -text
39
+ runtime/testdata/tflite_external_kv_test_fixture.tflite filter=lfs diff=lfs merge=lfs -text
40
+ schema/testdata/attention.tflite filter=lfs diff=lfs merge=lfs -text
41
+ schema/testdata/data.bin filter=lfs diff=lfs merge=lfs -text
42
+ schema/testdata/gemma3_tokenizer.spiece filter=lfs diff=lfs merge=lfs -text
43
+ schema/testdata/llm_metadata.pb filter=lfs diff=lfs merge=lfs -text
44
+ schema/testdata/test_tok_tfl_llm.litertlm filter=lfs diff=lfs merge=lfs -text
45
+ schema/testdata/test_tokenizer_tflite.litertlm filter=lfs diff=lfs merge=lfs -text
.github/workflows/auto-assignment.js ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Copyright 2025 Google LLC
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * https://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ /**
18
+ * @param {{github: !Object, context: !Object}} params
19
+ * @returns {!Promise<void>}
20
+ */
21
+ module.exports = async ({github, context}) => {
22
+ let issueNumber;
23
+ let assigneesList;
24
+
25
+ if (context.payload.issue) {
26
+ assigneesList = ['gaikwadrahul8']; // for issues
27
+ issueNumber = context.payload.issue.number;
28
+ } else if (context.payload.pull_request) {
29
+ assigneesList = ['gaikwadrahul8']; // for PRs
30
+ issueNumber = context.payload.pull_request.number;
31
+ } else {
32
+ console.log('Not an issue or PR');
33
+ return;
34
+ }
35
+
36
+ console.log('Assignee list:', assigneesList);
37
+ console.log('Entered auto assignment for this issue/PR:', issueNumber);
38
+
39
+ if (!assigneesList.length) {
40
+ console.log('No assignees found for this repo.');
41
+ return;
42
+ }
43
+
44
+ const noOfAssignees = assigneesList.length;
45
+ const selection = issueNumber % noOfAssignees;
46
+ const assigneeForIssue = assigneesList[selection];
47
+
48
+ console.log(
49
+ `Issue/PR Number = ${issueNumber}, assigning to: ${assigneeForIssue}`);
50
+
51
+ return github.rest.issues.addAssignees({
52
+ issue_number: issueNumber,
53
+ owner: context.repo.owner,
54
+ repo: context.repo.repo,
55
+ assignees: [assigneeForIssue],
56
+ });
57
+ };
.github/workflows/auto-assignment.yml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: auto-assignment
2
+ on:
3
+ issues:
4
+ types:
5
+ - opened
6
+
7
+ permissions:
8
+ contents: read
9
+ issues: write
10
+ pull-requests: write
11
+
12
+ jobs:
13
+ welcome:
14
+ runs-on: ubuntu-latest
15
+ steps:
16
+ - uses: actions/checkout@v4
17
+ - uses: actions/github-script@v7
18
+ with:
19
+ script: |
20
+ const script = require('.github/workflows/auto-assignment.js')
21
+ script({github, context})
.github/workflows/ci-build-cmake.yml ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "CI-CMake"
2
+ on:
3
+ push:
4
+ branches: [ "main" ]
5
+ pull_request:
6
+ branches: [ "main" ]
7
+ workflow_dispatch:
8
+ inputs:
9
+ REFRESH_CACHE:
10
+ description: 'Refresh build cache'
11
+ type: boolean
12
+ default: false
13
+
14
+ permissions:
15
+ contents: read
16
+ actions: write
17
+
18
+ env:
19
+ MODEL_KEY: gemma-3-1b-it-v1
20
+ MODEL_PATH: ./models/gemma3-1b-it-int4.litertlm
21
+ MODEL_URL: https://huggingface.co/litert-community/Gemma3-1B-IT/resolve/main/gemma3-1b-it-int4.litertlm
22
+ CCACHE_DIR: "${{ github.workspace }}/.ccache"
23
+ CCACHE_SLOPPINESS: "time_macros"
24
+
25
+ jobs:
26
+ presubmit:
27
+ name: "Presubmit-CMake-Linux"
28
+ runs-on: LiteRT_Linux_x64
29
+ container:
30
+ image: ubuntu:24.04
31
+
32
+ steps:
33
+ - name: Install System Dependencies
34
+ run: |
35
+ apt-get update && apt-get install -y \
36
+ build-essential cmake make gdb pkg-config \
37
+ openjdk-17-jre-headless git wget curl unzip \
38
+ tar python3 python3-dev python3-pip \
39
+ python3-numpy ccache zlib1g-dev libssl-dev \
40
+ libpng-dev libcurl4-openssl-dev libfftw3-dev \
41
+ apt-file && apt-file update
42
+
43
+ - name: Install Rust
44
+ run: |
45
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
46
+ echo "$HOME/.cargo/bin" >> $GITHUB_PATH
47
+
48
+ - name: Checkout repository
49
+ uses: actions/checkout@v4
50
+ with:
51
+ submodules: recursive
52
+
53
+ - name: Download Model
54
+ env:
55
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
56
+ run: |
57
+ mkdir -p ./models
58
+ echo "Downloading model from Hugging Face..."
59
+ curl -L --retry 5 -f \
60
+ -H "Authorization: Bearer $HF_TOKEN" \
61
+ -o ${{ env.MODEL_PATH }} \
62
+ "${{ env.MODEL_URL }}"
63
+ ls -lh ${{ env.MODEL_PATH }}
64
+
65
+ - name: Cache FetchContent Deps
66
+ uses: actions/cache@v4
67
+ with:
68
+ path: |
69
+ cmake/build/_deps
70
+ cmake/build/third_party
71
+ key: ${{ runner.os }}-fc-deps-${{ hashFiles('cmake/modules/fetch_content.cmake') }}
72
+
73
+ - name: Cache ExternalProject Deps
74
+ uses: actions/cache@v4
75
+ with:
76
+ path: |
77
+ cmake/build/external/abseil-cpp
78
+ cmake/build/external/protobuf
79
+ cmake/build/external/re2
80
+ cmake/build/external/sentencepiece
81
+ cmake/build/external/tokenizers-cpp
82
+ cmake/build/external/opencl_headers
83
+ key: ${{ runner.os }}-ext-deps-${{ hashFiles('cmake/packages/absl/**', 'cmake/packages/protobuf/**', 'cmake/packages/re2/**', 'cmake/packages/sentencepiece/**', 'cmake/packages/tokenizers/**', 'cmake/packages/opencl/**') }}
84
+
85
+ - name: Cache TensorFlow
86
+ uses: actions/cache@v4
87
+ with:
88
+ path: |
89
+ cmake/build/external/tensorflow
90
+ key: ${{ runner.os }}-tflite-${{ hashFiles('cmake/packages/tflite/**') }}
91
+ restore-keys: |
92
+ ${{ runner.os }}-tflite-
93
+
94
+ - name: Cache LiteRT
95
+ uses: actions/cache@v4
96
+ with:
97
+ path: |
98
+ cmake/build/external/litert
99
+ key: ${{ runner.os }}-litert-${{ hashFiles('cmake/packages/litert/**') }}
100
+ restore-keys: |
101
+ ${{ runner.os }}-litert-
102
+
103
+ - name: Cache CCache
104
+ uses: actions/cache@v4
105
+ with:
106
+ path: .ccache
107
+ key: ${{ runner.os }}-ccache-${{ hashFiles('**/CMakeLists.txt') }}
108
+ restore-keys: |
109
+ ${{ runner.os }}-ccache-
110
+
111
+ - name: Cache Generated Sourcetree & Flatbuffers
112
+ id: cache-generated
113
+ uses: actions/cache@v4
114
+ with:
115
+ path: |
116
+ cmake/build/generated
117
+ cmake/build/external/flatbuffers
118
+ key: ${{ runner.os }}-generated-src-v1-${{ hashFiles('schema/**/*.fbs', 'cmake/modules/generators.cmake', 'cmake/packages/flatbuffers/**', 'c/**/*.{cc,h}', 'schema/**/*.{cc,h}', 'runtime/**/*.{cc,h}') }}
119
+
120
+ - name: Configure CMake
121
+ run: |
122
+ # The restored caches will populate external/ and _deps/ before this runs.
123
+ cmake -B cmake/build -G "Unix Makefiles" -S . \
124
+ -DCMAKE_BUILD_TYPE=Release \
125
+ -DCMAKE_CXX_COMPILER=g++ \
126
+ -DCMAKE_C_COMPILER=gcc \
127
+ -DENABLE_CCACHE=ON
128
+
129
+ - name: Build
130
+ run: cmake --build cmake/build --config Release --parallel $(nproc)
131
+
132
+ - name: Install pytest
133
+ run: python3 -m pip install --break-system-packages pytest==8.3.4
134
+
135
+ - name: Run pytest
136
+ run: pytest tools/test/ --model-path=${{ env.MODEL_PATH }} --build-system=cmake
137
+
.github/workflows/ci-build-mac.yml ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "CI-Mac"
2
+ on:
3
+ push:
4
+ tags:
5
+ - v*.*.*
6
+ pull_request:
7
+ branches:
8
+ - main
9
+ schedule:
10
+ - cron: "0 10 * * *" # Run at 2am PST (10am UTC) every day to refresh the cache.
11
+ workflow_dispatch: # Manual trigger
12
+ inputs:
13
+ REFRESH_CACHE:
14
+ description: 'Refresh cache to remove unused files'
15
+ type: boolean
16
+ default: true
17
+
18
+
19
+ concurrency:
20
+ group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
21
+ cancel-in-progress: true
22
+
23
+ jobs:
24
+ presubmit:
25
+ name: "Presubmit-Mac"
26
+ runs-on: macos-latest
27
+ permissions:
28
+ actions: write # For gh cache delete.
29
+ contents: write # For gh release upload.
30
+ env:
31
+ MODEL_KEY: gemma-3-1b-it-v1
32
+ MODEL_PATH: ./models/gemma3-1b-it-int4.litertlm
33
+ MODEL_URL: https://huggingface.co/litert-community/Gemma3-1B-IT/resolve/main/gemma3-1b-it-int4.litertlm
34
+
35
+ GH_TOKEN: ${{ github.token }} # For gh release upload.
36
+ REFRESH_CACHE: ${{ github.event_name == 'schedule' ||
37
+ (github.event_name == 'workflow_dispatch' && inputs.REFRESH_CACHE) }}
38
+ steps:
39
+ - name: Checkout code.
40
+ uses: actions/checkout@v4
41
+ with:
42
+ lfs: true
43
+
44
+ - name : Set up cache keys.
45
+ id: cache-keys
46
+ run: |
47
+ CACHE_RESTORE_KEY_2="${GITHUB_WORKFLOW}"
48
+ CACHE_RESTORE_KEY_1="$CACHE_RESTORE_KEY_2-${{ hashFiles('**/WORKSPACE', '**/.bazelrc') }}"
49
+ CACHE_RESTORE_KEY_0="$CACHE_RESTORE_KEY_1-${{ hashFiles('**/BUILD*') }}"
50
+ # If it's not a pull request, then it will be the same as $CACHE_RESTORE_KEY_1-.
51
+ CACHE_RESTORE_KEY_HEAD="$CACHE_RESTORE_KEY_0-${{ github.event.pull_request.base.sha }}"
52
+ CACHE_KEY="$CACHE_RESTORE_KEY_0-${{ github.sha }}"
53
+ echo "CACHE_RESTORE_KEY_2=$CACHE_RESTORE_KEY_2" >> "$GITHUB_OUTPUT"
54
+ echo "CACHE_RESTORE_KEY_1=$CACHE_RESTORE_KEY_1" >> "$GITHUB_OUTPUT"
55
+ echo "CACHE_RESTORE_KEY_0=$CACHE_RESTORE_KEY_0" >> "$GITHUB_OUTPUT"
56
+ echo "CACHE_RESTORE_KEY_HEAD=$CACHE_RESTORE_KEY_HEAD" >> "$GITHUB_OUTPUT"
57
+ echo "CACHE_KEY=$CACHE_KEY" >> "$GITHUB_OUTPUT"
58
+
59
+ - name: Clean build outputs if cache is being refreshed.
60
+ if: env.REFRESH_CACHE == 'true'
61
+ run: bazel clean --expunge
62
+
63
+ - name: Restore bazel cache if cache is not being refreshed.
64
+ id: bazel-cache
65
+ if: env.REFRESH_CACHE != 'true'
66
+ uses: actions/cache/restore@v4
67
+ with:
68
+ path: |
69
+ ~/.cache/bazel-macos
70
+ ~/.cache/bazel-macos-dylib
71
+ ~/.cache/bazel-ios
72
+ key: ${{ steps.cache-keys.outputs.CACHE_KEY }}
73
+ restore-keys: |
74
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_HEAD }}
75
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_0 }}-
76
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_1 }}-
77
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_2 }}-
78
+
79
+ - name: Check cache hit.
80
+ run: |
81
+ echo "Cache Hit: ${STEPS_BAZEL_CACHE_OUTPUTS_CACHE_HIT}"
82
+ echo "Cache Primary Key: ${STEPS_BAZEL_CACHE_OUTPUTS_CACHE_PRIMARY_KEY}"
83
+ echo "Cache Matched Key: ${STEPS_BAZEL_CACHE_OUTPUTS_CACHE_MATCHED_KEY}"
84
+ env:
85
+ STEPS_BAZEL_CACHE_OUTPUTS_CACHE_HIT: ${{ steps.bazel-cache.outputs.cache-hit }}
86
+ STEPS_BAZEL_CACHE_OUTPUTS_CACHE_PRIMARY_KEY: ${{ steps.bazel-cache.outputs.cache-primary-key }}
87
+ STEPS_BAZEL_CACHE_OUTPUTS_CACHE_MATCHED_KEY: ${{ steps.bazel-cache.outputs.cache-matched-key }}
88
+
89
+ - name: Download Model
90
+ env:
91
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
92
+ run: |
93
+ mkdir -p ./models
94
+ echo "Downloading model from Hugging Face..."
95
+ curl -L --retry 5 -f \
96
+ -H "Authorization: Bearer $HF_TOKEN" \
97
+ -o ${{ env.MODEL_PATH }} \
98
+ "${{ env.MODEL_URL }}"
99
+ ls -lh ${{ env.MODEL_PATH }}
100
+
101
+ - name: Run bazel build on MacOS.
102
+ run: |
103
+ bazel build --disk_cache=~/.cache/bazel-macos \
104
+ //... \
105
+ //runtime/engine:litert_lm_main
106
+
107
+ - name: Update litert_lm_main prebuilt for MacOS if new version tag is pushed.
108
+ if: github.ref_type == 'tag'
109
+ run: |
110
+ cp bazel-bin/runtime/engine/litert_lm_main litert_lm_main.macos_arm64
111
+ gh release upload ${GITHUB_REF_NAME} litert_lm_main.macos_arm64 --clobber
112
+
113
+ - name: Run bazel test on MacOS.
114
+ run: |
115
+ bazel test --disk_cache=~/.cache/bazel-macos --test_output=errors //...
116
+
117
+ - name: Install pytest
118
+ run: python3 -m pip install --break-system-packages pytest==8.3.4
119
+
120
+ - name: Run pytest
121
+ run: pytest tools/test/ --model-path=${{ env.MODEL_PATH }} --build-system=bazel
122
+
123
+ - name: Run bazel build on MacOS with dynamic linking.
124
+ run: |
125
+ bazel build --disk_cache=~/.cache/bazel-macos-dylib \
126
+ --define=litert_link_capi_so=true \
127
+ --define=resolve_symbols_in_exec=false \
128
+ //runtime/engine:litert_lm_main
129
+
130
+ - name: Run bazel build for iOS Simulator.
131
+ run: |
132
+ # The `ios_sim_arm64` config causes bazel to build some mac-only
133
+ # targets that are not compatible with the iOS simulator, which
134
+ # leads to build failures. These flags filter out those
135
+ # incompatible builds.
136
+ bazel build --disk_cache=~/.cache/bazel-ios --config=ios_sim_arm64 \
137
+ --build_tag_filters=-requires-mac-inputs:hard,-no_mac \
138
+ //... \
139
+ //runtime/engine:litert_lm_main \
140
+ -- \
141
+ -//python/... \
142
+ -//schema/py:* \
143
+ -//kotlin/...
144
+
145
+ - name: Update litert_lm_main prebuilt for iOS Simulator if new version tag is pushed.
146
+ if: github.ref_type == 'tag'
147
+ run: |
148
+ cp bazel-bin/runtime/engine/litert_lm_main litert_lm_main.ios_sim_arm64
149
+ gh release upload ${GITHUB_REF_NAME} litert_lm_main.ios_sim_arm64 --clobber
150
+
151
+ - name: Remove cache if cache is being refreshed.
152
+ if: env.REFRESH_CACHE == 'true'
153
+ continue-on-error: true # Ignore errors when cache is not found.
154
+ run: gh cache delete ${STEPS_CACHE_KEYS_OUTPUTS_CACHE_KEY}
155
+ env:
156
+ STEPS_CACHE_KEYS_OUTPUTS_CACHE_KEY: ${{ steps.cache-keys.outputs.CACHE_KEY }}
157
+
158
+ - name: Save bazel cache if it's new or being refreshed.
159
+ uses: actions/cache/save@v4
160
+ if: env.REFRESH_CACHE == 'true' || steps.bazel-cache.outputs.cache-hit != 'true'
161
+ with:
162
+ path: |
163
+ ~/.cache/bazel-macos
164
+ ~/.cache/bazel-macos-dylib
165
+ ~/.cache/bazel-ios
166
+ key: ${{ steps.cache-keys.outputs.CACHE_KEY }}
.github/workflows/ci-build-win.yml ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "CI-Win"
2
+ on:
3
+ push:
4
+ tags:
5
+ - v*.*.*
6
+ pull_request:
7
+ branches:
8
+ - main
9
+ schedule:
10
+ - cron: "0 10 * * *" # Run at 2am PST (10am UTC) every day to refresh the cache.
11
+ workflow_dispatch: # Manual trigger
12
+ inputs:
13
+ REFRESH_CACHE:
14
+ description: 'Refresh cache to remove unused files'
15
+ type: boolean
16
+ default: true
17
+
18
+ concurrency:
19
+ group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
20
+ cancel-in-progress: true
21
+
22
+ jobs:
23
+ presubmit:
24
+ name: "Presubmit-Win"
25
+ runs-on: Windows_x64
26
+ defaults:
27
+ run:
28
+ shell: pwsh
29
+ permissions:
30
+ actions: write # For gh cache delete.
31
+ contents: write # For gh release upload.
32
+ env:
33
+ MODEL_KEY: gemma-3-1b-it-v1
34
+ MODEL_PATH: .\models\gemma3-1b-it-int4.litertlm
35
+ MODEL_URL: https://huggingface.co/litert-community/Gemma3-1B-IT/resolve/main/gemma3-1b-it-int4.litertlm
36
+
37
+ # Clear ANDROID_NDK_HOME as Windows_x64 has NDK where jobs don't have permission for
38
+ # androidndk rulesto create symlinks.
39
+ ANDROID_NDK_HOME:
40
+ GH_TOKEN: ${{ github.token }} # For gh release upload.
41
+ REFRESH_CACHE: ${{ github.event_name == 'schedule' ||
42
+ (github.event_name == 'workflow_dispatch' && inputs.REFRESH_CACHE) }}
43
+ steps:
44
+ - name: Checkout code.
45
+ uses: actions/checkout@v4
46
+ with:
47
+ lfs: true
48
+
49
+ - name : Set up cache keys and bazel output base.
50
+ id: cache-keys
51
+ run: |
52
+ $CACHE_RESTORE_KEY_2="${{ github.workflow }}"
53
+ $CACHE_RESTORE_KEY_1="$CACHE_RESTORE_KEY_2-${{ hashFiles('**/WORKSPACE', '**/.bazelrc') }}"
54
+ $CACHE_RESTORE_KEY_0="$CACHE_RESTORE_KEY_1-${{ hashFiles('**/BUILD*') }}"
55
+ # If it's not a pull request, then it will be the same as $CACHE_RESTORE_KEY_1-.
56
+ $CACHE_RESTORE_KEY_HEAD="$CACHE_RESTORE_KEY_0-${{ github.event.pull_request.base.sha }}"
57
+ $CACHE_KEY="$CACHE_RESTORE_KEY_0-${{ github.sha }}"
58
+ echo "CACHE_RESTORE_KEY_2=$CACHE_RESTORE_KEY_2" >> "$env:GITHUB_OUTPUT"
59
+ echo "CACHE_RESTORE_KEY_1=$CACHE_RESTORE_KEY_1" >> "$env:GITHUB_OUTPUT"
60
+ echo "CACHE_RESTORE_KEY_0=$CACHE_RESTORE_KEY_0" >> "$env:GITHUB_OUTPUT"
61
+ echo "CACHE_RESTORE_KEY_HEAD=$CACHE_RESTORE_KEY_HEAD" >> "$env:GITHUB_OUTPUT"
62
+ echo "CACHE_KEY=$CACHE_KEY" >> "$env:GITHUB_OUTPUT"
63
+ # D: is faster than C: for I/O.
64
+ $SHORT_SHA=$("${{ github.sha }}".SubString(0, 8))
65
+ echo "BAZEL_OUTPUT_BASE=D:/w-$SHORT_SHA" >> "$env:GITHUB_ENV"
66
+
67
+ - name: Clean build outputs if cache is being refreshed.
68
+ if: env.REFRESH_CACHE == 'true'
69
+ run: bazel --output_base="$env:BAZEL_OUTPUT_BASE" clean --expunge
70
+
71
+ - name: Restore bazel cache if cache is not being refreshed.
72
+ id: bazel-cache
73
+ if: env.REFRESH_CACHE != 'true'
74
+ uses: actions/cache/restore@v4
75
+ with:
76
+ path: |
77
+ ~/.cache/bazel-windows
78
+ ~/.cache/bazel-windows-dll
79
+ key: ${{ steps.cache-keys.outputs.CACHE_KEY }}
80
+ restore-keys: |
81
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_HEAD }}
82
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_0 }}-
83
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_1 }}-
84
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_2 }}-
85
+
86
+ - name: Check cache hit.
87
+ run: |
88
+ echo "Cache Hit: ${{ steps.bazel-cache.outputs.cache-hit }}"
89
+ echo "Cache Primary Key: ${{ steps.bazel-cache.outputs.cache-primary-key }}"
90
+ echo "Cache Matched Key: ${{ steps.bazel-cache.outputs.cache-matched-key }}"
91
+
92
+ - name: Download Model
93
+ env:
94
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
95
+ run: |
96
+ if (-not (Test-Path "./models")) {
97
+ New-Item -ItemType Directory -Path "./models" -Force
98
+ }
99
+
100
+ $headers = @{
101
+ "Authorization" = "Bearer $($env:HF_TOKEN)"
102
+ }
103
+
104
+ Write-Host "Downloading model from Hugging Face: ${{ env.MODEL_URL }}"
105
+
106
+ Invoke-WebRequest -Uri "${{ env.MODEL_URL }}" `
107
+ -Headers $headers `
108
+ -OutFile "${{ env.MODEL_PATH }}" `
109
+ -MaximumRetryCount 5
110
+
111
+ $file = Get-Item "${{ env.MODEL_PATH }}"
112
+ $sizeMB = [math]::Round($file.Length / 1MB, 2)
113
+ Write-Host "Verification Success: $($file.Name) ($sizeMB MB) is ready."
114
+ Get-ChildItem "${{ env.MODEL_PATH }}" | Select-Object Name, @{Name="Size(MB)";Expression={$_.Length / 1MB}}
115
+
116
+ - name: Run bazel build on Windows.
117
+ run: |
118
+ bazel --output_base="$env:BAZEL_OUTPUT_BASE" `
119
+ build --disk_cache=~/.cache/bazel-windows `
120
+ --build_tag_filters='-nowindows' `
121
+ //... `
122
+ //runtime/engine:litert_lm_main
123
+
124
+ - name: Update litert_lm_main prebuilt for Windows if new version tag is pushed.
125
+ if: github.ref_type == 'tag'
126
+ run: |
127
+ cp bazel-bin/runtime/engine/litert_lm_main.exe litert_lm_main.windows_x86_64.exe
128
+ gh release upload ${{ github.ref_name }} litert_lm_main.windows_x86_64.exe --clobber
129
+
130
+ - name: Run bazel test on Windows.
131
+ run: |
132
+ bazel --output_base="$env:BAZEL_OUTPUT_BASE" `
133
+ test --disk_cache=~/.cache/bazel-windows --test_output=errors `
134
+ --test_tag_filters='-requires-mac-inputs:hard,-nowindows' //...
135
+
136
+ - name: Install pytest
137
+ run: python3 -m pip install --break-system-packages pytest==8.3.4
138
+
139
+ - name: Run pytest
140
+ run: pytest tools/test/ --model-path=${{ env.MODEL_PATH }} --build-system=bazel
141
+
142
+ - name: Run bazel build on Windows with dynamic linking.
143
+ run: |
144
+ bazel --output_base="$env:BAZEL_OUTPUT_BASE" `
145
+ build --disk_cache=~/.cache/bazel-windows-dll `
146
+ --define=litert_link_capi_so=true `
147
+ --define=resolve_symbols_in_exec=false `
148
+ //runtime/engine:litert_lm_main
149
+
150
+ - name: Remove cache if cache is being refreshed.
151
+ if: env.REFRESH_CACHE == 'true'
152
+ continue-on-error: true # Ignore errors when cache is not found.
153
+ run: gh cache delete ${{ steps.cache-keys.outputs.CACHE_KEY }}
154
+
155
+ - name: Save bazel cache if it's new or being refreshed.
156
+ uses: actions/cache/save@v4
157
+ if: env.REFRESH_CACHE == 'true' || steps.bazel-cache.outputs.cache-hit != 'true'
158
+ with:
159
+ path: |
160
+ ~/.cache/bazel-windows
161
+ ~/.cache/bazel-windows-dll
162
+ key: ${{ steps.cache-keys.outputs.CACHE_KEY }}
.github/workflows/ci-build.yml ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "CI"
2
+ on:
3
+ push:
4
+ tags:
5
+ - v*.*.*
6
+ pull_request:
7
+ branches:
8
+ - main
9
+ schedule:
10
+ - cron: "0 10 * * *" # Run at 2am PST (10am UTC) every day to refresh the cache.
11
+ workflow_dispatch: # Manual trigger
12
+ inputs:
13
+ REFRESH_CACHE:
14
+ description: 'Refresh cache to remove unused files'
15
+ type: boolean
16
+ default: true
17
+
18
+ concurrency:
19
+ group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
20
+ cancel-in-progress: true
21
+
22
+ jobs:
23
+ presubmit:
24
+ name: "Presubmit"
25
+ runs-on: LiteRT_Linux_x64
26
+ permissions:
27
+ actions: write # For gh cache delete.
28
+ contents: write # For gh release upload.
29
+ env:
30
+ MODEL_KEY: gemma-3-1b-it-v1
31
+ MODEL_PATH: ./models/gemma3-1b-it-int4.litertlm
32
+ MODEL_URL: https://huggingface.co/litert-community/Gemma3-1B-IT/resolve/main/gemma3-1b-it-int4.litertlm
33
+
34
+ GH_TOKEN: ${{ github.token }} # For gh release upload.
35
+ REFRESH_CACHE: ${{ github.event_name == 'schedule' ||
36
+ (github.event_name == 'workflow_dispatch' && inputs.REFRESH_CACHE) }}
37
+ steps:
38
+ - name: Checkout code.
39
+ uses: actions/checkout@v4
40
+ with:
41
+ lfs: true
42
+
43
+ - name : Set up cache keys.
44
+ id: cache-keys
45
+ run: |
46
+ CACHE_RESTORE_KEY_2="${{ github.workflow }}"
47
+ CACHE_RESTORE_KEY_1="$CACHE_RESTORE_KEY_2-${{ hashFiles('**/WORKSPACE', '**/.bazelrc') }}"
48
+ CACHE_RESTORE_KEY_0="$CACHE_RESTORE_KEY_1-${{ hashFiles('**/BUILD*') }}"
49
+ # If it's not a pull request, then it will be the same as $CACHE_RESTORE_KEY_1-.
50
+ CACHE_RESTORE_KEY_HEAD="$CACHE_RESTORE_KEY_0-${{ github.event.pull_request.base.sha }}"
51
+ CACHE_KEY="$CACHE_RESTORE_KEY_0-${{ github.sha }}"
52
+ echo "CACHE_RESTORE_KEY_2=$CACHE_RESTORE_KEY_2" >> "$GITHUB_OUTPUT"
53
+ echo "CACHE_RESTORE_KEY_1=$CACHE_RESTORE_KEY_1" >> "$GITHUB_OUTPUT"
54
+ echo "CACHE_RESTORE_KEY_0=$CACHE_RESTORE_KEY_0" >> "$GITHUB_OUTPUT"
55
+ echo "CACHE_RESTORE_KEY_HEAD=$CACHE_RESTORE_KEY_HEAD" >> "$GITHUB_OUTPUT"
56
+ echo "CACHE_KEY=$CACHE_KEY" >> "$GITHUB_OUTPUT"
57
+
58
+ - name: Clean build outputs if cache is being refreshed.
59
+ if: env.REFRESH_CACHE == 'true'
60
+ run: bazel clean --expunge
61
+
62
+ - name: Restore bazel cache if cache is not being refreshed.
63
+ id: bazel-cache
64
+ if: env.REFRESH_CACHE != 'true'
65
+ uses: actions/cache/restore@v4
66
+ with:
67
+ path: |
68
+ ~/.cache/bazel-linux
69
+ ~/.cache/bazel-android
70
+ key: ${{ steps.cache-keys.outputs.CACHE_KEY }}
71
+ restore-keys: |
72
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_HEAD }}
73
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_0 }}-
74
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_1 }}-
75
+ ${{ steps.cache-keys.outputs.CACHE_RESTORE_KEY_2 }}-
76
+
77
+ - name: Check cache hit.
78
+ run: |
79
+ echo "Cache Hit: ${{ steps.bazel-cache.outputs.cache-hit }}"
80
+ echo "Cache Primary Key: ${{ steps.bazel-cache.outputs.cache-primary-key }}"
81
+ echo "Cache Matched Key: ${{ steps.bazel-cache.outputs.cache-matched-key }}"
82
+
83
+ - name: Download Model
84
+ env:
85
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
86
+ run: |
87
+ mkdir -p ./models
88
+ echo "Downloading model from Hugging Face..."
89
+ curl -L --retry 5 -f \
90
+ -H "Authorization: Bearer $HF_TOKEN" \
91
+ -o ${{ env.MODEL_PATH }} \
92
+ "${{ env.MODEL_URL }}"
93
+ ls -lh ${{ env.MODEL_PATH }}
94
+
95
+ - name: Run bazel build on Linux.
96
+ run: |
97
+ bazel build --disk_cache=~/.cache/bazel-linux --config=linux_x86_64 \
98
+ //... \
99
+ //runtime/engine:litert_lm_main
100
+
101
+ - name: Check if litert_lm_main doesn't link libLiteRt.so.
102
+ # Return exit code 1 if libLiteRt.so is required.
103
+ run: |
104
+ ! readelf -d bazel-bin/runtime/engine/litert_lm_main | grep libLiteRt.so
105
+
106
+ - name: Update litert_lm_main prebuilt for Linux if new version tag is pushed.
107
+ if: github.ref_type == 'tag'
108
+ run: |
109
+ cp bazel-bin/runtime/engine/litert_lm_main litert_lm_main.linux_x86_64
110
+ gh release upload ${{ github.ref_name }} litert_lm_main.linux_x86_64 --clobber
111
+
112
+ - name: Run bazel test on Linux.
113
+ run: |
114
+ bazel test --disk_cache=~/.cache/bazel-linux --config=linux_x86_64 \
115
+ --test_output=errors \
116
+ //...
117
+
118
+ - name: Install pytest
119
+ run: python3 -m pip install --break-system-packages pytest==8.3.4
120
+
121
+ - name: Run pytest
122
+ run: pytest tools/test/ --model-path=${{ env.MODEL_PATH }} --build-system=bazel
123
+
124
+ - name: Run bazel build on Linux with dynamic linking.
125
+ run: |
126
+ bazel build --config=linux_x86_64 \
127
+ --define=litert_link_capi_so=true \
128
+ --define=resolve_symbols_in_exec=false \
129
+ //runtime/engine:litert_lm_main
130
+
131
+ - name: Check if litert_lm_main has only LiteRt symbols undefined.
132
+ # Return exit code 1 if libLiteRt.so has LiteRt symbols except for LiteRtTopK
133
+ # and some exceptions listed explictly here.
134
+ # TODO b/453859132: Remove OpaqueOptions.
135
+ run: |
136
+ ! readelf -sW bazel-bin/runtime/engine/litert_lm_main \
137
+ | grep " LiteRt" | grep -v " UND LiteRt" | grep -v " LiteRtTopK" \
138
+ | grep -v -e LiteRtIsSameLayout -e LiteRtGetNumLayoutElements \
139
+ -e "LiteRt.*Logger" -e "LiteRt.*Metric" -e "LiteRt.*OpaqueOptions" \
140
+ -e "LiteRt.*EnvironmentOptions" -e LiteRtGetLogSeverityName \
141
+ -e LiteRtCompareApiVersion -e LiteRtGetStatusString \
142
+ -e LiteRtGetNumModelSignatures -e LiteRtGetModelSignature \
143
+ -e LiteRtGetSignatureKey -e LiteRtGetSignatureOutputTensor \
144
+ -e LiteRtGetQuantizationTypeId -e LiteRtGetPerTensorQuantization \
145
+ -e TensorBufferRequirements
146
+
147
+ - name: Setup Android NDK.
148
+ uses: nttld/setup-ndk@v1
149
+ id: setup-ndk
150
+ with:
151
+ ndk-version: r28b
152
+ add-to-path: false
153
+
154
+ - name: Run bazel build for Android.
155
+ run: |
156
+ bazel build --disk_cache=~/.cache/bazel-android --config=android_arm64 \
157
+ //... \
158
+ //runtime/engine:litert_lm_main \
159
+ @litert//litert/vendors/mediatek/dispatch:dispatch_api_so \
160
+ @litert//litert/vendors/qualcomm/dispatch:dispatch_api_so \
161
+ -- \
162
+ -//python/... \
163
+ -//schema/py:* \
164
+ -//kotlin/java/com/google/ai/edge/litertlm/example/...
165
+ env:
166
+ ANDROID_NDK_HOME: ${{ steps.setup-ndk.outputs.ndk-path }}
167
+
168
+ - name: Update litert_lm_main prebuilt for Android if new version tag is pushed.
169
+ if: github.ref_type == 'tag'
170
+ run: |
171
+ cp bazel-bin/runtime/engine/litert_lm_main litert_lm_main.android_arm64
172
+ gh release upload ${{ github.ref_name }} litert_lm_main.android_arm64 --clobber
173
+
174
+ - name: Remove cache if cache is being refreshed.
175
+ if: env.REFRESH_CACHE == 'true'
176
+ continue-on-error: true # Ignore errors when cache is not found.
177
+ run: gh cache delete ${{ steps.cache-keys.outputs.CACHE_KEY }}
178
+
179
+ - name: Save bazel cache if it's new or being refreshed.
180
+ uses: actions/cache/save@v4
181
+ if: env.REFRESH_CACHE == 'true' || steps.bazel-cache.outputs.cache-hit != 'true'
182
+ with:
183
+ path: |
184
+ ~/.cache/bazel-linux
185
+ ~/.cache/bazel-android
186
+ key: ${{ steps.cache-keys.outputs.CACHE_KEY }}
.github/workflows/mark_stale.yml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
2
+ #
3
+ # You can adjust the behavior by modifying this file.
4
+ # For more information, see:
5
+ # https://github.com/actions/stale
6
+ name: Mark stale issues and pull requests
7
+
8
+ on:
9
+ schedule:
10
+ # Scheduled to run at 1.30 UTC everyday
11
+ - cron: '30 1 * * *'
12
+
13
+ jobs:
14
+ stale:
15
+ if: |
16
+ github.event_name == 'workflow_dispatch' ||
17
+ (github.event_name == 'schedule' && github.repository == 'google-ai-edge/LiteRT-LM')
18
+ runs-on: ubuntu-latest
19
+ permissions:
20
+ issues: write
21
+ pull-requests: write
22
+ actions: write
23
+
24
+ steps:
25
+ - uses: actions/stale@v9
26
+ with:
27
+ days-before-issue-stale: 7
28
+ days-before-issue-close: 7
29
+ stale-issue-label: "status:stale"
30
+ close-issue-reason: completed
31
+ any-of-labels: "status:awaiting user response,status:more data needed"
32
+ # List of labels to remove when issues/PRs unstale.
33
+ labels-to-remove-when-unstale: 'status:awaiting user response,status:stale'
34
+ stale-issue-message: >
35
+ Marking this issue as stale since it has been open for 7 days with no activity.
36
+ This issue will be closed if no further activity occurs.
37
+ close-issue-message: >
38
+ This issue was closed because it has been inactive for 14 days.
39
+ Please post a new issue if you need further assistance. Thanks!
40
+ days-before-pr-stale: 14
41
+ days-before-pr-close: 14
42
+ stale-pr-label: "status:stale"
43
+ stale-pr-message: >
44
+ Marking this pull request as stale since it has been open for 14 days with no activity.
45
+ This PR will be closed if no further activity occurs.
46
+ close-pr-message: >
47
+ This pull request was closed because it has been inactive for 28 days.
48
+ Please open a new pull request if you need further assistance. Thanks!
49
+ # Label that can be assigned to issues to exclude them from being marked as stale
50
+ exempt-issue-labels: 'override-stale'
51
+ # Label that can be assigned to PRs to exclude them from being marked as stale
52
+ exempt-pr-labels: "override-stale"
.github/workflows/nightly-linux-arm64.yml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "Nightly-Linux-Arm64"
2
+
3
+ on:
4
+ workflow_dispatch:
5
+ inputs:
6
+ is_nightly:
7
+ description: 'Whether to build the nightly package (sets DEV_BUILD=1)'
8
+ required: true
9
+ default: true
10
+ type: boolean
11
+ publish_to_pypi:
12
+ description: 'Publish to PyPI'
13
+ required: true
14
+ default: false
15
+ type: boolean
16
+ schedule:
17
+ - cron: '0 21 * * *'
18
+ timezone: 'America/Los_Angeles'
19
+
20
+ jobs:
21
+ build-linux-wheel:
22
+ name: "Build Python Wheel ${{ matrix.python-version }}"
23
+ runs-on: ubuntu-22.04_ARM64-8core
24
+ env:
25
+ DEV_BUILD: ${{ (github.event_name == 'schedule' || github.event.inputs.is_nightly == 'true') && '1' || '0' }}
26
+ PUBLISH_TO_PYPI: ${{ (github.event_name == 'schedule' || github.event.inputs.publish_to_pypi == 'true') && 'true' || 'false' }}
27
+ strategy:
28
+ matrix:
29
+ python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
30
+ steps:
31
+ - name: Install required packages
32
+ # git-lfs is not installed in the image
33
+ run: sudo apt-get update && sudo apt-get install -y git-lfs
34
+
35
+ - name: Checkout code.
36
+ uses: actions/checkout@v4
37
+ with:
38
+ lfs: true
39
+
40
+ - name: Build Python Wheel
41
+ run: |
42
+ PYTHON_VERSION=${{ matrix.python-version }}
43
+ DATE=$(TZ=America/Los_Angeles date +'%Y%m%d')
44
+ bazel build \
45
+ --repo_env=HERMETIC_PYTHON_VERSION=${PYTHON_VERSION} \
46
+ --@rules_python//python/config_settings:python_version=${PYTHON_VERSION} \
47
+ --define=DEV_BUILD=${{ env.DEV_BUILD }} \
48
+ --define=DEV_VERSION=${DATE} \
49
+ --define=litert_link_capi_so=true \
50
+ --define=resolve_symbols_in_exec=false \
51
+ --config=linux_arm64 \
52
+ -c opt //python/litert_lm:wheel
53
+
54
+ - name: Install uv
55
+ uses: astral-sh/setup-uv@v5
56
+
57
+ - name: Test Python Wheel
58
+ run: |
59
+ PYTHON_VERSION=${{ matrix.python-version }}
60
+ uv venv --python=${PYTHON_VERSION}
61
+
62
+ # Install the built wheel
63
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm -name "*.whl" | head -n 1)
64
+ uv pip install $WHEEL_PATH
65
+
66
+ # Run the verification script
67
+ uv run python python/litert_lm/examples/simple_main.py
68
+
69
+ - name: Publish to PyPI
70
+ if: env.PUBLISH_TO_PYPI == 'true'
71
+ env:
72
+ UV_PUBLISH_TOKEN: ${{ secrets.PYPI_UPLOAD_TOKEN }}
73
+ run: |
74
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm -name "*.whl" | head -n 1)
75
+ uv publish $WHEEL_PATH
.github/workflows/nightly-linux-x64.yml ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "Nightly-Linux-x64"
2
+
3
+ on:
4
+ workflow_dispatch:
5
+ inputs:
6
+ is_nightly:
7
+ description: 'Whether to build the nightly package (sets DEV_BUILD=1)'
8
+ required: true
9
+ default: true
10
+ type: boolean
11
+ publish_to_pypi:
12
+ description: 'Publish to PyPI'
13
+ required: true
14
+ default: false
15
+ type: boolean
16
+ schedule:
17
+ - cron: '0 21 * * *'
18
+ timezone: 'America/Los_Angeles'
19
+
20
+ jobs:
21
+ build-linux-wheel:
22
+ name: "Build Python Wheel ${{ matrix.python-version }}"
23
+ runs-on: ubuntu-22.04-8core
24
+ env:
25
+ DEV_BUILD: ${{ (github.event_name == 'schedule' || github.event.inputs.is_nightly == 'true') && '1' || '0' }}
26
+ PUBLISH_TO_PYPI: ${{ (github.event_name == 'schedule' || github.event.inputs.publish_to_pypi == 'true') && 'true' || 'false' }}
27
+ strategy:
28
+ matrix:
29
+ python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
30
+ steps:
31
+ - name: Checkout code.
32
+ uses: actions/checkout@v4
33
+ with:
34
+ lfs: true
35
+
36
+ - name: Build Python Wheel
37
+ # Building with --define=xnn_enable_avxvnniint8=false because the Clang
38
+ # version on Ubuntu 22.04 does not support -mavxvnniint8. Ubuntu 22.04
39
+ # is used to maintain compatibility with Google Colab environments.
40
+ run: |
41
+ PYTHON_VERSION=${{ matrix.python-version }}
42
+ DATE=$(TZ=America/Los_Angeles date +'%Y%m%d')
43
+ bazel build \
44
+ --repo_env=HERMETIC_PYTHON_VERSION=${PYTHON_VERSION} \
45
+ --@rules_python//python/config_settings:python_version=${PYTHON_VERSION} \
46
+ --define=DEV_BUILD=${{ env.DEV_BUILD }} \
47
+ --define=DEV_VERSION=${DATE} \
48
+ --define=litert_link_capi_so=true \
49
+ --define=resolve_symbols_in_exec=false \
50
+ --define=xnn_enable_avxvnniint8=false \
51
+ -c opt //python/litert_lm:wheel
52
+
53
+ - name: Install uv
54
+ uses: astral-sh/setup-uv@v5
55
+
56
+ - name: Test Python Wheel
57
+ run: |
58
+ PYTHON_VERSION=${{ matrix.python-version }}
59
+ uv venv --python=${PYTHON_VERSION}
60
+
61
+ # Install the built wheel
62
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm -name "*.whl" | head -n 1)
63
+ uv pip install $WHEEL_PATH
64
+
65
+ # Run the verification script
66
+ uv run python python/litert_lm/examples/simple_main.py
67
+
68
+ - name: Publish to PyPI
69
+ if: env.PUBLISH_TO_PYPI == 'true'
70
+ env:
71
+ UV_PUBLISH_TOKEN: ${{ secrets.PYPI_UPLOAD_TOKEN }}
72
+ run: |
73
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm -name "*.whl" | head -n 1)
74
+ uv publish $WHEEL_PATH
75
+
76
+ # We just need to build the CLI once and it can works on all Python
77
+ # versions and different OSs. We pick Linux 3.13.
78
+ - name: Build CLI Python Wheel
79
+ if: matrix.python-version == '3.13'
80
+ run: |
81
+ DATE=$(TZ=America/Los_Angeles date +'%Y%m%d')
82
+ bazel build \
83
+ --define=DEV_BUILD=${{ env.DEV_BUILD }} \
84
+ --define=DEV_VERSION=${DATE} \
85
+ --define=xnn_enable_avxvnniint8=false \
86
+ -c opt //python/litert_lm_cli:wheel
87
+
88
+ - name: Test CLI Python Wheel
89
+ if: matrix.python-version == '3.13'
90
+ run: |
91
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm_cli -name "*.whl" | head -n 1)
92
+ uv pip install $WHEEL_PATH
93
+ uv run litert-lm --help
94
+
95
+ - name: Publish CLI to PyPI
96
+ if: matrix.python-version == '3.13' && env.PUBLISH_TO_PYPI == 'true'
97
+ env:
98
+ UV_PUBLISH_TOKEN: ${{ secrets.PYPI_UPLOAD_TOKEN }}
99
+ run: |
100
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm_cli -name "*.whl" | head -n 1)
101
+ uv publish $WHEEL_PATH
.github/workflows/nightly-mac-arm64.yml ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "Nightly-Mac-Arm64"
2
+
3
+ on:
4
+ workflow_dispatch:
5
+ inputs:
6
+ is_nightly:
7
+ description: 'Whether to build the nightly package (sets DEV_BUILD=1)'
8
+ required: true
9
+ default: true
10
+ type: boolean
11
+ publish_to_pypi:
12
+ description: 'Publish to PyPI'
13
+ required: true
14
+ default: false
15
+ type: boolean
16
+ schedule:
17
+ - cron: '0 21 * * *'
18
+ timezone: 'America/Los_Angeles'
19
+
20
+ jobs:
21
+ build-mac-wheel:
22
+ name: "Build Python Wheel ${{ matrix.python-version }}"
23
+ runs-on: macos-latest
24
+ env:
25
+ DEV_BUILD: ${{ (github.event_name == 'schedule' || github.event.inputs.is_nightly == 'true') && '1' || '0' }}
26
+ PUBLISH_TO_PYPI: ${{ (github.event_name == 'schedule' || github.event.inputs.publish_to_pypi == 'true') && 'true' || 'false' }}
27
+ strategy:
28
+ matrix:
29
+ python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
30
+ steps:
31
+ - name: Checkout code.
32
+ uses: actions/checkout@v4
33
+ with:
34
+ lfs: true
35
+
36
+ - name: Build Python Wheel
37
+ run: |
38
+ PYTHON_VERSION=${{ matrix.python-version }}
39
+ DATE=$(TZ=America/Los_Angeles date +'%Y%m%d')
40
+ bazel build --config=macos_arm64 \
41
+ --repo_env=HERMETIC_PYTHON_VERSION=${PYTHON_VERSION} \
42
+ --@rules_python//python/config_settings:python_version=${PYTHON_VERSION} \
43
+ --define=DEV_BUILD=${{ env.DEV_BUILD }} \
44
+ --define=DEV_VERSION=${DATE} \
45
+ -c opt //python/litert_lm:wheel
46
+
47
+ - name: Install uv
48
+ uses: astral-sh/setup-uv@v5
49
+
50
+ - name: Test Python Wheel
51
+ run: |
52
+ PYTHON_VERSION=${{ matrix.python-version }}
53
+ uv venv --python=${PYTHON_VERSION}
54
+
55
+ # Install the built wheel
56
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm -name "*.whl" | head -n 1)
57
+ uv pip install $WHEEL_PATH
58
+
59
+ # Run the verification script
60
+ uv run python python/litert_lm/examples/simple_main.py
61
+
62
+ - name: Publish to PyPI
63
+ if: env.PUBLISH_TO_PYPI == 'true'
64
+ env:
65
+ UV_PUBLISH_TOKEN: ${{ secrets.PYPI_UPLOAD_TOKEN }}
66
+ run: |
67
+ WHEEL_PATH=$(find bazel-bin/python/litert_lm -name "*.whl" | head -n 1)
68
+ uv publish $WHEEL_PATH
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ /MODULE.bazel.lock
2
+ /bazel-*
BUILD ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2025 The ODML Authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
BUILD.antlr4 ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package(
2
+ default_visibility = ["//visibility:public"],
3
+ )
4
+
5
+ cc_library(
6
+ name = "cpp",
7
+ srcs = glob(["runtime/Cpp/runtime/src/**/*.cpp"]),
8
+ hdrs = glob(["runtime/Cpp/runtime/src/**/*.h"]),
9
+ includes = ["runtime/Cpp/runtime/src"],
10
+ visibility = ["//visibility:public"],
11
+ defines = ["ANTLR4CPP_STATIC"],
12
+ )
BUILD.llguidance ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ cc_library(
2
+ name = "llguidance_cc",
3
+ hdrs = ["llguidance.h"],
4
+ deps = [":llguidance"],
5
+ visibility = ["//visibility:public"],
6
+ )
BUILD.miniaudio ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ load("@rules_cc//cc:objc_library.bzl", "objc_library")
2
+ package(default_visibility = ["//visibility:public"])
3
+
4
+ cc_library(
5
+ name = "miniaudio",
6
+ srcs = ["miniaudio.c"],
7
+ hdrs = ["miniaudio.h"],
8
+ )
9
+
10
+ genrule(
11
+ name = "generate_miniaudio_mm",
12
+ srcs = ["miniaudio.c"],
13
+ outs = ["miniaudio.mm"],
14
+ cmd = "cp $(location miniaudio.c) $(location miniaudio.mm)",
15
+ )
16
+
17
+ objc_library(
18
+ name = "miniaudio_objc",
19
+ srcs = [":miniaudio.mm"],
20
+ hdrs = ["miniaudio.h"],
21
+ sdk_frameworks = [
22
+ "AudioToolbox",
23
+ "AVFoundation",
24
+ ],
25
+ )
BUILD.minizip ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package(default_visibility = ["//visibility:public"])
2
+
3
+ cc_library(
4
+ name = "zlib_minizip",
5
+ srcs = [
6
+ "minizip/ioapi.c",
7
+ "minizip/unzip.c",
8
+ "minizip/zip.c",
9
+ ] + select({
10
+ "@platforms//os:windows": ["minizip/iowin32.c"],
11
+ "//conditions:default": [],
12
+ }),
13
+ hdrs = [
14
+ "minizip/crypt.h",
15
+ "minizip/ioapi.h",
16
+ "minizip/mztools.h",
17
+ "minizip/unzip.h",
18
+ "minizip/zip.h",
19
+ ] + select({
20
+ "@platforms//os:windows": ["minizip/iowin32.h"],
21
+ "//conditions:default": [],
22
+ }),
23
+ copts = [
24
+ "-DZLIB_MINIZIP_LIB",
25
+ "-std=gnu17", # Does not compile in C23; uses non-prototype function definitions.
26
+ ] + select({
27
+ "@platforms//os:windows": [
28
+ "-D_UNICODE",
29
+ "-DUNICODE",
30
+ ],
31
+ "//conditions:default": [
32
+ "-Wno-dangling-else",
33
+ "-Wno-format",
34
+ "-Wno-incompatible-pointer-types",
35
+ "-Wno-incompatible-pointer-types-discards-qualifiers",
36
+ "-Wno-parentheses",
37
+ ],
38
+ }) + select({
39
+ "@platforms//os:android": ["-DIOAPI_NO_64"],
40
+ "//conditions:default": [],
41
+ }),
42
+ deps = ["@zlib//:zlib"],
43
+ )
BUILD.minja ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package(
2
+ default_visibility = ["//visibility:public"],
3
+ )
4
+
5
+ cc_library(
6
+ name = "minja_google",
7
+ hdrs = [
8
+ "include/minja/chat-template.hpp",
9
+ "include/minja/minja.hpp",
10
+ ],
11
+ srcs = [
12
+ "include/minja/chat-template.hpp",
13
+ "include/minja/minja.hpp",
14
+ ],
15
+ includes = ["include"],
16
+ )
BUILD.nanobind_json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ load("@rules_cc//cc:defs.bzl", "cc_library")
2
+
3
+ cc_library(
4
+ name = "nanobind_json",
5
+ hdrs = ["include/nanobind_json/nanobind_json.hpp"],
6
+ includes = ["include"],
7
+ visibility = ["//visibility:public"],
8
+ deps = [
9
+ "@nlohmann_json//:json",
10
+ "@nanobind",
11
+ ],
12
+ )
BUILD.sentencepiece ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package(default_visibility = ["//visibility:public"])
2
+
3
+ proto_library(
4
+ name = "sentencepiece_proto",
5
+ srcs = ["sentencepiece.proto"],
6
+ )
7
+
8
+ cc_proto_library(
9
+ name = "sentencepiece_cc_proto",
10
+ deps = [":sentencepiece_proto"],
11
+ )
12
+
13
+ proto_library(
14
+ name = "sentencepiece_model_proto",
15
+ srcs = ["sentencepiece_model.proto"],
16
+ )
17
+
18
+ cc_proto_library(
19
+ name = "sentencepiece_model_cc_proto",
20
+ deps = [":sentencepiece_model_proto"],
21
+ )
22
+
23
+ cc_library(
24
+ name = "sentencepiece_processor",
25
+ srcs = [
26
+ "bpe_model.cc",
27
+ "char_model.cc",
28
+ "filesystem.cc",
29
+ "model_factory.cc",
30
+ "model_interface.cc",
31
+ "normalizer.cc",
32
+ "sentencepiece_processor.cc",
33
+ "unigram_model.cc",
34
+ "util.cc",
35
+ "word_model.cc",
36
+ ],
37
+ hdrs = [
38
+ "bpe_model.h",
39
+ "char_model.h",
40
+ "common.h",
41
+ "config.h",
42
+ "filesystem.h",
43
+ "freelist.h",
44
+ "model_factory.h",
45
+ "model_interface.h",
46
+ "normalizer.h",
47
+ "sentencepiece_processor.h",
48
+ "sentencepiece_trainer.h",
49
+ "trainer_interface.h",
50
+ "unigram_model.h",
51
+ "util.h",
52
+ "word_model.h",
53
+ ],
54
+ copts = [
55
+ "-DENABLE_NFKC_COMPILE",
56
+ "-DSENTENCEPIECE_PG3_BUILD",
57
+ ],
58
+ deps = [
59
+ ":sentencepiece_cc_proto",
60
+ ":sentencepiece_model_cc_proto",
61
+ "@com_google_absl//absl/base:core_headers",
62
+ "@com_google_absl//absl/cleanup",
63
+ "@com_google_absl//absl/log",
64
+ "@com_google_absl//absl/log:check",
65
+ "@com_google_absl//absl/memory",
66
+ "@com_google_absl//absl/status",
67
+ "@com_google_absl//absl/status:statusor",
68
+ "@com_google_absl//absl/strings",
69
+ "@com_google_absl//absl/container:flat_hash_map",
70
+ "@com_google_absl//absl/container:flat_hash_set",
71
+ "@com_google_absl//absl/strings:str_format",
72
+ "@darts_clone//:darts_clone",
73
+ ],
74
+ )
BUILD.stb ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package(default_visibility = ["//visibility:public"])
2
+
3
+ cc_library(
4
+ name = "stb_image",
5
+ hdrs = ["stb_image.h"],
6
+ )
7
+
8
+ cc_library(
9
+ name = "stblib",
10
+ hdrs = [
11
+ "stb_dxt.h",
12
+ "stb_image.h",
13
+ "stb_image_resize2.h",
14
+ "stb_image_write.h",
15
+ ],
16
+ )
BUILD.tokenizers_cpp ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ load("@rules_rust//rust:defs.bzl", "rust_library")
2
+
3
+ package(
4
+ default_visibility = ["//visibility:public"],
5
+ )
6
+
7
+ rust_library(
8
+ name = "huggingface_tokenizer_impl",
9
+ srcs = ["rust/src/lib.rs"],
10
+ edition = "2018",
11
+ proc_macro_deps = [],
12
+ deps = [
13
+ "@crate_index//:serde_json",
14
+ "@crate_index//:tokenizers",
15
+ ],
16
+ )
17
+
18
+ cc_library(
19
+ name = "huggingface_tokenizer",
20
+ srcs = ["src/huggingface_tokenizer.cc"],
21
+ hdrs = [
22
+ "include/tokenizers_c.h",
23
+ "include/tokenizers_cpp.h",
24
+ ],
25
+ includes = ["include"],
26
+ deps = [
27
+ ":huggingface_tokenizer_impl",
28
+ ],
29
+ linkopts = select({
30
+ "@platforms//os:windows": ["kernel32.lib", "ntdll.lib"],
31
+ "//conditions:default": [],
32
+ }),
33
+ )
CMakeLists.txt ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2026 Google LLC.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ # CMakeLists.txt for LiteRT-LM-Orchestrator.
17
+ cmake_minimum_required(VERSION 3.25 FATAL_ERROR)
18
+ project(LiteRT-LM-Orchestrator LANGUAGES NONE)
19
+
20
+ if(NOT DEFINED LITERTLM_HOST_PROTOC)
21
+ set(LITERTLM_HOST_PROTOC "${CMAKE_CURRENT_BINARY_DIR}/prebuild/build/external/protobuf/install/bin/protoc")
22
+ set(LITERTLM_HOST_PROTOC_BIN_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/build/external/protobuf/install/bin")
23
+ endif()
24
+
25
+ if(NOT DEFINED LITERTLM_HOST_FLATC)
26
+ set(LITERTLM_HOST_FLATC "${CMAKE_CURRENT_BINARY_DIR}/prebuild/build/external/flatbuffers/install/bin/flatc")
27
+ set(LITERTLM_HOST_FLATC_BIN_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/build/external/flatbuffers/install/bin")
28
+ endif()
29
+
30
+ include(ExternalProject)
31
+
32
+ set(LITERTLM_PROJECT_ROOT "${CMAKE_CURRENT_SOURCE_DIR}" CACHE PATH "LiteRT-LM: Absolute path to the repository root")
33
+ set(LITERTLM_TOOLCHAIN_ARGS "" CACHE STRING "String used to define parameterized CMAKE_ARGS")
34
+
35
+ set(_PREBUILD_DEPENDENCY "")
36
+
37
+ if(LITERTLM_TOOLCHAIN_ARGS)
38
+ message(STATUS "[LiteRTLM] Toolchain args detected. Generating Host Prebuild phase...")
39
+ ExternalProject_Add(
40
+ litert_lm_prebuild
41
+
42
+ SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/cmake/packages/litert_lm"
43
+
44
+ PREFIX "${CMAKE_CURRENT_BINARY_DIR}/prebuild"
45
+ BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/build"
46
+ STAMP_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/stamps"
47
+ TMP_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/tmp"
48
+ DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/download"
49
+ INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/prebuild/install"
50
+
51
+ CMAKE_ARGS
52
+ "-DLITERTLM_PROJECT_ROOT=${LITERTLM_PROJECT_ROOT}"
53
+
54
+ INSTALL_COMMAND ""
55
+ )
56
+ set(_PREBUILD_DEPENDENCY "litert_lm_prebuild")
57
+ else()
58
+ message(STATUS "[LiteRTLM] Native build detected. Skipping Host Prebuild phase.")
59
+ endif()
60
+
61
+ if(DEFINED CMAKE_TOOLCHAIN_FILE)
62
+ set(LITERTLM_TOOLCHAIN_FILE "-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}")
63
+ endif()
64
+
65
+ if(DEFINED LITERTLM_TOOLCHAIN_SCRIPT)
66
+ message(STATUS "[LiteRTLM] Executing toolchain script -> ${LITERTLM_TOOLCHAIN_SCRIPT}")
67
+ include("${LITERTLM_TOOLCHAIN_SCRIPT}")
68
+ endif()
69
+
70
+ ExternalProject_Add(
71
+ litert_lm
72
+
73
+ DEPENDS ${_PREBUILD_DEPENDENCY}
74
+
75
+ SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/cmake/packages/litert_lm"
76
+
77
+ PREFIX "${CMAKE_CURRENT_BINARY_DIR}/litert_lm"
78
+ BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/litert_lm/build"
79
+ STAMP_DIR "${CMAKE_CURRENT_BINARY_DIR}/litert_lm/stamps"
80
+ TMP_DIR "${CMAKE_CURRENT_BINARY_DIR}/litert_lm/tmp"
81
+ DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}/litert_lm/download"
82
+ INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/litert_lm/install"
83
+
84
+ CMAKE_ARGS
85
+ ${LITERTLM_TOOLCHAIN_FILE}
86
+ "-DLITERTLM_TOOLCHAIN_FILE=${LITERTLM_TOOLCHAIN_FILE}"
87
+
88
+ ${LITERTLM_TOOLCHAIN_ARGS}
89
+ "-DLITERTLM_TOOLCHAIN_ARGS=${LITERTLM_TOOLCHAIN_ARGS}"
90
+
91
+ "-DLITERTLM_PROJECT_ROOT=${LITERTLM_PROJECT_ROOT}"
92
+ "-DLITERTLM_HOST_PROTOC=${LITERTLM_HOST_PROTOC}"
93
+ "-DLITERTLM_HOST_PROTOC_BIN_DIR=${LITERTLM_HOST_PROTOC_BIN_DIR}"
94
+ "-DLITERTLM_HOST_FLATC=${LITERTLM_HOST_FLATC}"
95
+ "-DLITERTLM_HOST_FLATC_BIN_DIR=${LITERTLM_HOST_FLATC_BIN_DIR}"
96
+
97
+ INSTALL_COMMAND ""
98
+ BUILD_ALWAYS TRUE
99
+ )
CMakePresets.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": 3,
3
+ "cmakeMinimumRequired": {
4
+ "major": 3,
5
+ "minor": 25,
6
+ "patch": 0
7
+ },
8
+ "configurePresets": [
9
+ {
10
+ "name": "make",
11
+ "hidden": true,
12
+ "generator": "Unix Makefiles",
13
+ "binaryDir": "${sourceDir}/cmake/build/${presetName}"
14
+ },
15
+ {
16
+ "name": "android-arm64",
17
+ "displayName": "Android arm64-v8a (API 28)",
18
+ "description": "Cross-compiles LiteRT-LM for Android arm64. Auto-builds host tools first.",
19
+ "inherits": "make",
20
+ "environment": {
21
+ "ANDROID_NDK_ROOT": "$env{HOME}/android-ndk/android-ndk-r26d"
22
+ },
23
+ "cacheVariables": {
24
+ "CMAKE_TOOLCHAIN_FILE": "${sourceDir}/cmake/toolchains/litertlm_android.toolchain.cmake",
25
+ "LITERTLM_TOOLCHAIN_SCRIPT": "${sourceDir}/cmake/toolchains/litertlm_android.script.cmake",
26
+ "CMAKE_SYSTEM_NAME": "Android",
27
+ "CMAKE_SYSTEM_VERSION": "28",
28
+ "CMAKE_ANDROID_ARCH_ABI": "arm64-v8a",
29
+ "CMAKE_ANDROID_NDK": "$env{HOME}/android-ndk/android-ndk-r26d",
30
+ "CMAKE_ANDROID_STL_TYPE": "c++_shared",
31
+ "ANDROID_ABI": "arm64-v8a",
32
+ "ANDROID_PLATFORM": "android-28",
33
+ "ANDROID_STL": "c++_shared",
34
+ "ANDROID_NDK_ROOT": "$env{HOME}/android-ndk/android-ndk-r26d"
35
+ }
36
+ }
37
+ ]
38
+ }
CONTRIBUTING.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ The LiteRT LM repository is not currently ready for code contributions. We will
2
+ make a separate announcement when we are ready for OSS users to build from this
3
+ repository and make contributions to it. In the meanwhile, if you have any
4
+ suggestions, or issues, please feel free to create a
5
+ [GitHub Issues](https://github.com/google-ai-edge/LiteRT-LM/issues/new)
6
+ to us. Thanks!
Cargo.lock ADDED
@@ -0,0 +1,1344 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is automatically @generated by Cargo.
2
+ # It is not intended for manual editing.
3
+ version = 4
4
+
5
+ [[package]]
6
+ name = "ahash"
7
+ version = "0.8.12"
8
+ source = "registry+https://github.com/rust-lang/crates.io-index"
9
+ checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
10
+ dependencies = [
11
+ "cfg-if",
12
+ "getrandom 0.3.4",
13
+ "once_cell",
14
+ "version_check",
15
+ "zerocopy",
16
+ ]
17
+
18
+ [[package]]
19
+ name = "aho-corasick"
20
+ version = "1.1.4"
21
+ source = "registry+https://github.com/rust-lang/crates.io-index"
22
+ checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
23
+ dependencies = [
24
+ "memchr",
25
+ ]
26
+
27
+ [[package]]
28
+ name = "android_system_properties"
29
+ version = "0.1.5"
30
+ source = "registry+https://github.com/rust-lang/crates.io-index"
31
+ checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
32
+ dependencies = [
33
+ "libc",
34
+ ]
35
+
36
+ [[package]]
37
+ name = "anstyle"
38
+ version = "1.0.13"
39
+ source = "registry+https://github.com/rust-lang/crates.io-index"
40
+ checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
41
+
42
+ [[package]]
43
+ name = "antlr4rust"
44
+ version = "0.5.2"
45
+ source = "registry+https://github.com/rust-lang/crates.io-index"
46
+ checksum = "093d520274bfff7278d776f7ea12981a0a0a6f96db90964658e0f38fc6e9a6a6"
47
+ dependencies = [
48
+ "better_any",
49
+ "bit-set",
50
+ "byteorder",
51
+ "lazy_static",
52
+ "murmur3",
53
+ "once_cell",
54
+ "parking_lot",
55
+ "typed-arena",
56
+ "uuid",
57
+ ]
58
+
59
+ [[package]]
60
+ name = "antlr_fc_tool_call_parser"
61
+ version = "0.1.0"
62
+ dependencies = [
63
+ "antlr4rust",
64
+ ]
65
+
66
+ [[package]]
67
+ name = "antlr_python_tool_call_parser"
68
+ version = "0.1.0"
69
+ dependencies = [
70
+ "antlr4rust",
71
+ ]
72
+
73
+ [[package]]
74
+ name = "anyhow"
75
+ version = "1.0.101"
76
+ source = "registry+https://github.com/rust-lang/crates.io-index"
77
+ checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea"
78
+
79
+ [[package]]
80
+ name = "autocfg"
81
+ version = "1.5.0"
82
+ source = "registry+https://github.com/rust-lang/crates.io-index"
83
+ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
84
+
85
+ [[package]]
86
+ name = "base64"
87
+ version = "0.13.1"
88
+ source = "registry+https://github.com/rust-lang/crates.io-index"
89
+ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
90
+
91
+ [[package]]
92
+ name = "better_any"
93
+ version = "0.2.1"
94
+ source = "registry+https://github.com/rust-lang/crates.io-index"
95
+ checksum = "4372b9543397a4b86050cc5e7ee36953edf4bac9518e8a774c2da694977fb6e4"
96
+
97
+ [[package]]
98
+ name = "bit-set"
99
+ version = "0.8.0"
100
+ source = "registry+https://github.com/rust-lang/crates.io-index"
101
+ checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
102
+ dependencies = [
103
+ "bit-vec",
104
+ ]
105
+
106
+ [[package]]
107
+ name = "bit-vec"
108
+ version = "0.8.0"
109
+ source = "registry+https://github.com/rust-lang/crates.io-index"
110
+ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
111
+
112
+ [[package]]
113
+ name = "bitflags"
114
+ version = "2.10.0"
115
+ source = "registry+https://github.com/rust-lang/crates.io-index"
116
+ checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3"
117
+
118
+ [[package]]
119
+ name = "bumpalo"
120
+ version = "3.19.1"
121
+ source = "registry+https://github.com/rust-lang/crates.io-index"
122
+ checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510"
123
+
124
+ [[package]]
125
+ name = "bytemuck"
126
+ version = "1.25.0"
127
+ source = "registry+https://github.com/rust-lang/crates.io-index"
128
+ checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec"
129
+
130
+ [[package]]
131
+ name = "bytemuck_derive"
132
+ version = "1.10.2"
133
+ source = "registry+https://github.com/rust-lang/crates.io-index"
134
+ checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff"
135
+ dependencies = [
136
+ "proc-macro2",
137
+ "quote",
138
+ "syn",
139
+ ]
140
+
141
+ [[package]]
142
+ name = "byteorder"
143
+ version = "1.5.0"
144
+ source = "registry+https://github.com/rust-lang/crates.io-index"
145
+ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
146
+
147
+ [[package]]
148
+ name = "cc"
149
+ version = "1.2.55"
150
+ source = "registry+https://github.com/rust-lang/crates.io-index"
151
+ checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29"
152
+ dependencies = [
153
+ "find-msvc-tools",
154
+ "shlex",
155
+ ]
156
+
157
+ [[package]]
158
+ name = "cfg-if"
159
+ version = "1.0.4"
160
+ source = "registry+https://github.com/rust-lang/crates.io-index"
161
+ checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
162
+
163
+ [[package]]
164
+ name = "chrono"
165
+ version = "0.4.43"
166
+ source = "registry+https://github.com/rust-lang/crates.io-index"
167
+ checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118"
168
+ dependencies = [
169
+ "iana-time-zone",
170
+ "js-sys",
171
+ "num-traits",
172
+ "wasm-bindgen",
173
+ "windows-link",
174
+ ]
175
+
176
+ [[package]]
177
+ name = "clap"
178
+ version = "4.5.57"
179
+ source = "registry+https://github.com/rust-lang/crates.io-index"
180
+ checksum = "6899ea499e3fb9305a65d5ebf6e3d2248c5fab291f300ad0a704fbe142eae31a"
181
+ dependencies = [
182
+ "clap_builder",
183
+ ]
184
+
185
+ [[package]]
186
+ name = "clap_builder"
187
+ version = "4.5.57"
188
+ source = "registry+https://github.com/rust-lang/crates.io-index"
189
+ checksum = "7b12c8b680195a62a8364d16b8447b01b6c2c8f9aaf68bee653be34d4245e238"
190
+ dependencies = [
191
+ "anstyle",
192
+ "clap_lex",
193
+ "strsim",
194
+ ]
195
+
196
+ [[package]]
197
+ name = "clap_lex"
198
+ version = "0.7.7"
199
+ source = "registry+https://github.com/rust-lang/crates.io-index"
200
+ checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32"
201
+
202
+ [[package]]
203
+ name = "codespan-reporting"
204
+ version = "0.11.1"
205
+ source = "registry+https://github.com/rust-lang/crates.io-index"
206
+ checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
207
+ dependencies = [
208
+ "termcolor",
209
+ "unicode-width",
210
+ ]
211
+
212
+ [[package]]
213
+ name = "core-foundation-sys"
214
+ version = "0.8.7"
215
+ source = "registry+https://github.com/rust-lang/crates.io-index"
216
+ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
217
+
218
+ [[package]]
219
+ name = "crossbeam-deque"
220
+ version = "0.8.6"
221
+ source = "registry+https://github.com/rust-lang/crates.io-index"
222
+ checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
223
+ dependencies = [
224
+ "crossbeam-epoch",
225
+ "crossbeam-utils",
226
+ ]
227
+
228
+ [[package]]
229
+ name = "crossbeam-epoch"
230
+ version = "0.9.18"
231
+ source = "registry+https://github.com/rust-lang/crates.io-index"
232
+ checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
233
+ dependencies = [
234
+ "crossbeam-utils",
235
+ ]
236
+
237
+ [[package]]
238
+ name = "crossbeam-utils"
239
+ version = "0.8.21"
240
+ source = "registry+https://github.com/rust-lang/crates.io-index"
241
+ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
242
+
243
+ [[package]]
244
+ name = "cxx"
245
+ version = "1.0.149"
246
+ source = "registry+https://github.com/rust-lang/crates.io-index"
247
+ checksum = "478c837c611bc2a9fdeec08f85a5b198bb4e0bbdb3069f02443d2291383a7b42"
248
+ dependencies = [
249
+ "cc",
250
+ "cxxbridge-cmd",
251
+ "cxxbridge-flags",
252
+ "cxxbridge-macro",
253
+ "foldhash",
254
+ "link-cplusplus",
255
+ ]
256
+
257
+ [[package]]
258
+ name = "cxxbridge-cmd"
259
+ version = "1.0.149"
260
+ source = "registry+https://github.com/rust-lang/crates.io-index"
261
+ checksum = "a5fff7916bbde05c2db99469f09dcfaf203bf25b096ccbf4e761a04792412e10"
262
+ dependencies = [
263
+ "clap",
264
+ "codespan-reporting",
265
+ "proc-macro2",
266
+ "quote",
267
+ "syn",
268
+ ]
269
+
270
+ [[package]]
271
+ name = "cxxbridge-flags"
272
+ version = "1.0.149"
273
+ source = "registry+https://github.com/rust-lang/crates.io-index"
274
+ checksum = "4336c994ee47479f439b61a9723ed894ab4551d91e0f217c1e84515d57ea3d4f"
275
+
276
+ [[package]]
277
+ name = "cxxbridge-macro"
278
+ version = "1.0.149"
279
+ source = "registry+https://github.com/rust-lang/crates.io-index"
280
+ checksum = "4212f144792e9bc9d6891e369f87cc3adb7387a552993df8767d352482b3f88a"
281
+ dependencies = [
282
+ "proc-macro2",
283
+ "quote",
284
+ "rustversion",
285
+ "syn",
286
+ ]
287
+
288
+ [[package]]
289
+ name = "darling"
290
+ version = "0.20.11"
291
+ source = "registry+https://github.com/rust-lang/crates.io-index"
292
+ checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
293
+ dependencies = [
294
+ "darling_core",
295
+ "darling_macro",
296
+ ]
297
+
298
+ [[package]]
299
+ name = "darling_core"
300
+ version = "0.20.11"
301
+ source = "registry+https://github.com/rust-lang/crates.io-index"
302
+ checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
303
+ dependencies = [
304
+ "fnv",
305
+ "ident_case",
306
+ "proc-macro2",
307
+ "quote",
308
+ "strsim",
309
+ "syn",
310
+ ]
311
+
312
+ [[package]]
313
+ name = "darling_macro"
314
+ version = "0.20.11"
315
+ source = "registry+https://github.com/rust-lang/crates.io-index"
316
+ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
317
+ dependencies = [
318
+ "darling_core",
319
+ "quote",
320
+ "syn",
321
+ ]
322
+
323
+ [[package]]
324
+ name = "derive_builder"
325
+ version = "0.20.2"
326
+ source = "registry+https://github.com/rust-lang/crates.io-index"
327
+ checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947"
328
+ dependencies = [
329
+ "derive_builder_macro",
330
+ ]
331
+
332
+ [[package]]
333
+ name = "derive_builder_core"
334
+ version = "0.20.2"
335
+ source = "registry+https://github.com/rust-lang/crates.io-index"
336
+ checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8"
337
+ dependencies = [
338
+ "darling",
339
+ "proc-macro2",
340
+ "quote",
341
+ "syn",
342
+ ]
343
+
344
+ [[package]]
345
+ name = "derive_builder_macro"
346
+ version = "0.20.2"
347
+ source = "registry+https://github.com/rust-lang/crates.io-index"
348
+ checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
349
+ dependencies = [
350
+ "derive_builder_core",
351
+ "syn",
352
+ ]
353
+
354
+ [[package]]
355
+ name = "derivre"
356
+ version = "0.3.8"
357
+ source = "registry+https://github.com/rust-lang/crates.io-index"
358
+ checksum = "786c7c65c4ef0c7deb05de3005e01991612a8f09fe0844fc0969c68b90468ba8"
359
+ dependencies = [
360
+ "ahash",
361
+ "anyhow",
362
+ "bytemuck",
363
+ "bytemuck_derive",
364
+ "hashbrown 0.15.5",
365
+ "regex-syntax",
366
+ "strum",
367
+ ]
368
+
369
+ [[package]]
370
+ name = "either"
371
+ version = "1.15.0"
372
+ source = "registry+https://github.com/rust-lang/crates.io-index"
373
+ checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
374
+
375
+ [[package]]
376
+ name = "equivalent"
377
+ version = "1.0.2"
378
+ source = "registry+https://github.com/rust-lang/crates.io-index"
379
+ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
380
+
381
+ [[package]]
382
+ name = "esaxx-rs"
383
+ version = "0.1.10"
384
+ source = "registry+https://github.com/rust-lang/crates.io-index"
385
+ checksum = "d817e038c30374a4bcb22f94d0a8a0e216958d4c3dcde369b1439fec4bdda6e6"
386
+
387
+ [[package]]
388
+ name = "fc_parser"
389
+ version = "0.1.0"
390
+ dependencies = [
391
+ "antlr4rust",
392
+ "antlr_fc_tool_call_parser",
393
+ "serde",
394
+ "serde_json",
395
+ ]
396
+
397
+ [[package]]
398
+ name = "find-msvc-tools"
399
+ version = "0.1.9"
400
+ source = "registry+https://github.com/rust-lang/crates.io-index"
401
+ checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
402
+
403
+ [[package]]
404
+ name = "fnv"
405
+ version = "1.0.7"
406
+ source = "registry+https://github.com/rust-lang/crates.io-index"
407
+ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
408
+
409
+ [[package]]
410
+ name = "foldhash"
411
+ version = "0.1.5"
412
+ source = "registry+https://github.com/rust-lang/crates.io-index"
413
+ checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
414
+
415
+ [[package]]
416
+ name = "getrandom"
417
+ version = "0.2.17"
418
+ source = "registry+https://github.com/rust-lang/crates.io-index"
419
+ checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0"
420
+ dependencies = [
421
+ "cfg-if",
422
+ "libc",
423
+ "wasi",
424
+ ]
425
+
426
+ [[package]]
427
+ name = "getrandom"
428
+ version = "0.3.4"
429
+ source = "registry+https://github.com/rust-lang/crates.io-index"
430
+ checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
431
+ dependencies = [
432
+ "cfg-if",
433
+ "libc",
434
+ "r-efi",
435
+ "wasip2",
436
+ ]
437
+
438
+ [[package]]
439
+ name = "googletest"
440
+ version = "0.14.2"
441
+ source = "registry+https://github.com/rust-lang/crates.io-index"
442
+ checksum = "06597b7d02ee58b9a37f522785ac15b9e18c6b178747c4439a6c03fbb35ea753"
443
+ dependencies = [
444
+ "googletest_macro",
445
+ "num-traits",
446
+ "regex",
447
+ "rustversion",
448
+ ]
449
+
450
+ [[package]]
451
+ name = "googletest_macro"
452
+ version = "0.14.2"
453
+ source = "registry+https://github.com/rust-lang/crates.io-index"
454
+ checksum = "c31d9f07c9c19b855faebf71637be3b43f8e13a518aece5d61a3beee7710b4ef"
455
+ dependencies = [
456
+ "proc-macro2",
457
+ "quote",
458
+ "syn",
459
+ ]
460
+
461
+ [[package]]
462
+ name = "hashbrown"
463
+ version = "0.15.5"
464
+ source = "registry+https://github.com/rust-lang/crates.io-index"
465
+ checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
466
+
467
+ [[package]]
468
+ name = "hashbrown"
469
+ version = "0.16.1"
470
+ source = "registry+https://github.com/rust-lang/crates.io-index"
471
+ checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
472
+
473
+ [[package]]
474
+ name = "heck"
475
+ version = "0.5.0"
476
+ source = "registry+https://github.com/rust-lang/crates.io-index"
477
+ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
478
+
479
+ [[package]]
480
+ name = "iana-time-zone"
481
+ version = "0.1.65"
482
+ source = "registry+https://github.com/rust-lang/crates.io-index"
483
+ checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470"
484
+ dependencies = [
485
+ "android_system_properties",
486
+ "core-foundation-sys",
487
+ "iana-time-zone-haiku",
488
+ "js-sys",
489
+ "log",
490
+ "wasm-bindgen",
491
+ "windows-core",
492
+ ]
493
+
494
+ [[package]]
495
+ name = "iana-time-zone-haiku"
496
+ version = "0.1.2"
497
+ source = "registry+https://github.com/rust-lang/crates.io-index"
498
+ checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
499
+ dependencies = [
500
+ "cc",
501
+ ]
502
+
503
+ [[package]]
504
+ name = "ident_case"
505
+ version = "1.0.1"
506
+ source = "registry+https://github.com/rust-lang/crates.io-index"
507
+ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
508
+
509
+ [[package]]
510
+ name = "indexmap"
511
+ version = "2.13.0"
512
+ source = "registry+https://github.com/rust-lang/crates.io-index"
513
+ checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
514
+ dependencies = [
515
+ "equivalent",
516
+ "hashbrown 0.16.1",
517
+ ]
518
+
519
+ [[package]]
520
+ name = "itertools"
521
+ version = "0.11.0"
522
+ source = "registry+https://github.com/rust-lang/crates.io-index"
523
+ checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
524
+ dependencies = [
525
+ "either",
526
+ ]
527
+
528
+ [[package]]
529
+ name = "itertools"
530
+ version = "0.12.1"
531
+ source = "registry+https://github.com/rust-lang/crates.io-index"
532
+ checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569"
533
+ dependencies = [
534
+ "either",
535
+ ]
536
+
537
+ [[package]]
538
+ name = "itoa"
539
+ version = "1.0.17"
540
+ source = "registry+https://github.com/rust-lang/crates.io-index"
541
+ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
542
+
543
+ [[package]]
544
+ name = "js-sys"
545
+ version = "0.3.85"
546
+ source = "registry+https://github.com/rust-lang/crates.io-index"
547
+ checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3"
548
+ dependencies = [
549
+ "once_cell",
550
+ "wasm-bindgen",
551
+ ]
552
+
553
+ [[package]]
554
+ name = "json_parser"
555
+ version = "0.1.0"
556
+ dependencies = [
557
+ "serde",
558
+ "serde_json",
559
+ ]
560
+
561
+ [[package]]
562
+ name = "lazy_static"
563
+ version = "1.5.0"
564
+ source = "registry+https://github.com/rust-lang/crates.io-index"
565
+ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
566
+
567
+ [[package]]
568
+ name = "libc"
569
+ version = "0.2.180"
570
+ source = "registry+https://github.com/rust-lang/crates.io-index"
571
+ checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
572
+
573
+ [[package]]
574
+ name = "link-cplusplus"
575
+ version = "1.0.12"
576
+ source = "registry+https://github.com/rust-lang/crates.io-index"
577
+ checksum = "7f78c730aaa7d0b9336a299029ea49f9ee53b0ed06e9202e8cb7db9bae7b8c82"
578
+ dependencies = [
579
+ "cc",
580
+ ]
581
+
582
+ [[package]]
583
+ name = "litert_lm_deps"
584
+ version = "0.1.0"
585
+ dependencies = [
586
+ "antlr4rust",
587
+ "antlr_fc_tool_call_parser",
588
+ "antlr_python_tool_call_parser",
589
+ "chrono",
590
+ "cxx",
591
+ "fc_parser",
592
+ "googletest",
593
+ "indexmap",
594
+ "json_parser",
595
+ "llguidance",
596
+ "minijinja",
597
+ "paste",
598
+ "python_parser",
599
+ "quote",
600
+ "serde",
601
+ "serde_json",
602
+ "syn",
603
+ "tokenizers",
604
+ ]
605
+
606
+ [[package]]
607
+ name = "llguidance"
608
+ version = "1.3.0"
609
+ source = "registry+https://github.com/rust-lang/crates.io-index"
610
+ checksum = "614b6ece5bc57641b9b727f8a79d338a46584efb943017d0a7454442c0b947fb"
611
+ dependencies = [
612
+ "anyhow",
613
+ "derivre",
614
+ "indexmap",
615
+ "rayon",
616
+ "regex-syntax",
617
+ "serde",
618
+ "serde_json",
619
+ "toktrie",
620
+ ]
621
+
622
+ [[package]]
623
+ name = "lock_api"
624
+ version = "0.4.14"
625
+ source = "registry+https://github.com/rust-lang/crates.io-index"
626
+ checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
627
+ dependencies = [
628
+ "scopeguard",
629
+ ]
630
+
631
+ [[package]]
632
+ name = "log"
633
+ version = "0.4.29"
634
+ source = "registry+https://github.com/rust-lang/crates.io-index"
635
+ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
636
+
637
+ [[package]]
638
+ name = "macro_rules_attribute"
639
+ version = "0.2.2"
640
+ source = "registry+https://github.com/rust-lang/crates.io-index"
641
+ checksum = "65049d7923698040cd0b1ddcced9b0eb14dd22c5f86ae59c3740eab64a676520"
642
+ dependencies = [
643
+ "macro_rules_attribute-proc_macro",
644
+ "paste",
645
+ ]
646
+
647
+ [[package]]
648
+ name = "macro_rules_attribute-proc_macro"
649
+ version = "0.2.2"
650
+ source = "registry+https://github.com/rust-lang/crates.io-index"
651
+ checksum = "670fdfda89751bc4a84ac13eaa63e205cf0fd22b4c9a5fbfa085b63c1f1d3a30"
652
+
653
+ [[package]]
654
+ name = "memchr"
655
+ version = "2.8.0"
656
+ source = "registry+https://github.com/rust-lang/crates.io-index"
657
+ checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
658
+
659
+ [[package]]
660
+ name = "minijinja"
661
+ version = "2.14.0"
662
+ source = "registry+https://github.com/rust-lang/crates.io-index"
663
+ checksum = "12ea9ac0a51fb5112607099560fdf0f90366ab088a2a9e6e8ae176794e9806aa"
664
+ dependencies = [
665
+ "indexmap",
666
+ "serde",
667
+ "serde_json",
668
+ ]
669
+
670
+ [[package]]
671
+ name = "minimal-lexical"
672
+ version = "0.2.1"
673
+ source = "registry+https://github.com/rust-lang/crates.io-index"
674
+ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
675
+
676
+ [[package]]
677
+ name = "monostate"
678
+ version = "0.1.18"
679
+ source = "registry+https://github.com/rust-lang/crates.io-index"
680
+ checksum = "3341a273f6c9d5bef1908f17b7267bbab0e95c9bf69a0d4dcf8e9e1b2c76ef67"
681
+ dependencies = [
682
+ "monostate-impl",
683
+ "serde",
684
+ "serde_core",
685
+ ]
686
+
687
+ [[package]]
688
+ name = "monostate-impl"
689
+ version = "0.1.18"
690
+ source = "registry+https://github.com/rust-lang/crates.io-index"
691
+ checksum = "e4db6d5580af57bf992f59068d4ea26fd518574ff48d7639b255a36f9de6e7e9"
692
+ dependencies = [
693
+ "proc-macro2",
694
+ "quote",
695
+ "syn",
696
+ ]
697
+
698
+ [[package]]
699
+ name = "murmur3"
700
+ version = "0.4.1"
701
+ source = "registry+https://github.com/rust-lang/crates.io-index"
702
+ checksum = "a198f9589efc03f544388dfc4a19fe8af4323662b62f598b8dcfdac62c14771c"
703
+ dependencies = [
704
+ "byteorder",
705
+ ]
706
+
707
+ [[package]]
708
+ name = "nom"
709
+ version = "7.1.3"
710
+ source = "registry+https://github.com/rust-lang/crates.io-index"
711
+ checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
712
+ dependencies = [
713
+ "memchr",
714
+ "minimal-lexical",
715
+ ]
716
+
717
+ [[package]]
718
+ name = "num-traits"
719
+ version = "0.2.19"
720
+ source = "registry+https://github.com/rust-lang/crates.io-index"
721
+ checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
722
+ dependencies = [
723
+ "autocfg",
724
+ ]
725
+
726
+ [[package]]
727
+ name = "once_cell"
728
+ version = "1.21.3"
729
+ source = "registry+https://github.com/rust-lang/crates.io-index"
730
+ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
731
+
732
+ [[package]]
733
+ name = "onig"
734
+ version = "6.5.1"
735
+ source = "registry+https://github.com/rust-lang/crates.io-index"
736
+ checksum = "336b9c63443aceef14bea841b899035ae3abe89b7c486aaf4c5bd8aafedac3f0"
737
+ dependencies = [
738
+ "bitflags",
739
+ "libc",
740
+ "once_cell",
741
+ "onig_sys",
742
+ ]
743
+
744
+ [[package]]
745
+ name = "onig_sys"
746
+ version = "69.9.1"
747
+ source = "registry+https://github.com/rust-lang/crates.io-index"
748
+ checksum = "c7f86c6eef3d6df15f23bcfb6af487cbd2fed4e5581d58d5bf1f5f8b7f6727dc"
749
+ dependencies = [
750
+ "cc",
751
+ "pkg-config",
752
+ ]
753
+
754
+ [[package]]
755
+ name = "parking_lot"
756
+ version = "0.12.5"
757
+ source = "registry+https://github.com/rust-lang/crates.io-index"
758
+ checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
759
+ dependencies = [
760
+ "lock_api",
761
+ "parking_lot_core",
762
+ ]
763
+
764
+ [[package]]
765
+ name = "parking_lot_core"
766
+ version = "0.9.12"
767
+ source = "registry+https://github.com/rust-lang/crates.io-index"
768
+ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
769
+ dependencies = [
770
+ "cfg-if",
771
+ "libc",
772
+ "redox_syscall",
773
+ "smallvec",
774
+ "windows-link",
775
+ ]
776
+
777
+ [[package]]
778
+ name = "paste"
779
+ version = "1.0.15"
780
+ source = "registry+https://github.com/rust-lang/crates.io-index"
781
+ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
782
+
783
+ [[package]]
784
+ name = "pkg-config"
785
+ version = "0.3.32"
786
+ source = "registry+https://github.com/rust-lang/crates.io-index"
787
+ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
788
+
789
+ [[package]]
790
+ name = "ppv-lite86"
791
+ version = "0.2.21"
792
+ source = "registry+https://github.com/rust-lang/crates.io-index"
793
+ checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
794
+ dependencies = [
795
+ "zerocopy",
796
+ ]
797
+
798
+ [[package]]
799
+ name = "proc-macro2"
800
+ version = "1.0.106"
801
+ source = "registry+https://github.com/rust-lang/crates.io-index"
802
+ checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
803
+ dependencies = [
804
+ "unicode-ident",
805
+ ]
806
+
807
+ [[package]]
808
+ name = "python_parser"
809
+ version = "0.1.0"
810
+ dependencies = [
811
+ "antlr4rust",
812
+ "antlr_python_tool_call_parser",
813
+ "serde",
814
+ "serde_json",
815
+ ]
816
+
817
+ [[package]]
818
+ name = "quote"
819
+ version = "1.0.44"
820
+ source = "registry+https://github.com/rust-lang/crates.io-index"
821
+ checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4"
822
+ dependencies = [
823
+ "proc-macro2",
824
+ ]
825
+
826
+ [[package]]
827
+ name = "r-efi"
828
+ version = "5.3.0"
829
+ source = "registry+https://github.com/rust-lang/crates.io-index"
830
+ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
831
+
832
+ [[package]]
833
+ name = "rand"
834
+ version = "0.8.5"
835
+ source = "registry+https://github.com/rust-lang/crates.io-index"
836
+ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
837
+ dependencies = [
838
+ "libc",
839
+ "rand_chacha",
840
+ "rand_core",
841
+ ]
842
+
843
+ [[package]]
844
+ name = "rand_chacha"
845
+ version = "0.3.1"
846
+ source = "registry+https://github.com/rust-lang/crates.io-index"
847
+ checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
848
+ dependencies = [
849
+ "ppv-lite86",
850
+ "rand_core",
851
+ ]
852
+
853
+ [[package]]
854
+ name = "rand_core"
855
+ version = "0.6.4"
856
+ source = "registry+https://github.com/rust-lang/crates.io-index"
857
+ checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
858
+ dependencies = [
859
+ "getrandom 0.2.17",
860
+ ]
861
+
862
+ [[package]]
863
+ name = "rayon"
864
+ version = "1.11.0"
865
+ source = "registry+https://github.com/rust-lang/crates.io-index"
866
+ checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
867
+ dependencies = [
868
+ "either",
869
+ "rayon-core",
870
+ ]
871
+
872
+ [[package]]
873
+ name = "rayon-cond"
874
+ version = "0.3.0"
875
+ source = "registry+https://github.com/rust-lang/crates.io-index"
876
+ checksum = "059f538b55efd2309c9794130bc149c6a553db90e9d99c2030785c82f0bd7df9"
877
+ dependencies = [
878
+ "either",
879
+ "itertools 0.11.0",
880
+ "rayon",
881
+ ]
882
+
883
+ [[package]]
884
+ name = "rayon-core"
885
+ version = "1.13.0"
886
+ source = "registry+https://github.com/rust-lang/crates.io-index"
887
+ checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
888
+ dependencies = [
889
+ "crossbeam-deque",
890
+ "crossbeam-utils",
891
+ ]
892
+
893
+ [[package]]
894
+ name = "redox_syscall"
895
+ version = "0.5.18"
896
+ source = "registry+https://github.com/rust-lang/crates.io-index"
897
+ checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
898
+ dependencies = [
899
+ "bitflags",
900
+ ]
901
+
902
+ [[package]]
903
+ name = "regex"
904
+ version = "1.12.3"
905
+ source = "registry+https://github.com/rust-lang/crates.io-index"
906
+ checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276"
907
+ dependencies = [
908
+ "aho-corasick",
909
+ "memchr",
910
+ "regex-automata",
911
+ "regex-syntax",
912
+ ]
913
+
914
+ [[package]]
915
+ name = "regex-automata"
916
+ version = "0.4.14"
917
+ source = "registry+https://github.com/rust-lang/crates.io-index"
918
+ checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f"
919
+ dependencies = [
920
+ "aho-corasick",
921
+ "memchr",
922
+ "regex-syntax",
923
+ ]
924
+
925
+ [[package]]
926
+ name = "regex-syntax"
927
+ version = "0.8.9"
928
+ source = "registry+https://github.com/rust-lang/crates.io-index"
929
+ checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c"
930
+
931
+ [[package]]
932
+ name = "rustversion"
933
+ version = "1.0.22"
934
+ source = "registry+https://github.com/rust-lang/crates.io-index"
935
+ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
936
+
937
+ [[package]]
938
+ name = "scopeguard"
939
+ version = "1.2.0"
940
+ source = "registry+https://github.com/rust-lang/crates.io-index"
941
+ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
942
+
943
+ [[package]]
944
+ name = "serde"
945
+ version = "1.0.228"
946
+ source = "registry+https://github.com/rust-lang/crates.io-index"
947
+ checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
948
+ dependencies = [
949
+ "serde_core",
950
+ "serde_derive",
951
+ ]
952
+
953
+ [[package]]
954
+ name = "serde_core"
955
+ version = "1.0.228"
956
+ source = "registry+https://github.com/rust-lang/crates.io-index"
957
+ checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
958
+ dependencies = [
959
+ "serde_derive",
960
+ ]
961
+
962
+ [[package]]
963
+ name = "serde_derive"
964
+ version = "1.0.228"
965
+ source = "registry+https://github.com/rust-lang/crates.io-index"
966
+ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
967
+ dependencies = [
968
+ "proc-macro2",
969
+ "quote",
970
+ "syn",
971
+ ]
972
+
973
+ [[package]]
974
+ name = "serde_json"
975
+ version = "1.0.149"
976
+ source = "registry+https://github.com/rust-lang/crates.io-index"
977
+ checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
978
+ dependencies = [
979
+ "indexmap",
980
+ "itoa",
981
+ "memchr",
982
+ "serde",
983
+ "serde_core",
984
+ "zmij",
985
+ ]
986
+
987
+ [[package]]
988
+ name = "shlex"
989
+ version = "1.3.0"
990
+ source = "registry+https://github.com/rust-lang/crates.io-index"
991
+ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
992
+
993
+ [[package]]
994
+ name = "smallvec"
995
+ version = "1.15.1"
996
+ source = "registry+https://github.com/rust-lang/crates.io-index"
997
+ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
998
+
999
+ [[package]]
1000
+ name = "spm_precompiled"
1001
+ version = "0.1.4"
1002
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1003
+ checksum = "5851699c4033c63636f7ea4cf7b7c1f1bf06d0cc03cfb42e711de5a5c46cf326"
1004
+ dependencies = [
1005
+ "base64",
1006
+ "nom",
1007
+ "serde",
1008
+ "unicode-segmentation",
1009
+ ]
1010
+
1011
+ [[package]]
1012
+ name = "strsim"
1013
+ version = "0.11.1"
1014
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1015
+ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
1016
+
1017
+ [[package]]
1018
+ name = "strum"
1019
+ version = "0.27.2"
1020
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1021
+ checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
1022
+ dependencies = [
1023
+ "strum_macros",
1024
+ ]
1025
+
1026
+ [[package]]
1027
+ name = "strum_macros"
1028
+ version = "0.27.2"
1029
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1030
+ checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
1031
+ dependencies = [
1032
+ "heck",
1033
+ "proc-macro2",
1034
+ "quote",
1035
+ "syn",
1036
+ ]
1037
+
1038
+ [[package]]
1039
+ name = "syn"
1040
+ version = "2.0.114"
1041
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1042
+ checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a"
1043
+ dependencies = [
1044
+ "proc-macro2",
1045
+ "quote",
1046
+ "unicode-ident",
1047
+ ]
1048
+
1049
+ [[package]]
1050
+ name = "termcolor"
1051
+ version = "1.4.1"
1052
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1053
+ checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
1054
+ dependencies = [
1055
+ "winapi-util",
1056
+ ]
1057
+
1058
+ [[package]]
1059
+ name = "thiserror"
1060
+ version = "1.0.69"
1061
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1062
+ checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
1063
+ dependencies = [
1064
+ "thiserror-impl",
1065
+ ]
1066
+
1067
+ [[package]]
1068
+ name = "thiserror-impl"
1069
+ version = "1.0.69"
1070
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1071
+ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
1072
+ dependencies = [
1073
+ "proc-macro2",
1074
+ "quote",
1075
+ "syn",
1076
+ ]
1077
+
1078
+ [[package]]
1079
+ name = "tokenizers"
1080
+ version = "0.21.0"
1081
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1082
+ checksum = "9ecededfed68a69bc657e486510089e255e53c3d38cc7d4d59c8742668ca2cae"
1083
+ dependencies = [
1084
+ "aho-corasick",
1085
+ "derive_builder",
1086
+ "esaxx-rs",
1087
+ "getrandom 0.2.17",
1088
+ "itertools 0.12.1",
1089
+ "lazy_static",
1090
+ "log",
1091
+ "macro_rules_attribute",
1092
+ "monostate",
1093
+ "onig",
1094
+ "paste",
1095
+ "rand",
1096
+ "rayon",
1097
+ "rayon-cond",
1098
+ "regex",
1099
+ "regex-syntax",
1100
+ "serde",
1101
+ "serde_json",
1102
+ "spm_precompiled",
1103
+ "thiserror",
1104
+ "unicode-normalization-alignments",
1105
+ "unicode-segmentation",
1106
+ "unicode_categories",
1107
+ ]
1108
+
1109
+ [[package]]
1110
+ name = "toktrie"
1111
+ version = "1.5.0"
1112
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1113
+ checksum = "de81c795b1f2e5b7e531fbb587e541e124b47f434af2c427a4ae73ea0d4eca6c"
1114
+ dependencies = [
1115
+ "anyhow",
1116
+ "bytemuck",
1117
+ "bytemuck_derive",
1118
+ "serde",
1119
+ "serde_json",
1120
+ ]
1121
+
1122
+ [[package]]
1123
+ name = "typed-arena"
1124
+ version = "2.0.2"
1125
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1126
+ checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
1127
+
1128
+ [[package]]
1129
+ name = "unicode-ident"
1130
+ version = "1.0.23"
1131
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1132
+ checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e"
1133
+
1134
+ [[package]]
1135
+ name = "unicode-normalization-alignments"
1136
+ version = "0.1.12"
1137
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1138
+ checksum = "43f613e4fa046e69818dd287fdc4bc78175ff20331479dab6e1b0f98d57062de"
1139
+ dependencies = [
1140
+ "smallvec",
1141
+ ]
1142
+
1143
+ [[package]]
1144
+ name = "unicode-segmentation"
1145
+ version = "1.12.0"
1146
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1147
+ checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
1148
+
1149
+ [[package]]
1150
+ name = "unicode-width"
1151
+ version = "0.1.14"
1152
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1153
+ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
1154
+
1155
+ [[package]]
1156
+ name = "unicode_categories"
1157
+ version = "0.1.1"
1158
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1159
+ checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
1160
+
1161
+ [[package]]
1162
+ name = "uuid"
1163
+ version = "1.20.0"
1164
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1165
+ checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f"
1166
+ dependencies = [
1167
+ "js-sys",
1168
+ "wasm-bindgen",
1169
+ ]
1170
+
1171
+ [[package]]
1172
+ name = "version_check"
1173
+ version = "0.9.5"
1174
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1175
+ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
1176
+
1177
+ [[package]]
1178
+ name = "wasi"
1179
+ version = "0.11.1+wasi-snapshot-preview1"
1180
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1181
+ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
1182
+
1183
+ [[package]]
1184
+ name = "wasip2"
1185
+ version = "1.0.2+wasi-0.2.9"
1186
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1187
+ checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5"
1188
+ dependencies = [
1189
+ "wit-bindgen",
1190
+ ]
1191
+
1192
+ [[package]]
1193
+ name = "wasm-bindgen"
1194
+ version = "0.2.108"
1195
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1196
+ checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566"
1197
+ dependencies = [
1198
+ "cfg-if",
1199
+ "once_cell",
1200
+ "rustversion",
1201
+ "wasm-bindgen-macro",
1202
+ "wasm-bindgen-shared",
1203
+ ]
1204
+
1205
+ [[package]]
1206
+ name = "wasm-bindgen-macro"
1207
+ version = "0.2.108"
1208
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1209
+ checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608"
1210
+ dependencies = [
1211
+ "quote",
1212
+ "wasm-bindgen-macro-support",
1213
+ ]
1214
+
1215
+ [[package]]
1216
+ name = "wasm-bindgen-macro-support"
1217
+ version = "0.2.108"
1218
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1219
+ checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55"
1220
+ dependencies = [
1221
+ "bumpalo",
1222
+ "proc-macro2",
1223
+ "quote",
1224
+ "syn",
1225
+ "wasm-bindgen-shared",
1226
+ ]
1227
+
1228
+ [[package]]
1229
+ name = "wasm-bindgen-shared"
1230
+ version = "0.2.108"
1231
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1232
+ checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12"
1233
+ dependencies = [
1234
+ "unicode-ident",
1235
+ ]
1236
+
1237
+ [[package]]
1238
+ name = "winapi-util"
1239
+ version = "0.1.11"
1240
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1241
+ checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
1242
+ dependencies = [
1243
+ "windows-sys",
1244
+ ]
1245
+
1246
+ [[package]]
1247
+ name = "windows-core"
1248
+ version = "0.62.2"
1249
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1250
+ checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
1251
+ dependencies = [
1252
+ "windows-implement",
1253
+ "windows-interface",
1254
+ "windows-link",
1255
+ "windows-result",
1256
+ "windows-strings",
1257
+ ]
1258
+
1259
+ [[package]]
1260
+ name = "windows-implement"
1261
+ version = "0.60.2"
1262
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1263
+ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
1264
+ dependencies = [
1265
+ "proc-macro2",
1266
+ "quote",
1267
+ "syn",
1268
+ ]
1269
+
1270
+ [[package]]
1271
+ name = "windows-interface"
1272
+ version = "0.59.3"
1273
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1274
+ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
1275
+ dependencies = [
1276
+ "proc-macro2",
1277
+ "quote",
1278
+ "syn",
1279
+ ]
1280
+
1281
+ [[package]]
1282
+ name = "windows-link"
1283
+ version = "0.2.1"
1284
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1285
+ checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
1286
+
1287
+ [[package]]
1288
+ name = "windows-result"
1289
+ version = "0.4.1"
1290
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1291
+ checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
1292
+ dependencies = [
1293
+ "windows-link",
1294
+ ]
1295
+
1296
+ [[package]]
1297
+ name = "windows-strings"
1298
+ version = "0.5.1"
1299
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1300
+ checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
1301
+ dependencies = [
1302
+ "windows-link",
1303
+ ]
1304
+
1305
+ [[package]]
1306
+ name = "windows-sys"
1307
+ version = "0.61.2"
1308
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1309
+ checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
1310
+ dependencies = [
1311
+ "windows-link",
1312
+ ]
1313
+
1314
+ [[package]]
1315
+ name = "wit-bindgen"
1316
+ version = "0.51.0"
1317
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1318
+ checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5"
1319
+
1320
+ [[package]]
1321
+ name = "zerocopy"
1322
+ version = "0.8.39"
1323
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1324
+ checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a"
1325
+ dependencies = [
1326
+ "zerocopy-derive",
1327
+ ]
1328
+
1329
+ [[package]]
1330
+ name = "zerocopy-derive"
1331
+ version = "0.8.39"
1332
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1333
+ checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517"
1334
+ dependencies = [
1335
+ "proc-macro2",
1336
+ "quote",
1337
+ "syn",
1338
+ ]
1339
+
1340
+ [[package]]
1341
+ name = "zmij"
1342
+ version = "1.0.20"
1343
+ source = "registry+https://github.com/rust-lang/crates.io-index"
1344
+ checksum = "4de98dfa5d5b7fef4ee834d0073d560c9ca7b6c46a71d058c48db7960f8cfaf7"
Cargo.toml ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # To update the Cargo.lock file, run:
2
+ # touch src/lib.rs
3
+ # CARGO_BAZEL_REPIN=1 bazel sync --only=crate_index
4
+ # rm src/lib.rs
5
+
6
+ [package]
7
+ name = "litert_lm_deps"
8
+ version = "0.1.0"
9
+ edition = "2021"
10
+ publish = false
11
+
12
+ [dependencies]
13
+ antlr4rust = "0.5"
14
+ chrono = "0.4"
15
+ cxx = "=1.0.149"
16
+ googletest = "0.14"
17
+ indexmap = "2.12"
18
+ llguidance = { version = "=1.3.0", default-features = false, features = ["lark", "rayon", "ahash"] }
19
+ minijinja = { version = "=2.14.0", features = ["json", "preserve_order"] }
20
+ paste = "1.0"
21
+ quote = "1.0"
22
+ tokenizers = { version = "=0.21.0", default-features = false, features = ["onig"] }
23
+ serde = { version = "1.0", features = [ "derive" ] }
24
+ serde_json = { version = "1.0", features = [ "preserve_order" ] }
25
+ syn = "2.0"
26
+
27
+ antlr_fc_tool_call_parser = { path = "cmake/rust/antlr_fc" }
28
+ antlr_python_tool_call_parser = { path = "cmake/rust/antlr_python" }
29
+ json_parser = { path = "cmake/rust/json_parser" }
30
+ python_parser = { path = "cmake/rust/python_parser" }
31
+ fc_parser = { path = "cmake/rust/fc_parser" }
32
+
33
+
34
+ [lib]
35
+ crate-type = ["staticlib"]
LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
PATCH.llguidance ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --- src/earley/regexvec.rs
2
+ +++ src/earley/regexvec.rs
3
+ @@ -675,6 +675,6 @@
4
+ // return StateID::DEAD;
5
+ // }
6
+ - assert!(lst.len().is_multiple_of(2));
7
+ + assert!(lst.len() % 2 == 0);
8
+ let id = StateID::new(self.rx_sets.insert(&lst));
9
+ if id.as_usize() >= self.state_descs.len() {
10
+ let state_desc = self.compute_state_desc(id);
11
+ --- src/json/numeric.rs
12
+ +++ src/json/numeric.rs
13
+ @@ -20,6 +20,6 @@
14
+ let mut coef = coef;
15
+ let mut exp = exp;
16
+ - while exp > 0 && coef.is_multiple_of(10) {
17
+ + while exp > 0 && coef % 10 == 0 {
18
+ coef /= 10;
19
+ exp -= 1;
20
+ }
PATCH.llguidance_grammar ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ --- src/earley/grammar.rs
2
+ +++ src/earley/grammar.rs
3
+ @@ -1325,4 +1325,4 @@
4
+ outp.rhs_params.push(ParamExpr::Null);
5
+ }
6
+ - while !outp.rhs_elements.len().is_multiple_of(1 << RULE_SHIFT) {
7
+ + while outp.rhs_elements.len() % (1 << RULE_SHIFT) != 0 {
8
+ outp.rhs_elements.push(CSymIdx::NULL);
9
+ outp.rhs_params.push(ParamExpr::Null);
PATCH.llguidance_numeric ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ --- src/json/numeric.rs
2
+ +++ src/json/numeric.rs
3
+ @@ -20,6 +20,6 @@
4
+ let mut coef = coef;
5
+ let mut exp = exp;
6
+ - while exp > 0 && coef.is_multiple_of(10) {
7
+ + while exp > 0 && coef % 10 == 0 {
8
+ coef /= 10;
9
+ exp -= 1;
10
+ }
PATCH.llguidance_parser ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ --- src/earley/parser.rs
2
+ +++ src/earley/parser.rs
3
+ @@ -135,3 +135,3 @@
4
+ pub fn one_in(&mut self, n: u32) -> bool {
5
+ - self.next().is_multiple_of(n)
6
+ + self.next() % n == 0
7
+ }
8
+
PATCH.llguidance_perf ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ --- src/earley/perf.rs
2
+ +++ src/earley/perf.rs
3
+ @@ -143,3 +143,3 @@
4
+ for (i, c) in s.chars().enumerate() {
5
+ // Insert a comma once we've passed 'offset' and every 3 digits after that.
6
+ - if i != 0 && i >= offset && (i - offset).is_multiple_of(3) {
7
+ + if i != 0 && i >= offset && (i - offset) % 3 == 0 {
8
+ result.push(',');
9
+ }
PATCH.llguidance_regexvec ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ --- src/earley/regexvec.rs
2
+ +++ src/earley/regexvec.rs
3
+ @@ -675,6 +675,6 @@
4
+ // return StateID::DEAD;
5
+ // }
6
+ - assert!(lst.len().is_multiple_of(2));
7
+ + assert!(lst.len() % 2 == 0);
8
+ let id = StateID::new(self.rx_sets.insert(&lst));
9
+ if id.as_usize() >= self.state_descs.len() {
10
+ let state_desc = self.compute_state_desc(id);
PATCH.minja ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --- include/minja/chat-template.hpp
2
+ +++ include/minja/chat-template.hpp
3
+ @@ -282,13 +282,13 @@ class chat_template {
4
+ }
5
+ auto example = full.substr(common_prefix_length);
6
+ if (example.find("tool_name") == std::string::npos && example.find("some_value") == std::string::npos) {
7
+ - fprintf(stderr, "Failed to infer a tool call example (possible template bug)\n");
8
+ + // fprintf(stderr, "Failed to infer a tool call example (possible template bug)\n");
9
+ } else {
10
+ tool_call_example_ = example;
11
+ }
12
+ }
13
+ } catch (const std::exception & e) {
14
+ - fprintf(stderr, "Failed to generate tool call example: %s\n", e.what());
15
+ + // fprintf(stderr, "Failed to generate tool call example: %s\n", e.what());
16
+ }
17
+ }
18
+
19
+ @@ -305,7 +305,7 @@ class chat_template {
20
+ const nlohmann::ordered_json & extra_context = nlohmann::ordered_json(),
21
+ bool apply_polyfills = true)
22
+ {
23
+ - fprintf(stderr, "[%s] Deprecated!\n", __func__);
24
+ + // fprintf(stderr, "[%s] Deprecated!\n", __func__);
25
+ chat_template_inputs inputs;
26
+ inputs.messages = messages;
27
+ inputs.tools = tools;
28
+ @@ -412,7 +412,7 @@ class chat_template {
29
+ try {
30
+ arguments = json::parse(arguments.get<std::string>());
31
+ } catch (const std::exception & ecvt) {
32
+ - fprintf(stderr, "Failed to parse arguments: %s\n", ecvt.what());
33
+ + // fprintf(stderr, "Failed to parse arguments: %s\n", ecvt.what());
34
+ }
35
+ }
36
+ }
PATCH.nanobind_json ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --- include/nanobind_json/nanobind_json.hpp
2
+ +++ include/nanobind_json/nanobind_json.hpp
3
+ @@ -13,13 +13,15 @@
4
+
5
+ #include "nlohmann/json.hpp"
6
+
7
+ -#include "nanobind/nanobind.hpp"
8
+ +#include <nanobind/nanobind.h>
9
+ +#include <nanobind/stl/string.h>
10
+
11
+ namespace nb = nanobind;
12
+ namespace nl = nlohmann;
13
+
14
+ namespace pyjson
15
+ {
16
+ +
17
+ inline nb::object from_json(const nl::json& j)
18
+ {
19
+ if (j.is_null())
20
+ @@ -40,7 +42,7 @@
21
+ }
22
+ else if (j.is_string())
23
+ {
24
+ - return nb::str(j.get<std::string>());
25
+ + return nb::str(j.get<std::string>().c_str());
26
+ }
27
+ else if (j.is_array())
28
+ {
29
+ @@ -56,7 +58,7 @@
30
+ nb::dict obj;
31
+ for (nl::json::const_iterator it = j.cbegin(); it != j.cend(); ++it)
32
+ {
33
+ - obj[nb::str(it.key())] = from_json(it.value());
34
+ + obj[nb::str(it.key().c_str())] = from_json(it.value());
35
+ }
36
+ return std::move(obj);
37
+ }
38
+ @@ -70,24 +72,24 @@
39
+ }
40
+ if (nb::isinstance<nb::bool_>(obj))
41
+ {
42
+ - return obj.cast<bool>();
43
+ + return nb::cast<bool>(obj);
44
+ }
45
+ if (nb::isinstance<nb::int_>(obj))
46
+ {
47
+ - return obj.cast<long>();
48
+ + return nb::cast<long>(obj);
49
+ }
50
+ if (nb::isinstance<nb::float_>(obj))
51
+ {
52
+ - return obj.cast<double>();
53
+ + return nb::cast<double>(obj);
54
+ }
55
+ if (nb::isinstance<nb::bytes>(obj))
56
+ {
57
+ - nb::module base64 = nb::module::import("base64");
58
+ - return base64.attr("b64encode")(obj).attr("decode")("utf-8").cast<std::string>();
59
+ + nb::module_ base64 = nb::module_::import_("base64");
60
+ + return nb::cast<std::string>(base64.attr("b64encode")(obj).attr("decode")("utf-8"));
61
+ }
62
+ if (nb::isinstance<nb::str>(obj))
63
+ {
64
+ - return obj.cast<std::string>();
65
+ + return nb::cast<std::string>(obj);
66
+ }
67
+ if (nb::isinstance<nb::tuple>(obj) || nb::isinstance<nb::list>(obj))
68
+ {
69
+ @@ -103,11 +105,11 @@
70
+ auto out = nl::json::object();
71
+ for (const nb::handle key : obj)
72
+ {
73
+ - out[nb::str(key).cast<std::string>()] = to_json(obj[key]);
74
+ + out[nb::cast<std::string>(nb::str(key))] = to_json(obj[key]);
75
+ }
76
+ return out;
77
+ }
78
+ - throw std::runtime_error("to_json not implemented for this type of object: " + nb::repr(obj).cast<std::string>());
79
+ + throw std::runtime_error("to_json not implemented for this type of object: " + nb::cast<std::string>(nb::repr(obj)));
80
+ }
81
+ }
82
+
83
+ @@ -123,7 +125,7 @@
84
+ j = pyjson::to_json(obj); \
85
+ } \
86
+ \
87
+ - inline static T from_json(const json& j) \
88
+ + inline static nb::object from_json(const json& j) \
89
+ { \
90
+ return pyjson::from_json(j); \
91
+ } \
92
+ @@ -151,12 +153,6 @@
93
+ MAKE_NLJSON_SERIALIZER_DESERIALIZER(nb::dict);
94
+
95
+ MAKE_NLJSON_SERIALIZER_ONLY(nb::handle);
96
+ - MAKE_NLJSON_SERIALIZER_ONLY(nb::detail::item_accessor);
97
+ - MAKE_NLJSON_SERIALIZER_ONLY(nb::detail::list_accessor);
98
+ - MAKE_NLJSON_SERIALIZER_ONLY(nb::detail::tuple_accessor);
99
+ - MAKE_NLJSON_SERIALIZER_ONLY(nb::detail::sequence_accessor);
100
+ - MAKE_NLJSON_SERIALIZER_ONLY(nb::detail::str_attr_accessor);
101
+ - MAKE_NLJSON_SERIALIZER_ONLY(nb::detail::obj_attr_accessor);
102
+
103
+ #undef MAKE_NLJSON_SERIALIZER
104
+ #undef MAKE_NLJSON_SERIALIZER_ONLY
105
+ @@ -167,28 +163,53 @@
106
+ {
107
+ namespace detail
108
+ {
109
+ - template <> struct type_caster<nl::json>
110
+ - {
111
+ - public:
112
+ - NANOBIND_TYPE_CASTER(nl::json, _("json"));
113
+ +
114
+ + template <> struct type_caster<nl::json> {
115
+ + using Value = nl::json;
116
+
117
+ - bool load(handle src, bool)
118
+ - {
119
+ - try {
120
+ - value = pyjson::to_json(src);
121
+ - return true;
122
+ - }
123
+ - catch (...)
124
+ - {
125
+ - return false;
126
+ - }
127
+ - }
128
+ + template <typename T> using Cast = Value;
129
+ +
130
+ + // Value name for docstring generation
131
+ + static constexpr auto Name = const_name("Json");
132
+
133
+ - static handle cast(nl::json src, return_value_policy /* policy */, handle /* parent */)
134
+ + /// Python -> C++ caster, populates `caster1` and `caster2` upon success
135
+ + bool from_python(handle src, uint8_t flags,
136
+ + cleanup_list *cleanup) noexcept {
137
+ + try {
138
+ + value = pyjson::to_json(src);
139
+ + return true;
140
+ + }
141
+ + catch (...)
142
+ {
143
+ - object obj = pyjson::from_json(src);
144
+ - return obj.release();
145
+ + return false;
146
+ }
147
+ - };
148
+ + }
149
+ +
150
+ + template <typename T>
151
+ + static handle from_cpp(T *value, rv_policy policy, cleanup_list *cleanup) {
152
+ + if (!value)
153
+ + return none().release();
154
+ + return from_cpp(*value, policy, cleanup);
155
+ + }
156
+ +
157
+ + template<typename T>
158
+ + static handle from_cpp(T &&value, rv_policy policy,
159
+ + cleanup_list *cleanup) noexcept {
160
+ + object obj = pyjson::from_json(value);
161
+ + return obj.release();
162
+ + }
163
+ +
164
+ + template <typename T>
165
+ + bool can_cast() const noexcept {
166
+ + return true;
167
+ + }
168
+ +
169
+ + /// Return the constructed tuple by copying from the sub-casters
170
+ + explicit operator Value() {
171
+ + return value;
172
+ + }
173
+ +
174
+ + Value value;
175
+ + };
176
+ }
177
+ }
PATCH.rules_rust ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --- crate_universe/private/crates_repository.bzl 2025-05-01 16:41:19.000000000 -0700
2
+ +++ crate_universe/private/crates_repository.bzl 2025-06-19 10:31:33.225901444 -0700
3
+ @@ -28,6 +28,9 @@
4
+ # complexity for each platform triple added.
5
+ SUPPORTED_PLATFORM_TRIPLES = [
6
+ "aarch64-apple-darwin",
7
+ + "aarch64-apple-ios",
8
+ + "aarch64-apple-ios-sim",
9
+ + "aarch64-linux-android",
10
+ "aarch64-unknown-linux-gnu",
11
+ "wasm32-unknown-unknown",
12
+ "wasm32-wasip1",
13
+
PATCH.sentencepiece ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --- sentencepiece_processor.h 2025-04-27 15:47:21.057722872 -0700
2
+ +++ sentencepiece_processor.h 2025-04-27 15:50:12.601047364 -0700
3
+ @@ -22,6 +22,8 @@
4
+ #include <utility>
5
+ #include <vector>
6
+
7
+ +#include "absl/status/status.h"
8
+ +
9
+ #ifndef SWIG
10
+ namespace absl {
11
+ using std::string_view;
12
+ @@ -30,50 +32,8 @@
13
+
14
+ namespace sentencepiece {
15
+ namespace util {
16
+ -
17
+ -enum class StatusCode : int {
18
+ - kOk = 0,
19
+ - kCancelled = 1,
20
+ - kUnknown = 2,
21
+ - kInvalidArgument = 3,
22
+ - kDeadlineExceeded = 4,
23
+ - kNotFound = 5,
24
+ - kAlreadyExists = 6,
25
+ - kPermissionDenied = 7,
26
+ - kResourceExhausted = 8,
27
+ - kFailedPrecondition = 9,
28
+ - kAborted = 10,
29
+ - kOutOfRange = 11,
30
+ - kUnimplemented = 12,
31
+ - kInternal = 13,
32
+ - kUnavailable = 14,
33
+ - kDataLoss = 15,
34
+ - kUnauthenticated = 16,
35
+ -};
36
+ -
37
+ -class Status {
38
+ - public:
39
+ - Status();
40
+ - ~Status();
41
+ - Status(StatusCode code, absl::string_view error_message);
42
+ - Status(const Status &s);
43
+ - void operator=(const Status &s);
44
+ - bool operator==(const Status &s) const;
45
+ - bool operator!=(const Status &s) const;
46
+ - inline bool ok() const { return rep_ == nullptr; }
47
+ -
48
+ - void set_error_message(const char *str);
49
+ - const char *error_message() const;
50
+ - const char *message() const { return error_message(); }
51
+ - StatusCode code() const;
52
+ - std::string ToString() const;
53
+ -
54
+ - void IgnoreError();
55
+ -
56
+ - private:
57
+ - struct Rep;
58
+ - std::unique_ptr<Rep> rep_;
59
+ -};
60
+ +using StatusCode = absl::StatusCode;
61
+ +using Status = absl::Status;
62
+ } // namespace util
63
+
64
+ // SentencePieceProcessor:
65
+
66
+ --- common.h 2025-04-27 15:55:22.642953123 -0700
67
+ +++ common.h 2025-04-27 16:02:22.377904469 -0700
68
+ @@ -94,7 +94,7 @@
69
+ ~Die() {
70
+ std::cerr << std::endl;
71
+ if (die_) {
72
+ - Abort();
73
+ + exit(-1);
74
+ }
75
+ }
76
+ int operator&(std::ostream &) { return 0; }
77
+
PATCH.tensorflow ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --- third_party/xla/third_party/protobuf/protobuf.patch 2025-08-18 10:33:05
2
+ +++ third_party/xla/third_party/protobuf/protobuf.patch 2025-08-18 15:34:33
3
+ @@ -1,3 +1,36 @@
4
+ +diff --git a/src/google/protobuf/compiler/java/BUILD.bazel b/src/google/protobuf/compiler/java/BUILD.bazel
5
+ +--- a/src/google/protobuf/compiler/java/BUILD.bazel 2025-05-28 09:52:58
6
+ ++++ b/src/google/protobuf/compiler/java/BUILD.bazel 2025-08-18 17:40:44
7
+ +@@ -48,6 +48,7 @@
8
+ + "context.h",
9
+ + "doc_comment.h",
10
+ + "generator.h",
11
+ ++ "field_common.h",
12
+ + "helpers.h",
13
+ + "name_resolver.h",
14
+ + "names.h",
15
+ +diff --git a/src/google/protobuf/compiler/java/context.h b/src/google/protobuf/compiler/java/context.h
16
+ +--- a/src/google/protobuf/compiler/java/context.h
17
+ ++++ b/src/google/protobuf/compiler/java/context.h
18
+ +@@ -12,6 +12,7 @@
19
+ + #include <vector>
20
+ +
21
+ + #include "absl/container/flat_hash_map.h"
22
+ ++#include "google/protobuf/compiler/java/field_common.h"
23
+ + #include "google/protobuf/compiler/java/helpers.h"
24
+ + #include "google/protobuf/compiler/java/options.h"
25
+ + #include "google/protobuf/port.h"
26
+ +diff --git a/src/google/protobuf/compiler/java/message_serialization.h b/src/google/protobuf/compiler/java/message_serialization.h
27
+ +--- a/src/google/protobuf/compiler/java/message_serialization.h
28
+ ++++ b/src/google/protobuf/compiler/java/message_serialization.h
29
+ +@@ -13,6 +13,7 @@
30
+ + #include <vector>
31
+ +
32
+ + #include "absl/types/span.h"
33
+ ++#include "google/protobuf/compiler/java/field_common.h"
34
+ + #include "google/protobuf/compiler/java/generator_common.h"
35
+ + #include "google/protobuf/descriptor.h"
36
+ + #include "google/protobuf/io/printer.h"
37
+ diff --git a/BUILD.bazel b/BUILD.bazel
38
+ --- a/BUILD.bazel
39
+ +++ b/BUILD.bazel
40
+ +diff --git a/third_party/kissfft/kissfft.BUILD b/third_party/kissfft/kissfft.BUILD
41
+ --- third_party/kissfft/kissfft.BUILD
42
+ +++ third_party/kissfft/kissfft.BUILD
43
+ @@ -28,3 +28,22 @@ cc_library(
44
+ "-DFIXED_POINT=16",
45
+ ],
46
+ )
47
+ +cc_library(
48
+ + name = "kissfftr",
49
+ + srcs = [
50
+ + "kfc.c",
51
+ + "kiss_fft.c",
52
+ + "kiss_fftnd.c",
53
+ + "kiss_fftndr.c",
54
+ + "kiss_fftr.c",
55
+ + ],
56
+ + hdrs = [
57
+ + "_kiss_fft_guts.h",
58
+ + "kfc.h",
59
+ + "kiss_fft.h",
60
+ + "kiss_fft_log.h",
61
+ + "kiss_fftnd.h",
62
+ + "kiss_fftndr.h",
63
+ + "kiss_fftr.h",
64
+ + ],
65
+ +)
66
+ diff --git a/third_party/darts_clone.BUILD b/third_party/darts_clone.BUILD
67
+ --- third_party/darts_clone.BUILD 2026-03-24 15:10:28.541592816 -0700
68
+ +++ third_party/darts_clone.BUILD 2026-03-24 15:18:31.434002329 -0700
69
+ @@ -1,5 +1,3 @@
70
+ -load("//third_party/bazel_rules/rules_cc/cc:cc_library.bzl", "cc_library")
71
+ -
72
+ package(default_visibility = ["//visibility:public"])
73
+
74
+ cc_library(
PATCH.toktrie ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ --- src/bytes.rs
2
+ +++ src/bytes.rs
3
+ @@ -16,5 +16,5 @@
4
+ pub fn vec_from_bytes<T: PodTrait>(bytes: &[u8]) -> Vec<T> {
5
+ - if !bytes.len().is_multiple_of(size_of::<T>()) {
6
+ + if bytes.len() % size_of::<T>() != 0 {
7
+ panic!(
8
+ "vecT: got {} bytes, needed multiple of {}",
9
+ bytes.len(),
WORKSPACE ADDED
@@ -0,0 +1,498 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # buildifier: disable=load-on-top
2
+
3
+ workspace(name = "litert_lm")
4
+
5
+ # UPDATED = 2026-04-01
6
+ LITERT_REF = "5c42d07c5e457ea535d1a2144a376c95e79a06d5"
7
+
8
+ LITERT_SHA256 = "9e7f8f3e9152007a71766332147f6d2deecb3f9178749bc9ed9d76e69178a1c4"
9
+
10
+ TENSORFLOW_REF = "e6f36bd7a12f87aac6f21a5068719dfc91bed3eb"
11
+
12
+ TENSORFLOW_SHA256 = "643b8e3f23283fb0e6cacc4612e77b285a1d3cfd295aa10b3386fd8fcf6d651f"
13
+
14
+ # buildifier: disable=load-on-top
15
+
16
+ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_jar")
17
+
18
+ http_archive(
19
+ name = "rules_shell",
20
+ sha256 = "bc61ef94facc78e20a645726f64756e5e285a045037c7a61f65af2941f4c25e1",
21
+ strip_prefix = "rules_shell-0.4.1",
22
+ url = "https://github.com/bazelbuild/rules_shell/releases/download/v0.4.1/rules_shell-v0.4.1.tar.gz",
23
+ )
24
+
25
+ load("@rules_shell//shell:repositories.bzl", "rules_shell_dependencies", "rules_shell_toolchains")
26
+
27
+ rules_shell_dependencies()
28
+
29
+ rules_shell_toolchains()
30
+
31
+ http_archive(
32
+ name = "rules_platform",
33
+ sha256 = "0aadd1bd350091aa1f9b6f2fbcac8cd98201476289454e475b28801ecf85d3fd",
34
+ url = "https://github.com/bazelbuild/rules_platform/releases/download/0.1.0/rules_platform-0.1.0.tar.gz",
35
+ )
36
+
37
+ # Use recent platoforms version to support uefi platform.
38
+ http_archive(
39
+ name = "platforms",
40
+ sha256 = "3384eb1c30762704fbe38e440204e114154086c8fc8a8c2e3e28441028c019a8",
41
+ urls = [
42
+ "https://mirror.bazel.build/github.com/bazelbuild/platforms/releases/download/1.0.0/platforms-1.0.0.tar.gz",
43
+ "https://github.com/bazelbuild/platforms/releases/download/1.0.0/platforms-1.0.0.tar.gz",
44
+ ],
45
+ )
46
+
47
+ # Use 3.22.0 (from 3.5.1 of tensorflow) to fix binary signing issue on MacOS Tahoe.
48
+ http_archive(
49
+ name = "build_bazel_rules_apple",
50
+ sha256 = "a78f26c22ac8d6e3f3fcaad50eace4d9c767688bd7254b75bdf4a6735b299f6a",
51
+ url = "https://github.com/bazelbuild/rules_apple/releases/download/3.22.0/rules_apple.3.22.0.tar.gz",
52
+ )
53
+
54
+ load(
55
+ "@build_bazel_rules_apple//apple:repositories.bzl",
56
+ "apple_rules_dependencies",
57
+ )
58
+
59
+ apple_rules_dependencies()
60
+
61
+ http_archive(
62
+ name = "build_bazel_rules_swift",
63
+ sha256 = "f7a67197cd8a79debfe70b8cef4dc19d03039af02cc561e31e0718e98cad83ac",
64
+ url = "https://github.com/bazelbuild/rules_swift/releases/download/2.9.0/rules_swift.2.9.0.tar.gz",
65
+ )
66
+
67
+ # Lower the version from 1.24.5 that tensorflow uses to 1.23.1, the highest version which don't have
68
+ # issues with missing LC_UUID, DEVELOPER_DIR or SDKROOT on MacOS Tahoe.
69
+ http_archive(
70
+ name = "build_bazel_apple_support",
71
+ sha256 = "ee20cc5c0bab47065473c8033d462374dd38d172406ecc8de5c8f08487943f2f",
72
+ url = "https://github.com/bazelbuild/apple_support/releases/download/1.23.1/apple_support.1.23.1.tar.gz",
73
+ )
74
+
75
+ http_archive(
76
+ name = "bazel_features",
77
+ sha256 = "c26b4e69cf02fea24511a108d158188b9d8174426311aac59ce803a78d107648",
78
+ strip_prefix = "bazel_features-1.43.0",
79
+ url = "https://github.com/bazel-contrib/bazel_features/releases/download/v1.43.0/bazel_features-v1.43.0.tar.gz",
80
+ )
81
+
82
+ # TensorFlow
83
+ http_archive(
84
+ name = "org_tensorflow",
85
+ patches = ["@//:PATCH.tensorflow"],
86
+ sha256 = TENSORFLOW_SHA256,
87
+ strip_prefix = "tensorflow-" + TENSORFLOW_REF,
88
+ url = "https://github.com/tensorflow/tensorflow/archive/" + TENSORFLOW_REF + ".tar.gz",
89
+ )
90
+
91
+ # Initialize the TensorFlow repository and all dependencies.
92
+ #
93
+ # The cascade of load() statements and tf_workspace?() calls works around the
94
+ # restriction that load() statements need to be at the top of .bzl files.
95
+ # E.g. we can not retrieve a new repository with http_archive and then load()
96
+ # a macro from that repository in the same file.
97
+ load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3")
98
+
99
+ tf_workspace3()
100
+
101
+ # Toolchains for ML projects
102
+ # Details: https://github.com/google-ml-infra/rules_ml_toolchain
103
+ http_archive(
104
+ name = "rules_ml_toolchain",
105
+ sha256 = "9dbee8f24cc1b430bf9c2a6661ab70cbca89979322ddc7742305a05ff637ab6b",
106
+ strip_prefix = "rules_ml_toolchain-545c80f1026d526ea9c7aaa410bf0b52c9a82e74",
107
+ url = "https://github.com/google-ml-infra/rules_ml_toolchain/archive/545c80f1026d526ea9c7aaa410bf0b52c9a82e74.tar.gz",
108
+ )
109
+
110
+ load(
111
+ "@rules_ml_toolchain//cc/deps:cc_toolchain_deps.bzl",
112
+ "cc_toolchain_deps",
113
+ )
114
+
115
+ cc_toolchain_deps()
116
+
117
+ # Initialize hermetic Python
118
+ load("@xla//third_party/py:python_init_rules.bzl", "python_init_rules")
119
+
120
+ python_init_rules()
121
+
122
+ load("@xla//third_party/py:python_init_repositories.bzl", "python_init_repositories")
123
+
124
+ python_init_repositories(
125
+ default_python_version = "system",
126
+ local_wheel_dist_folder = "dist",
127
+ local_wheel_inclusion_list = [
128
+ "tensorflow*",
129
+ "tf_nightly*",
130
+ ],
131
+ local_wheel_workspaces = ["@org_tensorflow//:WORKSPACE"],
132
+ requirements = {
133
+ "3.10": "@org_tensorflow//:requirements_lock_3_10.txt",
134
+ "3.11": "@org_tensorflow//:requirements_lock_3_11.txt",
135
+ "3.12": "@org_tensorflow//:requirements_lock_3_12.txt",
136
+ "3.13": "@org_tensorflow//:requirements_lock_3_13.txt",
137
+ "3.14": "@org_tensorflow//:requirements_lock_3_14.txt",
138
+ },
139
+ )
140
+
141
+ load("@xla//third_party/py:python_init_toolchains.bzl", "python_init_toolchains")
142
+
143
+ python_init_toolchains()
144
+
145
+ load("@xla//third_party/py:python_init_pip.bzl", "python_init_pip")
146
+
147
+ python_init_pip()
148
+
149
+ load("@pypi//:requirements.bzl", "install_deps")
150
+
151
+ install_deps()
152
+ # End hermetic Python initialization
153
+
154
+ RULES_JVM_EXTERNAL_TAG = "6.8"
155
+
156
+ RULES_JVM_EXTERNAL_SHA = "704a0197e4e966f96993260418f2542568198490456c21814f647ae7091f56f2"
157
+
158
+ http_archive(
159
+ name = "rules_jvm_external",
160
+ sha256 = RULES_JVM_EXTERNAL_SHA,
161
+ strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG,
162
+ url = "https://github.com/bazelbuild/rules_jvm_external/releases/download/%s/rules_jvm_external-%s.tar.gz" % (RULES_JVM_EXTERNAL_TAG, RULES_JVM_EXTERNAL_TAG),
163
+ )
164
+
165
+ load("@rules_jvm_external//:defs.bzl", "maven_install")
166
+
167
+ maven_install(
168
+ name = "maven",
169
+ artifacts = [
170
+ "com.google.code.gson:gson:2.13.2",
171
+ "org.jetbrains.kotlinx:kotlinx-coroutines-core-jvm:1.9.0",
172
+ "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.9.0",
173
+ ],
174
+ repositories = [
175
+ "https://maven.google.com",
176
+ "https://repo1.maven.org/maven2",
177
+ ],
178
+ )
179
+
180
+ load("@org_tensorflow//tensorflow:workspace2.bzl", "tf_workspace2")
181
+
182
+ tf_workspace2()
183
+
184
+ load("@org_tensorflow//tensorflow:workspace1.bzl", "tf_workspace1")
185
+
186
+ tf_workspace1()
187
+
188
+ load("@org_tensorflow//tensorflow:workspace0.bzl", "tf_workspace0")
189
+
190
+ tf_workspace0()
191
+
192
+ load(
193
+ "@xla//third_party/py:python_wheel.bzl",
194
+ "python_wheel_version_suffix_repository",
195
+ )
196
+
197
+ python_wheel_version_suffix_repository(name = "tf_wheel_version_suffix")
198
+
199
+ load(
200
+ "@rules_ml_toolchain//gpu/cuda:cuda_json_init_repository.bzl",
201
+ "cuda_json_init_repository",
202
+ )
203
+
204
+ cuda_json_init_repository()
205
+
206
+ load(
207
+ "@cuda_redist_json//:distributions.bzl",
208
+ "CUDA_REDISTRIBUTIONS",
209
+ "CUDNN_REDISTRIBUTIONS",
210
+ )
211
+ load(
212
+ "@rules_ml_toolchain//gpu/cuda:cuda_redist_init_repositories.bzl",
213
+ "cuda_redist_init_repositories",
214
+ "cudnn_redist_init_repository",
215
+ )
216
+
217
+ cuda_redist_init_repositories(
218
+ cuda_redistributions = CUDA_REDISTRIBUTIONS,
219
+ )
220
+
221
+ cudnn_redist_init_repository(
222
+ cudnn_redistributions = CUDNN_REDISTRIBUTIONS,
223
+ )
224
+
225
+ load(
226
+ "@rules_ml_toolchain//gpu/cuda:cuda_configure.bzl",
227
+ "cuda_configure",
228
+ )
229
+
230
+ cuda_configure(name = "local_config_cuda")
231
+
232
+ load(
233
+ "@rules_ml_toolchain//gpu/nccl:nccl_redist_init_repository.bzl",
234
+ "nccl_redist_init_repository",
235
+ )
236
+
237
+ nccl_redist_init_repository()
238
+
239
+ load(
240
+ "@rules_ml_toolchain//gpu/nccl:nccl_configure.bzl",
241
+ "nccl_configure",
242
+ )
243
+
244
+ nccl_configure(name = "local_config_nccl")
245
+
246
+ # Kotlin rules
247
+ http_archive(
248
+ name = "rules_kotlin",
249
+ sha256 = "e1448a56b2462407b2688dea86df5c375b36a0991bd478c2ddd94c97168125e2",
250
+ url = "https://github.com/bazelbuild/rules_kotlin/releases/download/v2.1.3/rules_kotlin-v2.1.3.tar.gz",
251
+ )
252
+
253
+ load("@rules_kotlin//kotlin:repositories.bzl", "kotlin_repositories")
254
+
255
+ kotlin_repositories() # if you want the default. Otherwise see custom kotlinc distribution below
256
+
257
+ load("@rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
258
+
259
+ kt_register_toolchains() # to use the default toolchain, otherwise see toolchains below
260
+
261
+ # Rust (for HuggingFace Tokenizers)
262
+ http_archive(
263
+ name = "rules_rust",
264
+ patches = ["@//:PATCH.rules_rust"],
265
+ sha256 = "53c1bac7ec48f7ce48c4c1c6aa006f27515add2aeb05725937224e6e00ec7cea",
266
+ url = "https://github.com/bazelbuild/rules_rust/releases/download/0.61.0/rules_rust-0.61.0.tar.gz",
267
+ )
268
+
269
+ load("@rules_rust//rust:repositories.bzl", "rules_rust_dependencies", "rust_register_toolchains")
270
+
271
+ rules_rust_dependencies()
272
+
273
+ rust_register_toolchains(
274
+ edition = "2021",
275
+ extra_target_triples = [
276
+ # Explicitly add toolchains for mobile. Desktop platforms are supported by default.
277
+ "aarch64-linux-android",
278
+ "aarch64-apple-ios",
279
+ "aarch64-apple-ios-sim",
280
+ "x86_64-linux-android",
281
+ ],
282
+ )
283
+
284
+ load("@rules_rust//crate_universe:repositories.bzl", "crate_universe_dependencies")
285
+
286
+ crate_universe_dependencies()
287
+
288
+ load("@rules_rust//crate_universe:defs.bzl", "crate", "crates_repository")
289
+ load("@rules_rust//rust/platform:triple_mappings.bzl", "SUPPORTED_PLATFORM_TRIPLES")
290
+
291
+ crates_repository(
292
+ name = "crate_index",
293
+ annotations = {
294
+ "llguidance": [
295
+ crate.annotation(
296
+ additive_build_file = "@//:BUILD.llguidance",
297
+ gen_build_script = False,
298
+ patches = [
299
+ "@//:PATCH.llguidance_regexvec",
300
+ "@//:PATCH.llguidance_numeric",
301
+ "@//:PATCH.llguidance_grammar",
302
+ "@//:PATCH.llguidance_parser",
303
+ "@//:PATCH.llguidance_perf",
304
+ ],
305
+ ),
306
+ ],
307
+ "toktrie": [
308
+ crate.annotation(
309
+ patches = ["@//:PATCH.toktrie"],
310
+ ),
311
+ ],
312
+ },
313
+ cargo_lockfile = "//:Cargo.lock",
314
+ lockfile = "//:cargo-bazel-lock.json",
315
+ manifests = [
316
+ "//:Cargo.toml",
317
+ ],
318
+ supported_platform_triples = SUPPORTED_PLATFORM_TRIPLES + [
319
+ "x86_64-linux-android",
320
+ ],
321
+ )
322
+
323
+ load("@crate_index//:defs.bzl", "crate_repositories")
324
+
325
+ crate_repositories()
326
+
327
+ # cxxbridge-cmd is a binary-only package so we follow the steps in
328
+ # https://bazelbuild.github.io/rules_rust/crate_universe_workspace.html#binary-dependencies.
329
+ http_archive(
330
+ name = "cxxbridge_cmd",
331
+ build_file = "//cxxbridge_cmd:BUILD.cxxbridge_cmd.bazel",
332
+ integrity = "sha256-pf/3kWu94FwtuZRp8J3PryA78lsJbMv052GgR5JBLhA=",
333
+ strip_prefix = "cxxbridge-cmd-1.0.149",
334
+ type = "tar.gz",
335
+ url = "https://static.crates.io/crates/cxxbridge-cmd/cxxbridge-cmd-1.0.149.crate",
336
+ )
337
+
338
+ crates_repository(
339
+ name = "cxxbridge_cmd_deps",
340
+ cargo_lockfile = "//cxxbridge_cmd:Cargo.lock",
341
+ manifests = ["@cxxbridge_cmd//:Cargo.toml"],
342
+ )
343
+
344
+ load("@cxxbridge_cmd_deps//:defs.bzl", cxxbridge_cmd_deps = "crate_repositories")
345
+
346
+ cxxbridge_cmd_deps()
347
+
348
+ # Same one downloaded by tensorflow, but refer contrib/minizip.
349
+ http_archive(
350
+ name = "minizip",
351
+ add_prefix = "minizip",
352
+ build_file = "@//:BUILD.minizip",
353
+ sha256 = "9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23",
354
+ strip_prefix = "zlib-1.3.1/contrib/minizip",
355
+ url = "https://zlib.net/fossils/zlib-1.3.1.tar.gz",
356
+ )
357
+
358
+ http_archive(
359
+ name = "sentencepiece",
360
+ build_file = "@//:BUILD.sentencepiece",
361
+ patch_cmds = [
362
+ # Empty config.h seems enough.
363
+ "touch config.h",
364
+ # Replace third_party/absl/ with absl/ in *.h and *.cc files.
365
+ "sed -i -e 's|#include \"third_party/absl/|#include \"absl/|g' *.h *.cc",
366
+ # Replace third_party/darts_clone/ with include/ in *.h and *.cc files.
367
+ "sed -i -e 's|#include \"third_party/darts_clone/|#include \"include/|g' *.h *.cc",
368
+ ],
369
+ patches = ["@//:PATCH.sentencepiece"],
370
+ sha256 = "9970f0a0afee1648890293321665e5b2efa04eaec9f1671fcf8048f456f5bb86",
371
+ strip_prefix = "sentencepiece-0.2.0/src",
372
+ url = "https://github.com/google/sentencepiece/archive/refs/tags/v0.2.0.tar.gz",
373
+ )
374
+
375
+ http_archive(
376
+ name = "litert",
377
+ patch_cmds = [
378
+ # Replace @//third_party with @litert//third_party in files under third_party/.
379
+ "sed -i -e 's|\"@//third_party/|\"@litert//third_party/|g' third_party/*/*",
380
+ ],
381
+ sha256 = LITERT_SHA256,
382
+ strip_prefix = "LiteRT-" + LITERT_REF,
383
+ url = "https://github.com/google-ai-edge/LiteRT/archive/" + LITERT_REF + ".tar.gz",
384
+ )
385
+
386
+ http_archive(
387
+ name = "tokenizers_cpp",
388
+ build_file = "@//:BUILD.tokenizers_cpp",
389
+ sha256 = "3e0b9ec325a326b0a2cef5cf164ee94a74ac372c5881ae5af634036db0441823",
390
+ strip_prefix = "tokenizers-cpp-0.1.1",
391
+ url = "https://github.com/mlc-ai/tokenizers-cpp/archive/refs/tags/v0.1.1.tar.gz",
392
+ )
393
+
394
+ http_archive(
395
+ name = "absl_py",
396
+ sha256 = "8a3d0830e4eb4f66c4fa907c06edf6ce1c719ced811a12e26d9d3162f8471758",
397
+ strip_prefix = "abseil-py-2.1.0",
398
+ url = "https://github.com/abseil/abseil-py/archive/refs/tags/v2.1.0.tar.gz",
399
+ )
400
+
401
+ http_archive(
402
+ name = "nlohmann_json",
403
+ sha256 = "34660b5e9a407195d55e8da705ed26cc6d175ce5a6b1fb957e701fb4d5b04022",
404
+ strip_prefix = "json-3.12.0",
405
+ url = "https://github.com/nlohmann/json/archive/refs/tags/v3.12.0.zip",
406
+ )
407
+
408
+ http_archive(
409
+ name = "minja",
410
+ build_file = "@//:BUILD.minja",
411
+ patches = ["@//:PATCH.minja"],
412
+ sha256 = "752f47dd2a2f4920a66f497c952785073c1983f12f084b99e5c12bf89f96acfe",
413
+ strip_prefix = "minja-58568621432715b0ed38efd16238b0e7ff36c3ba",
414
+ url = "https://github.com/google/minja/archive/58568621432715b0ed38efd16238b0e7ff36c3ba.zip",
415
+ )
416
+
417
+ http_archive(
418
+ name = "miniaudio",
419
+ build_file = "@//:BUILD.miniaudio",
420
+ sha256 = "bcb07bfb27e6fa94d34da73ba2d5642d4940b208ec2a660dbf4e52e6b7cd492f",
421
+ strip_prefix = "miniaudio-0.11.22",
422
+ url = "https://github.com/mackron/miniaudio/archive/refs/tags/0.11.22.tar.gz",
423
+ )
424
+
425
+ http_archive(
426
+ name = "stb",
427
+ build_file = "@//:BUILD.stb",
428
+ sha256 = "119b9f3cca3e50225dc946ed1acd1b7a160943bc8bf549760109cea4e4e7c836",
429
+ strip_prefix = "stb-f58f558c120e9b32c217290b80bad1a0729fbb2c",
430
+ url = "https://github.com/nothings/stb/archive/f58f558c120e9b32c217290b80bad1a0729fbb2c.zip",
431
+ )
432
+
433
+ http_jar(
434
+ name = "javax_json",
435
+ sha256 = "0e1dec40a1ede965941251eda968aeee052cc4f50378bc316cc48e8159bdbeb4",
436
+ url = "https://jcenter.bintray.com/org/glassfish/javax.json/1.0.4/javax.json-1.0.4.jar",
437
+ )
438
+
439
+ # Android rules. Need latest rules_android_ndk to use NDK 26+.
440
+ load("@rules_android_ndk//:rules.bzl", "android_ndk_repository")
441
+
442
+ android_ndk_repository(name = "androidndk")
443
+
444
+ android_sdk_repository(name = "androidsdk")
445
+
446
+ # Configure Android NDK only when ANDROID_NDK_HOME is set.
447
+ # Creates current_android_ndk_env.bzl as a workaround since shell environment is available only
448
+ # through repository rule's context.
449
+ load("//:android_ndk_env.bzl", "check_android_ndk_env")
450
+
451
+ check_android_ndk_env(name = "android_ndk_env")
452
+
453
+ load("@android_ndk_env//:current_android_ndk_env.bzl", "ANDROID_NDK_HOME_IS_SET")
454
+
455
+ # Use "@android_ndk_env//:all" as a dummy toolchain target as register_toolchains() does not take
456
+ # an empty string.
457
+ register_toolchains("@androidndk//:all" if ANDROID_NDK_HOME_IS_SET else "@android_ndk_env//:all")
458
+
459
+ # VENDOR SDKS ######################################################################################
460
+
461
+ # QUALCOMM ---------------------------------------------------------------------------------------
462
+
463
+ # The actual macro call will be set during configure for now.
464
+ load("@litert//third_party/qairt:workspace.bzl", "qairt")
465
+
466
+ qairt()
467
+
468
+ # MEDIATEK ---------------------------------------------------------------------------------------
469
+
470
+ # Currently only works with local sdk
471
+ load("@litert//third_party/neuro_pilot:workspace.bzl", "neuro_pilot")
472
+
473
+ neuro_pilot()
474
+
475
+ # GOOGLE TENSOR ----------------------------------------------------------------------------------
476
+ load("@litert//third_party/google_tensor:workspace.bzl", "google_tensor")
477
+
478
+ google_tensor()
479
+
480
+ http_archive(
481
+ name = "nanobind_json",
482
+ build_file = "@//:BUILD.nanobind_json",
483
+ patches = ["@//:PATCH.nanobind_json"],
484
+ sha256 = "72cb4cdbf8108c7dd2dc669347669f2cc1acf4f943588f96661701f27f778912",
485
+ strip_prefix = "nanobind_json-e1953530697f61cbca9dc9b4f51561ea785cb09d",
486
+ urls = ["https://github.com/ianhbell/nanobind_json/archive/e1953530697f61cbca9dc9b4f51561ea785cb09d.zip"],
487
+ )
488
+
489
+ load("@rules_python//python:pip.bzl", "pip_parse")
490
+
491
+ pip_parse(
492
+ name = "custom_pip_deps",
493
+ requirements_lock = "//:requirements.txt",
494
+ )
495
+
496
+ load("@custom_pip_deps//:requirements.bzl", install_custom_deps = "install_deps")
497
+
498
+ install_custom_deps()
__init__.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2025 The ODML Authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """LiteRT LM is a library for running GenAI models on devices."""
android_ndk_env.bzl ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A workaround to check if ANDROID_NDK_HOME is set."""
2
+
3
+ def _check_android_ndk_env_impl(ctx):
4
+ ndk_home = ctx.getenv("ANDROID_NDK_HOME")
5
+ ndk_home_is_set = bool(ndk_home) and len(ndk_home.strip()) > 0
6
+
7
+ # .bzl file contains ANDROID_NDK_HOME_IS_SET = True if ANDROID_NDK_HOME is set.
8
+ content = "# Generated by check_android_ndk_env.bzl\n"
9
+ content += "ANDROID_NDK_HOME_IS_SET = "
10
+ content += "True" if ndk_home_is_set else "False"
11
+ content += "\n"
12
+ ctx.file("current_android_ndk_env.bzl", content = content)
13
+
14
+ # Dummy BUILD file to make the repository is valid.
15
+ ctx.file("BUILD", content = "")
16
+
17
+ check_android_ndk_env = repository_rule(
18
+ implementation = _check_android_ndk_env_impl,
19
+ local = True,
20
+ environ = ["ANDROID_NDK_HOME"],
21
+ )
build_config/BUILD ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2025 The ODML Authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ package(
16
+ default_visibility = [
17
+ "//:__subpackages__",
18
+ ],
19
+ )
20
+
21
+ config_setting(
22
+ name = "android_arm64",
23
+ constraint_values = [
24
+ "@platforms//os:android",
25
+ "@platforms//cpu:arm64",
26
+ ],
27
+ )
28
+
29
+ config_setting(
30
+ name = "android_x86_64",
31
+ constraint_values = [
32
+ "@platforms//os:android",
33
+ "@platforms//cpu:x86_64",
34
+ ],
35
+ )
36
+
37
+ config_setting(
38
+ name = "linux_x86_64",
39
+ constraint_values = [
40
+ "@platforms//os:linux",
41
+ "@platforms//cpu:x86_64",
42
+ ],
43
+ )
44
+
45
+ config_setting(
46
+ name = "linux_arm64",
47
+ constraint_values = [
48
+ "@platforms//cpu:arm64",
49
+ "@platforms//os:linux",
50
+ ],
51
+ )
52
+
53
+ config_setting(
54
+ name = "ios_arm64",
55
+ values = { "apple_platform_type": "ios", "cpu": "ios_arm64",},
56
+ )
57
+
58
+ config_setting(
59
+ name = "ios_sim_arm64",
60
+ values = { "apple_platform_type": "ios", "cpu": "ios_sim_arm64",},
61
+ )
c/BUILD ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2026 The ODML Authors.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # [Google-internal load of `cc_library`]
16
+ # [Google-internal load of `cc_test`]
17
+
18
+ package(
19
+ default_hdrs_check = "strict",
20
+ default_visibility = [
21
+ "//visibility:public",
22
+ ],
23
+ )
24
+
25
+ licenses(["notice"])
26
+
27
+ ENGINE_COMMON_DEPS = [
28
+ "@com_google_absl//absl/base:core_headers",
29
+ "@com_google_absl//absl/base:log_severity",
30
+ "@com_google_absl//absl/functional:any_invocable",
31
+ "@com_google_absl//absl/log:absl_log",
32
+ "@com_google_absl//absl/log:globals",
33
+ "@com_google_absl//absl/log:initialize",
34
+ "@com_google_absl//absl/log:log_sink_registry",
35
+ "@com_google_absl//absl/status",
36
+ "@com_google_absl//absl/status:statusor",
37
+ "@com_google_absl//absl/strings:str_format",
38
+ "@com_google_absl//absl/strings:string_view",
39
+ "@com_google_absl//absl/time",
40
+ "@nlohmann_json//:json",
41
+ "@litert//litert/c/internal:litert_logging",
42
+ "//runtime/conversation",
43
+ "//runtime/conversation:io_types",
44
+ "//runtime/engine:engine_factory",
45
+ "//runtime/engine:engine_interface",
46
+ "//runtime/engine:engine_settings",
47
+ "//runtime/engine:io_types",
48
+ "//runtime/executor:executor_settings_base",
49
+ "//runtime/executor:llm_executor_settings",
50
+ "//runtime/proto:engine_cc_proto",
51
+ "//runtime/proto:sampler_params_cc_proto",
52
+ ]
53
+
54
+ cc_library(
55
+ name = "engine",
56
+ srcs = [
57
+ "engine.cc",
58
+ "litert_lm_logging.cc",
59
+ ],
60
+ hdrs = [
61
+ "engine.h",
62
+ "litert_lm_logging.h",
63
+ ],
64
+ visibility = ["//visibility:public"],
65
+ deps = ENGINE_COMMON_DEPS + [
66
+ "//runtime/core:engine_impl",
67
+ ],
68
+ )
69
+
70
+ cc_library(
71
+ name = "engine_cpu",
72
+ srcs = [
73
+ "engine.cc",
74
+ "litert_lm_logging.cc",
75
+ ],
76
+ hdrs = [
77
+ "engine.h",
78
+ "litert_lm_logging.h",
79
+ ],
80
+ visibility = ["//visibility:public"],
81
+ deps = ENGINE_COMMON_DEPS + [
82
+ "//runtime/core:engine_impl_cpu_only",
83
+ ],
84
+ )
85
+
86
+ cc_test(
87
+ name = "engine_test",
88
+ srcs = ["engine_test.cc"],
89
+ data = [
90
+ "//runtime/testdata",
91
+ ],
92
+ tags = ["requires-mac-inputs:hard"],
93
+ target_compatible_with = select({
94
+ "@platforms//os:ios": [],
95
+ "@platforms//os:linux": [],
96
+ "@platforms//os:osx": [],
97
+ "@platforms//os:windows": [],
98
+ "//conditions:default": ["@platforms//:incompatible"],
99
+ }),
100
+ deps = [
101
+ ":engine",
102
+ "@com_google_googletest//:gtest_main",
103
+ "@com_google_absl//absl/status",
104
+ "@com_google_absl//absl/status:status_matchers",
105
+ "@com_google_absl//absl/synchronization",
106
+ "@nlohmann_json//:json",
107
+ "//runtime/conversation",
108
+ "//runtime/conversation:io_types",
109
+ "//runtime/engine:engine_settings",
110
+ "//runtime/executor:executor_settings_base",
111
+ "//runtime/executor:llm_executor_settings",
112
+ ],
113
+ )
c/CMakeLists.txt ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2026 Google LLC.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ set(PKG_ROOT ${CMAKE_CURRENT_SOURCE_DIR})
16
+
17
+ # ==============================================================================
18
+ # 1. Engine Library
19
+ # Bazel: cc_library(name = "engine", ...)
20
+ # ==============================================================================
21
+ add_litertlm_library(c_engine STATIC
22
+ engine.cc
23
+ litert_lm_logging.cc
24
+ )
25
+ add_library(LiteRTLM::C::Engine ALIAS c_engine)
26
+
27
+ target_include_directories(c_engine
28
+ PRIVATE
29
+ ${LITERTLM_INCLUDE_PATHS} # Project Root & Generated Headers
30
+ ${LITERT_INCLUDE_PATHS} # LiteRT External Headers
31
+ ${THIRD_PARTY_DIR}/json/include # @nlohmann_json//:json
32
+ )
33
+
34
+ target_link_libraries(c_engine
35
+ PUBLIC
36
+ LITERTLM_DEPS
37
+
38
+ LiteRTLM::Runtime::Conversation
39
+ LiteRTLM::Runtime::Conversation::IoTypes
40
+ LiteRTLM::Runtime::Core::EngineImpl
41
+ LiteRTLM::Runtime::Engine::Interface
42
+ LiteRTLM::Runtime::Engine::Settings
43
+ LiteRTLM::Runtime::Engine::IoTypes
44
+ LiteRTLM::Runtime::Executor::ExecutorSettingsBase
45
+ )
46
+
47
+ # ==============================================================================
48
+ # 2. Folder Facade
49
+ # ==============================================================================
50
+ add_library(c_libs INTERFACE)
51
+ add_library(LiteRTLM::C ALIAS c_libs)
52
+
53
+ target_link_libraries(c_libs INTERFACE
54
+ LiteRTLM::C::Engine
55
+ )
c/engine.cc ADDED
@@ -0,0 +1,786 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Copyright 2025 The ODML Authors.
2
+ //
3
+ // Licensed under the Apache License, Version 2.0 (the "License");
4
+ // you may not use this file except in compliance with the License.
5
+ // You may obtain a copy of the License at
6
+ //
7
+ // http://www.apache.org/licenses/LICENSE-2.0
8
+ //
9
+ // Unless required by applicable law or agreed to in writing, software
10
+ // distributed under the License is distributed on an "AS IS" BASIS,
11
+ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ // See the License for the specific language governing permissions and
13
+ // limitations under the License.
14
+
15
+ #include "c/engine.h"
16
+
17
+ #include <cstddef>
18
+ #include <cstring>
19
+ #include <memory>
20
+ #include <optional>
21
+ #include <string>
22
+ #include <utility>
23
+ #include <variant>
24
+ #include <vector>
25
+
26
+ #include "absl/functional/any_invocable.h" // from @com_google_absl
27
+ #include "absl/log/absl_log.h" // from @com_google_absl
28
+ #include "absl/status/status.h" // from @com_google_absl
29
+ #include "absl/status/statusor.h" // from @com_google_absl
30
+ #include "absl/strings/string_view.h" // from @com_google_absl
31
+ #include "absl/time/time.h" // from @com_google_absl
32
+ #include "nlohmann/json.hpp" // from @nlohmann_json
33
+ #include "runtime/conversation/conversation.h"
34
+ #include "runtime/conversation/io_types.h"
35
+ #include "runtime/engine/engine.h"
36
+ #include "runtime/engine/engine_factory.h"
37
+ #include "runtime/engine/engine_settings.h"
38
+ #include "runtime/engine/io_types.h"
39
+ #include "runtime/executor/executor_settings_base.h"
40
+ #include "runtime/executor/llm_executor_settings.h"
41
+ #include "runtime/proto/sampler_params.pb.h"
42
+
43
+ namespace {
44
+
45
+ absl::AnyInvocable<void(absl::StatusOr<litert::lm::Responses>)> CreateCallback(
46
+ LiteRtLmStreamCallback callback, void* callback_data) {
47
+ return [callback,
48
+ callback_data](absl::StatusOr<litert::lm::Responses> responses) {
49
+ if (!responses.ok()) {
50
+ callback(callback_data, /*text=*/nullptr, /*is_final=*/true,
51
+ responses.status().ToString().c_str());
52
+ return;
53
+ }
54
+ if (responses->GetTaskState() == litert::lm::TaskState::kDone) {
55
+ callback(callback_data, /*text=*/nullptr, /*is_final=*/true,
56
+ /*error_message=*/nullptr);
57
+ } else if (responses->GetTaskState() ==
58
+ litert::lm::TaskState::kMaxNumTokensReached) {
59
+ callback(callback_data, /*text=*/nullptr, /*is_final=*/true,
60
+ "Max number of tokens reached.");
61
+ } else {
62
+ for (const auto& text : responses->GetTexts()) {
63
+ callback(callback_data, text.data(), /*is_final=*/false,
64
+ /*error_message=*/nullptr);
65
+ }
66
+ }
67
+ };
68
+ }
69
+
70
+ absl::AnyInvocable<void(absl::StatusOr<litert::lm::Message>)>
71
+ CreateConversationCallback(LiteRtLmStreamCallback callback, void* user_data) {
72
+ return [callback, user_data](absl::StatusOr<litert::lm::Message> message) {
73
+ if (!message.ok()) {
74
+ std::string error_str = message.status().ToString();
75
+ callback(user_data, nullptr, true, const_cast<char*>(error_str.c_str()));
76
+ return;
77
+ }
78
+ if (auto* json_msg = std::get_if<litert::lm::JsonMessage>(&*message)) {
79
+ if (json_msg->is_null()) { // End of stream marker
80
+ callback(user_data, nullptr, true, nullptr);
81
+ } else {
82
+ std::string json_str = json_msg->dump();
83
+ callback(user_data, const_cast<char*>(json_str.c_str()), false,
84
+ nullptr);
85
+ }
86
+ } else {
87
+ std::string error_str = "Unsupported message type";
88
+ callback(user_data, nullptr, true, const_cast<char*>(error_str.c_str()));
89
+ }
90
+ };
91
+ }
92
+
93
+ litert::lm::OptionalArgs CreateOptionalArgs(const char* extra_context) {
94
+ litert::lm::OptionalArgs optional_args;
95
+ if (extra_context) {
96
+ auto extra_context_json =
97
+ nlohmann::ordered_json::parse(extra_context, nullptr, false);
98
+ if (!extra_context_json.is_null() && !extra_context_json.empty()) {
99
+ optional_args.extra_context = extra_context_json;
100
+ }
101
+ }
102
+ return optional_args;
103
+ }
104
+
105
+ } // namespace
106
+
107
+ using ::litert::lm::Conversation;
108
+ using ::litert::lm::ConversationConfig;
109
+ using ::litert::lm::Engine;
110
+ using ::litert::lm::EngineFactory;
111
+ using ::litert::lm::EngineSettings;
112
+ using ::litert::lm::InputText;
113
+ using ::litert::lm::JsonMessage;
114
+ using ::litert::lm::Message;
115
+ using ::litert::lm::ModelAssets;
116
+ using ::litert::lm::Responses;
117
+ using ::litert::lm::SessionConfig;
118
+ using ::litert::lm::proto::SamplerParameters;
119
+
120
+ struct LiteRtLmEngineSettings {
121
+ std::unique_ptr<EngineSettings> settings;
122
+ };
123
+
124
+ struct LiteRtLmEngine {
125
+ std::unique_ptr<Engine> engine;
126
+ };
127
+
128
+ struct LiteRtLmSession {
129
+ std::unique_ptr<Engine::Session> session;
130
+ };
131
+
132
+ struct LiteRtLmResponses {
133
+ Responses responses;
134
+ };
135
+
136
+ struct LiteRtLmBenchmarkInfo {
137
+ litert::lm::BenchmarkInfo benchmark_info;
138
+ };
139
+
140
+ struct LiteRtLmConversation {
141
+ std::unique_ptr<Conversation> conversation;
142
+ };
143
+
144
+ struct LiteRtLmJsonResponse {
145
+ std::string json_string;
146
+ };
147
+
148
+ struct LiteRtLmSessionConfig {
149
+ std::unique_ptr<SessionConfig> config;
150
+ };
151
+
152
+ struct LiteRtLmConversationConfig {
153
+ std::unique_ptr<ConversationConfig> config;
154
+ };
155
+
156
+ extern "C" {
157
+
158
+ SamplerParameters::Type ToSamplerParametersType(Type type) {
159
+ switch (type) {
160
+ case kTypeUnspecified:
161
+ return SamplerParameters::TYPE_UNSPECIFIED;
162
+ case kTopK:
163
+ return SamplerParameters::TOP_K;
164
+ case kTopP:
165
+ return SamplerParameters::TOP_P;
166
+ case kGreedy:
167
+ return SamplerParameters::GREEDY;
168
+ }
169
+ return SamplerParameters::TYPE_UNSPECIFIED;
170
+ }
171
+
172
+ LiteRtLmSessionConfig* litert_lm_session_config_create() {
173
+ auto* c_config = new LiteRtLmSessionConfig;
174
+ c_config->config =
175
+ std::make_unique<SessionConfig>(SessionConfig::CreateDefault());
176
+ return c_config;
177
+ }
178
+
179
+ void litert_lm_session_config_set_max_output_tokens(
180
+ LiteRtLmSessionConfig* config, int max_output_tokens) {
181
+ if (config && config->config) {
182
+ config->config->SetMaxOutputTokens(max_output_tokens);
183
+ }
184
+ }
185
+
186
+ void litert_lm_session_config_set_sampler_params(
187
+ LiteRtLmSessionConfig* config,
188
+ const LiteRtLmSamplerParams* sampler_params) {
189
+ if (config && config->config && sampler_params) {
190
+ SamplerParameters& params = config->config->GetMutableSamplerParams();
191
+
192
+ params.set_type(ToSamplerParametersType(sampler_params->type));
193
+
194
+ params.set_k(sampler_params->top_k);
195
+ params.set_p(sampler_params->top_p);
196
+ params.set_temperature(sampler_params->temperature);
197
+ params.set_seed(sampler_params->seed);
198
+ }
199
+ }
200
+
201
+ void litert_lm_session_config_delete(LiteRtLmSessionConfig* config) {
202
+ delete config;
203
+ }
204
+
205
+ LiteRtLmConversationConfig* litert_lm_conversation_config_create(
206
+ LiteRtLmEngine* engine, const LiteRtLmSessionConfig* session_config,
207
+ const char* system_message_json, const char* tools_json,
208
+ const char* messages_json, bool enable_constrained_decoding) {
209
+ if (!engine || !engine->engine) {
210
+ return nullptr;
211
+ }
212
+
213
+ litert::lm::JsonPreface json_preface;
214
+ if (system_message_json) {
215
+ nlohmann::ordered_json system_message;
216
+ system_message["role"] = "system";
217
+ auto content =
218
+ nlohmann::ordered_json::parse(system_message_json, nullptr, false);
219
+ if (content.is_discarded()) {
220
+ // If JSON parsing fails, assume it's a plain string.
221
+ system_message["content"] = system_message_json;
222
+ } else {
223
+ system_message["content"] = content;
224
+ }
225
+ json_preface.messages = nlohmann::ordered_json::array({system_message});
226
+ }
227
+
228
+ if (messages_json) {
229
+ auto messages =
230
+ nlohmann::ordered_json::parse(messages_json, nullptr, false);
231
+ if (messages.is_discarded()) {
232
+ ABSL_LOG(ERROR) << "Failed to parse messages JSON.";
233
+ } else if (!messages.is_array()) {
234
+ ABSL_LOG(ERROR) << "Messages JSON is not an array.";
235
+ } else {
236
+ if (json_preface.messages.is_array()) {
237
+ json_preface.messages.insert(json_preface.messages.end(),
238
+ messages.begin(), messages.end());
239
+ } else {
240
+ json_preface.messages = std::move(messages);
241
+ }
242
+ }
243
+ }
244
+
245
+ std::unique_ptr<SessionConfig> default_session_config;
246
+ const SessionConfig* config_to_use;
247
+
248
+ if (session_config && session_config->config) {
249
+ config_to_use = session_config->config.get();
250
+ } else {
251
+ default_session_config =
252
+ std::make_unique<SessionConfig>(SessionConfig::CreateDefault());
253
+ config_to_use = default_session_config.get();
254
+ }
255
+
256
+ if (tools_json) {
257
+ auto tool_json_parsed =
258
+ nlohmann::ordered_json::parse(tools_json, nullptr, false);
259
+ if (!tool_json_parsed.is_discarded() && tool_json_parsed.is_array()) {
260
+ json_preface.tools = tool_json_parsed;
261
+ } else {
262
+ ABSL_LOG(ERROR) << "Failed to parse tools JSON or not an array: "
263
+ << tools_json;
264
+ }
265
+ }
266
+
267
+ auto conversation_config =
268
+ litert::lm::ConversationConfig::Builder()
269
+ .SetSessionConfig(*config_to_use)
270
+ .SetPreface(json_preface)
271
+ .SetEnableConstrainedDecoding(enable_constrained_decoding)
272
+ .Build(*engine->engine);
273
+
274
+ if (!conversation_config.ok()) {
275
+ ABSL_LOG(ERROR) << "Failed to create conversation config: "
276
+ << conversation_config.status();
277
+ return nullptr;
278
+ }
279
+
280
+ auto* c_config = new LiteRtLmConversationConfig;
281
+ c_config->config =
282
+ std::make_unique<ConversationConfig>(*std::move(conversation_config));
283
+ return c_config;
284
+ }
285
+
286
+ void litert_lm_conversation_config_delete(LiteRtLmConversationConfig* config) {
287
+ delete config;
288
+ }
289
+
290
+ LiteRtLmEngineSettings* litert_lm_engine_settings_create(
291
+ const char* model_path, const char* backend_str,
292
+ const char* vision_backend_str, const char* audio_backend_str) {
293
+ auto model_assets = ModelAssets::Create(model_path);
294
+ if (!model_assets.ok()) {
295
+ ABSL_LOG(ERROR) << "Failed to create model assets: "
296
+ << model_assets.status();
297
+ return nullptr;
298
+ }
299
+ auto backend = litert::lm::GetBackendFromString(backend_str);
300
+ if (!backend.ok()) {
301
+ ABSL_LOG(ERROR) << "Failed to parse backend: " << backend.status();
302
+ return nullptr;
303
+ }
304
+
305
+ std::optional<litert::lm::Backend> vision_backend;
306
+ if (vision_backend_str) {
307
+ auto backend = litert::lm::GetBackendFromString(vision_backend_str);
308
+ if (!backend.ok()) {
309
+ ABSL_LOG(ERROR) << "Failed to parse vision backend: " << backend.status();
310
+ return nullptr;
311
+ }
312
+ vision_backend = *backend;
313
+ }
314
+
315
+ std::optional<litert::lm::Backend> audio_backend;
316
+ if (audio_backend_str) {
317
+ auto backend = litert::lm::GetBackendFromString(audio_backend_str);
318
+ if (!backend.ok()) {
319
+ ABSL_LOG(ERROR) << "Failed to parse audio backend: " << backend.status();
320
+ return nullptr;
321
+ }
322
+ audio_backend = *backend;
323
+ }
324
+
325
+ auto engine_settings = EngineSettings::CreateDefault(
326
+ *std::move(model_assets), *backend, vision_backend, audio_backend);
327
+ if (!engine_settings.ok()) {
328
+ ABSL_LOG(ERROR) << "Failed to create engine settings: "
329
+ << engine_settings.status();
330
+ return nullptr;
331
+ }
332
+
333
+ if (*backend == litert::lm::Backend::GPU) {
334
+ // Enforce floating point precision for better quality.
335
+ auto& executor_settings = engine_settings->GetMutableMainExecutorSettings();
336
+ executor_settings.SetActivationDataType(
337
+ litert::lm::ActivationDataType::FLOAT32);
338
+ }
339
+
340
+ auto* c_settings = new LiteRtLmEngineSettings;
341
+ c_settings->settings =
342
+ std::make_unique<EngineSettings>(*std::move(engine_settings));
343
+ return c_settings;
344
+ }
345
+
346
+ void litert_lm_engine_settings_delete(LiteRtLmEngineSettings* settings) {
347
+ delete settings;
348
+ }
349
+
350
+ void litert_lm_engine_settings_set_max_num_tokens(
351
+ LiteRtLmEngineSettings* settings, int max_num_tokens) {
352
+ if (settings && settings->settings) {
353
+ settings->settings->GetMutableMainExecutorSettings().SetMaxNumTokens(
354
+ max_num_tokens);
355
+ }
356
+ }
357
+
358
+ void litert_lm_engine_settings_set_cache_dir(LiteRtLmEngineSettings* settings,
359
+ const char* cache_dir) {
360
+ if (settings && settings->settings) {
361
+ settings->settings->GetMutableMainExecutorSettings().SetCacheDir(cache_dir);
362
+ }
363
+ }
364
+
365
+ void litert_lm_engine_settings_enable_benchmark(
366
+ LiteRtLmEngineSettings* settings) {
367
+ if (settings && settings->settings) {
368
+ settings->settings->GetMutableBenchmarkParams();
369
+ }
370
+ }
371
+
372
+ void litert_lm_engine_settings_set_num_prefill_tokens(
373
+ LiteRtLmEngineSettings* settings, int num_prefill_tokens) {
374
+ if (settings && settings->settings) {
375
+ settings->settings->GetMutableBenchmarkParams().set_num_prefill_tokens(
376
+ num_prefill_tokens);
377
+ }
378
+ }
379
+
380
+ void litert_lm_engine_settings_set_num_decode_tokens(
381
+ LiteRtLmEngineSettings* settings, int num_decode_tokens) {
382
+ if (settings && settings->settings) {
383
+ settings->settings->GetMutableBenchmarkParams().set_num_decode_tokens(
384
+ num_decode_tokens);
385
+ }
386
+ }
387
+
388
+ void litert_lm_engine_settings_set_activation_data_type(
389
+ LiteRtLmEngineSettings* settings, int activation_data_type_int) {
390
+ if (settings && settings->settings) {
391
+ settings->settings->GetMutableMainExecutorSettings().SetActivationDataType(
392
+ static_cast<litert::lm::ActivationDataType>(activation_data_type_int));
393
+ }
394
+ }
395
+
396
+ void litert_lm_engine_settings_set_prefill_chunk_size(
397
+ LiteRtLmEngineSettings* settings, int prefill_chunk_size) {
398
+ if (settings && settings->settings) {
399
+ auto& main_settings = settings->settings->GetMutableMainExecutorSettings();
400
+ auto config = main_settings.MutableBackendConfig<litert::lm::CpuConfig>();
401
+ if (!config.ok()) {
402
+ ABSL_LOG(WARNING) << "Failed to get CpuConfig to set prefill chunk size: "
403
+ << config.status();
404
+ return;
405
+ }
406
+ config->prefill_chunk_size = prefill_chunk_size;
407
+ main_settings.SetBackendConfig(*config);
408
+ }
409
+ }
410
+
411
+ LiteRtLmEngine* litert_lm_engine_create(
412
+ const LiteRtLmEngineSettings* settings) {
413
+ if (!settings || !settings->settings) {
414
+ return nullptr;
415
+ }
416
+
417
+ absl::StatusOr<std::unique_ptr<Engine>> engine;
418
+ engine = EngineFactory::CreateDefault(*settings->settings);
419
+
420
+ if (!engine.ok()) {
421
+ ABSL_LOG(ERROR) << "Failed to create engine: " << engine.status();
422
+ return nullptr;
423
+ }
424
+
425
+ auto* c_engine = new LiteRtLmEngine;
426
+ c_engine->engine = *std::move(engine);
427
+ return c_engine;
428
+ }
429
+
430
+ void litert_lm_engine_delete(LiteRtLmEngine* engine) { delete engine; }
431
+
432
+ LiteRtLmSession* litert_lm_engine_create_session(
433
+ LiteRtLmEngine* engine, LiteRtLmSessionConfig* config) {
434
+ if (!engine || !engine->engine) {
435
+ return nullptr;
436
+ }
437
+ absl::StatusOr<std::unique_ptr<Engine::Session>> session;
438
+ if (config && config->config) {
439
+ session = engine->engine->CreateSession(*config->config);
440
+ } else {
441
+ session = engine->engine->CreateSession(SessionConfig::CreateDefault());
442
+ }
443
+ if (!session.ok()) {
444
+ ABSL_LOG(ERROR) << "Failed to create session: " << session.status();
445
+ return nullptr;
446
+ }
447
+
448
+ auto* c_session = new LiteRtLmSession;
449
+ c_session->session = *std::move(session);
450
+ return c_session;
451
+ }
452
+
453
+ void litert_lm_session_delete(LiteRtLmSession* session) { delete session; }
454
+
455
+ LiteRtLmResponses* litert_lm_session_generate_content(LiteRtLmSession* session,
456
+ const InputData* inputs,
457
+ size_t num_inputs) {
458
+ if (!session || !session->session) {
459
+ return nullptr;
460
+ }
461
+ std::vector<litert::lm::InputData> engine_inputs;
462
+ engine_inputs.reserve(num_inputs);
463
+ for (size_t i = 0; i < num_inputs; ++i) {
464
+ switch (inputs[i].type) {
465
+ case kInputText:
466
+ engine_inputs.emplace_back(InputText(std::string(
467
+ static_cast<const char*>(inputs[i].data), inputs[i].size)));
468
+ break;
469
+ case kInputImage:
470
+ engine_inputs.emplace_back(litert::lm::InputImage(std::string(
471
+ static_cast<const char*>(inputs[i].data), inputs[i].size)));
472
+ break;
473
+ case kInputImageEnd:
474
+ engine_inputs.emplace_back(litert::lm::InputImageEnd());
475
+ break;
476
+ case kInputAudio:
477
+ engine_inputs.emplace_back(litert::lm::InputAudio(std::string(
478
+ static_cast<const char*>(inputs[i].data), inputs[i].size)));
479
+ break;
480
+ case kInputAudioEnd:
481
+ engine_inputs.emplace_back(litert::lm::InputAudioEnd());
482
+ break;
483
+ }
484
+ }
485
+ auto responses = session->session->GenerateContent(std::move(engine_inputs));
486
+ if (!responses.ok()) {
487
+ ABSL_LOG(ERROR) << "Failed to generate content: " << responses.status();
488
+ return nullptr;
489
+ }
490
+
491
+ auto* c_responses = new LiteRtLmResponses{std::move(*responses)};
492
+ return c_responses;
493
+ }
494
+
495
+ int litert_lm_session_generate_content_stream(LiteRtLmSession* session,
496
+ const InputData* inputs,
497
+ size_t num_inputs,
498
+ LiteRtLmStreamCallback callback,
499
+ void* callback_data) {
500
+ if (!session || !session->session) {
501
+ return -1;
502
+ }
503
+ std::vector<litert::lm::InputData> engine_inputs;
504
+ engine_inputs.reserve(num_inputs);
505
+ for (size_t i = 0; i < num_inputs; ++i) {
506
+ switch (inputs[i].type) {
507
+ case kInputText:
508
+ engine_inputs.emplace_back(litert::lm::InputText(std::string(
509
+ static_cast<const char*>(inputs[i].data), inputs[i].size)));
510
+ break;
511
+ case kInputImage:
512
+ engine_inputs.emplace_back(litert::lm::InputImage(std::string(
513
+ static_cast<const char*>(inputs[i].data), inputs[i].size)));
514
+ break;
515
+ case kInputImageEnd:
516
+ engine_inputs.emplace_back(litert::lm::InputImageEnd());
517
+ break;
518
+ case kInputAudio:
519
+ engine_inputs.emplace_back(litert::lm::InputAudio(std::string(
520
+ static_cast<const char*>(inputs[i].data), inputs[i].size)));
521
+ break;
522
+ case kInputAudioEnd:
523
+ engine_inputs.emplace_back(litert::lm::InputAudioEnd());
524
+ break;
525
+ }
526
+ }
527
+
528
+ absl::Status status = session->session->GenerateContentStream(
529
+ std::move(engine_inputs), CreateCallback(callback, callback_data));
530
+
531
+ if (!status.ok()) {
532
+ ABSL_LOG(ERROR) << "Failed to start content stream: " << status;
533
+ // No need to delete callbacks, unique_ptr handles it if not moved.
534
+ return static_cast<int>(status.code());
535
+ }
536
+ return 0; // The call is non-blocking and returns immediately.
537
+ }
538
+
539
+ void litert_lm_responses_delete(LiteRtLmResponses* responses) {
540
+ delete responses;
541
+ }
542
+
543
+ int litert_lm_responses_get_num_candidates(const LiteRtLmResponses* responses) {
544
+ if (!responses) {
545
+ return 0;
546
+ }
547
+ return responses->responses.GetTexts().size();
548
+ }
549
+
550
+ const char* litert_lm_responses_get_response_text_at(
551
+ const LiteRtLmResponses* responses, int index) {
552
+ if (!responses) {
553
+ return nullptr;
554
+ }
555
+ if (index < 0 || index >= responses->responses.GetTexts().size()) {
556
+ return nullptr;
557
+ }
558
+
559
+ // The string_view's data is valid as long as the responses object is alive.
560
+ return responses->responses.GetTexts()[index].data();
561
+ }
562
+
563
+ LiteRtLmBenchmarkInfo* litert_lm_session_get_benchmark_info(
564
+ LiteRtLmSession* session) {
565
+ if (!session || !session->session) {
566
+ return nullptr;
567
+ }
568
+ auto benchmark_info = session->session->GetBenchmarkInfo();
569
+ if (!benchmark_info.ok()) {
570
+ ABSL_LOG(ERROR) << "Failed to get benchmark info: "
571
+ << benchmark_info.status();
572
+ return nullptr;
573
+ }
574
+ return new LiteRtLmBenchmarkInfo{std::move(*benchmark_info)};
575
+ }
576
+
577
+ void litert_lm_benchmark_info_delete(LiteRtLmBenchmarkInfo* benchmark_info) {
578
+ delete benchmark_info;
579
+ }
580
+
581
+ double litert_lm_benchmark_info_get_time_to_first_token(
582
+ const LiteRtLmBenchmarkInfo* benchmark_info) {
583
+ if (!benchmark_info) {
584
+ return 0.0;
585
+ }
586
+ return benchmark_info->benchmark_info.GetTimeToFirstToken();
587
+ }
588
+
589
+ double litert_lm_benchmark_info_get_total_init_time_in_second(
590
+ const LiteRtLmBenchmarkInfo* benchmark_info) {
591
+ if (!benchmark_info) {
592
+ return 0.0;
593
+ }
594
+ double total_init_time_ms = 0.0;
595
+ for (const auto& phase : benchmark_info->benchmark_info.GetInitPhases()) {
596
+ total_init_time_ms += absl::ToDoubleMilliseconds(phase.second);
597
+ }
598
+ return total_init_time_ms / 1000.0;
599
+ }
600
+
601
+ int litert_lm_benchmark_info_get_num_prefill_turns(
602
+ const LiteRtLmBenchmarkInfo* benchmark_info) {
603
+ if (!benchmark_info) {
604
+ return 0;
605
+ }
606
+ return benchmark_info->benchmark_info.GetTotalPrefillTurns();
607
+ }
608
+
609
+ int litert_lm_benchmark_info_get_num_decode_turns(
610
+ const LiteRtLmBenchmarkInfo* benchmark_info) {
611
+ if (!benchmark_info) {
612
+ return 0;
613
+ }
614
+ return benchmark_info->benchmark_info.GetTotalDecodeTurns();
615
+ }
616
+
617
+ int litert_lm_benchmark_info_get_prefill_token_count_at(
618
+ const LiteRtLmBenchmarkInfo* benchmark_info, int index) {
619
+ if (!benchmark_info) {
620
+ return 0;
621
+ }
622
+ auto turn = benchmark_info->benchmark_info.GetPrefillTurn(index);
623
+ if (!turn.ok()) {
624
+ return 0;
625
+ }
626
+ return static_cast<int>(turn->num_tokens);
627
+ }
628
+
629
+ int litert_lm_benchmark_info_get_decode_token_count_at(
630
+ const LiteRtLmBenchmarkInfo* benchmark_info, int index) {
631
+ if (!benchmark_info) {
632
+ return 0;
633
+ }
634
+ auto turn = benchmark_info->benchmark_info.GetDecodeTurn(index);
635
+ if (!turn.ok()) {
636
+ return 0;
637
+ }
638
+ return static_cast<int>(turn->num_tokens);
639
+ }
640
+
641
+ double litert_lm_benchmark_info_get_prefill_tokens_per_sec_at(
642
+ const LiteRtLmBenchmarkInfo* benchmark_info, int index) {
643
+ if (!benchmark_info) {
644
+ return 0.0;
645
+ }
646
+ return benchmark_info->benchmark_info.GetPrefillTokensPerSec(index);
647
+ }
648
+
649
+ double litert_lm_benchmark_info_get_decode_tokens_per_sec_at(
650
+ const LiteRtLmBenchmarkInfo* benchmark_info, int index) {
651
+ if (!benchmark_info) {
652
+ return 0.0;
653
+ }
654
+ return benchmark_info->benchmark_info.GetDecodeTokensPerSec(index);
655
+ }
656
+
657
+ LiteRtLmConversation* litert_lm_conversation_create(
658
+ LiteRtLmEngine* engine, LiteRtLmConversationConfig* conversation_config) {
659
+ if (!engine || !engine->engine) {
660
+ return nullptr;
661
+ }
662
+
663
+ absl::StatusOr<std::unique_ptr<Conversation>> conversation;
664
+ if (conversation_config && conversation_config->config) {
665
+ conversation =
666
+ Conversation::Create(*engine->engine, *conversation_config->config);
667
+ } else {
668
+ auto default_conversation_config =
669
+ ConversationConfig::CreateDefault(*engine->engine);
670
+ if (!default_conversation_config.ok()) {
671
+ ABSL_LOG(ERROR) << "Failed to create default conversation config: "
672
+ << default_conversation_config.status();
673
+ return nullptr;
674
+ }
675
+ conversation =
676
+ Conversation::Create(*engine->engine, *default_conversation_config);
677
+ }
678
+
679
+ if (!conversation.ok()) {
680
+ ABSL_LOG(ERROR) << "Failed to create conversation: "
681
+ << conversation.status();
682
+ return nullptr;
683
+ }
684
+ auto* c_conversation = new LiteRtLmConversation;
685
+ c_conversation->conversation = *std::move(conversation);
686
+ return c_conversation;
687
+ }
688
+
689
+ void litert_lm_conversation_delete(LiteRtLmConversation* conversation) {
690
+ delete conversation;
691
+ }
692
+
693
+ LiteRtLmJsonResponse* litert_lm_conversation_send_message(
694
+ LiteRtLmConversation* conversation, const char* message_json,
695
+ const char* extra_context) {
696
+ if (!conversation || !conversation->conversation) {
697
+ return nullptr;
698
+ }
699
+ nlohmann::json json_message =
700
+ nlohmann::json::parse(message_json, /*cb=*/nullptr,
701
+ /*allow_exceptions=*/false);
702
+ if (json_message.is_discarded()) {
703
+ ABSL_LOG(ERROR) << "Failed to parse message JSON.";
704
+ return nullptr;
705
+ }
706
+
707
+ litert::lm::OptionalArgs optional_args = CreateOptionalArgs(extra_context);
708
+
709
+ auto response = conversation->conversation->SendMessage(
710
+ json_message, std::move(optional_args));
711
+ if (!response.ok()) {
712
+ ABSL_LOG(ERROR) << "Failed to send message: " << response.status();
713
+ return nullptr;
714
+ }
715
+ auto* json_response = std::get_if<JsonMessage>(&*response);
716
+ if (!json_response) {
717
+ ABSL_LOG(ERROR) << "Response is not a JSON message.";
718
+ return nullptr;
719
+ }
720
+ auto* c_response = new LiteRtLmJsonResponse;
721
+ c_response->json_string = json_response->dump();
722
+ return c_response;
723
+ }
724
+
725
+ void litert_lm_json_response_delete(LiteRtLmJsonResponse* response) {
726
+ delete response;
727
+ }
728
+
729
+ const char* litert_lm_json_response_get_string(
730
+ const LiteRtLmJsonResponse* response) {
731
+ if (!response) {
732
+ return nullptr;
733
+ }
734
+ return response->json_string.c_str();
735
+ }
736
+
737
+ int litert_lm_conversation_send_message_stream(
738
+ LiteRtLmConversation* conversation, const char* message_json,
739
+ const char* extra_context, LiteRtLmStreamCallback callback,
740
+ void* callback_data) {
741
+ if (!conversation || !conversation->conversation) {
742
+ return -1;
743
+ }
744
+ nlohmann::json json_message =
745
+ nlohmann::json::parse(message_json, /*cb=*/nullptr,
746
+ /*allow_exceptions=*/false);
747
+ if (json_message.is_discarded()) {
748
+ ABSL_LOG(ERROR) << "Failed to parse message JSON.";
749
+ return -1;
750
+ }
751
+
752
+ litert::lm::OptionalArgs optional_args = CreateOptionalArgs(extra_context);
753
+
754
+ absl::Status status = conversation->conversation->SendMessageAsync(
755
+ json_message, CreateConversationCallback(callback, callback_data),
756
+ std::move(optional_args));
757
+
758
+ if (!status.ok()) {
759
+ ABSL_LOG(ERROR) << "Failed to start message stream: " << status;
760
+ return static_cast<int>(status.code());
761
+ }
762
+ return 0;
763
+ }
764
+
765
+ void litert_lm_conversation_cancel_process(LiteRtLmConversation* conversation) {
766
+ if (!conversation || !conversation->conversation) {
767
+ return;
768
+ }
769
+ conversation->conversation->CancelProcess();
770
+ }
771
+
772
+ LiteRtLmBenchmarkInfo* litert_lm_conversation_get_benchmark_info(
773
+ LiteRtLmConversation* conversation) {
774
+ if (!conversation || !conversation->conversation) {
775
+ return nullptr;
776
+ }
777
+ auto benchmark_info = conversation->conversation->GetBenchmarkInfo();
778
+ if (!benchmark_info.ok()) {
779
+ ABSL_LOG(ERROR) << "Failed to get benchmark info: "
780
+ << benchmark_info.status();
781
+ return nullptr;
782
+ }
783
+ return new LiteRtLmBenchmarkInfo{std::move(*benchmark_info)};
784
+ }
785
+
786
+ } // extern "C"