repo stringlengths 6 47 | file_url stringlengths 77 269 | file_path stringlengths 5 186 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-07 08:35:43 2026-01-07 08:55:24 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/utils/base64_test.go | pkg/utils/base64_test.go | package utils_test
import (
. "github.com/mudler/LocalAI/pkg/utils"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("utils/base64 tests", func() {
It("GetImageURLAsBase64 can strip jpeg data url prefixes", func() {
// This one doesn't actually _care_ that it's base64, so feed "bad" data in this test in order to catch a change in that behavior for informational purposes.
input := "data:image/jpeg;base64,FOO"
b64, err := GetContentURIAsBase64(input)
Expect(err).To(BeNil())
Expect(b64).To(Equal("FOO"))
})
It("GetImageURLAsBase64 can strip png data url prefixes", func() {
// This one doesn't actually _care_ that it's base64, so feed "bad" data in this test in order to catch a change in that behavior for informational purposes.
input := "data:image/png;base64,BAR"
b64, err := GetContentURIAsBase64(input)
Expect(err).To(BeNil())
Expect(b64).To(Equal("BAR"))
})
It("GetImageURLAsBase64 returns an error for bogus data", func() {
input := "FOO"
b64, err := GetContentURIAsBase64(input)
Expect(b64).To(Equal(""))
Expect(err).ToNot(BeNil())
Expect(err).To(MatchError("not valid base64 data type string"))
})
It("GetImageURLAsBase64 can actually download images and calculates something", func() {
// This test doesn't actually _check_ the results at this time, which is bad, but there wasn't a test at all before...
input := "https://upload.wikimedia.org/wikipedia/en/2/29/Wargames.jpg"
b64, err := GetContentURIAsBase64(input)
Expect(err).To(BeNil())
Expect(b64).ToNot(BeNil())
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/utils/hash.go | pkg/utils/hash.go | package utils
import (
"crypto/md5"
"fmt"
)
func MD5(s string) string {
return fmt.Sprintf("%x", md5.Sum([]byte(s)))
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/utils/untar.go | pkg/utils/untar.go | package utils
import (
"fmt"
"os"
"github.com/mholt/archiver/v3"
)
func IsArchive(file string) bool {
uaIface, err := archiver.ByExtension(file)
if err != nil {
return false
}
_, ok := uaIface.(archiver.Unarchiver)
return ok
}
func ExtractArchive(archive, dst string) error {
uaIface, err := archiver.ByExtension(archive)
if err != nil {
return err
}
un, ok := uaIface.(archiver.Unarchiver)
if !ok {
return fmt.Errorf("format specified by source filename is not an archive format: %s (%T)", archive, uaIface)
}
mytar := &archiver.Tar{
OverwriteExisting: true,
MkdirAll: true,
ImplicitTopLevelFolder: false,
ContinueOnError: true,
}
switch v := uaIface.(type) {
case *archiver.Tar:
uaIface = mytar
case *archiver.TarBrotli:
v.Tar = mytar
case *archiver.TarBz2:
v.Tar = mytar
case *archiver.TarGz:
v.Tar = mytar
case *archiver.TarLz4:
v.Tar = mytar
case *archiver.TarSz:
v.Tar = mytar
case *archiver.TarXz:
v.Tar = mytar
case *archiver.TarZstd:
v.Tar = mytar
}
err = archiver.Walk(archive, func(f archiver.File) error {
if f.FileInfo.Mode()&os.ModeSymlink != 0 {
return fmt.Errorf("archive contains a symlink")
}
return nil
})
if err != nil {
return err
}
return un.Unarchive(archive, dst)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/utils/strings.go | pkg/utils/strings.go | package utils
import (
"math/rand"
"time"
)
var letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
func init() {
rand.Seed(time.Now().UnixNano())
}
func RandString(n int) string {
b := make([]rune, n)
for i := range b {
b[i] = letterRunes[rand.Intn(len(letterRunes))]
}
return string(b)
}
func Unique(arr []string) []string {
unique := make(map[string]bool)
var result []string
for _, item := range arr {
if _, ok := unique[item]; !ok {
unique[item] = true
result = append(result, item)
}
}
return result
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/utils/utils_suite_test.go | pkg/utils/utils_suite_test.go | package utils_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestUtils(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Utils test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/utils/base64.go | pkg/utils/base64.go | package utils
import (
"encoding/base64"
"fmt"
"io"
"net/http"
"regexp"
"strings"
"time"
"github.com/mudler/xlog"
)
var base64DownloadClient http.Client = http.Client{
Timeout: 30 * time.Second,
}
var dataURIPattern = regexp.MustCompile(`^data:([^;]+);base64,`)
// GetContentURIAsBase64 checks if the string is an URL, if it's an URL downloads the content in memory encodes it in base64 and returns the base64 string, otherwise returns the string by stripping base64 data headers
func GetContentURIAsBase64(s string) (string, error) {
if strings.HasPrefix(s, "http") || strings.HasPrefix(s, "https") {
// download the image
resp, err := base64DownloadClient.Get(s)
if err != nil {
return "", err
}
defer resp.Body.Close()
// read the image data into memory
data, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
// encode the image data in base64
encoded := base64.StdEncoding.EncodeToString(data)
// return the base64 string
return encoded, nil
}
// Match any data URI prefix pattern
if match := dataURIPattern.FindString(s); match != "" {
xlog.Debug("Found data URI prefix", "prefix", match)
return strings.Replace(s, match, "", 1), nil
}
return "", fmt.Errorf("not valid base64 data type string")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/audio/audio.go | pkg/audio/audio.go | package audio
// Copied from VoxInput
import (
"encoding/binary"
"io"
)
// WAVHeader represents the WAV file header (44 bytes for PCM)
type WAVHeader struct {
// RIFF Chunk (12 bytes)
ChunkID [4]byte
ChunkSize uint32
Format [4]byte
// fmt Subchunk (16 bytes)
Subchunk1ID [4]byte
Subchunk1Size uint32
AudioFormat uint16
NumChannels uint16
SampleRate uint32
ByteRate uint32
BlockAlign uint16
BitsPerSample uint16
// data Subchunk (8 bytes)
Subchunk2ID [4]byte
Subchunk2Size uint32
}
func NewWAVHeader(pcmLen uint32) WAVHeader {
header := WAVHeader{
ChunkID: [4]byte{'R', 'I', 'F', 'F'},
Format: [4]byte{'W', 'A', 'V', 'E'},
Subchunk1ID: [4]byte{'f', 'm', 't', ' '},
Subchunk1Size: 16, // PCM = 16 bytes
AudioFormat: 1, // PCM
NumChannels: 1, // Mono
SampleRate: 16000,
ByteRate: 16000 * 2, // SampleRate * BlockAlign (mono, 2 bytes per sample)
BlockAlign: 2, // 16-bit = 2 bytes per sample
BitsPerSample: 16,
Subchunk2ID: [4]byte{'d', 'a', 't', 'a'},
Subchunk2Size: pcmLen,
}
header.ChunkSize = 36 + header.Subchunk2Size
return header
}
func (h *WAVHeader) Write(writer io.Writer) error {
return binary.Write(writer, binary.LittleEndian, h)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/langchain/langchain.go | pkg/langchain/langchain.go | package langchain
type PredictOptions struct {
Model string `json:"model"`
// MaxTokens is the maximum number of tokens to generate.
MaxTokens int `json:"max_tokens"`
// Temperature is the temperature for sampling, between 0 and 1.
Temperature float64 `json:"temperature"`
// StopWords is a list of words to stop on.
StopWords []string `json:"stop_words"`
}
type PredictOption func(p *PredictOptions)
var DefaultOptions = PredictOptions{
Model: "gpt2",
MaxTokens: 200,
Temperature: 0.96,
StopWords: nil,
}
type Predict struct {
Completion string
}
func SetModel(model string) PredictOption {
return func(o *PredictOptions) {
o.Model = model
}
}
func SetTemperature(temperature float64) PredictOption {
return func(o *PredictOptions) {
o.Temperature = temperature
}
}
func SetMaxTokens(maxTokens int) PredictOption {
return func(o *PredictOptions) {
o.MaxTokens = maxTokens
}
}
func SetStopWords(stopWords []string) PredictOption {
return func(o *PredictOptions) {
o.StopWords = stopWords
}
}
// NewPredictOptions Create a new PredictOptions object with the given options.
func NewPredictOptions(opts ...PredictOption) PredictOptions {
p := DefaultOptions
for _, opt := range opts {
opt(&p)
}
return p
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/langchain/huggingface.go | pkg/langchain/huggingface.go | package langchain
import (
"context"
"fmt"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/huggingface"
)
type HuggingFace struct {
modelPath string
token string
}
func NewHuggingFace(repoId, token string) (*HuggingFace, error) {
if token == "" {
return nil, fmt.Errorf("no huggingface token provided")
}
return &HuggingFace{
modelPath: repoId,
token: token,
}, nil
}
func (s *HuggingFace) PredictHuggingFace(text string, opts ...PredictOption) (*Predict, error) {
po := NewPredictOptions(opts...)
// Init client
llm, err := huggingface.New(huggingface.WithToken(s.token))
if err != nil {
return nil, err
}
// Convert from LocalAI to LangChainGo format of options
co := []llms.CallOption{
llms.WithModel(po.Model),
llms.WithMaxTokens(po.MaxTokens),
llms.WithTemperature(po.Temperature),
llms.WithStopWords(po.StopWords),
}
// Call Inference API
ctx := context.Background()
completion, err := llm.Call(ctx, text, co...)
if err != nil {
return nil, err
}
return &Predict{
Completion: completion,
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/signals/handler.go | pkg/signals/handler.go | package signals
import (
"os"
"os/signal"
"sync"
"syscall"
)
var (
signalHandlers []func()
signalHandlersMutex sync.Mutex
signalHandlersOnce sync.Once
)
func RegisterGracefulTerminationHandler(fn func()) {
signalHandlersMutex.Lock()
defer signalHandlersMutex.Unlock()
signalHandlers = append(signalHandlers, fn)
}
func init() {
signalHandlersOnce.Do(func() {
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGINT, syscall.SIGTERM)
go signalHandler(c)
})
}
func signalHandler(c chan os.Signal) {
<-c
signalHandlersMutex.Lock()
defer signalHandlersMutex.Unlock()
for _, fn := range signalHandlers {
fn()
}
os.Exit(0)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/embed.go | pkg/grpc/embed.go | package grpc
import (
"context"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"google.golang.org/grpc"
"google.golang.org/grpc/metadata"
)
var _ Backend = new(embedBackend)
var _ pb.Backend_PredictStreamServer = new(embedBackendServerStream)
type embedBackend struct {
s *server
}
func (e *embedBackend) IsBusy() bool {
return e.s.llm.Busy()
}
func (e *embedBackend) HealthCheck(ctx context.Context) (bool, error) {
return true, nil
}
func (e *embedBackend) Embeddings(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.EmbeddingResult, error) {
return e.s.Embedding(ctx, in)
}
func (e *embedBackend) Predict(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.Reply, error) {
return e.s.Predict(ctx, in)
}
func (e *embedBackend) LoadModel(ctx context.Context, in *pb.ModelOptions, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.LoadModel(ctx, in)
}
func (e *embedBackend) PredictStream(ctx context.Context, in *pb.PredictOptions, f func(reply *pb.Reply), opts ...grpc.CallOption) error {
bs := &embedBackendServerStream{
ctx: ctx,
fn: f,
}
return e.s.PredictStream(in, bs)
}
func (e *embedBackend) GenerateImage(ctx context.Context, in *pb.GenerateImageRequest, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.GenerateImage(ctx, in)
}
func (e *embedBackend) GenerateVideo(ctx context.Context, in *pb.GenerateVideoRequest, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.GenerateVideo(ctx, in)
}
func (e *embedBackend) TTS(ctx context.Context, in *pb.TTSRequest, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.TTS(ctx, in)
}
func (e *embedBackend) SoundGeneration(ctx context.Context, in *pb.SoundGenerationRequest, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.SoundGeneration(ctx, in)
}
func (e *embedBackend) Detect(ctx context.Context, in *pb.DetectOptions, opts ...grpc.CallOption) (*pb.DetectResponse, error) {
return e.s.Detect(ctx, in)
}
func (e *embedBackend) AudioTranscription(ctx context.Context, in *pb.TranscriptRequest, opts ...grpc.CallOption) (*pb.TranscriptResult, error) {
return e.s.AudioTranscription(ctx, in)
}
func (e *embedBackend) TokenizeString(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.TokenizationResponse, error) {
return e.s.TokenizeString(ctx, in)
}
func (e *embedBackend) Status(ctx context.Context) (*pb.StatusResponse, error) {
return e.s.Status(ctx, &pb.HealthMessage{})
}
func (e *embedBackend) StoresSet(ctx context.Context, in *pb.StoresSetOptions, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.StoresSet(ctx, in)
}
func (e *embedBackend) StoresDelete(ctx context.Context, in *pb.StoresDeleteOptions, opts ...grpc.CallOption) (*pb.Result, error) {
return e.s.StoresDelete(ctx, in)
}
func (e *embedBackend) StoresGet(ctx context.Context, in *pb.StoresGetOptions, opts ...grpc.CallOption) (*pb.StoresGetResult, error) {
return e.s.StoresGet(ctx, in)
}
func (e *embedBackend) StoresFind(ctx context.Context, in *pb.StoresFindOptions, opts ...grpc.CallOption) (*pb.StoresFindResult, error) {
return e.s.StoresFind(ctx, in)
}
func (e *embedBackend) Rerank(ctx context.Context, in *pb.RerankRequest, opts ...grpc.CallOption) (*pb.RerankResult, error) {
return e.s.Rerank(ctx, in)
}
func (e *embedBackend) VAD(ctx context.Context, in *pb.VADRequest, opts ...grpc.CallOption) (*pb.VADResponse, error) {
return e.s.VAD(ctx, in)
}
func (e *embedBackend) GetTokenMetrics(ctx context.Context, in *pb.MetricsRequest, opts ...grpc.CallOption) (*pb.MetricsResponse, error) {
return e.s.GetMetrics(ctx, in)
}
type embedBackendServerStream struct {
ctx context.Context
fn func(reply *pb.Reply)
}
func (e *embedBackendServerStream) Send(reply *pb.Reply) error {
e.fn(reply)
return nil
}
func (e *embedBackendServerStream) SetHeader(md metadata.MD) error {
return nil
}
func (e *embedBackendServerStream) SendHeader(md metadata.MD) error {
return nil
}
func (e *embedBackendServerStream) SetTrailer(md metadata.MD) {
}
func (e *embedBackendServerStream) Context() context.Context {
return e.ctx
}
func (e *embedBackendServerStream) SendMsg(m any) error {
if x, ok := m.(*pb.Reply); ok {
return e.Send(x)
}
return nil
}
func (e *embedBackendServerStream) RecvMsg(m any) error {
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/client.go | pkg/grpc/client.go | package grpc
import (
"context"
"fmt"
"io"
"sync"
"time"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
)
type Client struct {
address string
busy bool
parallel bool
sync.Mutex
opMutex sync.Mutex
wd WatchDog
}
type WatchDog interface {
Mark(address string)
UnMark(address string)
}
func (c *Client) IsBusy() bool {
c.Lock()
defer c.Unlock()
return c.busy
}
func (c *Client) setBusy(v bool) {
c.Lock()
c.busy = v
c.Unlock()
}
func (c *Client) wdMark() {
if c.wd != nil {
c.wd.Mark(c.address)
}
}
func (c *Client) wdUnMark() {
if c.wd != nil {
c.wd.UnMark(c.address)
}
}
func (c *Client) HealthCheck(ctx context.Context) (bool, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return false, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
// The healthcheck call shouldn't take long time
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
res, err := client.Health(ctx, &pb.HealthMessage{})
if err != nil {
return false, err
}
if string(res.Message) == "OK" {
return true, nil
}
return false, fmt.Errorf("health check failed: %s", res.Message)
}
func (c *Client) Embeddings(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.EmbeddingResult, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.Embedding(ctx, in, opts...)
}
func (c *Client) Predict(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.Reply, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.Predict(ctx, in, opts...)
}
func (c *Client) LoadModel(ctx context.Context, in *pb.ModelOptions, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.LoadModel(ctx, in, opts...)
}
func (c *Client) PredictStream(ctx context.Context, in *pb.PredictOptions, f func(reply *pb.Reply), opts ...grpc.CallOption) error {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
stream, err := client.PredictStream(ctx, in, opts...)
if err != nil {
return err
}
for {
// Check if context is cancelled before receiving
select {
case <-ctx.Done():
return ctx.Err()
default:
}
reply, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
// Check if error is due to context cancellation
if ctx.Err() != nil {
return ctx.Err()
}
fmt.Println("Error", err)
return err
}
f(reply)
}
return nil
}
func (c *Client) GenerateImage(ctx context.Context, in *pb.GenerateImageRequest, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.GenerateImage(ctx, in, opts...)
}
func (c *Client) GenerateVideo(ctx context.Context, in *pb.GenerateVideoRequest, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.GenerateVideo(ctx, in, opts...)
}
func (c *Client) TTS(ctx context.Context, in *pb.TTSRequest, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.TTS(ctx, in, opts...)
}
func (c *Client) SoundGeneration(ctx context.Context, in *pb.SoundGenerationRequest, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.SoundGeneration(ctx, in, opts...)
}
func (c *Client) AudioTranscription(ctx context.Context, in *pb.TranscriptRequest, opts ...grpc.CallOption) (*pb.TranscriptResult, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.AudioTranscription(ctx, in, opts...)
}
func (c *Client) TokenizeString(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.TokenizationResponse, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
res, err := client.TokenizeString(ctx, in, opts...)
if err != nil {
return nil, err
}
return res, nil
}
func (c *Client) Status(ctx context.Context) (*pb.StatusResponse, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.Status(ctx, &pb.HealthMessage{})
}
func (c *Client) StoresSet(ctx context.Context, in *pb.StoresSetOptions, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.StoresSet(ctx, in, opts...)
}
func (c *Client) StoresDelete(ctx context.Context, in *pb.StoresDeleteOptions, opts ...grpc.CallOption) (*pb.Result, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.wdMark()
defer c.wdUnMark()
c.setBusy(true)
defer c.setBusy(false)
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.StoresDelete(ctx, in, opts...)
}
func (c *Client) StoresGet(ctx context.Context, in *pb.StoresGetOptions, opts ...grpc.CallOption) (*pb.StoresGetResult, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.StoresGet(ctx, in, opts...)
}
func (c *Client) StoresFind(ctx context.Context, in *pb.StoresFindOptions, opts ...grpc.CallOption) (*pb.StoresFindResult, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.StoresFind(ctx, in, opts...)
}
func (c *Client) Rerank(ctx context.Context, in *pb.RerankRequest, opts ...grpc.CallOption) (*pb.RerankResult, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.Rerank(ctx, in, opts...)
}
func (c *Client) GetTokenMetrics(ctx context.Context, in *pb.MetricsRequest, opts ...grpc.CallOption) (*pb.MetricsResponse, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.GetMetrics(ctx, in, opts...)
}
func (c *Client) VAD(ctx context.Context, in *pb.VADRequest, opts ...grpc.CallOption) (*pb.VADResponse, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.VAD(ctx, in, opts...)
}
func (c *Client) Detect(ctx context.Context, in *pb.DetectOptions, opts ...grpc.CallOption) (*pb.DetectResponse, error) {
if !c.parallel {
c.opMutex.Lock()
defer c.opMutex.Unlock()
}
c.setBusy(true)
defer c.setBusy(false)
c.wdMark()
defer c.wdUnMark()
conn, err := grpc.Dial(c.address, grpc.WithTransportCredentials(insecure.NewCredentials()),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxCallSendMsgSize(50*1024*1024), // 50MB
))
if err != nil {
return nil, err
}
defer conn.Close()
client := pb.NewBackendClient(conn)
return client.Detect(ctx, in, opts...)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/interface.go | pkg/grpc/interface.go | package grpc
import (
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
)
type AIModel interface {
Busy() bool
Lock()
Unlock()
Locking() bool
Predict(*pb.PredictOptions) (string, error)
PredictStream(*pb.PredictOptions, chan string) error
Load(*pb.ModelOptions) error
Embeddings(*pb.PredictOptions) ([]float32, error)
GenerateImage(*pb.GenerateImageRequest) error
GenerateVideo(*pb.GenerateVideoRequest) error
Detect(*pb.DetectOptions) (pb.DetectResponse, error)
AudioTranscription(*pb.TranscriptRequest) (pb.TranscriptResult, error)
TTS(*pb.TTSRequest) error
SoundGeneration(*pb.SoundGenerationRequest) error
TokenizeString(*pb.PredictOptions) (pb.TokenizationResponse, error)
Status() (pb.StatusResponse, error)
StoresSet(*pb.StoresSetOptions) error
StoresDelete(*pb.StoresDeleteOptions) error
StoresGet(*pb.StoresGetOptions) (pb.StoresGetResult, error)
StoresFind(*pb.StoresFindOptions) (pb.StoresFindResult, error)
VAD(*pb.VADRequest) (pb.VADResponse, error)
}
func newReply(s string) *pb.Reply {
return &pb.Reply{Message: []byte(s)}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/server.go | pkg/grpc/server.go | package grpc
import (
"context"
"fmt"
"log"
"net"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"google.golang.org/grpc"
)
// A GRPC Server that allows to run LLM inference.
// It is used by the LLMServices to expose the LLM functionalities that are called by the client.
// The GRPC Service is general, trying to encompass all the possible LLM options models.
// It depends on the real implementer then what can be done or not.
//
// The server is implemented as a GRPC service, with the following methods:
// - Predict: to run the inference with options
// - PredictStream: to run the inference with options and stream the results
// server is used to implement helloworld.GreeterServer.
type server struct {
pb.UnimplementedBackendServer
llm AIModel
}
func (s *server) Health(ctx context.Context, in *pb.HealthMessage) (*pb.Reply, error) {
return newReply("OK"), nil
}
func (s *server) Embedding(ctx context.Context, in *pb.PredictOptions) (*pb.EmbeddingResult, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
embeds, err := s.llm.Embeddings(in)
if err != nil {
return nil, err
}
return &pb.EmbeddingResult{Embeddings: embeds}, nil
}
func (s *server) LoadModel(ctx context.Context, in *pb.ModelOptions) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.Load(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error loading model: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "Loading succeeded", Success: true}, nil
}
func (s *server) Predict(ctx context.Context, in *pb.PredictOptions) (*pb.Reply, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
result, err := s.llm.Predict(in)
return newReply(result), err
}
func (s *server) GenerateImage(ctx context.Context, in *pb.GenerateImageRequest) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.GenerateImage(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error generating image: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "Image generated", Success: true}, nil
}
func (s *server) GenerateVideo(ctx context.Context, in *pb.GenerateVideoRequest) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.GenerateVideo(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error generating video: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "Video generated", Success: true}, nil
}
func (s *server) TTS(ctx context.Context, in *pb.TTSRequest) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.TTS(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error generating audio: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "TTS audio generated", Success: true}, nil
}
func (s *server) SoundGeneration(ctx context.Context, in *pb.SoundGenerationRequest) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.SoundGeneration(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error generating audio: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "Sound Generation audio generated", Success: true}, nil
}
func (s *server) Detect(ctx context.Context, in *pb.DetectOptions) (*pb.DetectResponse, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
res, err := s.llm.Detect(in)
if err != nil {
return nil, err
}
return &res, nil
}
func (s *server) AudioTranscription(ctx context.Context, in *pb.TranscriptRequest) (*pb.TranscriptResult, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
result, err := s.llm.AudioTranscription(in)
if err != nil {
return nil, err
}
tresult := &pb.TranscriptResult{}
for _, s := range result.Segments {
tks := []int32{}
for _, t := range s.Tokens {
tks = append(tks, int32(t))
}
tresult.Segments = append(tresult.Segments,
&pb.TranscriptSegment{
Text: s.Text,
Id: int32(s.Id),
Start: int64(s.Start),
End: int64(s.End),
Tokens: tks,
})
}
tresult.Text = result.Text
return tresult, nil
}
func (s *server) PredictStream(in *pb.PredictOptions, stream pb.Backend_PredictStreamServer) error {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
resultChan := make(chan string)
done := make(chan bool)
go func() {
for result := range resultChan {
stream.Send(newReply(result))
}
done <- true
}()
err := s.llm.PredictStream(in, resultChan)
<-done
return err
}
func (s *server) TokenizeString(ctx context.Context, in *pb.PredictOptions) (*pb.TokenizationResponse, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
res, err := s.llm.TokenizeString(in)
if err != nil {
return nil, err
}
castTokens := make([]int32, len(res.Tokens))
for i, v := range res.Tokens {
castTokens[i] = int32(v)
}
return &pb.TokenizationResponse{
Length: int32(res.Length),
Tokens: castTokens,
}, err
}
func (s *server) Status(ctx context.Context, in *pb.HealthMessage) (*pb.StatusResponse, error) {
res, err := s.llm.Status()
if err != nil {
return nil, err
}
return &res, nil
}
func (s *server) StoresSet(ctx context.Context, in *pb.StoresSetOptions) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.StoresSet(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error setting entry: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "Set key", Success: true}, nil
}
func (s *server) StoresDelete(ctx context.Context, in *pb.StoresDeleteOptions) (*pb.Result, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
err := s.llm.StoresDelete(in)
if err != nil {
return &pb.Result{Message: fmt.Sprintf("Error deleting entry: %s", err.Error()), Success: false}, err
}
return &pb.Result{Message: "Deleted key", Success: true}, nil
}
func (s *server) StoresGet(ctx context.Context, in *pb.StoresGetOptions) (*pb.StoresGetResult, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
res, err := s.llm.StoresGet(in)
if err != nil {
return nil, err
}
return &res, nil
}
func (s *server) StoresFind(ctx context.Context, in *pb.StoresFindOptions) (*pb.StoresFindResult, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
res, err := s.llm.StoresFind(in)
if err != nil {
return nil, err
}
return &res, nil
}
func (s *server) VAD(ctx context.Context, in *pb.VADRequest) (*pb.VADResponse, error) {
if s.llm.Locking() {
s.llm.Lock()
defer s.llm.Unlock()
}
res, err := s.llm.VAD(in)
if err != nil {
return nil, err
}
return &res, nil
}
func StartServer(address string, model AIModel) error {
lis, err := net.Listen("tcp", address)
if err != nil {
return err
}
s := grpc.NewServer(
grpc.MaxRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxSendMsgSize(50*1024*1024), // 50MB
)
pb.RegisterBackendServer(s, &server{llm: model})
log.Printf("gRPC Server listening at %v", lis.Addr())
if err := s.Serve(lis); err != nil {
return err
}
return nil
}
func RunServer(address string, model AIModel) (func() error, error) {
lis, err := net.Listen("tcp", address)
if err != nil {
return nil, err
}
s := grpc.NewServer(
grpc.MaxRecvMsgSize(50*1024*1024), // 50MB
grpc.MaxSendMsgSize(50*1024*1024), // 50MB
)
pb.RegisterBackendServer(s, &server{llm: model})
log.Printf("gRPC Server listening at %v", lis.Addr())
if err = s.Serve(lis); err != nil {
return func() error {
return lis.Close()
}, err
}
return func() error {
s.GracefulStop()
return nil
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/backend.go | pkg/grpc/backend.go | package grpc
import (
"context"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"google.golang.org/grpc"
)
var embeds = map[string]*embedBackend{}
func Provide(addr string, llm AIModel) {
embeds[addr] = &embedBackend{s: &server{llm: llm}}
}
func NewClient(address string, parallel bool, wd WatchDog, enableWatchDog bool) Backend {
if bc, ok := embeds[address]; ok {
return bc
}
return buildClient(address, parallel, wd, enableWatchDog)
}
func buildClient(address string, parallel bool, wd WatchDog, enableWatchDog bool) Backend {
if !enableWatchDog {
wd = nil
}
return &Client{
address: address,
parallel: parallel,
wd: wd,
}
}
type Backend interface {
IsBusy() bool
HealthCheck(ctx context.Context) (bool, error)
Embeddings(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.EmbeddingResult, error)
LoadModel(ctx context.Context, in *pb.ModelOptions, opts ...grpc.CallOption) (*pb.Result, error)
PredictStream(ctx context.Context, in *pb.PredictOptions, f func(reply *pb.Reply), opts ...grpc.CallOption) error
Predict(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.Reply, error)
GenerateImage(ctx context.Context, in *pb.GenerateImageRequest, opts ...grpc.CallOption) (*pb.Result, error)
GenerateVideo(ctx context.Context, in *pb.GenerateVideoRequest, opts ...grpc.CallOption) (*pb.Result, error)
TTS(ctx context.Context, in *pb.TTSRequest, opts ...grpc.CallOption) (*pb.Result, error)
SoundGeneration(ctx context.Context, in *pb.SoundGenerationRequest, opts ...grpc.CallOption) (*pb.Result, error)
Detect(ctx context.Context, in *pb.DetectOptions, opts ...grpc.CallOption) (*pb.DetectResponse, error)
AudioTranscription(ctx context.Context, in *pb.TranscriptRequest, opts ...grpc.CallOption) (*pb.TranscriptResult, error)
TokenizeString(ctx context.Context, in *pb.PredictOptions, opts ...grpc.CallOption) (*pb.TokenizationResponse, error)
Status(ctx context.Context) (*pb.StatusResponse, error)
StoresSet(ctx context.Context, in *pb.StoresSetOptions, opts ...grpc.CallOption) (*pb.Result, error)
StoresDelete(ctx context.Context, in *pb.StoresDeleteOptions, opts ...grpc.CallOption) (*pb.Result, error)
StoresGet(ctx context.Context, in *pb.StoresGetOptions, opts ...grpc.CallOption) (*pb.StoresGetResult, error)
StoresFind(ctx context.Context, in *pb.StoresFindOptions, opts ...grpc.CallOption) (*pb.StoresFindResult, error)
Rerank(ctx context.Context, in *pb.RerankRequest, opts ...grpc.CallOption) (*pb.RerankResult, error)
GetTokenMetrics(ctx context.Context, in *pb.MetricsRequest, opts ...grpc.CallOption) (*pb.MetricsResponse, error)
VAD(ctx context.Context, in *pb.VADRequest, opts ...grpc.CallOption) (*pb.VADResponse, error)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/base/base.go | pkg/grpc/base/base.go | package base
// This is a wrapper to satisfy the GRPC service interface
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"fmt"
"os"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
gopsutil "github.com/shirou/gopsutil/v3/process"
)
// Base is a base class for all backends to implement
// Note: the backends that does not support multiple requests
// should use SingleThread instead
type Base struct {
}
func (llm *Base) Locking() bool {
return false
}
func (llm *Base) Lock() {
panic("not implemented")
}
func (llm *Base) Unlock() {
panic("not implemented")
}
func (llm *Base) Busy() bool {
return false
}
func (llm *Base) Load(opts *pb.ModelOptions) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) Predict(opts *pb.PredictOptions) (string, error) {
return "", fmt.Errorf("unimplemented")
}
func (llm *Base) PredictStream(opts *pb.PredictOptions, results chan string) error {
close(results)
return fmt.Errorf("unimplemented")
}
func (llm *Base) Embeddings(opts *pb.PredictOptions) ([]float32, error) {
return []float32{}, fmt.Errorf("unimplemented")
}
func (llm *Base) GenerateImage(*pb.GenerateImageRequest) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) GenerateVideo(*pb.GenerateVideoRequest) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) AudioTranscription(*pb.TranscriptRequest) (pb.TranscriptResult, error) {
return pb.TranscriptResult{}, fmt.Errorf("unimplemented")
}
func (llm *Base) TTS(*pb.TTSRequest) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) SoundGeneration(*pb.SoundGenerationRequest) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) Detect(*pb.DetectOptions) (pb.DetectResponse, error) {
return pb.DetectResponse{}, fmt.Errorf("unimplemented")
}
func (llm *Base) TokenizeString(opts *pb.PredictOptions) (pb.TokenizationResponse, error) {
return pb.TokenizationResponse{}, fmt.Errorf("unimplemented")
}
// backends may wish to call this to capture the gopsutil info, then enhance with additional memory usage details?
func (llm *Base) Status() (pb.StatusResponse, error) {
return pb.StatusResponse{
Memory: memoryUsage(),
}, nil
}
func (llm *Base) StoresSet(*pb.StoresSetOptions) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) StoresGet(*pb.StoresGetOptions) (pb.StoresGetResult, error) {
return pb.StoresGetResult{}, fmt.Errorf("unimplemented")
}
func (llm *Base) StoresDelete(*pb.StoresDeleteOptions) error {
return fmt.Errorf("unimplemented")
}
func (llm *Base) StoresFind(*pb.StoresFindOptions) (pb.StoresFindResult, error) {
return pb.StoresFindResult{}, fmt.Errorf("unimplemented")
}
func (llm *Base) VAD(*pb.VADRequest) (pb.VADResponse, error) {
return pb.VADResponse{}, fmt.Errorf("unimplemented")
}
func memoryUsage() *pb.MemoryUsageData {
mud := pb.MemoryUsageData{
Breakdown: make(map[string]uint64),
}
pid := int32(os.Getpid())
backendProcess, err := gopsutil.NewProcess(pid)
if err == nil {
memInfo, err := backendProcess.MemoryInfo()
if err == nil {
mud.Total = memInfo.VMS // TEST, but rss seems reasonable first guess. Does include swap, but we might care about that.
mud.Breakdown["gopsutil-RSS"] = memInfo.RSS
}
}
return &mud
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/grpc/base/singlethread.go | pkg/grpc/base/singlethread.go | package base
import (
"sync"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
)
// SingleThread are backends that does not support multiple requests.
// There will be only one request being served at the time.
// This is useful for models that are not thread safe and cannot run
// multiple requests at the same time.
type SingleThread struct {
Base
backendBusy sync.Mutex
}
// Locking returns true if the backend needs to lock resources
func (llm *SingleThread) Locking() bool {
return true
}
func (llm *SingleThread) Lock() {
llm.backendBusy.Lock()
}
func (llm *SingleThread) Unlock() {
llm.backendBusy.Unlock()
}
func (llm *SingleThread) Busy() bool {
r := llm.backendBusy.TryLock()
if r {
llm.backendBusy.Unlock()
}
return r
}
// backends may wish to call this to capture the gopsutil info, then enhance with additional memory usage details?
func (llm *SingleThread) Status() (pb.StatusResponse, error) {
mud := memoryUsage()
state := pb.StatusResponse_READY
if llm.Busy() {
state = pb.StatusResponse_BUSY
}
return pb.StatusResponse{
State: state,
Memory: mud,
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/downloader/downloader_suite_test.go | pkg/downloader/downloader_suite_test.go | package downloader
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestDownloader(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Downloader test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/downloader/progress.go | pkg/downloader/progress.go | package downloader
import (
"context"
"hash"
)
type progressWriter struct {
fileName string
total int64
fileNo int
totalFiles int
written int64
downloadStatus func(string, string, string, float64)
hash hash.Hash
ctx context.Context
}
func (pw *progressWriter) Write(p []byte) (n int, err error) {
// Check for cancellation before writing
if pw.ctx != nil {
select {
case <-pw.ctx.Done():
return 0, pw.ctx.Err()
default:
}
}
n, err = pw.hash.Write(p)
if err != nil {
return n, err
}
pw.written += int64(n)
// Check for cancellation after writing chunk
if pw.ctx != nil {
select {
case <-pw.ctx.Done():
return n, pw.ctx.Err()
default:
}
}
if pw.total > 0 {
percentage := float64(pw.written) / float64(pw.total) * 100
if pw.totalFiles > 1 {
// This is a multi-file download
// so we need to adjust the percentage
// to reflect the progress of the whole download
// This is the file pw.fileNo (0-indexed) of pw.totalFiles files. We assume that
// the files before successfully downloaded.
percentage = percentage / float64(pw.totalFiles)
if pw.fileNo > 0 {
percentage += float64(pw.fileNo) * 100 / float64(pw.totalFiles)
}
}
//log.Debug().Msgf("Downloading %s: %s/%s (%.2f%%)", pw.fileName, formatBytes(pw.written), formatBytes(pw.total), percentage)
pw.downloadStatus(pw.fileName, formatBytes(pw.written), formatBytes(pw.total), percentage)
} else {
pw.downloadStatus(pw.fileName, formatBytes(pw.written), "", 0)
}
return
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/downloader/uri.go | pkg/downloader/uri.go | package downloader
import (
"context"
"crypto/sha256"
"errors"
"fmt"
"hash"
"io"
"net/http"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/google/go-containerregistry/pkg/v1/tarball"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/mudler/LocalAI/pkg/oci"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/LocalAI/pkg/xio"
"github.com/mudler/xlog"
)
const (
HuggingFacePrefix = "huggingface://"
HuggingFacePrefix1 = "hf://"
HuggingFacePrefix2 = "hf.co/"
OCIPrefix = "oci://"
OCIFilePrefix = "ocifile://"
OllamaPrefix = "ollama://"
HTTPPrefix = "http://"
HTTPSPrefix = "https://"
GithubURI = "github:"
GithubURI2 = "github://"
LocalPrefix = "file://"
)
type URI string
// HF_ENDPOINT is the HuggingFace endpoint, can be overridden by setting the HF_ENDPOINT environment variable.
var HF_ENDPOINT string = loadConfig()
func loadConfig() string {
HF_ENDPOINT := os.Getenv("HF_ENDPOINT")
if HF_ENDPOINT == "" {
HF_ENDPOINT = "https://huggingface.co"
}
return HF_ENDPOINT
}
func (uri URI) ReadWithCallback(basePath string, f func(url string, i []byte) error) error {
return uri.ReadWithAuthorizationAndCallback(context.Background(), basePath, "", f)
}
func (uri URI) ReadWithAuthorizationAndCallback(ctx context.Context, basePath string, authorization string, f func(url string, i []byte) error) error {
url := uri.ResolveURL()
if strings.HasPrefix(string(uri), LocalPrefix) {
// checks if the file is symbolic, and resolve if so - otherwise, this function returns the path unmodified.
resolvedFile, err := filepath.EvalSymlinks(url)
if err != nil {
return err
}
resolvedBasePath, err := filepath.EvalSymlinks(basePath)
if err != nil {
return err
}
// Check if the local file is rooted in basePath
err = utils.InTrustedRoot(resolvedFile, resolvedBasePath)
if err != nil {
xlog.Debug("downloader.GetURI blocked an attempt to ready a file url outside of basePath", "resolvedFile", resolvedFile, "basePath", basePath)
return err
}
// Read the response body
body, err := os.ReadFile(resolvedFile)
if err != nil {
return err
}
// Unmarshal YAML data into a struct
return f(url, body)
}
// Send a GET request to the URL
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return err
}
if authorization != "" {
req.Header.Add("Authorization", authorization)
}
response, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer response.Body.Close()
// Read the response body
body, err := io.ReadAll(response.Body)
if err != nil {
return err
}
// Unmarshal YAML data into a struct
return f(url, body)
}
func (u URI) FilenameFromUrl() (string, error) {
if f := filenameFromUrl(string(u)); f != "" {
return f, nil
}
f := utils.MD5(string(u))
if strings.HasSuffix(string(u), ".yaml") || strings.HasSuffix(string(u), ".yml") {
f = f + ".yaml"
}
return f, nil
}
func filenameFromUrl(urlstr string) string {
// strip anything after @
if strings.Contains(urlstr, "@") {
urlstr = strings.Split(urlstr, "@")[0]
}
u, err := url.Parse(urlstr)
if err != nil {
return ""
}
x, err := url.QueryUnescape(u.EscapedPath())
if err != nil {
return ""
}
return filepath.Base(x)
}
func (u URI) LooksLikeURL() bool {
return strings.HasPrefix(string(u), HTTPPrefix) ||
strings.HasPrefix(string(u), HTTPSPrefix) ||
strings.HasPrefix(string(u), HuggingFacePrefix) ||
strings.HasPrefix(string(u), HuggingFacePrefix1) ||
strings.HasPrefix(string(u), HuggingFacePrefix2) ||
strings.HasPrefix(string(u), GithubURI) ||
strings.HasPrefix(string(u), OllamaPrefix) ||
strings.HasPrefix(string(u), OCIPrefix) ||
strings.HasPrefix(string(u), GithubURI2)
}
func (u URI) LooksLikeHTTPURL() bool {
return strings.HasPrefix(string(u), HTTPPrefix) ||
strings.HasPrefix(string(u), HTTPSPrefix)
}
func (u URI) LooksLikeDir() bool {
f, err := os.Stat(string(u))
return err == nil && f.IsDir()
}
func (s URI) LooksLikeOCI() bool {
return strings.HasPrefix(string(s), "quay.io") ||
strings.HasPrefix(string(s), OCIPrefix) ||
strings.HasPrefix(string(s), OllamaPrefix) ||
strings.HasPrefix(string(s), OCIFilePrefix) ||
strings.HasPrefix(string(s), "ghcr.io") ||
strings.HasPrefix(string(s), "docker.io")
}
func (s URI) LooksLikeOCIFile() bool {
return strings.HasPrefix(string(s), OCIFilePrefix)
}
func (s URI) ResolveURL() string {
switch {
case strings.HasPrefix(string(s), LocalPrefix):
return strings.TrimPrefix(string(s), LocalPrefix)
case strings.HasPrefix(string(s), GithubURI2):
repository := strings.Replace(string(s), GithubURI2, "", 1)
repoParts := strings.Split(repository, "@")
branch := "main"
if len(repoParts) > 1 {
branch = repoParts[1]
}
repoPath := strings.Split(repoParts[0], "/")
org := repoPath[0]
project := repoPath[1]
projectPath := strings.Join(repoPath[2:], "/")
return fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s/%s", org, project, branch, projectPath)
case strings.HasPrefix(string(s), GithubURI):
parts := strings.Split(string(s), ":")
repoParts := strings.Split(parts[1], "@")
branch := "main"
if len(repoParts) > 1 {
branch = repoParts[1]
}
repoPath := strings.Split(repoParts[0], "/")
org := repoPath[0]
project := repoPath[1]
projectPath := strings.Join(repoPath[2:], "/")
return fmt.Sprintf("https://raw.githubusercontent.com/%s/%s/%s/%s", org, project, branch, projectPath)
case strings.HasPrefix(string(s), HuggingFacePrefix) || strings.HasPrefix(string(s), HuggingFacePrefix1) || strings.HasPrefix(string(s), HuggingFacePrefix2):
repository := strings.Replace(string(s), HuggingFacePrefix, "", 1)
repository = strings.Replace(repository, HuggingFacePrefix1, "", 1)
repository = strings.Replace(repository, HuggingFacePrefix2, "", 1)
// convert repository to a full URL.
// e.g. TheBloke/Mixtral-8x7B-v0.1-GGUF/mixtral-8x7b-v0.1.Q2_K.gguf@main -> https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q2_K.gguf
repoPieces := strings.Split(repository, "/")
repoID := strings.Split(repository, "@")
if len(repoPieces) < 3 {
return string(s)
}
owner := repoPieces[0]
repo := repoPieces[1]
branch := "main"
filepath := strings.Join(repoPieces[2:], "/")
if len(repoID) > 1 {
if strings.Contains(repo, "@") {
branch = repoID[1]
}
if strings.Contains(filepath, "@") {
filepath = repoID[2]
}
}
return fmt.Sprintf("%s/%s/%s/resolve/%s/%s", HF_ENDPOINT, owner, repo, branch, filepath)
}
return string(s)
}
func removePartialFile(tmpFilePath string) error {
_, err := os.Stat(tmpFilePath)
if err == nil {
xlog.Debug("Removing temporary file", "file", tmpFilePath)
err = os.Remove(tmpFilePath)
if err != nil {
err1 := fmt.Errorf("failed to remove temporary download file %s: %v", tmpFilePath, err)
xlog.Warn("failed to remove temporary download file", "error", err1)
return err1
}
}
return nil
}
func calculateHashForPartialFile(file *os.File) (hash.Hash, error) {
hash := sha256.New()
_, err := io.Copy(hash, file)
if err != nil {
return nil, err
}
return hash, nil
}
func (uri URI) checkSeverSupportsRangeHeader() (bool, error) {
url := uri.ResolveURL()
resp, err := http.Head(url)
if err != nil {
return false, err
}
defer resp.Body.Close()
return resp.Header.Get("Accept-Ranges") == "bytes", nil
}
func (uri URI) DownloadFile(filePath, sha string, fileN, total int, downloadStatus func(string, string, string, float64)) error {
return uri.DownloadFileWithContext(context.Background(), filePath, sha, fileN, total, downloadStatus)
}
func (uri URI) DownloadFileWithContext(ctx context.Context, filePath, sha string, fileN, total int, downloadStatus func(string, string, string, float64)) error {
url := uri.ResolveURL()
if uri.LooksLikeOCI() {
// Only Ollama wants to download to the file, for the rest, we want to download to the directory
// so we check if filepath has any extension, otherwise we assume it's a directory
if filepath.Ext(filePath) != "" && !strings.HasPrefix(url, OllamaPrefix) {
filePath = filepath.Dir(filePath)
}
progressStatus := func(desc ocispec.Descriptor) io.Writer {
return &progressWriter{
fileName: filePath,
total: desc.Size,
hash: sha256.New(),
fileNo: fileN,
totalFiles: total,
downloadStatus: downloadStatus,
}
}
if url, ok := strings.CutPrefix(url, OllamaPrefix); ok {
return oci.OllamaFetchModel(ctx, url, filePath, progressStatus)
}
if url, ok := strings.CutPrefix(url, OCIFilePrefix); ok {
// Open the tarball
img, err := tarball.ImageFromPath(url, nil)
if err != nil {
return fmt.Errorf("failed to open tarball: %s", err.Error())
}
return oci.ExtractOCIImage(ctx, img, url, filePath, downloadStatus)
}
url = strings.TrimPrefix(url, OCIPrefix)
img, err := oci.GetImage(url, "", nil, nil)
if err != nil {
return fmt.Errorf("failed to get image %q: %v", url, err)
}
return oci.ExtractOCIImage(ctx, img, url, filePath, downloadStatus)
}
// Check for cancellation before starting
select {
case <-ctx.Done():
return ctx.Err()
default:
}
// Check if the file already exists
_, err := os.Stat(filePath)
if err == nil {
xlog.Debug("[downloader] File already exists", "filePath", filePath)
// File exists, check SHA
if sha != "" {
// Verify SHA
calculatedSHA, err := calculateSHA(filePath)
if err != nil {
return fmt.Errorf("failed to calculate SHA for file %q: %v", filePath, err)
}
if calculatedSHA == sha {
// SHA matches, skip downloading
xlog.Debug("File already exists and matches the SHA. Skipping download", "file", filePath)
return nil
}
// SHA doesn't match, delete the file and download again
err = os.Remove(filePath)
if err != nil {
return fmt.Errorf("failed to remove existing file %q: %v", filePath, err)
}
xlog.Debug("Removed file (SHA doesn't match)", "file", filePath)
} else {
// SHA is missing, skip downloading
xlog.Debug("File already exists. Skipping download", "file", filePath)
return nil
}
} else if !os.IsNotExist(err) || !URI(url).LooksLikeHTTPURL() {
// Error occurred while checking file existence
return fmt.Errorf("file %s does not exist (%v) and %s does not look like an HTTP URL", filePath, err, url)
}
xlog.Info("Downloading", "url", url)
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return fmt.Errorf("failed to create request for %q: %v", filePath, err)
}
// save partial download to dedicated file
tmpFilePath := filePath + ".partial"
tmpFileInfo, err := os.Stat(tmpFilePath)
if err == nil && uri.LooksLikeHTTPURL() {
support, err := uri.checkSeverSupportsRangeHeader()
if err != nil {
return fmt.Errorf("failed to check if uri server supports range header: %v", err)
}
if support {
startPos := tmpFileInfo.Size()
req.Header.Set("Range", fmt.Sprintf("bytes=%d-", startPos))
} else {
err := removePartialFile(tmpFilePath)
if err != nil {
return err
}
}
} else if !errors.Is(err, os.ErrNotExist) {
return fmt.Errorf("failed to check file %q existence: %v", filePath, err)
}
var source io.ReadCloser
var contentLength int64
if _, e := os.Stat(uri.ResolveURL()); strings.HasPrefix(string(uri), LocalPrefix) || e == nil {
file, err := os.Open(uri.ResolveURL())
if err != nil {
return fmt.Errorf("failed to open file %q: %v", uri.ResolveURL(), err)
}
l, err := file.Stat()
if err != nil {
return fmt.Errorf("failed to get file size %q: %v", uri.ResolveURL(), err)
}
source = file
contentLength = l.Size()
} else {
// Start the request
resp, err := http.DefaultClient.Do(req)
if err != nil {
// Check if error is due to context cancellation
if errors.Is(err, context.Canceled) {
// Clean up partial file on cancellation
removePartialFile(tmpFilePath)
return err
}
return fmt.Errorf("failed to download file %q: %v", filePath, err)
}
//defer resp.Body.Close()
if resp.StatusCode >= 400 {
return fmt.Errorf("failed to download url %q, invalid status code %d", url, resp.StatusCode)
}
source = resp.Body
contentLength = resp.ContentLength
}
defer source.Close()
// Create parent directory
err = os.MkdirAll(filepath.Dir(filePath), 0750)
if err != nil {
return fmt.Errorf("failed to create parent directory for file %q: %v", filePath, err)
}
// Create and write file
outFile, err := os.OpenFile(tmpFilePath, os.O_APPEND|os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return fmt.Errorf("failed to create / open file %q: %v", tmpFilePath, err)
}
defer outFile.Close()
hash, err := calculateHashForPartialFile(outFile)
if err != nil {
return fmt.Errorf("failed to calculate hash for partial file")
}
progress := &progressWriter{
fileName: tmpFilePath,
total: contentLength,
hash: hash,
fileNo: fileN,
totalFiles: total,
downloadStatus: downloadStatus,
ctx: ctx,
}
_, err = xio.Copy(ctx, io.MultiWriter(outFile, progress), source)
if err != nil {
// Check if error is due to context cancellation
if errors.Is(err, context.Canceled) {
// Clean up partial file on cancellation
removePartialFile(tmpFilePath)
return err
}
return fmt.Errorf("failed to write file %q: %v", filePath, err)
}
// Check for cancellation before finalizing
select {
case <-ctx.Done():
removePartialFile(tmpFilePath)
return ctx.Err()
default:
}
err = os.Rename(tmpFilePath, filePath)
if err != nil {
return fmt.Errorf("failed to rename temporary file %s -> %s: %v", tmpFilePath, filePath, err)
}
if sha != "" {
// Verify SHA
calculatedSHA := fmt.Sprintf("%x", progress.hash.Sum(nil))
if calculatedSHA != sha {
xlog.Debug("SHA mismatch for file", "file", filePath, "calculated", calculatedSHA, "metadata", sha)
return fmt.Errorf("SHA mismatch for file %q ( calculated: %s != metadata: %s )", filePath, calculatedSHA, sha)
}
} else {
xlog.Debug("SHA missing. Skipping validation", "file", filePath)
}
xlog.Info("File downloaded and verified", "file", filePath)
if utils.IsArchive(filePath) {
basePath := filepath.Dir(filePath)
xlog.Info("File is an archive, uncompressing", "file", filePath, "basePath", basePath)
if err := utils.ExtractArchive(filePath, basePath); err != nil {
xlog.Debug("Failed decompressing", "file", filePath, "error", err)
return err
}
}
return nil
}
func formatBytes(bytes int64) string {
const unit = 1024
if bytes < unit {
return strconv.FormatInt(bytes, 10) + " B"
}
div, exp := int64(unit), 0
for n := bytes / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %ciB", float64(bytes)/float64(div), "KMGTPE"[exp])
}
func calculateSHA(filePath string) (string, error) {
file, err := os.Open(filePath)
if err != nil {
return "", err
}
defer file.Close()
hash := sha256.New()
if _, err := io.Copy(hash, file); err != nil {
return "", err
}
return fmt.Sprintf("%x", hash.Sum(nil)), nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/downloader/uri_test.go | pkg/downloader/uri_test.go | package downloader_test
import (
"crypto/rand"
"crypto/sha256"
"fmt"
"net/http"
"net/http/httptest"
"os"
"regexp"
"strconv"
. "github.com/mudler/LocalAI/pkg/downloader"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Gallery API tests", func() {
Context("URI", func() {
It("parses github with a branch", func() {
uri := URI("github:go-skynet/model-gallery/gpt4all-j.yaml")
Expect(
uri.ReadWithCallback("", func(url string, i []byte) error {
Expect(url).To(Equal("https://raw.githubusercontent.com/go-skynet/model-gallery/main/gpt4all-j.yaml"))
return nil
}),
).ToNot(HaveOccurred())
})
It("parses github without a branch", func() {
uri := URI("github:go-skynet/model-gallery/gpt4all-j.yaml@main")
Expect(
uri.ReadWithCallback("", func(url string, i []byte) error {
Expect(url).To(Equal("https://raw.githubusercontent.com/go-skynet/model-gallery/main/gpt4all-j.yaml"))
return nil
}),
).ToNot(HaveOccurred())
})
It("parses github with urls", func() {
uri := URI("https://raw.githubusercontent.com/go-skynet/model-gallery/main/gpt4all-j.yaml")
Expect(
uri.ReadWithCallback("", func(url string, i []byte) error {
Expect(url).To(Equal("https://raw.githubusercontent.com/go-skynet/model-gallery/main/gpt4all-j.yaml"))
return nil
}),
).ToNot(HaveOccurred())
})
})
})
type RangeHeaderError struct {
msg string
}
func (e *RangeHeaderError) Error() string { return e.msg }
var _ = Describe("Download Test", func() {
var mockData []byte
var mockDataSha string
var filePath string
extractRangeHeader := func(rangeString string) (int, int, error) {
regex := regexp.MustCompile(`^bytes=(\d+)-(\d+|)$`)
matches := regex.FindStringSubmatch(rangeString)
rangeErr := RangeHeaderError{msg: "invalid / ill-formatted range"}
if matches == nil {
return -1, -1, &rangeErr
}
startPos, err := strconv.Atoi(matches[1])
if err != nil {
return -1, -1, err
}
endPos := -1
if matches[2] != "" {
endPos, err = strconv.Atoi(matches[2])
if err != nil {
return -1, -1, err
}
endPos += 1 // because range is inclusive in rangeString
}
return startPos, endPos, nil
}
getMockServer := func(supportsRangeHeader bool) *httptest.Server {
mockServer := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method != "HEAD" && r.Method != "GET" {
w.WriteHeader(http.StatusNotFound)
return
}
if r.Method == "HEAD" {
if supportsRangeHeader {
w.Header().Add("Accept-Ranges", "bytes")
}
w.WriteHeader(http.StatusOK)
return
}
// GET method
startPos := 0
endPos := len(mockData)
var err error
var respData []byte
rangeString := r.Header.Get("Range")
if rangeString != "" {
startPos, endPos, err = extractRangeHeader(rangeString)
if err != nil {
if _, ok := err.(*RangeHeaderError); ok {
w.WriteHeader(http.StatusBadRequest)
return
}
Expect(err).ToNot(HaveOccurred())
}
if endPos == -1 {
endPos = len(mockData)
}
if startPos < 0 || startPos >= len(mockData) || endPos < 0 || endPos > len(mockData) || startPos > endPos {
w.WriteHeader(http.StatusBadRequest)
return
}
}
respData = mockData[startPos:endPos]
w.WriteHeader(http.StatusOK)
w.Write(respData)
}))
mockServer.EnableHTTP2 = true
mockServer.Start()
return mockServer
}
BeforeEach(func() {
mockData = make([]byte, 20000)
_, err := rand.Read(mockData)
Expect(err).ToNot(HaveOccurred())
_mockDataSha := sha256.New()
_, err = _mockDataSha.Write(mockData)
Expect(err).ToNot(HaveOccurred())
mockDataSha = fmt.Sprintf("%x", _mockDataSha.Sum(nil))
dir, err := os.Getwd()
filePath = dir + "/my_supercool_model"
Expect(err).NotTo(HaveOccurred())
})
Context("URI DownloadFile", func() {
It("fetches files from mock server", func() {
mockServer := getMockServer(true)
defer mockServer.Close()
uri := URI(mockServer.URL)
err := uri.DownloadFile(filePath, mockDataSha, 1, 1, func(s1, s2, s3 string, f float64) {})
Expect(err).ToNot(HaveOccurred())
})
It("resumes partially downloaded files", func() {
mockServer := getMockServer(true)
defer mockServer.Close()
uri := URI(mockServer.URL)
// Create a partial file
tmpFilePath := filePath + ".partial"
file, err := os.OpenFile(tmpFilePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
Expect(err).ToNot(HaveOccurred())
_, err = file.Write(mockData[0:10000])
Expect(err).ToNot(HaveOccurred())
err = uri.DownloadFile(filePath, mockDataSha, 1, 1, func(s1, s2, s3 string, f float64) {})
Expect(err).ToNot(HaveOccurred())
})
It("restarts download from 0 if server doesn't support Range header", func() {
mockServer := getMockServer(false)
defer mockServer.Close()
uri := URI(mockServer.URL)
// Create a partial file
tmpFilePath := filePath + ".partial"
file, err := os.OpenFile(tmpFilePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
Expect(err).ToNot(HaveOccurred())
_, err = file.Write(mockData[0:10000])
Expect(err).ToNot(HaveOccurred())
err = uri.DownloadFile(filePath, mockDataSha, 1, 1, func(s1, s2, s3 string, f float64) {})
Expect(err).ToNot(HaveOccurred())
})
})
AfterEach(func() {
os.Remove(filePath) // cleanup, also checks existence of filePath`
os.Remove(filePath + ".partial")
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/downloader/huggingface.go | pkg/downloader/huggingface.go | package downloader
import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"strings"
)
type HuggingFaceScanResult struct {
RepositoryId string `json:"repositoryId"`
Revision string `json:"revision"`
HasUnsafeFiles bool `json:"hasUnsafeFile"`
ClamAVInfectedFiles []string `json:"clamAVInfectedFiles"`
DangerousPickles []string `json:"dangerousPickles"`
ScansDone bool `json:"scansDone"`
}
var ErrNonHuggingFaceFile = errors.New("not a huggingface repo")
var ErrUnsafeFilesFound = errors.New("unsafe files found")
func HuggingFaceScan(uri URI) (*HuggingFaceScanResult, error) {
cleanParts := strings.Split(uri.ResolveURL(), "/")
if len(cleanParts) <= 4 || cleanParts[2] != "huggingface.co" && cleanParts[2] != HF_ENDPOINT {
return nil, ErrNonHuggingFaceFile
}
results, err := http.Get(fmt.Sprintf("%s/api/models/%s/%s/scan", HF_ENDPOINT, cleanParts[3], cleanParts[4]))
if err != nil {
return nil, err
}
if results.StatusCode != 200 {
return nil, fmt.Errorf("unexpected status code during HuggingFaceScan: %d", results.StatusCode)
}
scanResult := &HuggingFaceScanResult{}
bodyBytes, err := io.ReadAll(results.Body)
if err != nil {
return nil, err
}
err = json.Unmarshal(bodyBytes, scanResult)
if err != nil {
return nil, err
}
if scanResult.HasUnsafeFiles {
return scanResult, ErrUnsafeFilesFound
}
return scanResult, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/sound/int16.go | pkg/sound/int16.go | package sound
import (
"encoding/binary"
"math"
)
/*
MIT License
Copyright (c) 2024 Xbozon
*/
// calculateRMS16 calculates the root mean square of the audio buffer for int16 samples.
func CalculateRMS16(buffer []int16) float64 {
var sumSquares float64
for _, sample := range buffer {
val := float64(sample) // Convert int16 to float64 for calculation
sumSquares += val * val
}
meanSquares := sumSquares / float64(len(buffer))
return math.Sqrt(meanSquares)
}
func ResampleInt16(input []int16, inputRate, outputRate int) []int16 {
// Calculate the resampling ratio
ratio := float64(inputRate) / float64(outputRate)
// Calculate the length of the resampled output
outputLength := int(float64(len(input)) / ratio)
// Allocate a slice for the resampled output
output := make([]int16, outputLength)
// Perform linear interpolation for resampling
for i := 0; i < outputLength-1; i++ {
// Calculate the corresponding position in the input
pos := float64(i) * ratio
// Calculate the indices of the surrounding input samples
indexBefore := int(pos)
indexAfter := indexBefore + 1
if indexAfter >= len(input) {
indexAfter = len(input) - 1
}
// Calculate the fractional part of the position
frac := pos - float64(indexBefore)
// Linearly interpolate between the two surrounding input samples
output[i] = int16((1-frac)*float64(input[indexBefore]) + frac*float64(input[indexAfter]))
}
// Handle the last sample explicitly to avoid index out of range
output[outputLength-1] = input[len(input)-1]
return output
}
func ConvertInt16ToInt(input []int16) []int {
output := make([]int, len(input)) // Allocate a slice for the output
for i, value := range input {
output[i] = int(value) // Convert each int16 to int and assign it to the output slice
}
return output // Return the converted slice
}
func BytesToInt16sLE(bytes []byte) []int16 {
// Ensure the byte slice length is even
if len(bytes)%2 != 0 {
panic("bytesToInt16sLE: input bytes slice has odd length, must be even")
}
int16s := make([]int16, len(bytes)/2)
for i := 0; i < len(int16s); i++ {
int16s[i] = int16(bytes[2*i]) | int16(bytes[2*i+1])<<8
}
return int16s
}
func Int16toBytesLE(arr []int16) []byte {
le := binary.LittleEndian
result := make([]byte, 0, 2*len(arr))
for _, val := range arr {
result = le.AppendUint16(result, uint16(val))
}
return result
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/sound/float32.go | pkg/sound/float32.go | package sound
import (
"encoding/binary"
"math"
)
func BytesFloat32(bytes []byte) float32 {
bits := binary.LittleEndian.Uint32(bytes)
float := math.Float32frombits(bits)
return float
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/ollama.go | pkg/oci/ollama.go | package oci
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
)
// Define the main struct for the JSON data
type Manifest struct {
SchemaVersion int `json:"schemaVersion"`
MediaType string `json:"mediaType"`
Config Config `json:"config"`
Layers []LayerDetail `json:"layers"`
}
// Define the struct for the "config" section
type Config struct {
Digest string `json:"digest"`
MediaType string `json:"mediaType"`
Size int `json:"size"`
}
// Define the struct for each item in the "layers" array
type LayerDetail struct {
Digest string `json:"digest"`
MediaType string `json:"mediaType"`
Size int `json:"size"`
}
func OllamaModelManifest(image string) (*Manifest, error) {
// parse the repository and tag from `image`. `image` should be for e.g. gemma:2b, or foobar/gemma:2b
// if there is a : in the image, then split it
// if there is no : in the image, then assume it is the latest tag
tag, repository, image := ParseImageParts(image)
// get e.g. https://registry.ollama.ai/v2/library/llama3/manifests/latest
req, err := http.NewRequest("GET", "https://registry.ollama.ai/v2/"+repository+"/"+image+"/manifests/"+tag, nil)
if err != nil {
return nil, err
}
req.Header.Set("Accept", "application/vnd.docker.distribution.manifest.v2+json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
// parse the JSON response
var manifest Manifest
err = json.NewDecoder(resp.Body).Decode(&manifest)
if err != nil {
return nil, err
}
return &manifest, nil
}
func OllamaModelBlob(image string) (string, error) {
manifest, err := OllamaModelManifest(image)
if err != nil {
return "", err
}
// find a application/vnd.ollama.image.model in the mediaType
for _, layer := range manifest.Layers {
if layer.MediaType == "application/vnd.ollama.image.model" {
return layer.Digest, nil
}
}
return "", nil
}
func OllamaFetchModel(ctx context.Context, image string, output string, statusWriter func(ocispec.Descriptor) io.Writer) error {
_, repository, imageNoTag := ParseImageParts(image)
blobID, err := OllamaModelBlob(image)
if err != nil {
return err
}
return FetchImageBlob(ctx, fmt.Sprintf("registry.ollama.ai/%s/%s", repository, imageNoTag), blobID, output, statusWriter)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/tarball.go | pkg/oci/tarball.go | package oci
import (
"io"
"os"
containerdCompression "github.com/containerd/containerd/archive/compression"
"github.com/google/go-containerregistry/pkg/name"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/google/go-containerregistry/pkg/v1/empty"
"github.com/google/go-containerregistry/pkg/v1/mutate"
"github.com/google/go-containerregistry/pkg/v1/tarball"
"github.com/pkg/errors"
)
func imageFromTar(imagename, architecture, OS string, opener func() (io.ReadCloser, error)) (name.Reference, v1.Image, error) {
newRef, err := name.ParseReference(imagename)
if err != nil {
return nil, nil, err
}
layer, err := tarball.LayerFromOpener(opener)
if err != nil {
return nil, nil, err
}
baseImage := empty.Image
cfg, err := baseImage.ConfigFile()
if err != nil {
return nil, nil, err
}
cfg.Architecture = architecture
cfg.OS = OS
baseImage, err = mutate.ConfigFile(baseImage, cfg)
if err != nil {
return nil, nil, err
}
img, err := mutate.Append(baseImage, mutate.Addendum{
Layer: layer,
History: v1.History{
CreatedBy: "localai",
Comment: "Custom image",
},
})
if err != nil {
return nil, nil, err
}
return newRef, img, nil
}
// CreateTar a imagetarball from a standard tarball
func CreateTar(srctar, dstimageTar, imagename, architecture, OS string) error {
dstFile, err := os.Create(dstimageTar)
if err != nil {
return errors.Wrap(err, "Cannot create "+dstimageTar)
}
defer dstFile.Close()
newRef, img, err := imageFromTar(imagename, architecture, OS, func() (io.ReadCloser, error) {
f, err := os.Open(srctar)
if err != nil {
return nil, errors.Wrap(err, "Cannot open "+srctar)
}
decompressed, err := containerdCompression.DecompressStream(f)
if err != nil {
return nil, errors.Wrap(err, "Cannot open "+srctar)
}
return decompressed, nil
})
if err != nil {
return err
}
// NOTE: We might also stream that back to the daemon with daemon.Write(tag, img)
return tarball.Write(newRef, img, dstFile)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/ollama_test.go | pkg/oci/ollama_test.go | package oci_test
import (
"context"
"os"
. "github.com/mudler/LocalAI/pkg/oci" // Update with your module path
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("OCI", func() {
Context("ollama", func() {
It("pulls model files", func() {
f, err := os.CreateTemp("", "ollama")
Expect(err).NotTo(HaveOccurred())
defer os.RemoveAll(f.Name())
err = OllamaFetchModel(context.TODO(), "gemma:2b", f.Name(), nil)
Expect(err).NotTo(HaveOccurred())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/oci_suite_test.go | pkg/oci/oci_suite_test.go | package oci_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestOCI(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "OCI test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/image.go | pkg/oci/image.go | package oci
import (
"context"
"errors"
"fmt"
"io"
"net/http"
"os"
"runtime"
"strconv"
"strings"
"syscall"
"time"
"github.com/containerd/containerd/archive"
registrytypes "github.com/docker/docker/api/types/registry"
"github.com/google/go-containerregistry/pkg/authn"
"github.com/google/go-containerregistry/pkg/logs"
"github.com/google/go-containerregistry/pkg/name"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/google/go-containerregistry/pkg/v1/mutate"
"github.com/google/go-containerregistry/pkg/v1/remote"
"github.com/google/go-containerregistry/pkg/v1/remote/transport"
"github.com/google/go-containerregistry/pkg/v1/tarball"
"github.com/mudler/LocalAI/pkg/xio"
)
// ref: https://github.com/mudler/luet/blob/master/pkg/helpers/docker/docker.go#L117
type staticAuth struct {
auth *registrytypes.AuthConfig
}
func (s staticAuth) Authorization() (*authn.AuthConfig, error) {
if s.auth == nil {
return nil, nil
}
return &authn.AuthConfig{
Username: s.auth.Username,
Password: s.auth.Password,
Auth: s.auth.Auth,
IdentityToken: s.auth.IdentityToken,
RegistryToken: s.auth.RegistryToken,
}, nil
}
var defaultRetryBackoff = remote.Backoff{
Duration: 1.0 * time.Second,
Factor: 3.0,
Jitter: 0.1,
Steps: 3,
}
var defaultRetryPredicate = func(err error) bool {
if err == nil {
return false
}
if errors.Is(err, io.ErrUnexpectedEOF) || errors.Is(err, io.EOF) || errors.Is(err, syscall.EPIPE) || errors.Is(err, syscall.ECONNRESET) || strings.Contains(err.Error(), "connection refused") {
logs.Warn.Printf("retrying %v", err)
return true
}
return false
}
type progressWriter struct {
written int64
total int64
fileName string
downloadStatus func(string, string, string, float64)
}
func formatBytes(bytes int64) string {
const unit = 1024
if bytes < unit {
return strconv.FormatInt(bytes, 10) + " B"
}
div, exp := int64(unit), 0
for n := bytes / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %ciB", float64(bytes)/float64(div), "KMGTPE"[exp])
}
func (pw *progressWriter) Write(p []byte) (int, error) {
n := len(p)
pw.written += int64(n)
if pw.total > 0 {
percentage := float64(pw.written) / float64(pw.total) * 100
//log.Debug().Msgf("Downloading %s: %s/%s (%.2f%%)", pw.fileName, formatBytes(pw.written), formatBytes(pw.total), percentage)
pw.downloadStatus(pw.fileName, formatBytes(pw.written), formatBytes(pw.total), percentage)
} else {
pw.downloadStatus(pw.fileName, formatBytes(pw.written), "", 0)
}
return n, nil
}
// ExtractOCIImage will extract a given targetImage into a given targetDestination
func ExtractOCIImage(ctx context.Context, img v1.Image, imageRef string, targetDestination string, downloadStatus func(string, string, string, float64)) error {
// Create a temporary tar file
tmpTarFile, err := os.CreateTemp("", "localai-oci-*.tar")
if err != nil {
return fmt.Errorf("failed to create temporary tar file: %v", err)
}
defer os.Remove(tmpTarFile.Name())
defer tmpTarFile.Close()
// Download the image as tar with progress tracking
err = DownloadOCIImageTar(ctx, img, imageRef, tmpTarFile.Name(), downloadStatus)
if err != nil {
return fmt.Errorf("failed to download image tar: %v", err)
}
// Extract the tar file to the target destination
err = ExtractOCIImageFromTar(ctx, tmpTarFile.Name(), imageRef, targetDestination, downloadStatus)
if err != nil {
return fmt.Errorf("failed to extract image tar: %v", err)
}
return nil
}
func ParseImageParts(image string) (tag, repository, dstimage string) {
tag = "latest"
repository = "library"
if strings.Contains(image, ":") {
parts := strings.Split(image, ":")
image = parts[0]
tag = parts[1]
}
if strings.Contains("/", image) {
parts := strings.Split(image, "/")
repository = parts[0]
image = parts[1]
}
dstimage = image
return tag, repository, image
}
// GetImage if returns the proper image to pull with transport and auth
// tries local daemon first and then fallbacks into remote
// if auth is nil, it will try to use the default keychain https://github.com/google/go-containerregistry/tree/main/pkg/authn#tldr-for-consumers-of-this-package
func GetImage(targetImage, targetPlatform string, auth *registrytypes.AuthConfig, t http.RoundTripper) (v1.Image, error) {
var platform *v1.Platform
var image v1.Image
var err error
if targetPlatform != "" {
platform, err = v1.ParsePlatform(targetPlatform)
if err != nil {
return image, err
}
} else {
platform, err = v1.ParsePlatform(fmt.Sprintf("%s/%s", runtime.GOOS, runtime.GOARCH))
if err != nil {
return image, err
}
}
ref, err := name.ParseReference(targetImage)
if err != nil {
return image, err
}
if t == nil {
t = http.DefaultTransport
}
tr := transport.NewRetry(t,
transport.WithRetryBackoff(defaultRetryBackoff),
transport.WithRetryPredicate(defaultRetryPredicate),
)
opts := []remote.Option{
remote.WithTransport(tr),
remote.WithPlatform(*platform),
}
if auth != nil {
opts = append(opts, remote.WithAuth(staticAuth{auth}))
} else {
opts = append(opts, remote.WithAuthFromKeychain(authn.DefaultKeychain))
}
image, err = remote.Image(ref, opts...)
return image, err
}
func GetOCIImageSize(targetImage, targetPlatform string, auth *registrytypes.AuthConfig, t http.RoundTripper) (int64, error) {
var size int64
var img v1.Image
var err error
img, err = GetImage(targetImage, targetPlatform, auth, t)
if err != nil {
return size, err
}
layers, _ := img.Layers()
for _, layer := range layers {
s, _ := layer.Size()
size += s
}
return size, nil
}
// DownloadOCIImageTar downloads the compressed layers of an image and then creates an uncompressed tar
// This provides accurate size estimation and allows for later extraction
func DownloadOCIImageTar(ctx context.Context, img v1.Image, imageRef string, tarFilePath string, downloadStatus func(string, string, string, float64)) error {
// Get layers to calculate total compressed size for estimation
layers, err := img.Layers()
if err != nil {
return fmt.Errorf("failed to get layers: %v", err)
}
// Calculate total compressed size for progress tracking
var totalCompressedSize int64
for _, layer := range layers {
size, err := layer.Size()
if err != nil {
return fmt.Errorf("failed to get layer size: %v", err)
}
totalCompressedSize += size
}
// Create a temporary directory to store the compressed layers
tmpDir, err := os.MkdirTemp("", "localai-oci-layers-*")
if err != nil {
return fmt.Errorf("failed to create temporary directory: %v", err)
}
defer os.RemoveAll(tmpDir)
// Download all compressed layers with progress tracking
var downloadedLayers []v1.Layer
var downloadedSize int64
// Extract image name from the reference for display
imageName := imageRef
for i, layer := range layers {
layerSize, err := layer.Size()
if err != nil {
return fmt.Errorf("failed to get layer size: %v", err)
}
// Create a temporary file for this layer
layerFile := fmt.Sprintf("%s/layer-%d.tar.gz", tmpDir, i)
file, err := os.Create(layerFile)
if err != nil {
return fmt.Errorf("failed to create layer file: %v", err)
}
// Create progress writer for this layer
var writer io.Writer = file
if downloadStatus != nil {
writer = io.MultiWriter(file, &progressWriter{
total: totalCompressedSize,
fileName: fmt.Sprintf("Downloading %d/%d %s", i+1, len(layers), imageName),
downloadStatus: downloadStatus,
})
}
// Download the compressed layer
layerReader, err := layer.Compressed()
if err != nil {
file.Close()
return fmt.Errorf("failed to get compressed layer: %v", err)
}
_, err = xio.Copy(ctx, writer, layerReader)
file.Close()
if err != nil {
return fmt.Errorf("failed to download layer %d: %v", i, err)
}
// Load the downloaded layer
downloadedLayer, err := tarball.LayerFromFile(layerFile)
if err != nil {
return fmt.Errorf("failed to load downloaded layer: %v", err)
}
downloadedLayers = append(downloadedLayers, downloadedLayer)
downloadedSize += layerSize
}
// Create a local image from the downloaded layers
localImg, err := mutate.AppendLayers(img, downloadedLayers...)
if err != nil {
return fmt.Errorf("failed to create local image: %v", err)
}
// Now extract the uncompressed tar from the local image
tarFile, err := os.Create(tarFilePath)
if err != nil {
return fmt.Errorf("failed to create tar file: %v", err)
}
defer tarFile.Close()
// Extract uncompressed tar from local image
extractReader := mutate.Extract(localImg)
_, err = xio.Copy(ctx, tarFile, extractReader)
if err != nil {
return fmt.Errorf("failed to extract uncompressed tar: %v", err)
}
return nil
}
// ExtractOCIImageFromTar extracts an image from a previously downloaded tar file
func ExtractOCIImageFromTar(ctx context.Context, tarFilePath, imageRef, targetDestination string, downloadStatus func(string, string, string, float64)) error {
// Open the tar file
tarFile, err := os.Open(tarFilePath)
if err != nil {
return fmt.Errorf("failed to open tar file: %v", err)
}
defer tarFile.Close()
// Get file size for progress tracking
fileInfo, err := tarFile.Stat()
if err != nil {
return fmt.Errorf("failed to get file info: %v", err)
}
var reader io.Reader = tarFile
if downloadStatus != nil {
reader = io.TeeReader(tarFile, &progressWriter{
total: fileInfo.Size(),
fileName: fmt.Sprintf("Extracting %s", imageRef),
downloadStatus: downloadStatus,
})
}
// Extract the tar file
_, err = archive.Apply(ctx,
targetDestination, reader,
archive.WithNoSameOwner())
return err
}
// GetOCIImageUncompressedSize returns the total uncompressed size of an image
func GetOCIImageUncompressedSize(targetImage, targetPlatform string, auth *registrytypes.AuthConfig, t http.RoundTripper) (int64, error) {
var totalSize int64
var img v1.Image
var err error
img, err = GetImage(targetImage, targetPlatform, auth, t)
if err != nil {
return totalSize, err
}
layers, err := img.Layers()
if err != nil {
return totalSize, err
}
for _, layer := range layers {
// Use compressed size as an approximation since uncompressed size is not directly available
size, err := layer.Size()
if err != nil {
return totalSize, err
}
totalSize += size
}
return totalSize, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/image_test.go | pkg/oci/image_test.go | package oci_test
import (
"context"
"os"
"runtime"
. "github.com/mudler/LocalAI/pkg/oci" // Update with your module path
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("OCI", func() {
Context("when template is loaded successfully", func() {
It("should evaluate the template correctly", func() {
if runtime.GOOS == "darwin" {
Skip("Skipping test on darwin")
}
imageName := "alpine"
img, err := GetImage(imageName, "", nil, nil)
Expect(err).NotTo(HaveOccurred())
size, err := GetOCIImageSize(imageName, "", nil, nil)
Expect(err).NotTo(HaveOccurred())
Expect(size).ToNot(Equal(int64(0)))
// Create tempdir
dir, err := os.MkdirTemp("", "example")
Expect(err).NotTo(HaveOccurred())
defer os.RemoveAll(dir)
err = ExtractOCIImage(context.TODO(), img, imageName, dir, nil)
Expect(err).NotTo(HaveOccurred())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/blob_test.go | pkg/oci/blob_test.go | package oci_test
import (
"context"
"os"
. "github.com/mudler/LocalAI/pkg/oci" // Update with your module path
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("OCI", func() {
Context("pulling images", func() {
It("should fetch blobs correctly", func() {
f, err := os.CreateTemp("", "ollama")
Expect(err).NotTo(HaveOccurred())
defer os.RemoveAll(f.Name())
err = FetchImageBlob(context.TODO(), "registry.ollama.ai/library/gemma", "sha256:c1864a5eb19305c40519da12cc543519e48a0697ecd30e15d5ac228644957d12", f.Name(), nil)
Expect(err).NotTo(HaveOccurred())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/oci/blob.go | pkg/oci/blob.go | package oci
import (
"context"
"fmt"
"io"
"os"
"github.com/mudler/LocalAI/pkg/xio"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
oras "oras.land/oras-go/v2"
"oras.land/oras-go/v2/registry/remote"
)
func FetchImageBlob(ctx context.Context, r, reference, dst string, statusReader func(ocispec.Descriptor) io.Writer) error {
// 0. Create a file store for the output
fs, err := os.Create(dst)
if err != nil {
return err
}
defer fs.Close()
// 1. Connect to a remote repository
repo, err := remote.NewRepository(r)
if err != nil {
return fmt.Errorf("failed to create repository: %v", err)
}
repo.SkipReferrersGC = true
// https://github.com/oras-project/oras/blob/main/cmd/oras/internal/option/remote.go#L364
// https://github.com/oras-project/oras/blob/main/cmd/oras/root/blob/fetch.go#L136
desc, reader, err := oras.Fetch(ctx, repo.Blobs(), reference, oras.DefaultFetchOptions)
if err != nil {
return fmt.Errorf("failed to fetch image: %v", err)
}
if statusReader != nil {
// 3. Write the file to the file store
_, err = xio.Copy(ctx, io.MultiWriter(fs, statusReader(desc)), reader)
if err != nil {
return err
}
} else {
_, err = xio.Copy(ctx, fs, reader)
if err != nil {
return err
}
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/function_structure.go | pkg/functions/function_structure.go | package functions
import (
"encoding/json"
"github.com/mudler/LocalAI/pkg/functions/grammars"
)
type Item struct {
Type string `json:"type"`
Properties map[string]interface{} `json:"properties"`
}
type JSONFunctionStructure struct {
OneOf []Item `json:"oneOf,omitempty"`
AnyOf []Item `json:"anyOf,omitempty"`
Defs map[string]interface{} `json:"$defs,omitempty"`
}
func (j JSONFunctionStructure) Grammar(options ...func(*grammars.GrammarOption)) (string, error) {
grammarOpts := &grammars.GrammarOption{}
grammarOpts.Apply(options...)
dat, err := json.Marshal(j)
if err != nil {
return "", err
}
converter := NewSchemaConverter(*grammarOpts)
return converter.GrammarFromBytes(dat, options...)
}
type SchemaConverter interface {
GrammarFromBytes([]byte, ...func(*grammars.GrammarOption)) (string, error)
}
func NewSchemaConverter(opt grammars.GrammarOption) SchemaConverter {
switch {
case opt.SchemaType == grammars.LLama31Schema:
return grammars.NewLLama31SchemaConverter(opt.FunctionName)
}
return grammars.NewJSONSchemaConverter(opt.PropOrder)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/parse_test.go | pkg/functions/parse_test.go | package functions_test
import (
"encoding/json"
"regexp"
"strings"
. "github.com/mudler/LocalAI/pkg/functions"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("LocalAI function parse tests", func() {
var functionConfig FunctionsConfig
BeforeEach(func() {
// Default configuration setup
functionConfig = FunctionsConfig{}
})
Context("when using grammars and single result expected", func() {
It("should parse the function name and arguments correctly", func() {
input := `{"name": "add", "arguments": {"x": 5, "y": 3}}`
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
})
Context("when not using grammars and regex is needed", func() {
It("should extract function name and arguments from the regex", func() {
input := `add({"x":5,"y":3})`
functionConfig.ResponseRegex = []string{`(?P<name>\w+)\s*\((?P<arguments>.*)\)`}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should extract function name and arguments from the regex", func() {
input := `add({"x":5,"y":3})`
functionConfig.ResponseRegex = []string{`(?P<function>\w+)\s*\((?P<arguments>.*)\)`}
functionConfig.FunctionNameKey = "function"
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
})
Context("when having invalid input", func() {
It("returns no results when there is no input", func() {
input := ""
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(0))
})
It("returns no results when is invalid", func() {
input := "invalid input"
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(0))
})
})
Context("when parallel calls are enabled", func() {
It("should handle multiple function calls", func() {
input := `[{"name": "add", "arguments": {"x": 5, "y": 3}}, {"name": "subtract", "arguments": {"x": 10, "y": 7}}]`
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(2))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
Expect(results[1].Name).To(Equal("subtract"))
Expect(results[1].Arguments).To(Equal(`{"x":10,"y":7}`))
})
})
Context("without grammars and without regex", func() {
It("should parse the function name and arguments correctly with the name key", func() {
input := `{"function": "add", "arguments": {"x": 5, "y": 3}}`
functionConfig.FunctionNameKey = "function"
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should parse the function name and arguments correctly with the function key", func() {
input := `{"name": "add", "arguments": {"x": 5, "y": 3}}`
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should parse the result by matching the JSONRegexMatch", func() {
input := `
<tool_call>
{"name": "add", "arguments": {"x": 5, "y": 3}}
</tool_call>`
functionConfig.JSONRegexMatch = []string{`(?s)<tool_call>(.*?)</tool_call>`}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should parse the result by matching the JSONRegexMatch", func() {
input := `
{"name": "add", "arguments": {"x": 5, "y": 3}}
</tool_call>`
functionConfig.JSONRegexMatch = []string{`(?s)(.*?)</tool_call>`}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should parse the result even with invalid JSON", func() {
input := `{"name": "add", "arguments": {"x": 5, "y": 3}} invalid {"name": "add", "arguments": {"x": 5, "y": 3}}`
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(2))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
})
Context("when using ReplaceResults to clean up input", func() {
It("should replace text before and after JSON blob", func() {
input := `
Some text before the JSON
{"name": "add", "arguments": {"x": 5, "y": 3}}
Some text after the JSON
`
functionConfig.ReplaceFunctionResults = []ReplaceResult{
{Key: `(?s)^[^{\[]*`, Value: ""},
{Key: `(?s)[^}\]]*$`, Value: ""},
}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should replace text before and after array JSON blob", func() {
input := `
Some text before the JSON
[{"name": "add", "arguments": {"x": 5, "y": 3}}, {"name": "subtract", "arguments": {"x": 10, "y": 7}}]
Some text after the JSON
`
functionConfig.ReplaceFunctionResults = []ReplaceResult{
{Key: `(?s)^[^{\[]*`, Value: ""},
{Key: `(?s)[^}\]]*$`, Value: ""},
}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(2))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
Expect(results[1].Name).To(Equal("subtract"))
Expect(results[1].Arguments).To(Equal(`{"x":10,"y":7}`))
})
It("should convert single-quoted key-value pairs to double-quoted and escape double quotes within values", func() {
input := `
Some text before the JSON
{'name': '"add"', 'arguments': {'x': 5, 'z': '"v"', 'y': 'v"value"'}}
Some text after the JSON
`
functionConfig.JSONRegexMatch = []string{`(?s)<tool_call>(.*?)</tool_call>`}
// Regex to match non-JSON characters before the JSON structure
//reBefore := regexp.MustCompile(`(?s)^.*?(?=\{|\[)`)
// Regex to match non-JSON characters after the JSON structure
//reAfter := regexp.MustCompile(`(?s)(?<=\}|\]).*$`)
functionConfig.ReplaceFunctionResults = []ReplaceResult{
{Key: `(?s)^[^{\[]*`, Value: ""},
{Key: `(?s)[^}\]]*$`, Value: ""},
// Regex pattern to match single quotes around keys and values
// Step 1: Replace single quotes around keys and values with double quotes
{Key: `'([^']*?)'`, Value: `_DQUOTE_${1}_DQUOTE_`},
// Step 2: Replace double quotes inside values with placeholders
{Key: `\\"`, Value: `__TEMP_QUOTE__`},
{Key: `"`, Value: `\"`},
{Key: `\'`, Value: `'`},
{Key: `_DQUOTE_`, Value: `"`},
{Key: `__TEMP_QUOTE__`, Value: `"`},
}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("\"add\""))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":"v\"value\"","z":"\"v\""}`))
})
It("should convert single-quoted key-value pairs to double-quoted and escape double quotes within values", func() {
input := `
Some text before the JSON
<tool_call>{'name': '"add"', 'arguments': {'x': 5, 'z': '"v"', 'y': 'v"value"'}}</tool_call>
Some text after the JSON
`
functionConfig.JSONRegexMatch = []string{`(?s)<tool_call>(.*?)</tool_call>`}
// Regex to match non-JSON characters before the JSON structure
//reBefore := regexp.MustCompile(`(?s)^.*?(?=\{|\[)`)
// Regex to match non-JSON characters after the JSON structure
//reAfter := regexp.MustCompile(`(?s)(?<=\}|\]).*$`)
functionConfig.ReplaceFunctionResults = []ReplaceResult{
{Key: `(?s)^[^{\[]*`, Value: ""},
{Key: `(?s)[^}\]]*$`, Value: ""},
// Regex pattern to match single quotes around keys and values
// Step 1: Replace single quotes around keys and values with double quotes
{Key: `'([^']*?)'`, Value: `_DQUOTE_${1}_DQUOTE_`},
// Step 2: Replace double quotes inside values with placeholders
{Key: `\\"`, Value: `__TEMP_QUOTE__`},
{Key: `"`, Value: `\"`},
{Key: `\'`, Value: `'`},
{Key: `_DQUOTE_`, Value: `"`},
{Key: `__TEMP_QUOTE__`, Value: `"`},
}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("\"add\""))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":"v\"value\"","z":"\"v\""}`))
})
It("should detect multiple functions call where the JSONRegexMatch is repeated", func() {
input := `
Some text before the JSON
<tool_call>{"name": "add", "arguments": {"x": 5, "y": 3}}</tool_call>
<tool_call>{"name": "subtract", "arguments": {"x": 10, "y": 7}}</tool_call>
Some text after the JSON
`
functionConfig.JSONRegexMatch = []string{`(?s)<tool_call>(.*?)</tool_call>`}
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(2))
Expect(results[0].Name).To(Equal("add"))
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
Expect(results[1].Name).To(Equal("subtract"))
Expect(results[1].Arguments).To(Equal(`{"x":10,"y":7}`))
})
})
Context("ParseTextContent", func() {
It("Can extract notes from the LLM result", func() {
input := `
Some text before the JSON
<sketchpad>
roses are red
</sketchpad>
<tool_call>{"name": "subtract", "arguments": {"x": 10, "y": 7}}</tool_call>
Some text after the JSON
`
functionConfig.CaptureLLMResult = []string{`(?s)<sketchpad>(.*?)</sketchpad>`}
results := ParseTextContent(input, functionConfig)
Expect(results).To(Equal("roses are red"))
})
It("Defaults to empty if doesn't catch any", func() {
input := `
Some text before the JSON
<tool_call>{"name": "subtract", "arguments": {"x": 10, "y": 7}}</tool_call>
Some text after the JSON
`
functionConfig.CaptureLLMResult = []string{`(?s)<sketchpad>(.*?)</sketchpad>`}
results := ParseTextContent(input, functionConfig)
Expect(results).To(Equal(""))
})
})
Context("ParseJSON - when given valid JSON strings", func() {
It("should parse multiple JSON objects", func() {
input := `{"key1": "value1"} {"key2": "value2"}`
expected := []map[string]any{
{"key1": "value1"},
{"key2": "value2"},
}
result, err := ParseJSON(input)
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(expected))
})
It("should parse a single JSON object with various types", func() {
input := `{"key1": "value1", "key2": 2}`
expected := []map[string]any{
{"key1": "value1", "key2": float64(2)},
}
result, err := ParseJSON(input)
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(expected))
})
It("should handle JSON without syntax errors gracefully", func() {
input := `{"key1": "value1"}`
expected := []map[string]any{
{"key1": "value1"},
}
result, err := ParseJSON(input)
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(expected))
})
It("should handle JSON without syntax errors gracefully", func() {
input := `[{"key1": "value1"}]`
expected := []map[string]any{
{"key1": "value1"},
}
result, err := ParseJSON(input)
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(expected))
})
})
Context("ParseJSON - when given invalid JSON strings", func() {
It("should return an error for completely invalid JSON", func() {
input := `invalid json`
result, err := ParseJSON(input)
Expect(err).To(HaveOccurred())
Expect(result).To(BeNil())
})
It("should skip invalid JSON parts and parse valid parts", func() {
input := `{"key1": "value1"} invalid {"key2": "value2"}`
expected := []map[string]any{
{"key1": "value1"},
{"key2": "value2"},
}
result, err := ParseJSON(input)
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(expected))
})
PIt("should handle JSON with syntax errors gracefully", func() {
input := `{"key1": "value1", "key2": }`
expected := []map[string]any{
{"key1": "value1"},
}
result, err := ParseJSON(input)
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal(expected))
})
})
Context("ParseXML - when given XML tool call strings", func() {
It("should parse a basic XML tool call with tool_call wrapper", func() {
input := `<tool_call>
<function=glob>
<parameter=pattern>
**/package.json
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("glob"))
Expect(results[0].Arguments).To(Equal(`{"pattern":"**/package.json"}`))
})
It("should parse XML tool call without tool_call wrapper", func() {
input := `<function=add>
<parameter=x>
5
</parameter>
<parameter=y>
3
</parameter>
</function>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("add"))
// JSON parsing converts numeric strings to numbers (matching llama.cpp behavior)
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
})
It("should parse XML tool call with multiple parameters", func() {
input := `<tool_call>
<function=function_name>
<parameter=param_1>
param_1_Value
</parameter>
<parameter=param_2>
param_2_Value
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("function_name"))
Expect(results[0].Arguments).To(Equal(`{"param_1":"param_1_Value","param_2":"param_2_Value"}`))
})
It("should parse multiple XML tool calls", func() {
input := `<tool_call>
<function=add>
<parameter=x>
5
</parameter>
<parameter=y>
3
</parameter>
</function>
</tool_call>
<tool_call>
<function=subtract>
<parameter=x>
10
</parameter>
<parameter=y>
7
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results[0].Name).To(Equal("add"))
// JSON parsing converts numeric strings to numbers (matching llama.cpp behavior)
Expect(results[0].Arguments).To(Equal(`{"x":5,"y":3}`))
Expect(results[1].Name).To(Equal("subtract"))
Expect(results[1].Arguments).To(Equal(`{"x":10,"y":7}`))
})
It("should handle mixed text and XML tool calls", func() {
input := `A message from the LLM
<tool_call>
<function=glob>
<parameter=pattern>
**/package.json
</parameter>
</function>
</tool_call>
Some text after the tool call`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("glob"))
Expect(results[0].Arguments).To(Equal(`{"pattern":"**/package.json"}`))
})
It("should handle parameter values with newlines and whitespace", func() {
input := `<tool_call>
<function=search>
<parameter=query>
This is a multi-line
parameter value
with whitespace
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("search"))
// The value should be trimmed but preserve internal structure
args := results[0].Arguments
Expect(args).To(ContainSubstring("query"))
Expect(args).To(ContainSubstring("multi-line"))
})
It("should return empty results for invalid XML", func() {
input := `<tool_call>
<function=test>
<parameter=x>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
// Should handle gracefully, might return partial results or empty
Expect(results).NotTo(BeNil())
// Results may be empty for incomplete input, which is acceptable
})
It("should return empty results when no XML tool calls found", func() {
input := `Just some regular text without any XML tool calls`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(0))
})
It("should handle parameter values that are JSON", func() {
input := `<tool_call>
<function=process>
<parameter=config>
{"key": "value", "number": 42}
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("process"))
// JSON values should be parsed as JSON objects
Expect(results[0].Arguments).To(ContainSubstring("key"))
Expect(results[0].Arguments).To(ContainSubstring("value"))
})
It("should auto-detect Qwen3-Coder format", func() {
input := `<tool_call>
<function=test>
<parameter=key>
value
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test"))
})
It("should auto-detect GLM 4.5 format", func() {
input := `<tool_call>
test_function
<arg_key>key1</arg_key>
<arg_value>value1</arg_value>
<arg_key>key2</arg_key>
<arg_value>value2</arg_value>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test_function"))
Expect(results[0].Arguments).To(ContainSubstring("key1"))
Expect(results[0].Arguments).To(ContainSubstring("value1"))
})
It("should auto-detect MiniMax-M2 format", func() {
input := `<minimax:tool_call>
<invoke name="test_function">
<parameter name="key1">value1</parameter>
<parameter name="key2">value2</parameter>
</invoke>
</minimax:tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test_function"))
Expect(results[0].Arguments).To(ContainSubstring("key1"))
})
It("should auto-detect Functionary format", func() {
input := `<function=test_function>{"key1": "value1", "key2": "value2"}</function>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test_function"))
Expect(results[0].Arguments).To(ContainSubstring("key1"))
})
It("should use forced format when preset is specified via config", func() {
input := `<tool_call>
<function=test>
<parameter=key>
value
</parameter>
</function>
</tool_call>`
functionConfig.XMLFormatPreset = "qwen3-coder"
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test"))
})
It("should handle GLM 4.5 format with arg_key/arg_value pairs", func() {
input := `<tool_call>
search_function
<arg_key>query</arg_key>
<arg_value>test search</arg_value>
<arg_key>limit</arg_key>
<arg_value>10</arg_value>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("search_function"))
Expect(results[0].Arguments).To(ContainSubstring("query"))
Expect(results[0].Arguments).To(ContainSubstring("test search"))
})
It("should strip Kimi-K2 function name prefixes", func() {
// Kimi-K2 format: <|tool_calls_section_begin|><|tool_call_begin|>functions.name:index<|tool_call_argument_begin|>{JSON}<|tool_call_end|><|tool_calls_section_end|>
// The function name is between tool_start and tool_sep, arguments are JSON between tool_sep and tool_end
input := `<|tool_calls_section_begin|>
<|tool_call_begin|>
functions.search:0<|tool_call_argument_begin|>{"query": "test", "limit": 10}<|tool_call_end|>
<|tool_calls_section_end|>`
// Test auto-detection should find Kimi-K2 format
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("search"))
Expect(results[0].Arguments).To(ContainSubstring("query"))
})
It("should handle formats with last_val_end for last parameter", func() {
// Apriel-1.5 format uses last_val_end (empty string) for last parameter
input := `<tool_calls>[
{"name": "test_function", "arguments": {"key1": "value1", "key2": "value2"}}
]</tool_calls>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
// Should parse JSON-like format
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test_function"))
})
It("should validate scope_start has only whitespace before it", func() {
// This should NOT match because there's non-whitespace before scope_start
input := `text<minimax:tool_call>
<invoke name="test">
<parameter name="key">value</parameter>
</invoke>
</minimax:tool_call>`
// The scope validation should prevent matching when there's text before scope_start
// However, our current implementation will still match because regex is greedy
// This is a limitation of regex-based parsing vs streaming parser
results, err := ParseXML(input, nil)
// The iterative parser should reject this (scope validation), but ParseXML falls back to regex
// So it should succeed with regex parser
Expect(err).NotTo(HaveOccurred())
// Regex parser accepts it (this is a known limitation)
Expect(results).NotTo(BeNil())
})
It("should handle empty tool calls with no arguments", func() {
// Tool call with no parameters should return empty arguments object
input := `<tool_call>
<function=test_function>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test_function"))
Expect(results[0].Arguments).To(Equal("{}"))
})
It("should support partial parsing for streaming", func() {
// Partial XML that ends mid-tag should be detected as partial
input := `<tool_call>
<function=test>
<parameter=key>
value
</parameter>`
partialResult, err := ParseXMLPartial(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(partialResult).NotTo(BeNil())
// Should detect partial content
Expect(partialResult).NotTo(BeNil())
Expect(partialResult.IsPartial).To(BeTrue())
})
It("should parse JSON values correctly in all formats", func() {
// Test that numeric strings are parsed as numbers (not strings)
input := `<tool_call>
<function=test>
<parameter=count>
42
</parameter>
<parameter=enabled>
true
</parameter>
</function>
</tool_call>`
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
// JSON parsing should convert "42" to number 42 and "true" to boolean true
Expect(results[0].Arguments).To(ContainSubstring(`"count":42`))
Expect(results[0].Arguments).To(ContainSubstring(`"enabled":true`))
})
It("should handle reasoning blocks with tool calls", func() {
// Test parsing tool calls that appear after reasoning blocks
// Note: parseMsgWithXMLToolCalls is currently internal, so we test through ParseXML
// which should still parse tool calls even with reasoning blocks present
input := `<think>
I need to search for information.
</think>
<tool_call>
<function=search>
<parameter=query>
test query
</parameter>
</function>
</tool_call>`
// ParseXML should extract tool calls even with reasoning blocks
results, err := ParseXML(input, nil)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("search"))
})
It("should use iterative parser for streaming scenarios", func() {
// Test that ParseXMLIterative works correctly
input := `<tool_call>
<function=test_function>
<parameter=key1>
value1
</parameter>
<parameter=key2>
value2
</parameter>
</function>
</tool_call>`
results, err := ParseXMLIterative(input, nil, false)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("test_function"))
Expect(results[0].Arguments).To(ContainSubstring("key1"))
Expect(results[0].Arguments).To(ContainSubstring("value1"))
})
It("should handle partial parsing with iterative parser", func() {
// Test partial parsing with iterative parser
input := `<tool_call>
<function=test>
<parameter=key>
value
</parameter>`
results, err := ParseXMLIterative(input, nil, true)
// Should handle partial content gracefully
// Either returns partial results or empty, but should not error
Expect(err).NotTo(HaveOccurred())
// Results may be empty or contain partial tool call
Expect(results).NotTo(BeNil())
})
})
Context("ParseFunctionCall with XML tool calls", func() {
It("should parse XML tool calls when JSON parsing fails", func() {
input := `A message from the LLM
<tool_call>
<function=glob>
<parameter=pattern>
**/package.json
</parameter>
</function>
</tool_call>`
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(1))
Expect(results[0].Name).To(Equal("glob"))
Expect(results[0].Arguments).To(Equal(`{"pattern":"**/package.json"}`))
})
It("should parse XML tool calls alongside JSON tool calls", func() {
input := `{"name": "add", "arguments": {"x": 5, "y": 3}}
<tool_call>
<function=subtract>
<parameter=x>
10
</parameter>
<parameter=y>
7
</parameter>
</function>
</tool_call>`
results := ParseFunctionCall(input, functionConfig)
// Should find both JSON and XML tool calls
Expect(results).To(HaveLen(2))
// First result should be from JSON
Expect(results[0].Name).To(Equal("add"))
// Second result should be from XML
Expect(results[1].Name).To(Equal("subtract"))
})
It("should handle mixed content with text, JSON, and XML", func() {
input := `Some introductory text
{"name": "first", "arguments": {"a": 1}}
More text in between
<tool_call>
<function=second>
<parameter=b>
2
</parameter>
</function>
</tool_call>
Final text`
results := ParseFunctionCall(input, functionConfig)
Expect(results).To(HaveLen(2))
Expect(results[0].Name).To(Equal("first"))
Expect(results[1].Name).To(Equal("second"))
})
})
Context("Iterative Parser (ChatMsgParser)", func() {
Describe("Basic functionality", func() {
It("should track position correctly", func() {
parser := NewChatMsgParser("hello world", false)
Expect(parser.Pos()).To(Equal(0))
Expect(parser.Input()).To(Equal("hello world"))
Expect(parser.IsPartial()).To(BeFalse())
err := parser.MoveTo(5)
Expect(err).NotTo(HaveOccurred())
Expect(parser.Pos()).To(Equal(5))
err = parser.MoveBack(2)
Expect(err).NotTo(HaveOccurred())
Expect(parser.Pos()).To(Equal(3))
})
It("should handle position errors", func() {
parser := NewChatMsgParser("test", false)
err := parser.MoveTo(10)
Expect(err).To(HaveOccurred())
err = parser.MoveBack(10)
Expect(err).To(HaveOccurred())
})
It("should find literals correctly", func() {
parser := NewChatMsgParser("hello world test", false)
result := parser.TryFindLiteral("world")
Expect(result).NotTo(BeNil())
Expect(result.Prelude).To(Equal("hello "))
Expect(parser.Pos()).To(Equal(11)) // After "world"
})
It("should consume literals correctly", func() {
parser := NewChatMsgParser("hello world", false)
success := parser.TryConsumeLiteral("hello")
Expect(success).To(BeTrue())
Expect(parser.Pos()).To(Equal(5))
success = parser.TryConsumeLiteral("invalid")
Expect(success).To(BeFalse())
})
It("should consume spaces", func() {
parser := NewChatMsgParser(" hello", false)
consumed := parser.ConsumeSpaces()
Expect(consumed).To(BeTrue())
Expect(parser.Pos()).To(Equal(3))
})
It("should add content and tool calls", func() {
parser := NewChatMsgParser("test", false)
parser.AddContent("hello")
parser.AddReasoningContent("thinking")
parser.AddToolCall("test_func", "", `{"arg":"value"}`)
Expect(parser.Content()).To(Equal("hello"))
Expect(parser.Reasoning()).To(Equal("thinking"))
Expect(parser.ToolCalls()).To(HaveLen(1))
Expect(parser.ToolCalls()[0].Name).To(Equal("test_func"))
})
It("should not add tool call with empty name", func() {
parser := NewChatMsgParser("test", false)
success := parser.AddToolCall("", "", `{}`)
Expect(success).To(BeFalse())
Expect(parser.ToolCalls()).To(HaveLen(0))
})
})
Describe("JSON parsing", func() {
It("should parse complete JSON objects", func() {
parser := NewChatMsgParser(`{"name":"test","value":42}`, false)
jsonValue, isPartial, jsonDumpMarker, err := parser.TryConsumeJSON()
Expect(err).NotTo(HaveOccurred())
Expect(isPartial).To(BeFalse())
Expect(jsonDumpMarker).To(Equal(""), "Complete JSON should have empty jsonDumpMarker")
Expect(jsonValue).NotTo(BeNil())
// Type assert to map[string]any
obj, ok := jsonValue.(map[string]any)
Expect(ok).To(BeTrue())
Expect(obj["name"]).To(Equal("test"))
Expect(obj["value"]).To(Equal(float64(42)))
})
It("should parse JSON arrays (matching llama.cpp behavior)", func() {
parser := NewChatMsgParser(`[{"a":1},{"b":2}]`, false)
jsonValue, isPartial, jsonDumpMarker, err := parser.TryConsumeJSON()
// TryConsumeJSON now supports arrays (matching llama.cpp's try_consume_json)
Expect(err).NotTo(HaveOccurred())
Expect(isPartial).To(BeFalse())
Expect(jsonDumpMarker).To(Equal(""), "Complete JSON should have empty jsonDumpMarker")
Expect(jsonValue).NotTo(BeNil())
// Should be an array
arr, ok := jsonValue.([]any)
Expect(ok).To(BeTrue())
Expect(arr).To(HaveLen(2))
// First element should be an object
obj1, ok := arr[0].(map[string]any)
Expect(ok).To(BeTrue())
Expect(obj1["a"]).To(Equal(float64(1)))
})
It("should heal incomplete JSON in partial mode", func() {
parser := NewChatMsgParser(`{"name":"test","value":`, true)
jsonValue, isPartial, jsonDumpMarker, err := parser.TryConsumeJSON()
// TryConsumeJSON attempts to heal incomplete JSON in partial mode
// For this input, healing should succeed (adds closing quote and brace)
Expect(err).NotTo(HaveOccurred())
Expect(isPartial).To(BeTrue())
Expect(jsonDumpMarker).NotTo(Equal(""), "Healed JSON should have non-empty jsonDumpMarker")
Expect(jsonValue).NotTo(BeNil())
// Type assert to map[string]any
obj, ok := jsonValue.(map[string]any)
Expect(ok).To(BeTrue())
Expect(obj["name"]).To(Equal("test"))
})
It("should reject non-JSON input", func() {
parser := NewChatMsgParser("not json", false)
jsonValue, isPartial, jsonDumpMarker, err := parser.TryConsumeJSON()
Expect(err).To(HaveOccurred())
Expect(isPartial).To(BeFalse())
Expect(jsonDumpMarker).To(Equal(""), "Error case should have empty jsonDumpMarker")
Expect(jsonValue).To(BeNil())
})
It("should parse multiple JSON objects", func() {
input := `{"a":1} {"b":2}`
results, err := ParseJSONIterative(input, false)
Expect(err).NotTo(HaveOccurred())
Expect(results).To(HaveLen(2))
})
})
Describe("XML parsing", func() {
It("should parse XML tool calls with iterative parser", func() {
input := `<tool_call>
<function=test>
<parameter=key>
value
</parameter>
</function>
</tool_call>`
format := GetXMLFormatPreset("qwen3-coder")
parser := NewChatMsgParser(input, false)
success, err := parser.TryConsumeXMLToolCalls(format)
Expect(err).NotTo(HaveOccurred())
Expect(success).To(BeTrue())
Expect(parser.ToolCalls()).To(HaveLen(1))
Expect(parser.ToolCalls()[0].Name).To(Equal("test"))
})
It("should return partial exception for incomplete XML tool calls", func() {
input := `<tool_call>
<function=test>
<parameter=key>
value
</parameter>`
format := GetXMLFormatPreset("qwen3-coder")
parser := NewChatMsgParser(input, true)
success, err := parser.TryConsumeXMLToolCalls(format)
// Should return partial exception for incomplete XML
Expect(err).To(HaveOccurred())
_, isPartialErr := err.(*ChatMsgPartialException)
Expect(isPartialErr).To(BeTrue(), "Should return ChatMsgPartialException for incomplete XML")
Expect(success).To(BeFalse())
})
It("should return partial exception for incomplete literals", func() {
input := `<tool_call>
<function=test>
<parameter=key>`
format := GetXMLFormatPreset("qwen3-coder")
parser := NewChatMsgParser(input, true)
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | true |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/iterative_parser.go | pkg/functions/iterative_parser.go | package functions
import (
"encoding/json"
"errors"
"fmt"
"math/rand"
"regexp"
"strings"
"unicode"
"unicode/utf8"
"github.com/mudler/xlog"
)
// ChatMsgPartialException represents a partial parsing exception (recoverable)
type ChatMsgPartialException struct {
Message string
}
func (e *ChatMsgPartialException) Error() string {
return e.Message
}
// StringRange represents a range of characters in the input string
type StringRange struct {
Begin int
End int
}
// FindLiteralResult represents the result of finding a literal in the input
type FindLiteralResult struct {
Prelude string
Groups []StringRange
}
// ChatMsgParser is an iterative parser similar to llama.cpp's common_chat_msg_parser
// It tracks position in the input and can parse incrementally, supporting partial parsing
type ChatMsgParser struct {
input string
isPartial bool
pos int
healingMarker string
content strings.Builder
reasoning strings.Builder
toolCalls []FuncCallResults
}
// NewChatMsgParser creates a new iterative parser
func NewChatMsgParser(input string, isPartial bool) *ChatMsgParser {
// Generate a unique healing marker (similar to llama.cpp)
healingMarker := generateHealingMarker(input)
return &ChatMsgParser{
input: input,
isPartial: isPartial,
pos: 0,
healingMarker: healingMarker,
toolCalls: make([]FuncCallResults, 0),
}
}
// generateHealingMarker generates a unique marker that doesn't appear in the input
func generateHealingMarker(input string) string {
for {
id := fmt.Sprintf("%d", rand.Int63())
if !strings.Contains(input, id) {
return id
}
}
}
// SetHealingMarker sets a custom healing marker for testing purposes
func (p *ChatMsgParser) SetHealingMarker(marker string) {
p.healingMarker = marker
}
// Input returns the input string
func (p *ChatMsgParser) Input() string {
return p.input
}
// Pos returns the current position in the input
func (p *ChatMsgParser) Pos() int {
return p.pos
}
// IsPartial returns whether this is a partial parse
func (p *ChatMsgParser) IsPartial() bool {
return p.isPartial
}
// HealingMarker returns the healing marker used for partial JSON
func (p *ChatMsgParser) HealingMarker() string {
return p.healingMarker
}
// MoveTo moves the parser position to a specific index
func (p *ChatMsgParser) MoveTo(pos int) error {
if pos < 0 || pos > len(p.input) {
return fmt.Errorf("invalid position: %d (input length: %d)", pos, len(p.input))
}
p.pos = pos
return nil
}
// MoveBack moves the parser position back by n characters
func (p *ChatMsgParser) MoveBack(n int) error {
if p.pos < n {
return fmt.Errorf("can't move back %d characters from position %d", n, p.pos)
}
p.pos -= n
return nil
}
// Str returns the substring at the given range
func (p *ChatMsgParser) Str(rng StringRange) string {
if rng.Begin < 0 || rng.End > len(p.input) || rng.Begin > rng.End {
return ""
}
return p.input[rng.Begin:rng.End]
}
// ConsumeRest returns the remaining input from current position to end
func (p *ChatMsgParser) ConsumeRest() string {
if p.pos >= len(p.input) {
return ""
}
result := p.input[p.pos:]
p.pos = len(p.input)
return result
}
// AddContent appends content to the result
func (p *ChatMsgParser) AddContent(content string) {
p.content.WriteString(content)
}
// AddReasoningContent appends reasoning content to the result
func (p *ChatMsgParser) AddReasoningContent(reasoning string) {
p.reasoning.WriteString(reasoning)
}
// AddToolCall adds a tool call to the result
func (p *ChatMsgParser) AddToolCall(name, id, arguments string) bool {
if name == "" {
return false
}
p.toolCalls = append(p.toolCalls, FuncCallResults{
Name: name,
Arguments: arguments,
})
return true
}
// ToolCalls returns the parsed tool calls
func (p *ChatMsgParser) ToolCalls() []FuncCallResults {
return p.toolCalls
}
// Content returns the parsed content
func (p *ChatMsgParser) Content() string {
return p.content.String()
}
// Reasoning returns the parsed reasoning content
func (p *ChatMsgParser) Reasoning() string {
return p.reasoning.String()
}
// rstrip removes trailing whitespace from a string
func rstrip(s string) string {
return strings.TrimRightFunc(s, unicode.IsSpace)
}
// eraseSpaces erases a substring and surrounding spaces, replacing with newlines
// Reference: llama.cpp/common/chat-parser-xml-toolcall.cpp lines 659-668
func eraseSpaces(str string, l, r int) (string, int) {
if l < 0 || r < 0 || l > len(str) || r > len(str) || l > r {
return str, l
}
// Move l left to include leading spaces
for l > 0 && l < len(str) && unicode.IsSpace(rune(str[l-1])) {
l--
}
// Move r right to include trailing spaces
for r < len(str) && unicode.IsSpace(rune(str[r])) {
r++
}
// Replace with newlines
result := str[:l]
if l < r {
result += "\n"
if l+1 < r {
result += "\n"
}
}
newL := l
if newL != 0 {
newL += 2
}
if newL < len(str) && newL <= r {
result += str[r:]
} else if newL < len(str) {
result += str[newL:]
}
return result, newL
}
// ClearTools clears all parsed tool calls
func (p *ChatMsgParser) ClearTools() {
p.toolCalls = p.toolCalls[:0]
}
// TryConsumeLiteral attempts to consume a literal string at the current position
// Returns true if the literal was found and consumed, false otherwise
func (p *ChatMsgParser) TryConsumeLiteral(literal string) bool {
if len(literal) == 0 {
return true
}
if p.pos+len(literal) > len(p.input) {
return false
}
if p.input[p.pos:p.pos+len(literal)] == literal {
p.pos += len(literal)
return true
}
return false
}
// ConsumeLiteral consumes a literal string, throwing an error if not found
func (p *ChatMsgParser) ConsumeLiteral(literal string) error {
if !p.TryConsumeLiteral(literal) {
return &ChatMsgPartialException{Message: fmt.Sprintf("Expected literal: %s", literal)}
}
return nil
}
// TryFindLiteral finds a literal string starting from the current position
// Returns the result if found, nil otherwise
// Similar to llama.cpp's try_find_literal
func (p *ChatMsgParser) TryFindLiteral(literal string) *FindLiteralResult {
if len(literal) == 0 {
return nil
}
// Search for the literal starting from current position
idx := strings.Index(p.input[p.pos:], literal)
if idx == -1 {
// If partial parsing is enabled, try to find partial matches
if p.isPartial {
partialIdx := stringFindPartialStop(p.input[p.pos:], literal)
if partialIdx != -1 && partialIdx >= 0 {
result := &FindLiteralResult{
Prelude: p.input[p.pos : p.pos+partialIdx],
Groups: []StringRange{
{Begin: p.pos + partialIdx, End: len(p.input)},
},
}
p.pos = len(p.input)
return result
}
}
return nil
}
idx += p.pos
result := &FindLiteralResult{
Prelude: p.input[p.pos:idx],
Groups: []StringRange{
{Begin: idx, End: idx + len(literal)},
},
}
p.pos = idx + len(literal)
return result
}
// stringFindPartialStop finds where a partial string match might stop
// This is used for streaming/partial parsing
func stringFindPartialStop(s, needle string) int {
if len(needle) == 0 || len(s) == 0 {
return -1
}
// Check if s ends with a prefix of needle
for i := len(needle); i > 0; i-- {
if len(s) >= i && s[len(s)-i:] == needle[:i] {
return len(s) - i
}
}
return -1
}
// ConsumeSpaces consumes whitespace characters
func (p *ChatMsgParser) ConsumeSpaces() bool {
consumed := false
for p.pos < len(p.input) && unicode.IsSpace(rune(p.input[p.pos])) {
p.pos++
consumed = true
}
return consumed
}
// AllSpace checks if a string contains only whitespace
func AllSpace(s string) bool {
return strings.TrimSpace(s) == ""
}
// TryConsumeJSON attempts to consume a JSON value from the current position
// Returns the parsed JSON (can be object, array, or any JSON type), whether it's partial,
// and the jsonDumpMarker (non-empty if JSON was healed)
// Matches llama.cpp's try_consume_json() which returns common_json containing any JSON type and healing_marker
func (p *ChatMsgParser) TryConsumeJSON() (any, bool, string, error) {
// Skip whitespace
p.ConsumeSpaces()
if p.pos >= len(p.input) {
return nil, false, "", errors.New("end of input")
}
// Try to parse JSON starting from current position
jsonStart := p.pos
if p.input[p.pos] != '{' && p.input[p.pos] != '[' {
return nil, false, "", errors.New("not a JSON object or array")
}
// Try parsing complete JSON first using decoder to get exact position
// Use any to support objects, arrays, and other JSON types (matching llama.cpp)
decoder := json.NewDecoder(strings.NewReader(p.input[jsonStart:]))
var jsonValue any
if err := decoder.Decode(&jsonValue); err == nil {
// Complete JSON parsed successfully
// Calculate position after JSON using decoder's input offset
p.pos = jsonStart + int(decoder.InputOffset())
return jsonValue, false, "", nil
}
// If parsing failed, try to find where JSON might end
// Find matching brace/bracket
depth := 0
inString := false
escape := false
jsonEnd := -1
for i := p.pos; i < len(p.input); i++ {
ch := p.input[i]
if escape {
escape = false
continue
}
if ch == '\\' {
escape = true
continue
}
if ch == '"' {
inString = !inString
continue
}
if inString {
continue
}
if ch == '{' || ch == '[' {
depth++
} else if ch == '}' || ch == ']' {
depth--
if depth == 0 {
jsonEnd = i + 1
break
}
}
}
if jsonEnd == -1 {
// Incomplete JSON (partial)
if p.isPartial {
// Use stack-based healing matching llama.cpp's implementation
partialInput := p.input[jsonStart:]
healedValue, wasHealed, jsonDumpMarker, err := parseJSONWithStack(partialInput, p.healingMarker)
if err == nil && wasHealed {
// Successfully healed - remove healing marker from result
cleaned := removeHealingMarkerFromJSONAny(healedValue, p.healingMarker)
p.pos = len(p.input)
return cleaned, true, jsonDumpMarker, nil
}
}
return nil, true, "", errors.New("incomplete JSON")
}
// Parse complete JSON
jsonStr := p.input[jsonStart:jsonEnd]
if err := json.Unmarshal([]byte(jsonStr), &jsonValue); err != nil {
return nil, false, "", err
}
p.pos = jsonEnd
return jsonValue, false, "", nil
}
// tryConsumeJSONPrimitive attempts to consume a JSON primitive (null, true, false, or number)
// This is a fallback when TryConsumeJSON fails because it only accepts objects/arrays
// Reference: llama.cpp/common/chat-parser-xml-toolcall.cpp lines 506-520
func (p *ChatMsgParser) tryConsumeJSONPrimitive() (any, bool) {
// Consume spaces first
p.ConsumeSpaces()
if p.pos >= len(p.input) {
return nil, false
}
// Get UTF-8 safe view of remaining input
remaining := p.input[p.pos:]
safeView := utf8TruncateSafeView(remaining)
// Check for null, true, false (minimum 4 chars needed)
if len(safeView) >= 4 {
prefix := safeView
if len(prefix) > 6 {
prefix = prefix[:6]
}
if strings.HasPrefix(prefix, "null") {
// Check if it's complete "null" (followed by space, comma, }, ], or end)
if len(safeView) >= 4 {
if len(safeView) == 4 || isJSONTerminator(safeView[4]) {
p.pos += 4
return nil, false
}
}
} else if strings.HasPrefix(prefix, "true") {
if len(safeView) >= 4 {
if len(safeView) == 4 || isJSONTerminator(safeView[4]) {
p.pos += 4
return true, false
}
}
} else if strings.HasPrefix(prefix, "false") {
if len(safeView) >= 5 {
if len(safeView) == 5 || isJSONTerminator(safeView[5]) {
p.pos += 5
return false, false
}
}
}
}
// Check for number: [0-9-][0-9]*(\.\d*)?([eE][+-]?\d*)?
// Use regex to match number pattern
numberRegex := regexp.MustCompile(`^[0-9-][0-9]*(\.\d*)?([eE][+-]?\d*)?`)
if match := numberRegex.FindString(safeView); match != "" {
// Try to parse as number
var numValue float64
if _, err := fmt.Sscanf(match, "%f", &numValue); err == nil {
// Check if match is followed by a JSON terminator or end of input
if len(safeView) == len(match) || isJSONTerminator(safeView[len(match)]) {
p.pos += len(match)
return numValue, false
}
}
}
return nil, false
}
// isJSONTerminator checks if a character is a valid JSON terminator
func isJSONTerminator(ch byte) bool {
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' ||
ch == ',' || ch == '}' || ch == ']' || ch == ':' || ch == '<'
}
// utf8TruncateSafeView truncates a string at a safe UTF-8 boundary
// This is a helper function to avoid importing from parse.go
func utf8TruncateSafeView(s string) string {
if len(s) == 0 {
return s
}
// Check if the string ends at a valid UTF-8 boundary
// If not, truncate to the last valid boundary
for i := len(s); i > 0 && i > len(s)-4; i-- {
if utf8.ValidString(s[:i]) {
return s[:i]
}
}
// If we can't find a valid boundary in the last 4 bytes, truncate conservatively
if len(s) > 3 {
return s[:len(s)-3]
}
return ""
}
// isJSONObjectOrArray checks if a value is a JSON object or array
func isJSONObjectOrArray(v any) bool {
switch v.(type) {
case map[string]any, []any:
return true
default:
return false
}
}
// isJSONString checks if a value is a JSON string
func isJSONString(v any) bool {
_, ok := v.(string)
return ok
}
// trimPotentialPartialWord removes partial XML tags from the end of content
// This prevents emitting incomplete tags during streaming
// Reference: llama.cpp/common/chat-parser-xml-toolcall.cpp lines 684-692
func trimPotentialPartialWord(content string, format *XMLToolCallFormat, startThink, endThink string) string {
patterns := []string{
startThink,
endThink,
format.ScopeStart,
format.ToolStart,
format.ToolSep,
format.KeyStart,
format.KeyValSep,
}
if format.KeyValSep2 != nil {
patterns = append(patterns, *format.KeyValSep2)
}
patterns = append(patterns, format.ValEnd)
if format.LastValEnd != nil {
patterns = append(patterns, *format.LastValEnd)
}
patterns = append(patterns, format.ToolEnd)
if format.LastToolEnd != nil {
patterns = append(patterns, *format.LastToolEnd)
}
patterns = append(patterns, format.ScopeEnd)
bestMatch := len(content)
for _, pattern := range patterns {
if len(pattern) == 0 {
continue
}
// Check for suffix matches from end of content backwards
maxStart := len(content) - len(pattern)
if maxStart < 0 {
maxStart = 0
}
for matchIdx := len(content); matchIdx > maxStart; matchIdx-- {
matchLen := len(content) - matchIdx
if matchLen > 0 && matchIdx < len(content) {
// Check if pattern matches as suffix starting at matchIdx
if matchIdx+matchLen <= len(content) {
substr := content[matchIdx : matchIdx+matchLen]
if len(substr) <= len(pattern) && strings.HasPrefix(pattern, substr) {
if matchIdx < bestMatch {
bestMatch = matchIdx
}
}
}
}
}
}
if len(content) > bestMatch {
return content[:bestMatch]
}
return content
}
// removeHealingMarkerFromJSON removes healing markers from a parsed JSON structure (objects only)
func removeHealingMarkerFromJSON(value map[string]any, marker string) map[string]any {
result := make(map[string]any)
for k, v := range value {
if str, ok := v.(string); ok {
if idx := strings.Index(str, marker); idx != -1 {
v = str[:idx]
}
} else if nestedMap, ok := v.(map[string]any); ok {
v = removeHealingMarkerFromJSON(nestedMap, marker)
}
result[k] = v
}
return result
}
// removeHealingMarkerFromJSONAny removes healing markers from any JSON type (objects, arrays, etc.)
func removeHealingMarkerFromJSONAny(value any, marker string) any {
switch v := value.(type) {
case map[string]any:
return removeHealingMarkerFromJSON(v, marker)
case []any:
result := make([]any, len(v))
for i, item := range v {
result[i] = removeHealingMarkerFromJSONAny(item, marker)
}
return result
case string:
if idx := strings.Index(v, marker); idx != -1 {
return v[:idx]
}
return v
default:
return v
}
}
// TryConsumeXMLToolCalls attempts to parse XML tool calls using the iterative parser
// Returns true if tool calls were found and parsed, false otherwise
// Similar to llama.cpp's parse_xml_tool_calls
func (p *ChatMsgParser) TryConsumeXMLToolCalls(format *XMLToolCallFormat) (bool, error) {
if format == nil {
return false, errors.New("format is required")
}
// Handle Functionary format (JSON parameters inside XML tags) - use regex parser
if format.KeyStart == "" && format.ToolStart == "<function=" {
// Fall back to regex-based parser for Functionary format
results, err := parseFunctionaryFormat(p.input[p.pos:], format)
if err != nil || len(results) == 0 {
return false, nil
}
for _, result := range results {
p.AddToolCall(result.Name, "", result.Arguments)
}
return true, nil
}
// Handle JSON-like formats (Apriel-1.5, Xiaomi-MiMo) - use regex parser
if format.ToolStart != "" && strings.Contains(format.ToolStart, "{\"name\"") {
results, err := parseJSONLikeXMLFormat(p.input[p.pos:], format)
if err != nil || len(results) == 0 {
return false, nil
}
for _, result := range results {
p.AddToolCall(result.Name, "", result.Arguments)
}
return true, nil
}
// Validate required fields for standard XML formats
if format.ToolStart == "" || format.KeyStart == "" || format.KeyValSep == "" ||
format.ValEnd == "" || format.ToolEnd == "" {
return false, errors.New("required format fields missing")
}
startPos := p.pos
recovery := true
// Helper to return error with optional recovery
returnError := func(err error, canRecover bool) (bool, error) {
xlog.Debug("Failed to parse XML tool call", "error", err, "position", p.pos)
if canRecover && recovery {
p.MoveTo(startPos)
return false, nil
}
return false, fmt.Errorf("tool call parsing failed with unrecoverable errors: %w", err)
}
// Helper to find val_end or last_val_end
tryFindValEnd := func() (int, *FindLiteralResult) {
savedPos := p.pos
tc := p.TryFindLiteral(format.ValEnd)
valEndSize := len(format.ValEnd)
if format.LastValEnd != nil {
p.MoveTo(savedPos)
tc2 := p.tryFind2LiteralSplitBySpaces(*format.LastValEnd, format.ToolEnd)
if format.LastToolEnd != nil {
p.MoveTo(savedPos)
tc3 := p.tryFind2LiteralSplitBySpaces(*format.LastValEnd, *format.LastToolEnd)
if tc3 != nil && (tc2 == nil || len(tc2.Prelude) > len(tc3.Prelude)) {
tc2 = tc3
}
}
if tc2 != nil && (tc == nil || len(tc.Prelude) > len(tc2.Prelude)) {
tc = tc2
if tc.Groups[0].End > len(p.input) {
tc.Groups[0].End = len(p.input)
}
if tc.Groups[0].Begin+len(*format.LastValEnd) < len(p.input) {
tc.Groups[0].End = tc.Groups[0].Begin + len(*format.LastValEnd)
}
p.MoveTo(tc.Groups[0].End)
valEndSize = len(*format.LastValEnd)
} else {
p.MoveTo(savedPos)
}
}
return valEndSize, tc
}
// Helper to find tool_end or last_tool_end
tryFindToolEnd := func() (int, *FindLiteralResult) {
savedPos := p.pos
tc := p.TryFindLiteral(format.ToolEnd)
toolEndSize := len(format.ToolEnd)
if format.LastToolEnd != nil {
p.MoveTo(savedPos)
tc2 := p.tryFind2LiteralSplitBySpaces(*format.LastToolEnd, format.ScopeEnd)
if tc2 != nil && (tc == nil || len(tc.Prelude) > len(tc2.Prelude)) {
tc = tc2
if tc.Groups[0].End > len(p.input) {
tc.Groups[0].End = len(p.input)
}
if tc.Groups[0].Begin+len(*format.LastToolEnd) < len(p.input) {
tc.Groups[0].End = tc.Groups[0].Begin + len(*format.LastToolEnd)
}
p.MoveTo(tc.Groups[0].End)
toolEndSize = len(*format.LastToolEnd)
} else {
p.MoveTo(savedPos)
}
}
return toolEndSize, tc
}
// Parse multiple scopes (for formats like qwen3-coder that can have multiple <tool_call> blocks)
// Continue parsing until no more scopes are found
for {
// Parse scope_start if present
if format.ScopeStart != "" && !AllSpace(format.ScopeStart) {
tc := p.TryFindLiteral(format.ScopeStart)
if tc == nil {
// No more scopes found, break
break
}
if !AllSpace(tc.Prelude) {
// Non-whitespace before scope_start, stop parsing
p.MoveTo(tc.Groups[0].Begin - len(tc.Prelude))
break
}
// Validate size match (partial detection)
if len(tc.Groups) > 0 {
matchedSize := tc.Groups[0].End - tc.Groups[0].Begin
if matchedSize != len(format.ScopeStart) {
return false, &ChatMsgPartialException{Message: fmt.Sprintf("Partial literal: %s", format.ScopeStart)}
}
}
}
// Parse tool calls within this scope
scopeToolCallsFound := false
for {
tc := p.TryFindLiteral(format.ToolStart)
if tc == nil {
break
}
if !AllSpace(tc.Prelude) {
// Non-whitespace before tool_start, stop parsing
p.MoveTo(tc.Groups[0].Begin - len(tc.Prelude))
break
}
// Find function name
var funcName *FindLiteralResult
if AllSpace(format.ToolSep) {
// GLM 4.5 format: function name is between tool_start and key_start
funcName = p.TryFindLiteral(format.KeyStart)
} else {
// Standard format: function name is between tool_start and tool_sep
funcName = p.TryFindLiteral(format.ToolSep)
}
if funcName == nil {
// Try to find tool_end instead (empty tool call)
_, toolEnd := tryFindToolEnd()
if toolEnd != nil {
// Empty tool call - extract function name from between tool_start and tool_end
nameStart := tc.Groups[0].End
nameEnd := toolEnd.Groups[0].Begin
functionName := ""
if nameEnd > nameStart {
functionName = strings.TrimSpace(p.input[nameStart:nameEnd])
}
argsJSON, _ := json.Marshal(map[string]any{})
p.AddToolCall(functionName, "", string(argsJSON))
recovery = false
continue
}
// Partial tool name not supported
return false, &ChatMsgPartialException{Message: "incomplete tool_call"}
}
// Check if tool_end appears in function name prelude (empty tool call)
functionNamePrelude := funcName.Prelude
if strings.Contains(functionNamePrelude, format.ToolEnd) ||
(format.LastToolEnd != nil && strings.Contains(functionNamePrelude, *format.LastToolEnd)) {
// Empty tool call - function name is empty, tool_end is in the prelude
// Move back to start of tool_start and find tool_end
p.MoveTo(tc.Groups[0].Begin)
_, toolEnd := tryFindToolEnd()
if toolEnd != nil {
// Extract function name from between tool_start and tool_end
nameStart := tc.Groups[0].End
nameEnd := toolEnd.Groups[0].Begin
functionName := ""
if nameEnd > nameStart {
functionName = strings.TrimSpace(p.input[nameStart:nameEnd])
// Remove tool_sep if present
if !AllSpace(format.ToolSep) && strings.HasSuffix(functionName, format.ToolSep) {
functionName = strings.TrimSpace(functionName[:len(functionName)-len(format.ToolSep)])
}
}
argsJSON, _ := json.Marshal(map[string]any{})
p.AddToolCall(functionName, "", string(argsJSON))
recovery = false
continue
}
}
// Extract function name from prelude
// Move to appropriate position based on format
if AllSpace(format.ToolSep) {
// GLM 4.5 format: function name is on a separate line after tool_start, before key_start
// The prelude contains the function name
p.MoveTo(funcName.Groups[0].Begin)
} else {
// Standard format: function name is before tool_sep
p.MoveTo(funcName.Groups[0].End)
}
functionName := strings.TrimSpace(funcName.Prelude)
// Handle Kimi-K2 function name stripping
if strings.HasPrefix(functionName, "functions.") {
functionName = functionName[10:]
if idx := strings.LastIndex(functionName, ":"); idx != -1 {
suffix := functionName[idx+1:]
allDigits := true
for _, r := range suffix {
if r < '0' || r > '9' {
allDigits = false
break
}
}
if allDigits {
functionName = functionName[:idx]
}
}
}
// Parse arguments
arguments := make(map[string]any)
for {
keyStart := p.TryFindLiteral(format.KeyStart)
if keyStart == nil {
break
}
if !AllSpace(keyStart.Prelude) {
// Non-whitespace before key_start, stop parsing parameters
p.MoveTo(keyStart.Groups[0].Begin - len(keyStart.Prelude))
break
}
// Validate size match (partial detection)
if len(keyStart.Groups) > 0 {
matchedSize := keyStart.Groups[0].End - keyStart.Groups[0].Begin
if matchedSize != len(format.KeyStart) {
// Partial key_start, emit tool call with current args
argsJSON, _ := json.Marshal(arguments)
if len(argsJSON) > 0 && argsJSON[len(argsJSON)-1] == '}' {
argsJSON = argsJSON[:len(argsJSON)-1]
}
p.AddToolCall(functionName, "", string(argsJSON))
return false, &ChatMsgPartialException{Message: fmt.Sprintf("Partial literal: %s", format.KeyStart)}
}
}
// Find key_val_sep
keyValSep := p.TryFindLiteral(format.KeyValSep)
if keyValSep == nil {
// Generate partial args
rest := p.ConsumeRest()
arguments[rest+"XML_TOOL_CALL_PARTIAL_FLAG"] = ""
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
if cleaned, isPartial := partialJSON(toolStr); isPartial {
p.AddToolCall(functionName, "", cleaned)
} else {
p.AddToolCall(functionName, "", toolStr)
}
return false, &ChatMsgPartialException{
Message: fmt.Sprintf("Expected %s after %s", format.KeyValSep, format.KeyStart),
}
}
// Validate size match
if len(keyValSep.Groups) > 0 {
matchedSize := keyValSep.Groups[0].End - keyValSep.Groups[0].Begin
if matchedSize != len(format.KeyValSep) {
// Partial key_val_sep
rest := keyValSep.Prelude
arguments[rest+"XML_TOOL_CALL_PARTIAL_FLAG"] = ""
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
if cleaned, isPartial := partialJSON(toolStr); isPartial {
p.AddToolCall(functionName, "", cleaned)
} else {
p.AddToolCall(functionName, "", toolStr)
}
return false, &ChatMsgPartialException{Message: fmt.Sprintf("Partial literal: %s", format.KeyValSep)}
}
}
key := strings.TrimSpace(keyValSep.Prelude)
recovery = false
// Handle key_val_sep2 if present (GLM 4.5 format)
// For GLM 4.5, key_val_sep2 is "</arg_key>\n<arg_value>"
// We need to consume it but it's optional - if not found, the value might be empty
if format.KeyValSep2 != nil {
// Try to consume it, but don't fail if not found (might be empty value)
p.TryConsumeLiteral(*format.KeyValSep2)
}
// Save position before attempting JSON parsing
// Reference: llama.cpp/common/chat-parser-xml-toolcall.cpp lines 499-555
valStart := p.pos
// Try to parse JSON first (if raw_argval is false/null)
// This matches llama.cpp's approach: try JSON before finding val_end
var jsonValue any
var jsonHealingMarker string
jsonParsed := false
if format.RawArgVal == nil || !*format.RawArgVal {
// Try JSON parsing (objects/arrays)
jsonVal, _, jsonDumpMarker, err := p.TryConsumeJSON()
if err == nil {
jsonValue = jsonVal
jsonHealingMarker = jsonDumpMarker
jsonParsed = true
} else {
// Try primitive fallback (null, true, false, numbers)
primitiveVal, found := p.tryConsumeJSONPrimitive()
if found {
jsonValue = primitiveVal
jsonParsed = true
} else {
// Reset position if JSON parsing failed
p.MoveTo(valStart)
}
}
}
// If JSON was parsed, check if val_end follows
if jsonParsed {
jsonEnd := p.pos
p.ConsumeSpaces()
// Check if at end of input (partial case)
if p.pos >= len(p.input) {
// Partial JSON - handle based on format and JSON type
if format.RawArgVal != nil && !*format.RawArgVal {
// raw_argval is false - only JSON allowed
if isJSONObjectOrArray(jsonValue) || isJSONString(jsonValue) {
arguments[key] = jsonValue
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
// Use jsonDumpMarker to cut precisely (matching llama.cpp lines 532-538)
if jsonHealingMarker != "" {
// Find jsonDumpMarker in the JSON string and cut there
// Matching llama.cpp: GGML_ASSERT(std::string::npos != json_str.rfind(...))
idx := strings.LastIndex(toolStr, jsonHealingMarker)
if idx != -1 {
toolStr = toolStr[:idx]
} else {
// Marker should always be found if it was returned from parseJSONWithStack
// Log warning but continue with fallback
jsonPreview := toolStr
if len(jsonPreview) > 100 {
jsonPreview = jsonPreview[:100]
}
xlog.Debug("jsonDumpMarker not found in JSON string, using fallback", "marker", jsonHealingMarker, "json", jsonPreview)
// Fallback: remove trailing } if present
if len(toolStr) > 0 && toolStr[len(toolStr)-1] == '}' {
toolStr = toolStr[:len(toolStr)-1]
}
}
} else {
// Remove trailing } if present (matching llama.cpp line 537)
if len(toolStr) > 0 && toolStr[len(toolStr)-1] == '}' {
toolStr = toolStr[:len(toolStr)-1]
}
}
p.AddToolCall(functionName, "", toolStr)
return false, &ChatMsgPartialException{
Message: "JSON arg_value detected. Waiting for more tokens for validations.",
}
}
}
// Generate partial args
genPartialArgs := func(needle string) {
arguments[key] = needle
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
if cleaned, isPartial := partialJSON(toolStr); isPartial {
p.AddToolCall(functionName, "", cleaned)
} else {
p.AddToolCall(functionName, "", toolStr)
}
}
genPartialArgs("XML_TOOL_CALL_PARTIAL_FLAG")
return false, &ChatMsgPartialException{
Message: "JSON arg_value detected. Waiting for more tokens for validations.",
}
}
// Rewind to json_end and check if val_end follows
p.MoveTo(jsonEnd)
valEndSize, valEnd := tryFindValEnd()
if valEnd != nil && AllSpace(valEnd.Prelude) && jsonHealingMarker == "" {
// val_end follows JSON
if len(valEnd.Groups) > 0 {
matchedSize := valEnd.Groups[0].End - valEnd.Groups[0].Begin
if matchedSize == valEndSize {
// Complete val_end - use JSON value
arguments[key] = jsonValue
} else {
// Partial val_end
genPartialArgs := func(needle string) {
arguments[key] = needle
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
if cleaned, isPartial := partialJSON(toolStr); isPartial {
p.AddToolCall(functionName, "", cleaned)
} else {
p.AddToolCall(functionName, "", toolStr)
}
}
genPartialArgs("XML_TOOL_CALL_PARTIAL_FLAG")
return false, &ChatMsgPartialException{
Message: fmt.Sprintf("Partial literal: %s", format.ValEnd),
}
}
}
} else {
// val_end doesn't follow - rewind and parse as text
p.MoveTo(valStart)
jsonParsed = false
}
}
// If JSON wasn't parsed or val_end didn't follow, parse as plain text
if !jsonParsed {
valEndSize, valEnd := tryFindValEnd()
if valEnd == nil {
// Partial value
rest := p.ConsumeRest()
if format.TrimRawArgVal {
rest = strings.TrimSpace(rest)
}
arguments[key] = rest + "XML_TOOL_CALL_PARTIAL_FLAG"
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
if cleaned, isPartial := partialJSON(toolStr); isPartial {
p.AddToolCall(functionName, "", cleaned)
} else {
p.AddToolCall(functionName, "", toolStr)
}
return false, &ChatMsgPartialException{
Message: fmt.Sprintf("Expected %s after %s", format.ValEnd, format.KeyValSep),
}
}
// Validate size match
if len(valEnd.Groups) > 0 {
matchedSize := valEnd.Groups[0].End - valEnd.Groups[0].Begin
if matchedSize != valEndSize {
// Partial val_end
rest := valEnd.Prelude
if format.TrimRawArgVal {
rest = strings.TrimSpace(rest)
}
arguments[key] = rest + "XML_TOOL_CALL_PARTIAL_FLAG"
argsJSON, _ := json.Marshal(arguments)
toolStr := string(argsJSON)
if cleaned, isPartial := partialJSON(toolStr); isPartial {
p.AddToolCall(functionName, "", cleaned)
} else {
p.AddToolCall(functionName, "", toolStr)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | true |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/functions.go | pkg/functions/functions.go | package functions
import (
"encoding/json"
"github.com/mudler/xlog"
)
const (
defaultFunctionNameKey = "name"
defaultFunctionArgumentsKey = "arguments"
)
type Function struct {
Name string `json:"name"`
Description string `json:"description"`
Strict bool `json:"strict"`
Parameters map[string]interface{} `json:"parameters"`
}
type Functions []Function
type FunctionName struct {
Const string `json:"const"`
}
type Argument struct {
Type string `json:"type"`
Properties map[string]interface{} `json:"properties"`
}
type Tool struct {
Type string `json:"type"`
Function Function `json:"function,omitempty"`
}
type Tools []Tool
// ToJSONStructure converts a list of functions to a JSON structure that can be parsed to a grammar
// This allows the LLM to return a response of the type: { "name": "function_name", "arguments": { "arg1": "value1", "arg2": "value2" } }
func (f Functions) ToJSONStructure(name, args string) JSONFunctionStructure {
nameKey := defaultFunctionNameKey
argsKey := defaultFunctionArgumentsKey
if name != "" {
nameKey = name
}
if args != "" {
argsKey = args
}
js := JSONFunctionStructure{}
for _, function := range f {
// t := function.Parameters["type"]
//tt := t.(string)
properties := function.Parameters["properties"]
defs := function.Parameters["$defs"]
dat, _ := json.Marshal(properties)
dat2, _ := json.Marshal(defs)
prop := map[string]interface{}{}
defsD := map[string]interface{}{}
err := json.Unmarshal(dat, &prop)
if err != nil {
xlog.Error("error unmarshalling dat", "error", err)
}
err = json.Unmarshal(dat2, &defsD)
if err != nil {
xlog.Error("error unmarshalling dat2", "error", err)
}
if js.Defs == nil {
js.Defs = defsD
}
property := map[string]interface{}{}
property[nameKey] = FunctionName{Const: function.Name}
property[argsKey] = Argument{
Type: "object",
Properties: prop,
}
js.OneOf = append(js.OneOf, Item{
Type: "object",
Properties: property,
})
/*
js.AnyOf = append(js.OneOf, Item{
Type: "object",
Properties: property,
})
*/
}
return js
}
// Select returns a list of functions containing the function with the given name
func (f Functions) Select(name string) Functions {
var funcs Functions
for _, f := range f {
if f.Name == name {
funcs = []Function{f}
break
}
}
return funcs
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/functions_test.go | pkg/functions/functions_test.go | package functions_test
import (
. "github.com/mudler/LocalAI/pkg/functions"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("LocalAI grammar functions", func() {
Describe("ToJSONStructure()", func() {
It("converts a list of functions to a JSON structure that can be parsed to a grammar", func() {
var functions Functions = []Function{
{
Name: "create_event",
Parameters: map[string]interface{}{
"properties": map[string]interface{}{
"event_name": map[string]interface{}{
"type": "string",
},
"event_date": map[string]interface{}{
"type": "string",
},
},
},
},
{
Name: "search",
Parameters: map[string]interface{}{
"properties": map[string]interface{}{
"query": map[string]interface{}{
"type": "string",
},
},
},
},
}
js := functions.ToJSONStructure("function", "arguments")
Expect(len(js.OneOf)).To(Equal(2))
fnName := js.OneOf[0].Properties["function"].(FunctionName)
fnArgs := js.OneOf[0].Properties["arguments"].(Argument)
Expect(fnName.Const).To(Equal("create_event"))
Expect(fnArgs.Properties["event_name"].(map[string]interface{})["type"]).To(Equal("string"))
Expect(fnArgs.Properties["event_date"].(map[string]interface{})["type"]).To(Equal("string"))
fnName = js.OneOf[1].Properties["function"].(FunctionName)
fnArgs = js.OneOf[1].Properties["arguments"].(Argument)
Expect(fnName.Const).To(Equal("search"))
Expect(fnArgs.Properties["query"].(map[string]interface{})["type"]).To(Equal("string"))
// Test with custom keys
jsN := functions.ToJSONStructure("name", "arguments")
Expect(len(jsN.OneOf)).To(Equal(2))
fnName = jsN.OneOf[0].Properties["name"].(FunctionName)
fnArgs = jsN.OneOf[0].Properties["arguments"].(Argument)
Expect(fnName.Const).To(Equal("create_event"))
Expect(fnArgs.Properties["event_name"].(map[string]interface{})["type"]).To(Equal("string"))
Expect(fnArgs.Properties["event_date"].(map[string]interface{})["type"]).To(Equal("string"))
fnName = jsN.OneOf[1].Properties["name"].(FunctionName)
fnArgs = jsN.OneOf[1].Properties["arguments"].(Argument)
Expect(fnName.Const).To(Equal("search"))
Expect(fnArgs.Properties["query"].(map[string]interface{})["type"]).To(Equal("string"))
})
})
Context("Select()", func() {
It("selects one of the functions and returns a list containing only the selected one", func() {
var functions Functions = []Function{
{
Name: "create_event",
},
{
Name: "search",
},
}
functions = functions.Select("create_event")
Expect(len(functions)).To(Equal(1))
Expect(functions[0].Name).To(Equal("create_event"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/json_mode.go | pkg/functions/json_mode.go | package functions
const (
JSONBNF = `root ::= object
value ::= object | array | string | number | ("true" | "false" | "null") ws
object ::=
"{" ws (
string ":" ws value
("," ws string ":" ws value)*
)? "}" ws
array ::=
"[" ws (
value
("," ws value)*
)? "]" ws
string ::=
"\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) # escapes
)* "\"" ws
number ::= ("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? ws
ws ::= ([ \t\n] ws)?`
)
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/parse.go | pkg/functions/parse.go | package functions
import (
"encoding/json"
"errors"
"io"
"regexp"
"slices"
"strings"
"unicode/utf8"
"github.com/mudler/LocalAI/pkg/functions/grammars"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/xlog"
)
// @Description GrammarConfig contains configuration for grammar parsing
type GrammarConfig struct {
// ParallelCalls enables the LLM to return multiple function calls in the same response
ParallelCalls bool `yaml:"parallel_calls,omitempty" json:"parallel_calls,omitempty"`
DisableParallelNewLines bool `yaml:"disable_parallel_new_lines,omitempty" json:"disable_parallel_new_lines,omitempty"`
// MixedMode enables the LLM to return strings and not only JSON objects
// This is useful for models to not constraining returning only JSON and also messages back to the user
MixedMode bool `yaml:"mixed_mode,omitempty" json:"mixed_mode,omitempty"`
// NoMixedFreeString disables the mixed mode for free strings
// In this way if the LLM selects a free string, it won't be mixed necessarily with JSON objects.
// For example, if enabled the LLM or returns a JSON object or a free string, but not a mix of both
// If disabled(default): the LLM can return a JSON object surrounded by free strings (e.g. `this is the JSON result: { "bar": "baz" } for your question`). This forces the LLM to return at least a JSON object, but its not going to be strict
NoMixedFreeString bool `yaml:"no_mixed_free_string,omitempty" json:"no_mixed_free_string,omitempty"`
// NoGrammar disables the grammar parsing and parses the responses directly from the LLM
NoGrammar bool `yaml:"disable,omitempty" json:"disable,omitempty"`
// Prefix is the suffix to append to the grammar when being generated
// This is useful when models prepend a tag before returning JSON
Prefix string `yaml:"prefix,omitempty" json:"prefix,omitempty"`
// ExpectStringsAfterJSON enables mixed string suffix
ExpectStringsAfterJSON bool `yaml:"expect_strings_after_json,omitempty" json:"expect_strings_after_json,omitempty"`
// PropOrder selects what order to print properties
// for instance name,arguments will make print { "name": "foo", "arguments": { "bar": "baz" } }
// instead of { "arguments": { "bar": "baz" }, "name": "foo" }
PropOrder string `yaml:"properties_order,omitempty" json:"properties_order,omitempty"`
// SchemaType can be configured to use a specific schema type to force the grammar
// available : json, llama3.1
SchemaType string `yaml:"schema_type,omitempty" json:"schema_type,omitempty"`
GrammarTriggers []GrammarTrigger `yaml:"triggers,omitempty" json:"triggers,omitempty"`
}
// @Description GrammarTrigger defines a trigger word for grammar parsing
type GrammarTrigger struct {
// Trigger is the string that triggers the grammar
Word string `yaml:"word,omitempty" json:"word,omitempty"`
}
// @Description FunctionsConfig is the configuration for the tool/function call.
// It includes setting to map the function name and arguments from the response
// and, for instance, also if processing the requests with BNF grammars.
type FunctionsConfig struct {
// DisableNoAction disables the "no action" tool
// By default we inject a tool that does nothing and is used to return an answer from the LLM
DisableNoAction bool `yaml:"disable_no_action,omitempty" json:"disable_no_action,omitempty"`
// Grammar is the configuration for the grammar
GrammarConfig GrammarConfig `yaml:"grammar,omitempty" json:"grammar,omitempty"`
// NoActionFunctionName is the name of the function that does nothing. It defaults to "answer"
NoActionFunctionName string `yaml:"no_action_function_name,omitempty" json:"no_action_function_name,omitempty"`
// NoActionDescriptionName is the name of the function that returns the description of the no action function
NoActionDescriptionName string `yaml:"no_action_description_name,omitempty" json:"no_action_description_name,omitempty"`
// ResponseRegex is a named regex to extract the function name and arguments from the response
ResponseRegex []string `yaml:"response_regex,omitempty" json:"response_regex,omitempty"`
// JSONRegexMatch is a regex to extract the JSON object from the response
JSONRegexMatch []string `yaml:"json_regex_match,omitempty" json:"json_regex_match,omitempty"`
// ArgumentRegex is a named regex to extract the arguments from the response. Use ArgumentRegexKey and ArgumentRegexValue to set the names of the named regex for key and value of the arguments.
ArgumentRegex []string `yaml:"argument_regex,omitempty" json:"argument_regex,omitempty"`
// ArgumentRegex named regex names for key and value extractions. default: key and value
ArgumentRegexKey string `yaml:"argument_regex_key_name,omitempty" json:"argument_regex_key_name,omitempty"` // default: key
ArgumentRegexValue string `yaml:"argument_regex_value_name,omitempty" json:"argument_regex_value_name,omitempty"` // default: value
// ReplaceFunctionResults allow to replace strings in the results before parsing them
ReplaceFunctionResults []ReplaceResult `yaml:"replace_function_results,omitempty" json:"replace_function_results,omitempty"`
// ReplaceLLMResult allow to replace strings in the results before parsing them
ReplaceLLMResult []ReplaceResult `yaml:"replace_llm_results,omitempty" json:"replace_llm_results,omitempty"`
// CaptureLLMResult is a regex to extract a string from the LLM response
// that is used as return string when using tools.
// This is useful for e.g. if the LLM outputs a reasoning and we want to get the reasoning as a string back
CaptureLLMResult []string `yaml:"capture_llm_results,omitempty" json:"capture_llm_results,omitempty"`
// FunctionName enable the LLM to return { "name": "function_name", "arguments": { "arg1": "value1", "arg2": "value2" } }
// instead of { "function": "function_name", "arguments": { "arg1": "value1", "arg2": "value2" } }.
// This might be useful for certain models trained with the function name as the first token.
FunctionNameKey string `yaml:"function_name_key,omitempty" json:"function_name_key,omitempty"`
FunctionArgumentsKey string `yaml:"function_arguments_key,omitempty" json:"function_arguments_key,omitempty"`
// XMLFormatPreset is an optional preset format name to force (e.g., "qwen3-coder", "glm-4.5", "minimax-m2")
// If empty, auto-detection will try all formats
XMLFormatPreset string `yaml:"xml_format_preset,omitempty" json:"xml_format_preset,omitempty"`
// XMLFormat is an optional custom XML format configuration
// If set, only this format will be tried (overrides XMLFormatPreset)
XMLFormat *XMLToolCallFormat `yaml:"xml_format,omitempty" json:"xml_format,omitempty"`
}
// @Description ReplaceResult defines a key-value replacement for function results
type ReplaceResult struct {
Key string `yaml:"key,omitempty" json:"key,omitempty"`
Value string `yaml:"value,omitempty" json:"value,omitempty"`
}
// @Description XMLToolCallFormat defines the structure for parsing XML-style tool calls
// This mirrors llama.cpp's xml_tool_call_format structure
type XMLToolCallFormat struct {
// ScopeStart is the optional wrapper start tag (e.g., "<minimax:tool_call>")
ScopeStart string `yaml:"scope_start,omitempty" json:"scope_start,omitempty"`
// ToolStart is the tool call start tag (e.g., "<tool_call>", "<invoke name=\"")
ToolStart string `yaml:"tool_start,omitempty" json:"tool_start,omitempty"`
// ToolSep is the separator after tool name (e.g., ">", "\">")
ToolSep string `yaml:"tool_sep,omitempty" json:"tool_sep,omitempty"`
// KeyStart is the parameter key start tag (e.g., "<parameter=", "<arg_key>")
KeyStart string `yaml:"key_start,omitempty" json:"key_start,omitempty"`
// KeyValSep is the separator between key and value (e.g., ">", "</arg_key>")
KeyValSep string `yaml:"key_val_sep,omitempty" json:"key_val_sep,omitempty"`
// ValEnd is the parameter value end tag (e.g., "</parameter>", "</arg_value>")
ValEnd string `yaml:"val_end,omitempty" json:"val_end,omitempty"`
// ToolEnd is the tool call end tag (e.g., "</tool_call>", "</invoke>")
ToolEnd string `yaml:"tool_end,omitempty" json:"tool_end,omitempty"`
// ScopeEnd is the optional wrapper end tag (e.g., "</minimax:tool_call>")
ScopeEnd string `yaml:"scope_end,omitempty" json:"scope_end,omitempty"`
// KeyValSep2 is the optional second separator (for GLM 4.5 format: "</arg_key>\n<arg_value>")
KeyValSep2 *string `yaml:"key_val_sep2,omitempty" json:"key_val_sep2,omitempty"`
// RawArgVal indicates whether to treat values as raw strings (true) vs JSON (false), nil means both allowed
RawArgVal *bool `yaml:"raw_argval,omitempty" json:"raw_argval,omitempty"`
// LastValEnd is the alternative value end for last parameter
LastValEnd *string `yaml:"last_val_end,omitempty" json:"last_val_end,omitempty"`
// LastToolEnd is the alternative tool end for last tool call
LastToolEnd *string `yaml:"last_tool_end,omitempty" json:"last_tool_end,omitempty"`
// TrimRawArgVal indicates whether to trim whitespace from raw values
TrimRawArgVal bool `yaml:"trim_raw_argval,omitempty" json:"trim_raw_argval,omitempty"`
// AllowToolcallInThink allows tool calls inside thinking/reasoning blocks
AllowToolcallInThink bool `yaml:"allow_toolcall_in_think,omitempty" json:"allow_toolcall_in_think,omitempty"`
}
type FuncCallResults struct {
Name string
Arguments string
}
func (g FunctionsConfig) GrammarOptions() []func(o *grammars.GrammarOption) {
opts := []func(o *grammars.GrammarOption){}
if g.GrammarConfig.MixedMode {
opts = append(opts, grammars.EnableMaybeString)
}
if g.GrammarConfig.ParallelCalls {
opts = append(opts, grammars.EnableMaybeArray)
}
if g.GrammarConfig.DisableParallelNewLines {
opts = append(opts, grammars.DisableParallelNewLines)
}
if g.GrammarConfig.Prefix != "" {
opts = append(opts, grammars.SetPrefix(g.GrammarConfig.Prefix))
}
if g.GrammarConfig.NoMixedFreeString {
opts = append(opts, grammars.NoMixedFreeString)
}
if g.GrammarConfig.ExpectStringsAfterJSON {
opts = append(opts, grammars.ExpectStringsAfterJSON)
}
if g.GrammarConfig.SchemaType != "" {
opts = append(opts, grammars.WithSchemaType(grammars.NewType(g.GrammarConfig.SchemaType)))
}
if g.FunctionNameKey != "" {
opts = append(opts, grammars.WithFunctionName(g.FunctionNameKey))
}
opts = append(opts, grammars.SetPropOrder(g.GrammarConfig.PropOrder))
return opts
}
func CleanupLLMResult(llmresult string, functionConfig FunctionsConfig) string {
xlog.Debug("LLM result", "result", llmresult)
for _, item := range functionConfig.ReplaceLLMResult {
k, v := item.Key, item.Value
xlog.Debug("Replacing", "key", k, "value", v)
re := regexp.MustCompile(k)
llmresult = re.ReplaceAllString(llmresult, v)
}
xlog.Debug("LLM result(processed)", "result", llmresult)
return llmresult
}
func ParseTextContent(llmresult string, functionConfig FunctionsConfig) string {
xlog.Debug("ParseTextContent", "result", llmresult)
xlog.Debug("CaptureLLMResult", "config", functionConfig.CaptureLLMResult)
for _, r := range functionConfig.CaptureLLMResult {
// We use a regex to extract the JSON object from the response
var respRegex = regexp.MustCompile(r)
match := respRegex.FindStringSubmatch(llmresult)
if len(match) >= 1 {
m := strings.TrimSpace(match[1])
return m
}
}
return ""
}
// ParseJSON is a function that parses a JSON string that might contain multiple JSON objects
// and syntax errors in between by shifting the offset
// This for e.g. allow to parse
// { "foo": "bar" } invalid { "baz": "qux" }
// into
// [ { "foo": "bar" }, { "baz": "qux" } ]
// Credits to Michael Yang (https://github.com/mxyng) for the original implementation
// This is a slightly reworked version, improved for readability and error handling
// ParseJSON parses JSON objects from a string, supporting multiple JSON objects
// Now defaults to iterative parser for better streaming support
// Falls back to legacy parser if iterative parser fails
func ParseJSON(s string) ([]map[string]any, error) {
// Try iterative parser first (non-partial mode for complete parsing)
results, err := ParseJSONIterative(s, false)
if err == nil && len(results) > 0 {
return results, nil
}
// Fall back to legacy parser for backward compatibility
return parseJSONLegacy(s)
}
// ParseJSONIterative parses JSON using the iterative parser
// Supports partial parsing for streaming scenarios
// Returns objects and arrays (matching llama.cpp behavior)
func ParseJSONIterative(s string, isPartial bool) ([]map[string]any, error) {
parser := NewChatMsgParser(s, isPartial)
var results []map[string]any
// Try to parse JSON values one by one
for parser.Pos() < len(parser.Input()) {
jsonValue, isPartialJSON, _, err := parser.TryConsumeJSON()
if err != nil {
// If it's a partial exception and we're in partial mode, return what we have
if _, ok := err.(*ChatMsgPartialException); ok && isPartial {
break
}
// For non-partial errors or when not in partial mode, try legacy parsing
return parseJSONLegacy(s)
}
if jsonValue != nil {
// Convert to map[string]any if it's an object, or handle arrays
if obj, ok := jsonValue.(map[string]any); ok {
results = append(results, obj)
} else if arr, ok := jsonValue.([]any); ok {
// Handle arrays: extract objects from array
for _, item := range arr {
if obj, ok := item.(map[string]any); ok {
results = append(results, obj)
}
}
}
}
if isPartialJSON {
break
}
// Skip whitespace between JSON values
parser.ConsumeSpaces()
}
if len(results) > 0 {
return results, nil
}
// Fallback to legacy parsing if iterative parser found nothing
return parseJSONLegacy(s)
}
// parseJSONLegacy is the original decoder-based JSON parsing (kept for compatibility)
func parseJSONLegacy(s string) ([]map[string]any, error) {
var objs []map[string]any
offset := 0
for offset < len(s) {
var obj map[string]any
decoder := json.NewDecoder(strings.NewReader(s[offset:]))
err := decoder.Decode(&obj)
switch {
case errors.Is(err, io.EOF):
return objs, nil
case err == nil:
offset += int(decoder.InputOffset())
objs = append(objs, obj)
default: // handle the error type
var syntaxErr *json.SyntaxError
var unmarshalTypeErr *json.UnmarshalTypeError
switch {
case errors.As(err, &syntaxErr):
offset += int(syntaxErr.Offset)
case errors.As(err, &unmarshalTypeErr):
offset += int(unmarshalTypeErr.Offset)
default:
return objs, err
}
}
}
return objs, nil
}
// GetXMLFormatPreset returns a preset XML format by name, or nil if not found
// This is exported for use in chat.go streaming integration
func GetXMLFormatPreset(name string) *XMLToolCallFormat {
formats := getAllXMLFormats()
for _, format := range formats {
if format.name == name {
return format.format
}
}
return nil
}
// xmlFormatPreset holds a preset format with its name
type xmlFormatPreset struct {
name string
format *XMLToolCallFormat
}
// getAllXMLFormats returns all preset XML formats matching llama.cpp's formats
func getAllXMLFormats() []xmlFormatPreset {
falseVal := false
commaSpace := ", "
emptyValEnd := ""
return []xmlFormatPreset{
{
name: "functionary",
format: &XMLToolCallFormat{
ScopeStart: "",
ToolStart: "<function=",
ToolSep: ">",
KeyStart: "", // Parameters are JSON, not XML tags
KeyValSep: "",
ValEnd: "",
ToolEnd: "</function>",
ScopeEnd: "",
RawArgVal: &falseVal, // JSON only
},
},
{
name: "qwen3-coder",
format: &XMLToolCallFormat{
ScopeStart: "<tool_call>",
ToolStart: "<function=",
ToolSep: ">",
KeyStart: "<parameter=",
KeyValSep: ">",
ValEnd: "</parameter>",
ToolEnd: "</function>",
ScopeEnd: "</tool_call>",
TrimRawArgVal: true,
},
},
{
name: "glm-4.5",
format: &XMLToolCallFormat{
ScopeStart: "",
ToolStart: "<tool_call>",
ToolSep: "",
KeyStart: "<arg_key>",
KeyValSep: "</arg_key>",
KeyValSep2: func() *string { s := "<arg_value>"; return &s }(),
ValEnd: "</arg_value>",
ToolEnd: "</tool_call>",
ScopeEnd: "",
},
},
{
name: "minimax-m2",
format: &XMLToolCallFormat{
ScopeStart: "<minimax:tool_call>",
ToolStart: "<invoke name=\"",
ToolSep: "\">",
KeyStart: "<parameter name=\"",
KeyValSep: "\">",
ValEnd: "</parameter>",
ToolEnd: "</invoke>",
ScopeEnd: "</minimax:tool_call>",
},
},
{
name: "kimi-k2",
format: &XMLToolCallFormat{
ScopeStart: "<|tool_calls_section_begin|>",
ToolStart: "<|tool_call_begin|>",
ToolSep: "<|tool_call_argument_begin|>{",
KeyStart: "\"",
KeyValSep: "\":",
ValEnd: ",",
ToolEnd: "}<|tool_call_end|>",
ScopeEnd: "<|tool_calls_section_end|>",
LastValEnd: &emptyValEnd,
RawArgVal: &falseVal,
AllowToolcallInThink: true, // Kimi-K2 supports tool calls in thinking blocks
},
},
{
name: "apriel-1.5",
format: &XMLToolCallFormat{
ScopeStart: "<tool_calls>[",
ToolStart: "{\"name\": \"",
ToolSep: "\", \"arguments\": {",
KeyStart: "\"",
KeyValSep: "\": ",
ValEnd: commaSpace,
ToolEnd: "}, ",
ScopeEnd: "]</tool_calls>",
LastValEnd: &emptyValEnd,
LastToolEnd: func() *string { s := "}"; return &s }(),
RawArgVal: &falseVal,
},
},
{
name: "xiaomi-mimo",
format: &XMLToolCallFormat{
ScopeStart: "",
ToolStart: "<tool_call>\n{\"name\": \"",
ToolSep: "\", \"arguments\": {",
KeyStart: "\"",
KeyValSep: "\": ",
ValEnd: commaSpace,
ToolEnd: "}\n</tool_call>",
ScopeEnd: "",
LastValEnd: &emptyValEnd,
RawArgVal: &falseVal,
},
},
}
}
// parseXMLAutoDetect tries all preset formats in sequence and returns results from the first one that succeeds
func parseXMLAutoDetect(s string) ([]FuncCallResults, error) {
formats := getAllXMLFormats()
for _, preset := range formats {
results, err := parseXMLWithFormat(s, preset.format)
if err == nil && len(results) > 0 {
xlog.Debug("XML auto-detection succeeded", "format", preset.name, "count", len(results))
return results, nil
}
}
return nil, nil
}
// ParseXML is a function that parses XML-style tool calls from a string that might contain
// text and valid XML tool calls. If format is nil, it will auto-detect by trying all formats.
// Returns a slice of FuncCallResults with function names and JSON-encoded arguments.
// Now defaults to iterative parser for better streaming and partial parsing support.
// Falls back to regex parser if iterative parser fails for backward compatibility.
func ParseXML(s string, format *XMLToolCallFormat) ([]FuncCallResults, error) {
// Try iterative parser first (non-partial mode for complete parsing)
results, err := ParseXMLIterative(s, format, false)
if err == nil && len(results) > 0 {
return results, nil
}
// Fall back to regex parser for backward compatibility
if format == nil {
return parseXMLAutoDetect(s)
}
return parseXMLWithFormat(s, format)
}
// ParseXMLIterative parses XML tool calls using the iterative parser
// This provides better streaming and partial parsing support
func ParseXMLIterative(s string, format *XMLToolCallFormat, isPartial bool) ([]FuncCallResults, error) {
parser := NewChatMsgParser(s, isPartial)
// Auto-detect format if not provided
if format == nil {
formats := getAllXMLFormats()
for _, fmtPreset := range formats {
if fmtPreset.format != nil {
// Try parsing with this format
parser.MoveTo(0)
parser.ClearTools()
success, err := parser.TryConsumeXMLToolCalls(fmtPreset.format)
if err != nil {
// Check if it's a partial exception (recoverable)
if _, ok := err.(*ChatMsgPartialException); ok {
// Partial parse, return what we have
return parser.ToolCalls(), nil
}
// Try next format
continue
}
if success && len(parser.ToolCalls()) > 0 {
return parser.ToolCalls(), nil
}
}
}
// No format matched, return empty
return []FuncCallResults{}, nil
}
// Use specified format
success, err := parser.TryConsumeXMLToolCalls(format)
if err != nil {
// Check if it's a partial exception (recoverable)
if _, ok := err.(*ChatMsgPartialException); ok {
// Partial parse, return what we have
return parser.ToolCalls(), nil
}
return nil, err
}
if !success {
return []FuncCallResults{}, nil
}
return parser.ToolCalls(), nil
}
// ParseXMLPartial parses XML tool calls that may be incomplete (for streaming support)
// It returns both complete results and partial results that can be emitted during streaming
// Reference: llama.cpp's partial parsing support
// Uses iterative parser for better partial detection
func ParseXMLPartial(s string, format *XMLToolCallFormat) (*PartialXMLResult, error) {
// Use iterative parser with partial flag enabled for better streaming support
results, err := ParseXMLIterative(s, format, true)
if err != nil {
return nil, err
}
// Check if the input ends with incomplete XML tags (indicating partial content)
isPartial := false
trimmed := strings.TrimSpace(s)
// Auto-detect format if not provided to check for partial content
if format == nil {
formats := getAllXMLFormats()
for _, fmtPreset := range formats {
if fmtPreset.format != nil {
format = fmtPreset.format
break
}
}
}
if format != nil {
// Check if string ends with incomplete tool_end or val_end
// Also check for incomplete tags like "</parameter" (missing >)
if !strings.HasSuffix(trimmed, format.ToolEnd) {
if format.LastToolEnd != nil && !strings.HasSuffix(trimmed, *format.LastToolEnd) {
// Check if it starts with tool_end but is incomplete
if len(trimmed) > 0 && len(format.ToolEnd) > 0 {
suffix := trimmed[max(0, len(trimmed)-len(format.ToolEnd)):]
if strings.HasPrefix(format.ToolEnd, suffix) && suffix != format.ToolEnd {
isPartial = true
}
}
}
// Also check for incomplete closing tags (ends with < but not complete)
if strings.HasSuffix(trimmed, "<") || strings.HasSuffix(trimmed, "</") {
isPartial = true
}
}
if !strings.HasSuffix(trimmed, format.ValEnd) {
if format.LastValEnd != nil && !strings.HasSuffix(trimmed, *format.LastValEnd) {
if len(trimmed) > 0 && len(format.ValEnd) > 0 {
suffix := trimmed[max(0, len(trimmed)-len(format.ValEnd)):]
if strings.HasPrefix(format.ValEnd, suffix) && suffix != format.ValEnd {
isPartial = true
}
}
}
// Check for incomplete closing tags
if strings.HasSuffix(trimmed, "<") || strings.HasSuffix(trimmed, "</") {
isPartial = true
}
}
// Check for incomplete parameter tags
if format.KeyStart != "" && (strings.HasSuffix(trimmed, "<parameter") || strings.HasSuffix(trimmed, "<parameter=")) {
isPartial = true
}
// Check if we have tool_start but missing tool_end (incomplete tool call)
if strings.Contains(trimmed, format.ToolStart) && !strings.HasSuffix(trimmed, format.ToolEnd) {
if format.LastToolEnd == nil || !strings.HasSuffix(trimmed, *format.LastToolEnd) {
// Check if tool_end appears anywhere (if not, it's partial)
if !strings.Contains(trimmed, format.ToolEnd) {
isPartial = true
}
}
}
}
return &PartialXMLResult{
Results: results,
IsPartial: isPartial,
}, nil
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
// parseXMLWithFormat parses XML tool calls using a specific format configuration
// Returns parsed results and error. Handles errors gracefully by continuing to parse other tool calls.
func parseXMLWithFormat(s string, format *XMLToolCallFormat) ([]FuncCallResults, error) {
var results []FuncCallResults
// Handle Functionary format (JSON parameters inside XML tags)
if format.KeyStart == "" && format.ToolStart == "<function=" {
return parseFunctionaryFormat(s, format)
}
// Handle formats with JSON-like structure (Apriel-1.5, Xiaomi-MiMo)
// Note: Kimi-K2 is NOT JSON-like - it uses standard XML format with JSON arguments
if format.ToolStart != "" && strings.Contains(format.ToolStart, "{\"name\"") {
return parseJSONLikeXMLFormat(s, format)
}
// Handle GLM 4.5 format specially (function name on separate line after <tool_call>)
if format.ToolStart == "<tool_call>" && format.ToolSep == "" && format.KeyStart == "<arg_key>" {
return parseGLM45Format(s, format)
}
// Build regex patterns from format configuration
// Escape special regex characters in format strings
escapeRegex := func(str string) string {
return regexp.QuoteMeta(str)
}
// Build scope pattern (optional)
// llama.cpp validates that only whitespace appears before scope_start
var scopePattern *regexp.Regexp
if format.ScopeStart != "" {
// Match scope_start with optional whitespace before it, but validate it's only whitespace
scopeRegex := `(?s)(\s*)` + escapeRegex(format.ScopeStart) + `\s*(.*?)\s*` + escapeRegex(format.ScopeEnd)
scopePattern = regexp.MustCompile(scopeRegex)
}
// Build tool call patterns - try both primary and alternative tool_end
var toolCallPatterns []*regexp.Regexp
buildToolCallPattern := func(toolEnd string) string {
toolCallRegex := `(?s)` + escapeRegex(format.ToolStart)
if format.ToolSep != "" {
// Tool name is between ToolStart and ToolSep
// Use non-greedy match to capture function name until ToolSep
// We can't use [^...] for multi-character strings, so use .*? with ToolSep
toolCallRegex += `(.*?)` + escapeRegex(format.ToolSep)
toolCallRegex += `(.*?)` + escapeRegex(toolEnd)
} else {
// Tool name might be on a separate line (GLM 4.5) or after ToolStart
// For GLM 4.5: <tool_call>\nfunction_name\n<arg_key>...
// Match function name until we find key_start or newline
if format.KeyStart != "" {
// Match whitespace/newlines, then function name, then whitespace, then key_start
// We'll capture the function name and the rest (including key_start)
toolCallRegex += `\s*([^\n` + escapeRegex(format.KeyStart) + `]+?)\s*` + escapeRegex(format.KeyStart) + `(.*?)` + escapeRegex(toolEnd)
} else {
// Match until newline
toolCallRegex += `\s*([^\n]+)\s*(.*?)` + escapeRegex(toolEnd)
}
}
return toolCallRegex
}
// Primary pattern with tool_end
toolCallPatterns = append(toolCallPatterns, regexp.MustCompile(buildToolCallPattern(format.ToolEnd)))
// Alternative pattern with last_tool_end if specified
if format.LastToolEnd != nil && *format.LastToolEnd != "" {
toolCallPatterns = append(toolCallPatterns, regexp.MustCompile(buildToolCallPattern(*format.LastToolEnd)))
}
// Extract content to search in
searchContent := s
if scopePattern != nil {
scopeMatches := scopePattern.FindAllStringSubmatch(s, -1)
if len(scopeMatches) == 0 {
// Scope not found
// If scope_end is not empty/whitespace, this might be an error
// But scope is optional, so try parsing without scope
if strings.TrimSpace(format.ScopeEnd) != "" {
// Scope expected but not found - this might indicate incomplete input
// For now, try parsing without scope (scope is optional)
xlog.Debug("scope_start not found but scope_end is non-empty", "scope_end", format.ScopeEnd)
}
searchContent = s
} else {
// Process each scope match separately
for _, scopeMatch := range scopeMatches {
if len(scopeMatch) >= 3 {
// scopeMatch[1] is the whitespace before scope_start (we validate it's only whitespace)
// scopeMatch[2] is the content inside the scope
prelude := scopeMatch[1]
// Validate that prelude contains only whitespace (llama.cpp behavior)
allWhitespace := true
for _, r := range prelude {
if !strings.ContainsRune(" \t\n\r", r) {
allWhitespace = false
break
}
}
if !allWhitespace {
// Non-whitespace before scope_start, skip this match
// This matches llama.cpp's behavior (line 394)
xlog.Debug("non-whitespace before scope_start, skipping match", "prelude", prelude)
continue
}
scopeContent := scopeMatch[2]
// Validate scope_end is present in the match (scope pattern should include it)
// The regex pattern already includes scope_end, so if we matched, it should be there
// But we can verify the match is complete
// Find all tool calls within this scope - try both patterns
var toolCallMatches [][]string
for _, pattern := range toolCallPatterns {
matches := pattern.FindAllStringSubmatch(scopeContent, -1)
toolCallMatches = append(toolCallMatches, matches...)
}
for _, match := range toolCallMatches {
if len(match) >= 3 {
functionName := strings.TrimSpace(match[1])
// Handle Kimi-K2 function name prefix stripping: "functions.name:index" -> "name"
if strings.HasPrefix(functionName, "functions.") {
// Remove "functions." prefix
functionName = functionName[10:]
// Remove ":index" suffix if present
if idx := strings.LastIndex(functionName, ":"); idx != -1 {
// Check if what follows ":" is all digits
suffix := functionName[idx+1:]
if len(suffix) > 0 {
allDigits := true
for _, r := range suffix {
if r < '0' || r > '9' {
allDigits = false
break
}
}
if allDigits {
functionName = functionName[:idx]
}
}
}
}
var functionContent string
if format.ToolSep == "" && format.KeyStart != "" {
// Content includes key_start, so prepend it
functionContent = format.KeyStart + match[2]
} else {
functionContent = match[2]
}
// Check for empty tool call: if tool_end appears in function name or content is empty
// This matches llama.cpp's behavior (lines 419-424)
if strings.Contains(functionName, format.ToolEnd) || (format.LastToolEnd != nil && strings.Contains(functionName, *format.LastToolEnd)) {
// Empty tool call - emit with empty arguments
cleanName := strings.TrimSpace(functionName)
if idx := strings.Index(cleanName, format.ToolEnd); idx != -1 {
cleanName = strings.TrimSpace(cleanName[:idx])
} else if format.LastToolEnd != nil {
if idx := strings.Index(cleanName, *format.LastToolEnd); idx != -1 {
cleanName = strings.TrimSpace(cleanName[:idx])
}
}
results = append(results, FuncCallResults{
Name: cleanName,
Arguments: "{}",
})
continue
}
// Check if content is empty or only whitespace
if strings.TrimSpace(functionContent) == "" {
// Empty tool call - emit with empty arguments
results = append(results, FuncCallResults{
Name: functionName,
Arguments: "{}",
})
continue
}
// Parse parameters based on format
args, err := parseXMLParametersWithFormat(functionContent, format)
if err != nil {
xlog.Debug("error parsing XML parameters", "error", err, "content", functionContent)
continue
}
// If no parameters were parsed and content was not empty, still create tool call with empty args
if len(args) == 0 && strings.TrimSpace(functionContent) != "" {
// Check if there's any parameter-like content that just didn't match
if !strings.Contains(functionContent, format.KeyStart) {
argsJSON, _ := json.Marshal(args)
results = append(results, FuncCallResults{
Name: functionName,
Arguments: string(argsJSON),
})
continue
}
}
argsJSON, _ := json.Marshal(args)
results = append(results, FuncCallResults{
Name: functionName,
Arguments: string(argsJSON),
})
}
}
}
}
return results, nil
}
}
// No scope, find all tool calls directly in the string - try both patterns
var toolCallMatches [][]string
for _, pattern := range toolCallPatterns {
matches := pattern.FindAllStringSubmatch(searchContent, -1)
toolCallMatches = append(toolCallMatches, matches...)
}
if len(toolCallMatches) == 0 {
return nil, nil
}
// Process each tool call
for _, match := range toolCallMatches {
if len(match) < 3 {
continue
}
// Validate tool_end is complete (exact size match)
// This matches llama.cpp's behavior (line 595)
fullMatch := match[0]
expectedToolEnd := format.ToolEnd
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | true |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/json_stack_parser.go | pkg/functions/json_stack_parser.go | package functions
import (
"encoding/json"
"errors"
"regexp"
"strings"
"unicode"
)
// JSONStackElementType represents the type of JSON stack element
type JSONStackElementType int
const (
JSONStackElementObject JSONStackElementType = iota
JSONStackElementKey
JSONStackElementArray
)
// JSONStackElement represents an element in the JSON parsing stack
type JSONStackElement struct {
Type JSONStackElementType
Key string
}
// JSONErrorLocator tracks JSON parsing state and errors
type JSONErrorLocator struct {
position int
foundError bool
lastToken string
exceptionMessage string
stack []JSONStackElement
}
// parseJSONWithStack parses JSON with stack tracking, matching llama.cpp's common_json_parse
// Returns the parsed JSON value, whether it was healed, and any error
func parseJSONWithStack(input string, healingMarker string) (any, bool, string, error) {
if healingMarker == "" {
// No healing marker, just try to parse normally
var result any
if err := json.Unmarshal([]byte(input), &result); err != nil {
return nil, false, "", err
}
return result, false, "", nil
}
// Try to parse complete JSON first
var result any
if err := json.Unmarshal([]byte(input), &result); err == nil {
return result, false, "", nil
}
// Parsing failed, need to track stack and heal
errLoc := &JSONErrorLocator{
position: 0,
foundError: false,
stack: make([]JSONStackElement, 0),
}
// Parse with stack tracking to find where error occurs
errorPos, err := parseJSONWithStackTracking(input, errLoc)
if err == nil && !errLoc.foundError {
// No error found, should have parsed successfully
var result any
if err := json.Unmarshal([]byte(input), &result); err != nil {
return nil, false, "", err
}
return result, false, "", nil
}
if !errLoc.foundError || len(errLoc.stack) == 0 {
// Can't heal without stack information
return nil, false, "", errors.New("incomplete JSON")
}
// Build closing braces/brackets from stack
closing := ""
for i := len(errLoc.stack) - 1; i >= 0; i-- {
el := errLoc.stack[i]
if el.Type == JSONStackElementObject {
closing += "}"
} else if el.Type == JSONStackElementArray {
closing += "]"
}
// Keys don't add closing characters
}
// Get the partial input up to error position
partialInput := input
if errorPos > 0 && errorPos < len(input) {
partialInput = input[:errorPos]
}
// Find last non-space character
lastNonSpacePos := strings.LastIndexFunc(partialInput, func(r rune) bool {
return !unicode.IsSpace(r)
})
if lastNonSpacePos == -1 {
return nil, false, "", errors.New("cannot heal a truncated JSON that stopped in an unknown location")
}
lastNonSpaceChar := rune(partialInput[lastNonSpacePos])
// Check if we stopped on a number
wasMaybeNumber := func() bool {
if len(partialInput) > 0 && unicode.IsSpace(rune(partialInput[len(partialInput)-1])) {
return false
}
return unicode.IsDigit(lastNonSpaceChar) ||
lastNonSpaceChar == '.' ||
lastNonSpaceChar == 'e' ||
lastNonSpaceChar == 'E' ||
lastNonSpaceChar == '-'
}
// Check for partial unicode escape sequences
partialUnicodeRegex := regexp.MustCompile(`\\u(?:[0-9a-fA-F](?:[0-9a-fA-F](?:[0-9a-fA-F](?:[0-9a-fA-F])?)?)?)?$`)
unicodeMarkerPadding := "udc00"
lastUnicodeMatch := partialUnicodeRegex.FindStringSubmatch(partialInput)
if lastUnicodeMatch != nil {
// Pad the escape sequence
unicodeMarkerPadding = strings.Repeat("0", 6-len(lastUnicodeMatch[0]))
// Check if it's a high surrogate
if len(lastUnicodeMatch[0]) >= 4 {
seq := lastUnicodeMatch[0]
if seq[0] == '\\' && seq[1] == 'u' {
third := strings.ToLower(string(seq[2]))
if third == "d" {
fourth := strings.ToLower(string(seq[3]))
if fourth == "8" || fourth == "9" || fourth == "a" || fourth == "b" {
// High surrogate, add low surrogate
unicodeMarkerPadding += "\\udc00"
}
}
}
}
}
canParse := func(str string) bool {
var test any
return json.Unmarshal([]byte(str), &test) == nil
}
// Heal based on stack top element type
healedJSON := partialInput
jsonDumpMarker := ""
topElement := errLoc.stack[len(errLoc.stack)-1]
if topElement.Type == JSONStackElementKey {
// We're inside an object value
if lastNonSpaceChar == ':' && canParse(healedJSON+"1"+closing) {
jsonDumpMarker = "\"" + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if canParse(healedJSON + ": 1" + closing) {
jsonDumpMarker = ":\"" + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if lastNonSpaceChar == '{' && canParse(healedJSON+closing) {
jsonDumpMarker = "\"" + healingMarker
healedJSON += jsonDumpMarker + "\": 1" + closing
} else if canParse(healedJSON + "\"" + closing) {
jsonDumpMarker = healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if len(healedJSON) > 0 && healedJSON[len(healedJSON)-1] == '\\' && canParse(healedJSON+"\\\""+closing) {
jsonDumpMarker = "\\" + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if canParse(healedJSON + unicodeMarkerPadding + "\"" + closing) {
jsonDumpMarker = unicodeMarkerPadding + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else {
// Find last colon and cut back
lastColon := strings.LastIndex(healedJSON, ":")
if lastColon == -1 {
return nil, false, "", errors.New("cannot heal a truncated JSON that stopped in an unknown location")
}
jsonDumpMarker = "\"" + healingMarker
healedJSON = healedJSON[:lastColon+1] + jsonDumpMarker + "\"" + closing
}
} else if topElement.Type == JSONStackElementArray {
// We're inside an array
if (lastNonSpaceChar == ',' || lastNonSpaceChar == '[') && canParse(healedJSON+"1"+closing) {
jsonDumpMarker = "\"" + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if canParse(healedJSON + "\"" + closing) {
jsonDumpMarker = healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if len(healedJSON) > 0 && healedJSON[len(healedJSON)-1] == '\\' && canParse(healedJSON+"\\\""+closing) {
jsonDumpMarker = "\\" + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if canParse(healedJSON + unicodeMarkerPadding + "\"" + closing) {
jsonDumpMarker = unicodeMarkerPadding + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else if !wasMaybeNumber() && canParse(healedJSON+", 1"+closing) {
jsonDumpMarker = ",\"" + healingMarker
healedJSON += jsonDumpMarker + "\"" + closing
} else {
lastBracketOrComma := strings.LastIndexAny(healedJSON, "[,")
if lastBracketOrComma == -1 {
return nil, false, "", errors.New("cannot heal a truncated JSON array stopped in an unknown location")
}
jsonDumpMarker = "\"" + healingMarker
healedJSON = healedJSON[:lastBracketOrComma+1] + jsonDumpMarker + "\"" + closing
}
} else if topElement.Type == JSONStackElementObject {
// We're inside an object (expecting a key)
if (lastNonSpaceChar == '{' && canParse(healedJSON+closing)) ||
(lastNonSpaceChar == ',' && canParse(healedJSON+"\"\": 1"+closing)) {
jsonDumpMarker = "\"" + healingMarker
healedJSON += jsonDumpMarker + "\": 1" + closing
} else if !wasMaybeNumber() && canParse(healedJSON+",\"\": 1"+closing) {
jsonDumpMarker = ",\"" + healingMarker
healedJSON += jsonDumpMarker + "\": 1" + closing
} else if canParse(healedJSON + "\": 1" + closing) {
jsonDumpMarker = healingMarker
healedJSON += jsonDumpMarker + "\": 1" + closing
} else if len(healedJSON) > 0 && healedJSON[len(healedJSON)-1] == '\\' && canParse(healedJSON+"\\\": 1"+closing) {
jsonDumpMarker = "\\" + healingMarker
healedJSON += jsonDumpMarker + "\": 1" + closing
} else if canParse(healedJSON + unicodeMarkerPadding + "\": 1" + closing) {
jsonDumpMarker = unicodeMarkerPadding + healingMarker
healedJSON += jsonDumpMarker + "\": 1" + closing
} else {
lastColon := strings.LastIndex(healedJSON, ":")
if lastColon == -1 {
return nil, false, "", errors.New("cannot heal a truncated JSON object stopped in an unknown location")
}
jsonDumpMarker = "\"" + healingMarker
healedJSON = healedJSON[:lastColon+1] + jsonDumpMarker + "\"" + closing
}
} else {
return nil, false, "", errors.New("cannot heal a truncated JSON object stopped in an unknown location")
}
// Try to parse the healed JSON
var healedValue any
if err := json.Unmarshal([]byte(healedJSON), &healedValue); err != nil {
return nil, false, "", err
}
// Remove healing marker from result
cleaned := removeHealingMarkerFromJSONAny(healedValue, healingMarker)
return cleaned, true, jsonDumpMarker, nil
}
// parseJSONWithStackTracking parses JSON while tracking the stack structure
// Returns the error position and any error encountered
// This implements stack tracking similar to llama.cpp's json_error_locator
func parseJSONWithStackTracking(input string, errLoc *JSONErrorLocator) (int, error) {
// First, try to parse to get exact error position
decoder := json.NewDecoder(strings.NewReader(input))
var test any
err := decoder.Decode(&test)
if err != nil {
errLoc.foundError = true
errLoc.exceptionMessage = err.Error()
var errorPos int
if syntaxErr, ok := err.(*json.SyntaxError); ok {
errorPos = int(syntaxErr.Offset)
errLoc.position = errorPos
} else {
// Fallback: use end of input
errorPos = len(input)
errLoc.position = errorPos
}
// Now build the stack by parsing up to the error position
// This matches llama.cpp's approach of tracking stack during SAX parsing
partialInput := input
if errorPos > 0 && errorPos < len(input) {
partialInput = input[:errorPos]
}
// Track stack by parsing character by character up to error
pos := 0
inString := false
escape := false
keyStart := -1
keyEnd := -1
for pos < len(partialInput) {
ch := partialInput[pos]
if escape {
escape = false
pos++
continue
}
if ch == '\\' {
escape = true
pos++
continue
}
if ch == '"' {
if !inString {
// Starting a string
inString = true
// Check if we're in an object context (expecting a key)
if len(errLoc.stack) > 0 {
top := errLoc.stack[len(errLoc.stack)-1]
if top.Type == JSONStackElementObject {
// This could be a key
keyStart = pos + 1 // Start after quote
}
}
} else {
// Ending a string
inString = false
if keyStart != -1 {
// This was potentially a key, extract it
keyEnd = pos
key := partialInput[keyStart:keyEnd]
// Look ahead to see if next non-whitespace is ':'
nextPos := pos + 1
for nextPos < len(partialInput) && unicode.IsSpace(rune(partialInput[nextPos])) {
nextPos++
}
if nextPos < len(partialInput) && partialInput[nextPos] == ':' {
// This is a key, add it to stack
errLoc.stack = append(errLoc.stack, JSONStackElement{Type: JSONStackElementKey, Key: key})
}
keyStart = -1
keyEnd = -1
}
}
pos++
continue
}
if inString {
pos++
continue
}
// Handle stack operations (outside strings)
if ch == '{' {
errLoc.stack = append(errLoc.stack, JSONStackElement{Type: JSONStackElementObject})
} else if ch == '}' {
// Pop object and any key on top (keys are popped when value starts, but handle here too)
for len(errLoc.stack) > 0 {
top := errLoc.stack[len(errLoc.stack)-1]
errLoc.stack = errLoc.stack[:len(errLoc.stack)-1]
if top.Type == JSONStackElementObject {
break
}
}
} else if ch == '[' {
errLoc.stack = append(errLoc.stack, JSONStackElement{Type: JSONStackElementArray})
} else if ch == ']' {
// Pop array
for len(errLoc.stack) > 0 {
top := errLoc.stack[len(errLoc.stack)-1]
errLoc.stack = errLoc.stack[:len(errLoc.stack)-1]
if top.Type == JSONStackElementArray {
break
}
}
} else if ch == ':' {
// Colon means we're starting a value, pop the key if it's on stack
if len(errLoc.stack) > 0 && errLoc.stack[len(errLoc.stack)-1].Type == JSONStackElementKey {
errLoc.stack = errLoc.stack[:len(errLoc.stack)-1]
}
}
// Note: commas and whitespace don't affect stack structure
pos++
}
return errorPos, err
}
// No error, parse was successful - build stack anyway for completeness
// (though we shouldn't need healing in this case)
pos := 0
inString := false
escape := false
for pos < len(input) {
ch := input[pos]
if escape {
escape = false
pos++
continue
}
if ch == '\\' {
escape = true
pos++
continue
}
if ch == '"' {
inString = !inString
pos++
continue
}
if inString {
pos++
continue
}
if ch == '{' {
errLoc.stack = append(errLoc.stack, JSONStackElement{Type: JSONStackElementObject})
} else if ch == '}' {
for len(errLoc.stack) > 0 {
top := errLoc.stack[len(errLoc.stack)-1]
errLoc.stack = errLoc.stack[:len(errLoc.stack)-1]
if top.Type == JSONStackElementObject {
break
}
}
} else if ch == '[' {
errLoc.stack = append(errLoc.stack, JSONStackElement{Type: JSONStackElementArray})
} else if ch == ']' {
for len(errLoc.stack) > 0 {
top := errLoc.stack[len(errLoc.stack)-1]
errLoc.stack = errLoc.stack[:len(errLoc.stack)-1]
if top.Type == JSONStackElementArray {
break
}
}
}
pos++
}
return len(input), nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/functions_suite_test.go | pkg/functions/functions_suite_test.go | package functions_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestFunctions(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Functions test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/llama31_schema_test.go | pkg/functions/grammars/llama31_schema_test.go | package grammars_test
import (
"strings"
. "github.com/mudler/LocalAI/pkg/functions/grammars"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
const (
testllama31Input1 = `
{
"oneOf": [
{
"type": "object",
"properties": {
"function": {"const": "create_event"},
"arguments": {
"type": "object",
"properties": {
"title": {"type": "string"},
"date": {"type": "string"},
"time": {"type": "string"}
}
}
}
},
{
"type": "object",
"properties": {
"function": {"const": "search"},
"arguments": {
"type": "object",
"properties": {
"query": {"type": "string"}
}
}
}
}
]
}`
// <function=example_function_name>{{"example_name": "example_value"}}</function>
testllama31inputResult1 = `root-0-function ::= "create_event"
freestring ::= (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space
root-0 ::= "<function=" root-0-function ">{" root-0-arguments "}</function>"
root-1-arguments ::= "{" space "\"query\"" space ":" space string "}" space
root ::= root-0 | root-1
space ::= " "?
root-0-arguments ::= "{" space "\"date\"" space ":" space string "," space "\"time\"" space ":" space string "," space "\"title\"" space ":" space string "}" space
root-1 ::= "<function=" root-1-function ">{" root-1-arguments "}</function>"
string ::= "\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space
root-1-function ::= "search"`
)
var _ = Describe("JSON schema grammar tests", func() {
Context("JSON", func() {
It("generates a valid grammar from JSON schema", func() {
grammar, err := NewLLama31SchemaConverter("function").GrammarFromBytes([]byte(testllama31Input1))
Expect(err).ToNot(HaveOccurred())
results := strings.Split(testllama31inputResult1, "\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/rules.go | pkg/functions/grammars/rules.go | package grammars
import (
"fmt"
"strings"
"github.com/mudler/LocalAI/pkg/utils"
)
type Rules map[string]string
func (rules Rules) ToGrammar(options ...func(*GrammarOption)) string {
grammarOpts := &GrammarOption{}
grammarOpts.Apply(options...)
prefix := grammarOpts.Prefix
maybeArray := grammarOpts.MaybeArray
disableParallelNewLines := grammarOpts.DisableParallelNewLines
maybeString := grammarOpts.MaybeString
noMixedFreeString := grammarOpts.NoMixedFreeString
var lines []string
swapRoot := maybeArray || maybeString || prefix != ""
// write down the computed rules.
// if maybeArray is true, we need to add the array rule and slightly tweak the root rule
for name, rule := range rules {
if swapRoot && name == "root" {
name = "realvalue"
}
lines = append(lines, fmt.Sprintf("%s ::= %s", name, rule))
}
if !swapRoot {
return strings.Join(lines, "\n")
}
newRoot := "realvalue"
if maybeArray {
newRoot = "arr | realvalue"
}
freestringRule := "mixedstring"
if noMixedFreeString {
freestringRule = "freestring"
}
if prefix != "" {
// quote newlines in suffix
prefix = utils.EscapeNewLines(prefix)
if maybeArray && maybeString {
newRoot = "(" + newRoot + ")"
}
if maybeString {
//newRoot = "( (\"" + suffix + "\" " + newRoot + ") | freestring ) "
newRoot = "( \"" + prefix + "\" " + newRoot + " | " + freestringRule + " ) "
} else {
newRoot = "\"" + prefix + "\" " + "" + newRoot + ""
}
} else if maybeString {
if maybeArray {
// newRoot = "(" + newRoot + ")"
}
newRoot = freestringRule + " | " + newRoot
}
lines = append(lines, fmt.Sprintf("%s ::= %s", "root", newRoot))
if disableParallelNewLines {
lines = append(lines, array)
} else {
lines = append(lines, arrayNewLines)
}
if maybeArray {
if grammarOpts.ExpectStringsAfterJSON {
lines = append(lines, `mixedstring ::= freestring | freestring arr freestring | (freestring realvalue freestring)* | realvalue | arr`)
} else {
lines = append(lines, `mixedstring ::= freestring | freestring arr | freestring realvalue | realvalue | arr`)
}
} else {
if grammarOpts.ExpectStringsAfterJSON {
lines = append(lines, `mixedstring ::= freestring | (freestring realvalue freestring)* | realvalue`)
} else {
lines = append(lines, `mixedstring ::= freestring | freestring realvalue | realvalue`)
}
}
return strings.Join(lines, "\n")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/types.go | pkg/functions/grammars/types.go | package grammars
type SchemaConverterType int
const (
JSONSchema SchemaConverterType = iota
LLama31Schema
)
const (
LlamaType string = "llama3.1"
JSONType string = "json"
)
func (s SchemaConverterType) String() string {
switch s {
case JSONSchema:
return JSONType
case LLama31Schema:
return LlamaType
}
return "unknown"
}
func NewType(t string) SchemaConverterType {
switch t {
case JSONType:
return JSONSchema
case LlamaType:
return LLama31Schema
}
return JSONSchema
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/llama31_schema.go | pkg/functions/grammars/llama31_schema.go | package grammars
import (
"encoding/json"
"fmt"
"regexp"
"sort"
"strings"
)
type LLama31SchemaConverter struct {
fnName string
rules Rules
}
func NewLLama31SchemaConverter(fnName string) *LLama31SchemaConverter {
rules := make(map[string]string)
rules["space"] = SPACE_RULE
if fnName == "" {
fnName = "name"
}
return &LLama31SchemaConverter{
rules: rules,
fnName: fnName,
}
}
var GRAMMAR_LITERAL_ESCAPESLlama = map[string]string{
"\r": `\r`,
"\n": `\n`,
}
var GRAMMAR_LITERAL_ESCAPE_RELlama = regexp.MustCompile(`[\r\n]`)
func (sc *LLama31SchemaConverter) formatLiteral(literal interface{}) (string, error) {
jLiteral, err := jsonString(literal)
if err != nil {
return "", err
}
escaped := GRAMMAR_LITERAL_ESCAPE_RELlama.ReplaceAllStringFunc(jLiteral, func(match string) string {
return GRAMMAR_LITERAL_ESCAPESLlama[match]
})
return escaped, nil
}
func (sc *LLama31SchemaConverter) formatLiteralQuoted(literal interface{}) (string, error) {
jLiteral, err := jsonString(literal)
if err != nil {
return "", err
}
escaped := GRAMMAR_LITERAL_ESCAPE_RE.ReplaceAllStringFunc(jLiteral, func(match string) string {
return GRAMMAR_LITERAL_ESCAPES[match]
})
return fmt.Sprintf(`"%s"`, escaped), nil
}
func (sc *LLama31SchemaConverter) addRule(name, rule string) string {
escName := INVALID_RULE_CHARS_RE.ReplaceAllString(name, "-")
key := escName
if existingRule, ok := sc.rules[escName]; ok && existingRule != rule {
i := 0
for {
key = fmt.Sprintf("%s%d", escName, i)
if _, ok := sc.rules[key]; !ok {
break
}
i++
}
}
sc.rules[key] = rule
return key
}
func (sc *LLama31SchemaConverter) visit(schema map[string]interface{}, name string, rootSchema map[string]interface{}) (string, error) {
st, existType := schema["type"]
var schemaType string
if existType {
schemaType = st.(string)
}
ruleName := name
if name == "" {
ruleName = "root"
}
_, oneOfExists := schema["oneOf"]
_, anyOfExists := schema["anyOf"]
if oneOfExists || anyOfExists {
var alternatives []string
oneOfSchemas, oneOfExists := schema["oneOf"].([]interface{})
anyOfSchemas, anyOfExists := schema["anyOf"].([]interface{})
if oneOfExists {
for i, altSchema := range oneOfSchemas {
alternative, err := sc.visit(altSchema.(map[string]interface{}), fmt.Sprintf("%s-%d", ruleName, i), rootSchema)
if err != nil {
return "", err
}
alternatives = append(alternatives, alternative)
}
} else if anyOfExists {
for i, altSchema := range anyOfSchemas {
alternative, err := sc.visit(altSchema.(map[string]interface{}), fmt.Sprintf("%s-%d", ruleName, i), rootSchema)
if err != nil {
return "", err
}
alternatives = append(alternatives, alternative)
}
}
rule := strings.Join(alternatives, " | ")
return sc.addRule(ruleName, rule), nil
} else if ref, exists := schema["$ref"].(string); exists {
referencedSchema, err := sc.resolveReference(ref, rootSchema)
if err != nil {
return "", err
}
return sc.visit(referencedSchema, name, rootSchema)
} else if constVal, exists := schema["const"]; exists {
literal, err := sc.formatLiteral((constVal))
if err != nil {
return "", err
}
return sc.addRule(ruleName, literal), nil
} else if enumVals, exists := schema["enum"].([]interface{}); exists {
var enumRules []string
for _, enumVal := range enumVals {
enumRule, err := sc.formatLiteralQuoted(enumVal)
if err != nil {
return "", err
}
enumRules = append(enumRules, enumRule)
}
rule := strings.Join(enumRules, " | ")
return sc.addRule(ruleName, rule), nil
} else if properties, exists := schema["properties"].(map[string]interface{}); schemaType == "object" && exists {
baseProperty := false
depth := strings.Split(name, "-")
if len(depth) == 2 {
baseProperty = true
}
type propData []struct {
propName string
propSchema map[string]interface{}
}
var propPairs propData
for propName, propSchema := range properties {
propPairs = append(propPairs, struct {
propName string
propSchema map[string]interface{}
}{propName: propName, propSchema: propSchema.(map[string]interface{})})
}
sort.Slice(propPairs, func(i, j int) bool {
return propPairs[i].propName < propPairs[j].propName
})
var rule strings.Builder
if baseProperty {
rule.WriteString(`"<function="`)
} else {
rule.WriteString(`"{" space`)
}
if baseProperty {
namePair := propData{}
for i, propPair := range propPairs {
propName := propPair.propName
if propName == sc.fnName {
namePair = append(namePair, propPair)
// remove namePair from propPairs
propPairs = append(propPairs[:i], propPairs[i+1:]...)
break
}
}
if len(namePair) == 0 {
return "", fmt.Errorf("no function name found in the schema: %s", schema)
}
propRuleName, err := sc.visit(namePair[0].propSchema, fmt.Sprintf("%s-%s", ruleName, sc.fnName), rootSchema)
if err != nil {
return "", err
}
rule.WriteString(fmt.Sprintf(` %s ">{" `, propRuleName))
for _, propPair := range propPairs {
propName := propPair.propName
propSchema := propPair.propSchema
propRuleName, err := sc.visit(propSchema, fmt.Sprintf("%s-%s", ruleName, propName), rootSchema)
if err != nil {
return "", err
}
rule.WriteString(propRuleName)
}
rule.WriteString(` "}</function>"`)
} else {
for i, propPair := range propPairs {
propName := propPair.propName
propSchema := propPair.propSchema
propRuleName, err := sc.visit(propSchema, fmt.Sprintf("%s-%s", ruleName, propName), rootSchema)
if err != nil {
return "", err
}
lPropName, err := sc.formatLiteralQuoted(propName)
if err != nil {
return "", err
}
if i > 0 {
rule.WriteString(` "," space`)
}
rule.WriteString(fmt.Sprintf(` %s space ":" space %s`, lPropName, propRuleName))
}
}
if !baseProperty {
rule.WriteString(` "}" space`)
}
return sc.addRule(ruleName, rule.String()), nil
} else if items, exists := schema["items"].(map[string]interface{}); schemaType == "array" && exists {
itemRuleName, err := sc.visit(items, fmt.Sprintf("%s-item", ruleName), rootSchema)
if err != nil {
return "", err
}
rule := fmt.Sprintf(`"[" space (%s ("," space %s)*)? "]" space`, itemRuleName, itemRuleName)
return sc.addRule(ruleName, rule), nil
} else {
primitiveRule, exists := PRIMITIVE_RULES[schemaType]
if !exists {
return "", fmt.Errorf("unrecognized schema: %v", schema)
}
if ruleName == "root" {
schemaType = "root"
}
return sc.addRule(schemaType, primitiveRule), nil
}
}
func (sc *LLama31SchemaConverter) resolveReference(ref string, rootSchema map[string]interface{}) (map[string]interface{}, error) {
if !strings.HasPrefix(ref, "#/$defs/") {
return nil, fmt.Errorf("invalid reference format: %s", ref)
}
defKey := strings.TrimPrefix(ref, "#/$defs/")
definitions, exists := rootSchema["$defs"].(map[string]interface{})
if !exists {
return nil, fmt.Errorf("no definitions found in the schema: %s", rootSchema)
}
def, exists := definitions[defKey].(map[string]interface{})
if !exists {
return nil, fmt.Errorf("definition not found: %s %+v", defKey, definitions)
}
return def, nil
}
func (sc *LLama31SchemaConverter) Grammar(schema map[string]interface{}, options ...func(*GrammarOption)) (string, error) {
sc.addRule("freestring", PRIMITIVE_RULES["freestring"])
_, err := sc.visit(schema, "", schema)
if err != nil {
return "", err
}
return sc.rules.ToGrammar(options...), nil
}
func (sc *LLama31SchemaConverter) GrammarFromBytes(b []byte, options ...func(*GrammarOption)) (string, error) {
var schema map[string]interface{}
err := json.Unmarshal(b, &schema)
if err != nil {
return "", err
}
return sc.Grammar(schema, options...)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/json_schema_test.go | pkg/functions/grammars/json_schema_test.go | package grammars_test
import (
"strings"
. "github.com/mudler/LocalAI/pkg/functions"
. "github.com/mudler/LocalAI/pkg/functions/grammars"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var testFunctions = []Item{
{
Type: "object",
Properties: createFunction(
"function",
"arguments",
"create_event",
map[string]interface{}{
"title": map[string]string{"type": "string"},
"date": map[string]string{"type": "string"},
"time": map[string]string{"type": "string"},
},
),
},
{
Type: "object",
Properties: createFunction(
"function",
"arguments",
"search",
map[string]interface{}{
"query": map[string]string{"type": "string"},
}),
},
}
var testFunctionsName = []Item{
{
Type: "object",
Properties: createFunction(
"name",
"arguments",
"create_event",
map[string]interface{}{
"title": map[string]string{"type": "string"},
"date": map[string]string{"type": "string"},
"time": map[string]string{"type": "string"},
},
),
},
{
Type: "object",
Properties: createFunction(
"name",
"arguments",
"search",
map[string]interface{}{
"query": map[string]string{"type": "string"},
}),
},
}
func rootResult(s string) string {
return `root-0-name ::= "\"create_event\""
freestring ::= (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space
root-0 ::= "{" space "\"arguments\"" space ":" space root-0-arguments "," space "\"name\"" space ":" space root-0-name "}" space
root-1-arguments ::= "{" space "\"query\"" space ":" space string "}" space
realvalue ::= root-0 | root-1
root ::= ` + s + `
space ::= " "?
root-0-arguments ::= "{" space "\"date\"" space ":" space string "," space "\"time\"" space ":" space string "," space "\"title\"" space ":" space string "}" space
root-1 ::= "{" space "\"arguments\"" space ":" space root-1-arguments "," space "\"name\"" space ":" space root-1-name "}" space
string ::= "\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space
arr ::=
"[\n" (
realvalue
(",\n" realvalue)*
)? "]"
root-1-name ::= "\"search\""`
}
const (
testInput1 = `
{
"oneOf": [
{
"type": "object",
"properties": {
"function": {"const": "create_event"},
"arguments": {
"type": "object",
"properties": {
"title": {"type": "string"},
"date": {"type": "string"},
"time": {"type": "string"}
}
}
}
},
{
"type": "object",
"properties": {
"function": {"const": "search"},
"arguments": {
"type": "object",
"properties": {
"query": {"type": "string"}
}
}
}
}
]
}`
inputResult1 = `root-0-function ::= "\"create_event\""
freestring ::= (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space
root-0 ::= "{" space "\"arguments\"" space ":" space root-0-arguments "," space "\"function\"" space ":" space root-0-function "}" space
root-1-arguments ::= "{" space "\"query\"" space ":" space string "}" space
root ::= root-0 | root-1
space ::= " "?
root-0-arguments ::= "{" space "\"date\"" space ":" space string "," space "\"time\"" space ":" space string "," space "\"title\"" space ":" space string "}" space
root-1 ::= "{" space "\"arguments\"" space ":" space root-1-arguments "," space "\"function\"" space ":" space root-1-function "}" space
string ::= "\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space
root-1-function ::= "\"search\""`
inputResult2 = `root-0-function ::= "\"create_event\""
freestring ::= (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space
root-0 ::= "{" space "\"arguments\"" space ":" space root-0-arguments "," space "\"function\"" space ":" space root-0-function "}" space
root-1-arguments ::= "{" space "\"query\"" space ":" space string "}" space
realvalue ::= root-0 | root-1
root ::= arr | realvalue
space ::= " "?
root-0-arguments ::= "{" space "\"date\"" space ":" space string "," space "\"time\"" space ":" space string "," space "\"title\"" space ":" space string "}" space
root-1 ::= "{" space "\"arguments\"" space ":" space root-1-arguments "," space "\"function\"" space ":" space root-1-function "}" space
string ::= "\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space
arr ::=
"[\n" (
realvalue
(",\n" realvalue)*
)? "]"
root-1-function ::= "\"search\""`
testInput2 = `
{
"oneOf": [
{
"type": "object",
"properties": {
"name": {"const": "create_event"},
"arguments": {
"type": "object",
"properties": {
"title": {"type": "string"},
"date": {"type": "string"},
"time": {"type": "string"}
}
}
}
},
{
"type": "object",
"properties": {
"name": {"const": "search"},
"arguments": {
"type": "object",
"properties": {
"query": {"type": "string"}
}
}
}
}
]
}`
inputResult3 = `root-0-name ::= "\"create_event\""
freestring ::= (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space
root-0 ::= "{" space "\"arguments\"" space ":" space root-0-arguments "," space "\"name\"" space ":" space root-0-name "}" space
root-1-arguments ::= "{" space "\"query\"" space ":" space string "}" space
root ::= root-0 | root-1
space ::= " "?
root-0-arguments ::= "{" space "\"date\"" space ":" space string "," space "\"time\"" space ":" space string "," space "\"title\"" space ":" space string "}" space
root-1 ::= "{" space "\"arguments\"" space ":" space root-1-arguments "," space "\"name\"" space ":" space root-1-name "}" space
string ::= "\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space
root-1-name ::= "\"search\""`
inputResult4 = `root-0-name ::= "\"create_event\""
freestring ::= (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space
root-0 ::= "{" space "\"arguments\"" space ":" space root-0-arguments "," space "\"name\"" space ":" space root-0-name "}" space
root-1-arguments ::= "{" space "\"query\"" space ":" space string "}" space
realvalue ::= root-0 | root-1
root ::= arr | realvalue
space ::= " "?
root-0-arguments ::= "{" space "\"date\"" space ":" space string "," space "\"time\"" space ":" space string "," space "\"title\"" space ":" space string "}" space
root-1 ::= "{" space "\"arguments\"" space ":" space root-1-arguments "," space "\"name\"" space ":" space root-1-name "}" space
string ::= "\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space
arr ::=
"[\n" (
realvalue
(",\n" realvalue)*
)? "]"
root-1-name ::= "\"search\""`
)
var _ = Describe("JSON schema grammar tests", func() {
Context("JSON", func() {
It("generates a valid grammar from JSON schema", func() {
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(testInput1))
Expect(err).To(BeNil())
results := strings.Split(inputResult1, "\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))))
})
It("generates a valid grammar from JSON schema", func() {
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(testInput2))
Expect(err).To(BeNil())
results := strings.Split(inputResult3, "\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))))
})
It("generates a valid grammar from JSON Objects", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctions}
grammar, err := structuredGrammar.Grammar()
Expect(err).To(BeNil())
results := strings.Split(inputResult1, "\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))))
})
It("generates a valid grammar from JSON Objects for multiple function return", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctions}
grammar, err := structuredGrammar.Grammar(EnableMaybeArray)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
inputResult2,
"mixedstring ::= freestring | freestring arr | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects for multiple function return", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(EnableMaybeArray)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
inputResult4,
"mixedstring ::= freestring | freestring arr | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects for multiple function return with a suffix and array", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(
SetPrefix("suffix"),
EnableMaybeArray,
)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
rootResult(`"suffix" arr | realvalue`),
"mixedstring ::= freestring | freestring arr | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects with a suffix", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(SetPrefix("suffix"))
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
rootResult(`"suffix" realvalue`),
"mixedstring ::= freestring | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects with a suffix and could return string", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(SetPrefix("suffix"), EnableMaybeString)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
rootResult(`( "suffix" realvalue | mixedstring )`),
"mixedstring ::= freestring | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects with a suffix that could return text or an array of tools", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(SetPrefix("suffix"), EnableMaybeString, EnableMaybeArray)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
rootResult(`( "suffix" (arr | realvalue) | mixedstring )`),
"mixedstring ::= freestring | freestring arr | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects without a suffix that could return text or an array of tools or just string", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(EnableMaybeString, EnableMaybeArray)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
rootResult(`mixedstring | arr | realvalue`),
"mixedstring ::= freestring | freestring arr | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates a valid grammar from JSON Objects without a suffix that could return text or an array of tools or just string. Disables mixedstring", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
grammar, err := structuredGrammar.Grammar(EnableMaybeString, EnableMaybeArray, NoMixedFreeString)
Expect(err).To(BeNil())
results := strings.Split(
strings.Join([]string{
rootResult(`freestring | arr | realvalue`),
"mixedstring ::= freestring | freestring arr | freestring realvalue"}, "\n"),
"\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
Expect(len(results)).To(Equal(len(strings.Split(grammar, "\n"))), grammar)
})
It("generates parallel tools without newlines in JSON", func() {
structuredGrammar := JSONFunctionStructure{
OneOf: testFunctionsName}
content := `arr ::=
"[" (
realvalue
("," realvalue)*
)? "]"`
grammar, err := structuredGrammar.Grammar(EnableMaybeString, EnableMaybeArray, DisableParallelNewLines)
Expect(err).To(BeNil())
results := strings.Split(content, "\n")
for _, r := range results {
if r != "" {
Expect(grammar).To(ContainSubstring(r))
}
}
})
It("handles empty object schema without properties", func() {
// Test case for the bug fix: schema with empty properties map
emptyObjectSchema := `{
"type": "object",
"properties": {}
}`
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(emptyObjectSchema))
Expect(err).To(BeNil())
Expect(grammar).To(ContainSubstring(`root ::= "{" space "}" space`))
})
It("handles object schema without properties field", func() {
// Test case for object schema without properties field at all
objectWithoutProperties := `{
"type": "object"
}`
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(objectWithoutProperties))
Expect(err).To(BeNil())
Expect(grammar).To(ContainSubstring(`root ::= "{" space "}" space`))
})
It("handles schema with properties but no type field", func() {
// Test case for the exact scenario causing the panic: schema with properties but no type
schemaWithPropertiesNoType := `{
"properties": {}
}`
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(schemaWithPropertiesNoType))
Expect(err).To(BeNil())
Expect(grammar).To(ContainSubstring(`root ::= "{" space "}" space`))
})
It("handles multi-type array definitions like [string, null]", func() {
// Type defined as an array should not panic
multiTypeSchema := `{
"type": "object",
"properties": {
"street": {
"description": "The given street name where the company resides.",
"type": ["string", "null"]
},
"city": {
"description": "The given city where the company resides.",
"type": ["string", "null"]
}
}
}`
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(multiTypeSchema))
Expect(err).To(BeNil())
// The grammar should contain rules for both string and null types
Expect(grammar).To(ContainSubstring("string"))
Expect(grammar).To(ContainSubstring("null"))
// Should not panic and should generate valid grammar
Expect(grammar).ToNot(BeEmpty())
})
It("handles complex nested schema with multi-type arrays (issue #5572)", func() {
complexSchema := `{
"type": "object",
"properties": {
"companylist": {
"type": "array",
"items": {
"type": "object",
"properties": {
"companyname": {
"description": "The given name of the company.",
"type": "string"
},
"street": {
"description": "The given street name where the company resides.",
"type": ["string", "null"]
},
"city": {
"description": "The given city where the company resides.",
"type": ["string", "null"]
}
},
"additionalProperties": false,
"required": ["companyname", "street", "city"]
}
},
"filter": {
"description": "The type we should filter the list of companies by.",
"type": "string"
}
},
"required": ["companylist", "filter"],
"additionalProperties": false
}`
grammar, err := NewJSONSchemaConverter("").GrammarFromBytes([]byte(complexSchema))
Expect(err).To(BeNil())
// The grammar should be generated without panic
Expect(grammar).ToNot(BeEmpty())
// Should contain object and array structures
Expect(grammar).To(ContainSubstring("{"))
Expect(grammar).To(ContainSubstring("["))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/json_schema.go | pkg/functions/grammars/json_schema.go | package grammars
// a golang port of https://github.com/ggerganov/llama.cpp/pull/1887
import (
"encoding/json"
"fmt"
"sort"
"strings"
)
type JSONSchemaConverter struct {
propOrder map[string]int
rules Rules
}
func NewJSONSchemaConverter(propOrder string) *JSONSchemaConverter {
propOrderSlice := strings.Split(propOrder, ",")
propOrderMap := make(map[string]int)
for idx, name := range propOrderSlice {
propOrderMap[name] = idx
}
rules := make(map[string]string)
rules["space"] = SPACE_RULE
return &JSONSchemaConverter{
propOrder: propOrderMap,
rules: rules,
}
}
func (sc *JSONSchemaConverter) formatLiteral(literal interface{}) (string, error) {
jLiteral, err := jsonString(literal)
if err != nil {
return "", err
}
escaped := GRAMMAR_LITERAL_ESCAPE_RE.ReplaceAllStringFunc(jLiteral, func(match string) string {
return GRAMMAR_LITERAL_ESCAPES[match]
})
return fmt.Sprintf(`"%s"`, escaped), nil
}
func (sc *JSONSchemaConverter) addRule(name, rule string) string {
escName := INVALID_RULE_CHARS_RE.ReplaceAllString(name, "-")
key := escName
if existingRule, ok := sc.rules[escName]; ok && existingRule != rule {
i := 0
for {
key = fmt.Sprintf("%s%d", escName, i)
if _, ok := sc.rules[key]; !ok {
break
}
i++
}
}
sc.rules[key] = rule
return key
}
func (sc *JSONSchemaConverter) visit(schema map[string]interface{}, name string, rootSchema map[string]interface{}) (string, error) {
st, existType := schema["type"]
var schemaType string
var schemaTypes []string
if existType {
// Handle both single type strings and arrays of types (e.g., ["string", "null"])
switch v := st.(type) {
case string:
// Single type: "type": "string"
schemaType = v
schemaTypes = []string{v}
case []interface{}:
// Multiple types: "type": ["string", "null"]
for _, item := range v {
if typeStr, ok := item.(string); ok {
schemaTypes = append(schemaTypes, typeStr)
}
}
// Use the first type as the primary schema type for compatibility
if len(schemaTypes) > 0 {
schemaType = schemaTypes[0]
}
}
}
ruleName := name
if name == "" {
ruleName = "root"
}
_, oneOfExists := schema["oneOf"]
_, anyOfExists := schema["anyOf"]
if oneOfExists || anyOfExists {
var alternatives []string
oneOfSchemas, oneOfExists := schema["oneOf"].([]interface{})
anyOfSchemas, anyOfExists := schema["anyOf"].([]interface{})
if oneOfExists {
for i, altSchema := range oneOfSchemas {
alternative, err := sc.visit(altSchema.(map[string]interface{}), fmt.Sprintf("%s-%d", ruleName, i), rootSchema)
if err != nil {
return "", err
}
alternatives = append(alternatives, alternative)
}
} else if anyOfExists {
for i, altSchema := range anyOfSchemas {
alternative, err := sc.visit(altSchema.(map[string]interface{}), fmt.Sprintf("%s-%d", ruleName, i), rootSchema)
if err != nil {
return "", err
}
alternatives = append(alternatives, alternative)
}
}
rule := strings.Join(alternatives, " | ")
return sc.addRule(ruleName, rule), nil
} else if ref, exists := schema["$ref"].(string); exists {
referencedSchema, err := sc.resolveReference(ref, rootSchema)
if err != nil {
return "", err
}
return sc.visit(referencedSchema, name, rootSchema)
} else if constVal, exists := schema["const"]; exists {
literal, err := sc.formatLiteral((constVal))
if err != nil {
return "", err
}
return sc.addRule(ruleName, literal), nil
} else if enumVals, exists := schema["enum"].([]interface{}); exists {
var enumRules []string
for _, enumVal := range enumVals {
enumRule, err := sc.formatLiteral(enumVal)
if err != nil {
return "", err
}
enumRules = append(enumRules, enumRule)
}
rule := strings.Join(enumRules, " | ")
return sc.addRule(ruleName, rule), nil
} else if properties, exists := schema["properties"].(map[string]interface{}); schemaType == "object" && exists {
propOrder := sc.propOrder
var propPairs []struct {
propName string
propSchema map[string]interface{}
}
for propName, propSchema := range properties {
propPairs = append(propPairs, struct {
propName string
propSchema map[string]interface{}
}{propName: propName, propSchema: propSchema.(map[string]interface{})})
}
sort.Slice(propPairs, func(i, j int) bool {
iOrder := propOrder[propPairs[i].propName]
jOrder := propOrder[propPairs[j].propName]
if iOrder != 0 && jOrder != 0 {
return iOrder < jOrder
}
return propPairs[i].propName < propPairs[j].propName
})
var rule strings.Builder
rule.WriteString(`"{" space`)
for i, propPair := range propPairs {
propName := propPair.propName
propSchema := propPair.propSchema
propRuleName, err := sc.visit(propSchema, fmt.Sprintf("%s-%s", ruleName, propName), rootSchema)
if err != nil {
return "", err
}
lPropName, err := sc.formatLiteral(propName)
if err != nil {
return "", err
}
if i > 0 {
rule.WriteString(` "," space`)
}
rule.WriteString(fmt.Sprintf(` %s space ":" space %s`, lPropName, propRuleName))
}
rule.WriteString(` "}" space`)
return sc.addRule(ruleName, rule.String()), nil
} else if items, exists := schema["items"].(map[string]interface{}); schemaType == "array" && exists {
itemRuleName, err := sc.visit(items, fmt.Sprintf("%s-item", ruleName), rootSchema)
if err != nil {
return "", err
}
rule := fmt.Sprintf(`"[" space (%s ("," space %s)*)? "]" space`, itemRuleName, itemRuleName)
return sc.addRule(ruleName, rule), nil
} else if properties, _ := schema["properties"].(map[string]interface{}); (schemaType == "object" || schemaType == "") && len(properties) == 0 {
// Handle empty object schema (no properties)
rule := `"{" space "}" space`
return sc.addRule(ruleName, rule), nil
} else {
// Handle primitive types, including multi-type arrays like ["string", "null"]
if len(schemaTypes) > 1 {
// Generate a union of multiple primitive types
var typeRules []string
for _, t := range schemaTypes {
primitiveRule, exists := PRIMITIVE_RULES[t]
if !exists {
return "", fmt.Errorf("unrecognized type in multi-type schema: %s (schema: %v)", t, schema)
}
typeRules = append(typeRules, primitiveRule)
}
rule := "(" + strings.Join(typeRules, " | ") + ")"
return sc.addRule(ruleName, rule), nil
} else {
// Single type
primitiveRule, exists := PRIMITIVE_RULES[schemaType]
if !exists {
return "", fmt.Errorf("unrecognized schema: %v (type: %s)", schema, schemaType)
}
if ruleName == "root" {
schemaType = "root"
}
return sc.addRule(schemaType, primitiveRule), nil
}
}
}
func (sc *JSONSchemaConverter) resolveReference(ref string, rootSchema map[string]interface{}) (map[string]interface{}, error) {
if !strings.HasPrefix(ref, "#/$defs/") {
return nil, fmt.Errorf("invalid reference format: %s", ref)
}
defKey := strings.TrimPrefix(ref, "#/$defs/")
definitions, exists := rootSchema["$defs"].(map[string]interface{})
if !exists {
return nil, fmt.Errorf("no definitions found in the schema: %s", rootSchema)
}
def, exists := definitions[defKey].(map[string]interface{})
if !exists {
return nil, fmt.Errorf("definition not found: %s %+v", defKey, definitions)
}
return def, nil
}
func (sc *JSONSchemaConverter) Grammar(schema map[string]interface{}, options ...func(*GrammarOption)) (string, error) {
sc.addRule("freestring", PRIMITIVE_RULES["freestring"])
_, err := sc.visit(schema, "", schema)
if err != nil {
return "", err
}
return sc.rules.ToGrammar(options...), nil
}
func (sc *JSONSchemaConverter) GrammarFromBytes(b []byte, options ...func(*GrammarOption)) (string, error) {
var schema map[string]interface{}
err := json.Unmarshal(b, &schema)
if err != nil {
return "", err
}
return sc.Grammar(schema, options...)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/options.go | pkg/functions/grammars/options.go | package grammars
type GrammarOption struct {
PropOrder string
Prefix string
MaybeArray bool
DisableParallelNewLines bool
MaybeString bool
NoMixedFreeString bool
ExpectStringsAfterJSON bool
FunctionName string
SchemaType SchemaConverterType
}
func (o *GrammarOption) Apply(options ...func(*GrammarOption)) {
for _, l := range options {
l(o)
}
}
var EnableMaybeArray = func(o *GrammarOption) {
o.MaybeArray = true
}
var DisableParallelNewLines = func(o *GrammarOption) {
o.DisableParallelNewLines = true
}
var EnableMaybeString = func(o *GrammarOption) {
o.MaybeString = true
}
var NoMixedFreeString func(*GrammarOption) = func(o *GrammarOption) {
o.NoMixedFreeString = true
}
// ExpectStringsAfterJSON enables mixed string suffix
var ExpectStringsAfterJSON func(*GrammarOption) = func(o *GrammarOption) {
o.ExpectStringsAfterJSON = true
}
func SetPrefix(suffix string) func(*GrammarOption) {
return func(o *GrammarOption) {
o.Prefix = suffix
}
}
func SetPropOrder(order string) func(*GrammarOption) {
return func(o *GrammarOption) {
o.PropOrder = order
}
}
func WithSchemaType(schemaType SchemaConverterType) func(*GrammarOption) {
return func(o *GrammarOption) {
o.SchemaType = schemaType
}
}
func WithFunctionName(name string) func(*GrammarOption) {
return func(o *GrammarOption) {
o.FunctionName = name
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/bnf_rules.go | pkg/functions/grammars/bnf_rules.go | package grammars
import (
"encoding/json"
"regexp"
)
var (
PRIMITIVE_RULES = map[string]string{
"boolean": `("true" | "false") space`,
"number": `("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space`,
"integer": `("-"? ([0-9] | [1-9] [0-9]*)) space`,
"string": `"\"" (
[^"\\] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* "\"" space`,
// TODO: we shouldn't forbid \" and \\ or all unicode and have this branch here,
// however, if we don't have it, the grammar will be ambiguous and
// empirically results are way worse.
"freestring": `(
[^\x00] |
"\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
)* space`,
"null": `"null" space`,
}
INVALID_RULE_CHARS_RE = regexp.MustCompile(`[^a-zA-Z0-9-]+`)
GRAMMAR_LITERAL_ESCAPE_RE = regexp.MustCompile(`[\r\n"]`)
GRAMMAR_LITERAL_ESCAPES = map[string]string{
"\r": `\r`,
"\n": `\n`,
`"`: `\"`,
}
)
const (
SPACE_RULE = `" "?`
arrayNewLines = `arr ::=
"[\n" (
realvalue
(",\n" realvalue)*
)? "]"`
array = `arr ::=
"[" (
realvalue
("," realvalue)*
)? "]"`
)
func jsonString(v interface{}) (string, error) {
b, err := json.Marshal(v)
if err != nil {
return "", err
}
return string(b), nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/functions/grammars/grammars_suite_test.go | pkg/functions/grammars/grammars_suite_test.go | package grammars_test
import (
"testing"
. "github.com/mudler/LocalAI/pkg/functions"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestGrammar(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Grammar test suite")
}
func createFunction(field1 string, field2 string, name string, properties map[string]interface{}) map[string]interface{} {
property := map[string]interface{}{}
property[field1] = FunctionName{Const: name}
property[field2] = Argument{
Type: "object",
Properties: properties,
}
return property
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/xsync/map_test.go | pkg/xsync/map_test.go | package xsync_test
import (
. "github.com/mudler/LocalAI/pkg/xsync"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("SyncMap", func() {
Context("Syncmap", func() {
It("sets and gets", func() {
m := NewSyncedMap[string, string]()
m.Set("foo", "bar")
Expect(m.Get("foo")).To(Equal("bar"))
})
It("deletes", func() {
m := NewSyncedMap[string, string]()
m.Set("foo", "bar")
m.Delete("foo")
Expect(m.Get("foo")).To(Equal(""))
Expect(m.Exists("foo")).To(Equal(false))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/xsync/sync_suite_test.go | pkg/xsync/sync_suite_test.go | package xsync_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestSync(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "LocalAI sync test")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/pkg/xsync/map.go | pkg/xsync/map.go | package xsync
import (
"sync"
)
type SyncedMap[K comparable, V any] struct {
mu sync.RWMutex
m map[K]V
}
func NewSyncedMap[K comparable, V any]() *SyncedMap[K, V] {
return &SyncedMap[K, V]{
m: make(map[K]V),
}
}
func (m *SyncedMap[K, V]) Map() map[K]V {
m.mu.RLock()
defer m.mu.RUnlock()
return m.m
}
func (m *SyncedMap[K, V]) Get(key K) V {
m.mu.RLock()
defer m.mu.RUnlock()
return m.m[key]
}
func (m *SyncedMap[K, V]) Keys() []K {
m.mu.RLock()
defer m.mu.RUnlock()
keys := make([]K, 0, len(m.m))
for k := range m.m {
keys = append(keys, k)
}
return keys
}
func (m *SyncedMap[K, V]) Values() []V {
m.mu.RLock()
defer m.mu.RUnlock()
values := make([]V, 0, len(m.m))
for _, v := range m.m {
values = append(values, v)
}
return values
}
func (m *SyncedMap[K, V]) Len() int {
m.mu.RLock()
defer m.mu.RUnlock()
return len(m.m)
}
func (m *SyncedMap[K, V]) Iterate(f func(key K, value V) bool) {
m.mu.RLock()
defer m.mu.RUnlock()
for k, v := range m.m {
if !f(k, v) {
break
}
}
}
func (m *SyncedMap[K, V]) Set(key K, value V) {
m.mu.Lock()
m.m[key] = value
m.mu.Unlock()
}
func (m *SyncedMap[K, V]) Delete(key K) {
m.mu.Lock()
delete(m.m, key)
m.mu.Unlock()
}
func (m *SyncedMap[K, V]) Exists(key K) bool {
m.mu.RLock()
defer m.mu.RUnlock()
_, ok := m.m[key]
return ok
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/icon.go | cmd/launcher/icon.go | package main
import (
_ "embed"
"fyne.io/fyne/v2"
)
//go:embed logo.png
var logoData []byte
// resourceIconPng is the LocalAI logo icon
var resourceIconPng = &fyne.StaticResource{
StaticName: "logo.png",
StaticContent: logoData,
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/main.go | cmd/launcher/main.go | package main
import (
"log"
"fyne.io/fyne/v2"
"fyne.io/fyne/v2/app"
"fyne.io/fyne/v2/driver/desktop"
coreLauncher "github.com/mudler/LocalAI/cmd/launcher/internal"
"github.com/mudler/LocalAI/pkg/signals"
)
func main() {
// Create the application with unique ID
myApp := app.NewWithID("com.localai.launcher")
myApp.SetIcon(resourceIconPng)
myWindow := myApp.NewWindow("LocalAI Launcher")
myWindow.Resize(fyne.NewSize(800, 600))
// Create the launcher UI
ui := coreLauncher.NewLauncherUI()
// Initialize the launcher with UI context
launcher := coreLauncher.NewLauncher(ui, myWindow, myApp)
// Setup the UI
content := ui.CreateMainUI(launcher)
myWindow.SetContent(content)
// Setup window close behavior - minimize to tray instead of closing
myWindow.SetCloseIntercept(func() {
myWindow.Hide()
})
// Setup system tray using Fyne's built-in approach``
if desk, ok := myApp.(desktop.App); ok {
// Create a dynamic systray manager
systray := coreLauncher.NewSystrayManager(launcher, myWindow, desk, myApp, resourceIconPng)
launcher.SetSystray(systray)
}
// Setup signal handling for graceful shutdown
signals.RegisterGracefulTerminationHandler(func() {
// Perform cleanup
if err := launcher.Shutdown(); err != nil {
log.Printf("Error during shutdown: %v", err)
}
})
// Initialize the launcher state
go func() {
if err := launcher.Initialize(); err != nil {
log.Printf("Failed to initialize launcher: %v", err)
if launcher.GetUI() != nil {
launcher.GetUI().UpdateStatus("Failed to initialize: " + err.Error())
}
} else {
// Load configuration into UI
launcher.GetUI().LoadConfiguration()
launcher.GetUI().UpdateStatus("Ready")
// Show welcome window if configured to do so
config := launcher.GetConfig()
if *config.ShowWelcome {
launcher.GetUI().ShowWelcomeWindow()
}
}
}()
// Run the application in background (window only shown when "Settings" is clicked)
myApp.Run()
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/launcher_test.go | cmd/launcher/internal/launcher_test.go | package launcher_test
import (
"os"
"path/filepath"
"strings"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"fyne.io/fyne/v2/app"
launcher "github.com/mudler/LocalAI/cmd/launcher/internal"
)
var _ = Describe("Launcher", func() {
var (
launcherInstance *launcher.Launcher
tempDir string
)
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "launcher-test-*")
Expect(err).ToNot(HaveOccurred())
ui := launcher.NewLauncherUI()
app := app.NewWithID("com.localai.launcher")
launcherInstance = launcher.NewLauncher(ui, nil, app)
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
Describe("NewLauncher", func() {
It("should create a launcher with default configuration", func() {
Expect(launcherInstance.GetConfig()).ToNot(BeNil())
})
})
Describe("Initialize", func() {
It("should set default paths when not configured", func() {
err := launcherInstance.Initialize()
Expect(err).ToNot(HaveOccurred())
config := launcherInstance.GetConfig()
Expect(config.ModelsPath).ToNot(BeEmpty())
Expect(config.BackendsPath).ToNot(BeEmpty())
})
It("should set default ShowWelcome to true", func() {
err := launcherInstance.Initialize()
Expect(err).ToNot(HaveOccurred())
config := launcherInstance.GetConfig()
Expect(config.ShowWelcome).To(BeTrue())
Expect(config.Address).To(Equal("127.0.0.1:8080"))
Expect(config.LogLevel).To(Equal("info"))
})
It("should create models and backends directories", func() {
// Set custom paths for testing
config := launcherInstance.GetConfig()
config.ModelsPath = filepath.Join(tempDir, "models")
config.BackendsPath = filepath.Join(tempDir, "backends")
launcherInstance.SetConfig(config)
err := launcherInstance.Initialize()
Expect(err).ToNot(HaveOccurred())
// Check if directories were created
_, err = os.Stat(config.ModelsPath)
Expect(err).ToNot(HaveOccurred())
_, err = os.Stat(config.BackendsPath)
Expect(err).ToNot(HaveOccurred())
})
})
Describe("Configuration", func() {
It("should get and set configuration", func() {
config := launcherInstance.GetConfig()
config.ModelsPath = "/test/models"
config.BackendsPath = "/test/backends"
config.Address = ":9090"
config.LogLevel = "debug"
err := launcherInstance.SetConfig(config)
Expect(err).ToNot(HaveOccurred())
retrievedConfig := launcherInstance.GetConfig()
Expect(retrievedConfig.ModelsPath).To(Equal("/test/models"))
Expect(retrievedConfig.BackendsPath).To(Equal("/test/backends"))
Expect(retrievedConfig.Address).To(Equal(":9090"))
Expect(retrievedConfig.LogLevel).To(Equal("debug"))
})
})
Describe("WebUI URL", func() {
It("should return correct WebUI URL for localhost", func() {
config := launcherInstance.GetConfig()
config.Address = ":8080"
launcherInstance.SetConfig(config)
url := launcherInstance.GetWebUIURL()
Expect(url).To(Equal("http://localhost:8080"))
})
It("should return correct WebUI URL for full address", func() {
config := launcherInstance.GetConfig()
config.Address = "127.0.0.1:8080"
launcherInstance.SetConfig(config)
url := launcherInstance.GetWebUIURL()
Expect(url).To(Equal("http://127.0.0.1:8080"))
})
It("should handle http prefix correctly", func() {
config := launcherInstance.GetConfig()
config.Address = "http://localhost:8080"
launcherInstance.SetConfig(config)
url := launcherInstance.GetWebUIURL()
Expect(url).To(Equal("http://localhost:8080"))
})
})
Describe("Process Management", func() {
It("should not be running initially", func() {
Expect(launcherInstance.IsRunning()).To(BeFalse())
})
It("should handle start when binary doesn't exist", func() {
err := launcherInstance.StartLocalAI()
Expect(err).To(HaveOccurred())
// Could be either "not found" or "permission denied" depending on test environment
errMsg := err.Error()
hasExpectedError := strings.Contains(errMsg, "LocalAI binary") ||
strings.Contains(errMsg, "permission denied")
Expect(hasExpectedError).To(BeTrue(), "Expected error about binary not found or permission denied, got: %s", errMsg)
})
It("should handle stop when not running", func() {
err := launcherInstance.StopLocalAI()
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("LocalAI is not running"))
})
})
Describe("Logs", func() {
It("should return empty logs initially", func() {
logs := launcherInstance.GetLogs()
Expect(logs).To(BeEmpty())
})
})
Describe("Version Management", func() {
It("should return empty version when no binary installed", func() {
version := launcherInstance.GetCurrentVersion()
Expect(version).To(BeEmpty()) // No binary installed in test environment
})
It("should handle update checks", func() {
// This test would require mocking HTTP responses
// For now, we'll just test that the method doesn't panic
_, _, err := launcherInstance.CheckForUpdates()
// We expect either success or a network error, not a panic
if err != nil {
// Network error is acceptable in tests
Expect(err.Error()).To(ContainSubstring("failed to fetch"))
}
})
})
})
var _ = Describe("Config", func() {
It("should have proper JSON tags", func() {
config := &launcher.Config{
ModelsPath: "/test/models",
BackendsPath: "/test/backends",
Address: ":8080",
AutoStart: true,
LogLevel: "info",
EnvironmentVars: map[string]string{"TEST": "value"},
}
Expect(config.ModelsPath).To(Equal("/test/models"))
Expect(config.BackendsPath).To(Equal("/test/backends"))
Expect(config.Address).To(Equal(":8080"))
Expect(config.AutoStart).To(BeTrue())
Expect(config.LogLevel).To(Equal("info"))
Expect(config.EnvironmentVars).To(HaveKeyWithValue("TEST", "value"))
})
It("should initialize environment variables map", func() {
config := &launcher.Config{}
Expect(config.EnvironmentVars).To(BeNil())
ui := launcher.NewLauncherUI()
app := app.NewWithID("com.localai.launcher")
launcher := launcher.NewLauncher(ui, nil, app)
err := launcher.Initialize()
Expect(err).ToNot(HaveOccurred())
retrievedConfig := launcher.GetConfig()
Expect(retrievedConfig.EnvironmentVars).ToNot(BeNil())
Expect(retrievedConfig.EnvironmentVars).To(BeEmpty())
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/launcher.go | cmd/launcher/internal/launcher.go | package launcher
import (
"bufio"
"context"
"encoding/json"
"fmt"
"io"
"log"
"net/url"
"os"
"os/exec"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
"fyne.io/fyne/v2"
"fyne.io/fyne/v2/container"
"fyne.io/fyne/v2/dialog"
"fyne.io/fyne/v2/widget"
)
// Config represents the launcher configuration
type Config struct {
ModelsPath string `json:"models_path"`
BackendsPath string `json:"backends_path"`
Address string `json:"address"`
AutoStart bool `json:"auto_start"`
StartOnBoot bool `json:"start_on_boot"`
LogLevel string `json:"log_level"`
EnvironmentVars map[string]string `json:"environment_vars"`
ShowWelcome *bool `json:"show_welcome"`
}
// Launcher represents the main launcher application
type Launcher struct {
// Core components
releaseManager *ReleaseManager
config *Config
ui *LauncherUI
systray *SystrayManager
ctx context.Context
window fyne.Window
app fyne.App
// Process management
localaiCmd *exec.Cmd
isRunning bool
logBuffer *strings.Builder
logMutex sync.RWMutex
statusChannel chan string
// Logging
logFile *os.File
logPath string
// UI state
lastUpdateCheck time.Time
}
// NewLauncher creates a new launcher instance
func NewLauncher(ui *LauncherUI, window fyne.Window, app fyne.App) *Launcher {
return &Launcher{
releaseManager: NewReleaseManager(),
config: &Config{},
logBuffer: &strings.Builder{},
statusChannel: make(chan string, 100),
ctx: context.Background(),
ui: ui,
window: window,
app: app,
}
}
// setupLogging sets up log file for LocalAI process output
func (l *Launcher) setupLogging() error {
// Create logs directory in data folder
dataPath := l.GetDataPath()
logsDir := filepath.Join(dataPath, "logs")
if err := os.MkdirAll(logsDir, 0755); err != nil {
return fmt.Errorf("failed to create logs directory: %w", err)
}
// Create log file with timestamp
timestamp := time.Now().Format("2006-01-02_15-04-05")
l.logPath = filepath.Join(logsDir, fmt.Sprintf("localai_%s.log", timestamp))
logFile, err := os.Create(l.logPath)
if err != nil {
return fmt.Errorf("failed to create log file: %w", err)
}
l.logFile = logFile
return nil
}
// Initialize sets up the launcher
func (l *Launcher) Initialize() error {
if l.app == nil {
return fmt.Errorf("app is nil")
}
log.Printf("Initializing launcher...")
// Setup logging
if err := l.setupLogging(); err != nil {
return fmt.Errorf("failed to setup logging: %w", err)
}
// Load configuration
log.Printf("Loading configuration...")
if err := l.loadConfig(); err != nil {
return fmt.Errorf("failed to load config: %w", err)
}
log.Printf("Configuration loaded, current state: ModelsPath=%s, BackendsPath=%s, Address=%s, LogLevel=%s",
l.config.ModelsPath, l.config.BackendsPath, l.config.Address, l.config.LogLevel)
// Clean up any partial downloads
log.Printf("Cleaning up partial downloads...")
if err := l.releaseManager.CleanupPartialDownloads(); err != nil {
log.Printf("Warning: failed to cleanup partial downloads: %v", err)
}
if l.config.StartOnBoot {
l.StartLocalAI()
}
// Set default paths if not configured (only if not already loaded from config)
if l.config.ModelsPath == "" {
homeDir, _ := os.UserHomeDir()
l.config.ModelsPath = filepath.Join(homeDir, ".localai", "models")
log.Printf("Setting default ModelsPath: %s", l.config.ModelsPath)
}
if l.config.BackendsPath == "" {
homeDir, _ := os.UserHomeDir()
l.config.BackendsPath = filepath.Join(homeDir, ".localai", "backends")
log.Printf("Setting default BackendsPath: %s", l.config.BackendsPath)
}
if l.config.Address == "" {
l.config.Address = "127.0.0.1:8080"
log.Printf("Setting default Address: %s", l.config.Address)
}
if l.config.LogLevel == "" {
l.config.LogLevel = "info"
log.Printf("Setting default LogLevel: %s", l.config.LogLevel)
}
if l.config.EnvironmentVars == nil {
l.config.EnvironmentVars = make(map[string]string)
log.Printf("Initializing empty EnvironmentVars map")
}
// Set default welcome window preference
if l.config.ShowWelcome == nil {
true := true
l.config.ShowWelcome = &true
log.Printf("Setting default ShowWelcome: true")
}
// Create directories
os.MkdirAll(l.config.ModelsPath, 0755)
os.MkdirAll(l.config.BackendsPath, 0755)
// Save the configuration with default values
if err := l.saveConfig(); err != nil {
log.Printf("Warning: failed to save default configuration: %v", err)
}
// System tray is now handled in main.go using Fyne's built-in approach
// Check if LocalAI is installed
if !l.releaseManager.IsLocalAIInstalled() {
log.Printf("No LocalAI installation found")
fyne.Do(func() {
l.updateStatus("No LocalAI installation found")
if l.ui != nil {
// Show dialog offering to download LocalAI
l.showDownloadLocalAIDialog()
}
})
}
// Check for updates periodically
go l.periodicUpdateCheck()
return nil
}
// StartLocalAI starts the LocalAI server
func (l *Launcher) StartLocalAI() error {
if l.isRunning {
return fmt.Errorf("LocalAI is already running")
}
// Verify binary integrity before starting
if err := l.releaseManager.VerifyInstalledBinary(); err != nil {
// Binary is corrupted, remove it and offer to reinstall
binaryPath := l.releaseManager.GetBinaryPath()
if removeErr := os.Remove(binaryPath); removeErr != nil {
log.Printf("Failed to remove corrupted binary: %v", removeErr)
}
return fmt.Errorf("LocalAI binary is corrupted: %v. Please reinstall LocalAI", err)
}
binaryPath := l.releaseManager.GetBinaryPath()
if _, err := os.Stat(binaryPath); os.IsNotExist(err) {
return fmt.Errorf("LocalAI binary not found. Please download a release first")
}
// Build command arguments
args := []string{
"run",
"--models-path", l.config.ModelsPath,
"--backends-path", l.config.BackendsPath,
"--address", l.config.Address,
"--log-level", l.config.LogLevel,
}
l.localaiCmd = exec.CommandContext(l.ctx, binaryPath, args...)
// Apply environment variables
if len(l.config.EnvironmentVars) > 0 {
env := os.Environ()
for key, value := range l.config.EnvironmentVars {
env = append(env, fmt.Sprintf("%s=%s", key, value))
}
l.localaiCmd.Env = env
}
// Setup logging
stdout, err := l.localaiCmd.StdoutPipe()
if err != nil {
return fmt.Errorf("failed to create stdout pipe: %w", err)
}
stderr, err := l.localaiCmd.StderrPipe()
if err != nil {
return fmt.Errorf("failed to create stderr pipe: %w", err)
}
// Start the process
if err := l.localaiCmd.Start(); err != nil {
return fmt.Errorf("failed to start LocalAI: %w", err)
}
l.isRunning = true
fyne.Do(func() {
l.updateStatus("LocalAI is starting...")
l.updateRunningState(true)
})
// Start log monitoring
go l.monitorLogs(stdout, "STDOUT")
go l.monitorLogs(stderr, "STDERR")
// Monitor process with startup timeout
go func() {
// Wait for process to start or fail
err := l.localaiCmd.Wait()
l.isRunning = false
fyne.Do(func() {
l.updateRunningState(false)
if err != nil {
l.updateStatus(fmt.Sprintf("LocalAI stopped with error: %v", err))
} else {
l.updateStatus("LocalAI stopped")
}
})
}()
// Add startup timeout detection
go func() {
time.Sleep(10 * time.Second) // Wait 10 seconds for startup
if l.isRunning {
// Check if process is still alive
if l.localaiCmd.Process != nil {
if err := l.localaiCmd.Process.Signal(syscall.Signal(0)); err != nil {
// Process is dead, mark as not running
l.isRunning = false
fyne.Do(func() {
l.updateRunningState(false)
l.updateStatus("LocalAI failed to start properly")
})
}
}
}
}()
return nil
}
// StopLocalAI stops the LocalAI server
func (l *Launcher) StopLocalAI() error {
if !l.isRunning || l.localaiCmd == nil {
return fmt.Errorf("LocalAI is not running")
}
// Gracefully terminate the process
if err := l.localaiCmd.Process.Signal(os.Interrupt); err != nil {
// If graceful termination fails, force kill
if killErr := l.localaiCmd.Process.Kill(); killErr != nil {
return fmt.Errorf("failed to kill LocalAI process: %w", killErr)
}
}
l.isRunning = false
fyne.Do(func() {
l.updateRunningState(false)
l.updateStatus("LocalAI stopped")
})
return nil
}
// IsRunning returns whether LocalAI is currently running
func (l *Launcher) IsRunning() bool {
return l.isRunning
}
// Shutdown performs cleanup when the application is closing
func (l *Launcher) Shutdown() error {
log.Printf("Launcher shutting down, stopping LocalAI...")
// Stop LocalAI if it's running
if l.isRunning {
if err := l.StopLocalAI(); err != nil {
log.Printf("Error stopping LocalAI during shutdown: %v", err)
}
}
// Close log file if open
if l.logFile != nil {
if err := l.logFile.Close(); err != nil {
log.Printf("Error closing log file: %v", err)
}
l.logFile = nil
}
log.Printf("Launcher shutdown complete")
return nil
}
// GetLogs returns the current log buffer
func (l *Launcher) GetLogs() string {
l.logMutex.RLock()
defer l.logMutex.RUnlock()
return l.logBuffer.String()
}
// GetRecentLogs returns the most recent logs (last 50 lines) for better error display
func (l *Launcher) GetRecentLogs() string {
l.logMutex.RLock()
defer l.logMutex.RUnlock()
content := l.logBuffer.String()
lines := strings.Split(content, "\n")
// Get last 50 lines
if len(lines) > 50 {
lines = lines[len(lines)-50:]
}
return strings.Join(lines, "\n")
}
// GetConfig returns the current configuration
func (l *Launcher) GetConfig() *Config {
return l.config
}
// SetConfig updates the configuration
func (l *Launcher) SetConfig(config *Config) error {
l.config = config
return l.saveConfig()
}
func (l *Launcher) GetUI() *LauncherUI {
return l.ui
}
func (l *Launcher) SetSystray(systray *SystrayManager) {
l.systray = systray
}
// GetReleaseManager returns the release manager
func (l *Launcher) GetReleaseManager() *ReleaseManager {
return l.releaseManager
}
// GetWebUIURL returns the URL for the WebUI
func (l *Launcher) GetWebUIURL() string {
address := l.config.Address
if strings.HasPrefix(address, ":") {
address = "localhost" + address
}
if !strings.HasPrefix(address, "http") {
address = "http://" + address
}
return address
}
// GetDataPath returns the path where LocalAI data and logs are stored
func (l *Launcher) GetDataPath() string {
// LocalAI typically stores data in the current working directory or a models directory
// First check if models path is configured
if l.config != nil && l.config.ModelsPath != "" {
// Return the parent directory of models path
return filepath.Dir(l.config.ModelsPath)
}
// Fallback to home directory LocalAI folder
homeDir, err := os.UserHomeDir()
if err != nil {
return "."
}
return filepath.Join(homeDir, ".localai")
}
// CheckForUpdates checks if there are any available updates
func (l *Launcher) CheckForUpdates() (bool, string, error) {
log.Printf("CheckForUpdates: checking for available updates...")
available, version, err := l.releaseManager.IsUpdateAvailable()
if err != nil {
log.Printf("CheckForUpdates: error occurred: %v", err)
return false, "", err
}
log.Printf("CheckForUpdates: result - available=%v, version=%s", available, version)
l.lastUpdateCheck = time.Now()
return available, version, nil
}
// DownloadUpdate downloads the latest version
func (l *Launcher) DownloadUpdate(version string, progressCallback func(float64)) error {
return l.releaseManager.DownloadRelease(version, progressCallback)
}
// GetCurrentVersion returns the current installed version
func (l *Launcher) GetCurrentVersion() string {
return l.releaseManager.GetInstalledVersion()
}
// GetCurrentStatus returns the current status
func (l *Launcher) GetCurrentStatus() string {
select {
case status := <-l.statusChannel:
return status
default:
if l.isRunning {
return "LocalAI is running"
}
return "Ready"
}
}
// GetLastStatus returns the last known status without consuming from channel
func (l *Launcher) GetLastStatus() string {
if l.isRunning {
return "LocalAI is running"
}
// Check if LocalAI is installed
if !l.releaseManager.IsLocalAIInstalled() {
return "LocalAI not installed"
}
return "Ready"
}
func (l *Launcher) githubReleaseNotesURL(version string) (*url.URL, error) {
// Construct GitHub release URL
releaseURL := fmt.Sprintf("https://github.com/%s/%s/releases/tag/%s",
l.releaseManager.GitHubOwner,
l.releaseManager.GitHubRepo,
version)
// Convert string to *url.URL
return url.Parse(releaseURL)
}
// showDownloadLocalAIDialog shows a dialog offering to download LocalAI
func (l *Launcher) showDownloadLocalAIDialog() {
if l.app == nil {
log.Printf("Cannot show download dialog: app is nil")
return
}
fyne.DoAndWait(func() {
// Create a standalone window for the download dialog
dialogWindow := l.app.NewWindow("LocalAI Installation Required")
dialogWindow.Resize(fyne.NewSize(500, 350))
dialogWindow.CenterOnScreen()
dialogWindow.SetCloseIntercept(func() {
dialogWindow.Close()
})
// Create the dialog content
titleLabel := widget.NewLabel("LocalAI Not Found")
titleLabel.TextStyle = fyne.TextStyle{Bold: true}
titleLabel.Alignment = fyne.TextAlignCenter
messageLabel := widget.NewLabel("LocalAI is not installed on your system.\n\nWould you like to download and install the latest version?")
messageLabel.Wrapping = fyne.TextWrapWord
messageLabel.Alignment = fyne.TextAlignCenter
// Buttons
downloadButton := widget.NewButton("Download & Install", func() {
dialogWindow.Close()
l.downloadAndInstallLocalAI()
if l.systray != nil {
l.systray.recreateMenu()
}
})
downloadButton.Importance = widget.HighImportance
// Release notes button
releaseNotesButton := widget.NewButton("View Release Notes", func() {
// Get latest release info and open release notes
go func() {
release, err := l.releaseManager.GetLatestRelease()
if err != nil {
log.Printf("Failed to get latest release info: %v", err)
return
}
releaseNotesURL, err := l.githubReleaseNotesURL(release.Version)
if err != nil {
log.Printf("Failed to parse URL: %v", err)
return
}
l.app.OpenURL(releaseNotesURL)
}()
})
skipButton := widget.NewButton("Skip for Now", func() {
dialogWindow.Close()
})
// Layout - put release notes button above the main action buttons
actionButtons := container.NewHBox(skipButton, downloadButton)
content := container.NewVBox(
titleLabel,
widget.NewSeparator(),
messageLabel,
widget.NewSeparator(),
releaseNotesButton,
widget.NewSeparator(),
actionButtons,
)
dialogWindow.SetContent(content)
dialogWindow.Show()
})
}
// downloadAndInstallLocalAI downloads and installs the latest LocalAI version
func (l *Launcher) downloadAndInstallLocalAI() {
if l.app == nil {
log.Printf("Cannot download LocalAI: app is nil")
return
}
// First check what the latest version is
go func() {
log.Printf("Checking for latest LocalAI version...")
available, version, err := l.CheckForUpdates()
if err != nil {
log.Printf("Failed to check for updates: %v", err)
l.showDownloadError("Failed to check for latest version", err.Error())
return
}
if !available {
log.Printf("No updates available, but LocalAI is not installed")
l.showDownloadError("No Version Available", "Could not determine the latest LocalAI version. Please check your internet connection and try again.")
return
}
log.Printf("Latest version available: %s", version)
// Show progress window with the specific version
l.showDownloadProgress(version, fmt.Sprintf("Downloading LocalAI %s...", version))
}()
}
// showDownloadError shows an error dialog for download failures
func (l *Launcher) showDownloadError(title, message string) {
fyne.DoAndWait(func() {
// Create error window
errorWindow := l.app.NewWindow("Download Error")
errorWindow.Resize(fyne.NewSize(400, 200))
errorWindow.CenterOnScreen()
errorWindow.SetCloseIntercept(func() {
errorWindow.Close()
})
// Error content
titleLabel := widget.NewLabel(title)
titleLabel.TextStyle = fyne.TextStyle{Bold: true}
titleLabel.Alignment = fyne.TextAlignCenter
messageLabel := widget.NewLabel(message)
messageLabel.Wrapping = fyne.TextWrapWord
messageLabel.Alignment = fyne.TextAlignCenter
// Close button
closeButton := widget.NewButton("Close", func() {
errorWindow.Close()
})
// Layout
content := container.NewVBox(
titleLabel,
widget.NewSeparator(),
messageLabel,
widget.NewSeparator(),
closeButton,
)
errorWindow.SetContent(content)
errorWindow.Show()
})
}
// showDownloadProgress shows a standalone progress window for downloading LocalAI
func (l *Launcher) showDownloadProgress(version, title string) {
fyne.DoAndWait(func() {
// Create progress window
progressWindow := l.app.NewWindow("Downloading LocalAI")
progressWindow.Resize(fyne.NewSize(400, 250))
progressWindow.CenterOnScreen()
progressWindow.SetCloseIntercept(func() {
progressWindow.Close()
})
// Progress bar
progressBar := widget.NewProgressBar()
progressBar.SetValue(0)
// Status label
statusLabel := widget.NewLabel("Preparing download...")
// Release notes button
releaseNotesButton := widget.NewButton("View Release Notes", func() {
releaseNotesURL, err := l.githubReleaseNotesURL(version)
if err != nil {
log.Printf("Failed to parse URL: %v", err)
return
}
l.app.OpenURL(releaseNotesURL)
})
// Progress container
progressContainer := container.NewVBox(
widget.NewLabel(title),
progressBar,
statusLabel,
widget.NewSeparator(),
releaseNotesButton,
)
progressWindow.SetContent(progressContainer)
progressWindow.Show()
// Start download in background
go func() {
err := l.DownloadUpdate(version, func(progress float64) {
// Update progress bar
fyne.Do(func() {
progressBar.SetValue(progress)
percentage := int(progress * 100)
statusLabel.SetText(fmt.Sprintf("Downloading... %d%%", percentage))
})
})
// Handle completion
fyne.Do(func() {
if err != nil {
statusLabel.SetText(fmt.Sprintf("Download failed: %v", err))
// Show error dialog
dialog.ShowError(err, progressWindow)
} else {
statusLabel.SetText("Download completed successfully!")
progressBar.SetValue(1.0)
// Show success dialog
dialog.ShowConfirm("Installation Complete",
"LocalAI has been downloaded and installed successfully. You can now start LocalAI from the launcher.",
func(close bool) {
progressWindow.Close()
// Update status and refresh systray menu
l.updateStatus("LocalAI installed successfully")
if l.systray != nil {
l.systray.recreateMenu()
}
}, progressWindow)
}
})
}()
})
}
// monitorLogs monitors the output of LocalAI and adds it to the log buffer
func (l *Launcher) monitorLogs(reader io.Reader, prefix string) {
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
line := scanner.Text()
timestamp := time.Now().Format("15:04:05")
logLine := fmt.Sprintf("[%s] %s: %s\n", timestamp, prefix, line)
l.logMutex.Lock()
l.logBuffer.WriteString(logLine)
// Keep log buffer size reasonable
if l.logBuffer.Len() > 100000 { // 100KB
content := l.logBuffer.String()
// Keep last 50KB
if len(content) > 50000 {
l.logBuffer.Reset()
l.logBuffer.WriteString(content[len(content)-50000:])
}
}
l.logMutex.Unlock()
// Write to log file if available
if l.logFile != nil {
if _, err := l.logFile.WriteString(logLine); err != nil {
log.Printf("Failed to write to log file: %v", err)
}
}
fyne.Do(func() {
// Notify UI of new log content
if l.ui != nil {
l.ui.OnLogUpdate(logLine)
}
// Check for startup completion
if strings.Contains(line, "API server listening") {
l.updateStatus("LocalAI is running")
}
})
}
}
// updateStatus updates the status and notifies UI
func (l *Launcher) updateStatus(status string) {
select {
case l.statusChannel <- status:
default:
// Channel full, skip
}
if l.ui != nil {
l.ui.UpdateStatus(status)
}
if l.systray != nil {
l.systray.UpdateStatus(status)
}
}
// updateRunningState updates the running state in UI and systray
func (l *Launcher) updateRunningState(isRunning bool) {
if l.ui != nil {
l.ui.UpdateRunningState(isRunning)
}
if l.systray != nil {
l.systray.UpdateRunningState(isRunning)
}
}
// periodicUpdateCheck checks for updates periodically
func (l *Launcher) periodicUpdateCheck() {
ticker := time.NewTicker(1 * time.Hour)
defer ticker.Stop()
for {
select {
case <-ticker.C:
available, version, err := l.CheckForUpdates()
if err == nil && available {
fyne.Do(func() {
l.updateStatus(fmt.Sprintf("Update available: %s", version))
if l.systray != nil {
l.systray.NotifyUpdateAvailable(version)
}
if l.ui != nil {
l.ui.NotifyUpdateAvailable(version)
}
})
}
case <-l.ctx.Done():
return
}
}
}
// loadConfig loads configuration from file
func (l *Launcher) loadConfig() error {
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("failed to get home directory: %w", err)
}
configPath := filepath.Join(homeDir, ".localai", "launcher.json")
log.Printf("Loading config from: %s", configPath)
if _, err := os.Stat(configPath); os.IsNotExist(err) {
log.Printf("Config file not found, creating default config")
// Create default config
return l.saveConfig()
}
// Load existing config
configData, err := os.ReadFile(configPath)
if err != nil {
return fmt.Errorf("failed to read config file: %w", err)
}
log.Printf("Config file content: %s", string(configData))
log.Printf("loadConfig: about to unmarshal JSON data")
if err := json.Unmarshal(configData, l.config); err != nil {
return fmt.Errorf("failed to parse config file: %w", err)
}
log.Printf("loadConfig: JSON unmarshaled successfully")
log.Printf("Loaded config: ModelsPath=%s, BackendsPath=%s, Address=%s, LogLevel=%s",
l.config.ModelsPath, l.config.BackendsPath, l.config.Address, l.config.LogLevel)
log.Printf("Environment vars: %v", l.config.EnvironmentVars)
return nil
}
// saveConfig saves configuration to file
func (l *Launcher) saveConfig() error {
homeDir, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("failed to get home directory: %w", err)
}
configDir := filepath.Join(homeDir, ".localai")
if err := os.MkdirAll(configDir, 0755); err != nil {
return fmt.Errorf("failed to create config directory: %w", err)
}
// Marshal config to JSON
log.Printf("saveConfig: marshaling config with EnvironmentVars: %v", l.config.EnvironmentVars)
configData, err := json.MarshalIndent(l.config, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal config: %w", err)
}
log.Printf("saveConfig: JSON marshaled successfully, length: %d", len(configData))
configPath := filepath.Join(configDir, "launcher.json")
log.Printf("Saving config to: %s", configPath)
log.Printf("Config content: %s", string(configData))
if err := os.WriteFile(configPath, configData, 0644); err != nil {
return fmt.Errorf("failed to write config file: %w", err)
}
log.Printf("Config saved successfully")
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/launcher_suite_test.go | cmd/launcher/internal/launcher_suite_test.go | package launcher_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestLauncher(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Launcher Suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/systray_manager.go | cmd/launcher/internal/systray_manager.go | package launcher
import (
"fmt"
"log"
"net/url"
"fyne.io/fyne/v2"
"fyne.io/fyne/v2/container"
"fyne.io/fyne/v2/dialog"
"fyne.io/fyne/v2/driver/desktop"
"fyne.io/fyne/v2/widget"
)
// SystrayManager manages the system tray functionality
type SystrayManager struct {
launcher *Launcher
window fyne.Window
app fyne.App
desk desktop.App
// Menu items that need dynamic updates
startStopItem *fyne.MenuItem
hasUpdateAvailable bool
latestVersion string
icon *fyne.StaticResource
}
// NewSystrayManager creates a new systray manager
func NewSystrayManager(launcher *Launcher, window fyne.Window, desktop desktop.App, app fyne.App, icon *fyne.StaticResource) *SystrayManager {
sm := &SystrayManager{
launcher: launcher,
window: window,
app: app,
desk: desktop,
icon: icon,
}
sm.setupMenu(desktop)
return sm
}
// setupMenu sets up the system tray menu
func (sm *SystrayManager) setupMenu(desk desktop.App) {
sm.desk = desk
// Create the start/stop toggle item
sm.startStopItem = fyne.NewMenuItem("Start LocalAI", func() {
sm.toggleLocalAI()
})
desk.SetSystemTrayIcon(sm.icon)
// Initialize the menu state using recreateMenu
sm.recreateMenu()
}
// toggleLocalAI starts or stops LocalAI based on current state
func (sm *SystrayManager) toggleLocalAI() {
if sm.launcher.IsRunning() {
go func() {
if err := sm.launcher.StopLocalAI(); err != nil {
log.Printf("Failed to stop LocalAI: %v", err)
sm.showErrorDialog("Failed to Stop LocalAI", err.Error())
}
}()
} else {
go func() {
if err := sm.launcher.StartLocalAI(); err != nil {
log.Printf("Failed to start LocalAI: %v", err)
sm.showStartupErrorDialog(err)
}
}()
}
}
// openWebUI opens the LocalAI WebUI in the default browser
func (sm *SystrayManager) openWebUI() {
if !sm.launcher.IsRunning() {
return // LocalAI is not running
}
webURL := sm.launcher.GetWebUIURL()
if parsedURL, err := url.Parse(webURL); err == nil {
sm.app.OpenURL(parsedURL)
}
}
// openDocumentation opens the LocalAI documentation
func (sm *SystrayManager) openDocumentation() {
if parsedURL, err := url.Parse("https://localai.io"); err == nil {
sm.app.OpenURL(parsedURL)
}
}
// updateStartStopItem updates the start/stop menu item based on current state
func (sm *SystrayManager) updateStartStopItem() {
// Since Fyne menu items can't change text dynamically, we recreate the menu
sm.recreateMenu()
}
// recreateMenu recreates the entire menu with updated state
func (sm *SystrayManager) recreateMenu() {
if sm.desk == nil {
return
}
// Determine the action based on LocalAI installation and running state
var actionItem *fyne.MenuItem
if !sm.launcher.GetReleaseManager().IsLocalAIInstalled() {
// LocalAI not installed - show install option
actionItem = fyne.NewMenuItem("📥 Install Latest Version", func() {
sm.launcher.showDownloadLocalAIDialog()
})
} else if sm.launcher.IsRunning() {
// LocalAI is running - show stop option
actionItem = fyne.NewMenuItem("🛑 Stop LocalAI", func() {
sm.toggleLocalAI()
})
} else {
// LocalAI is installed but not running - show start option
actionItem = fyne.NewMenuItem("▶️ Start LocalAI", func() {
sm.toggleLocalAI()
})
}
menuItems := []*fyne.MenuItem{}
// Add status at the top (clickable for details)
status := sm.launcher.GetLastStatus()
statusText := sm.truncateText(status, 30)
statusItem := fyne.NewMenuItem("📊 Status: "+statusText, func() {
sm.showStatusDetails(status, "")
})
menuItems = append(menuItems, statusItem)
// Only show version if LocalAI is installed
if sm.launcher.GetReleaseManager().IsLocalAIInstalled() {
version := sm.launcher.GetCurrentVersion()
versionText := sm.truncateText(version, 25)
versionItem := fyne.NewMenuItem("🔧 Version: "+versionText, func() {
sm.showStatusDetails(status, version)
})
menuItems = append(menuItems, versionItem)
}
menuItems = append(menuItems, fyne.NewMenuItemSeparator())
// Add update notification if available
if sm.hasUpdateAvailable {
updateItem := fyne.NewMenuItem("🔔 New version available ("+sm.latestVersion+")", func() {
sm.downloadUpdate()
})
menuItems = append(menuItems, updateItem)
menuItems = append(menuItems, fyne.NewMenuItemSeparator())
}
// Core actions
menuItems = append(menuItems,
actionItem,
)
// Only show WebUI option if LocalAI is installed
if sm.launcher.GetReleaseManager().IsLocalAIInstalled() && sm.launcher.IsRunning() {
menuItems = append(menuItems,
fyne.NewMenuItem("Open WebUI", func() {
sm.openWebUI()
}),
)
}
menuItems = append(menuItems,
fyne.NewMenuItemSeparator(),
fyne.NewMenuItem("Check for Updates", func() {
sm.checkForUpdates()
}),
fyne.NewMenuItemSeparator(),
fyne.NewMenuItem("Settings", func() {
sm.showSettings()
}),
fyne.NewMenuItem("Show Welcome Window", func() {
sm.showWelcomeWindow()
}),
fyne.NewMenuItem("Open Data Folder", func() {
sm.openDataFolder()
}),
fyne.NewMenuItemSeparator(),
fyne.NewMenuItem("Documentation", func() {
sm.openDocumentation()
}),
fyne.NewMenuItemSeparator(),
fyne.NewMenuItem("Quit", func() {
// Perform cleanup before quitting
if err := sm.launcher.Shutdown(); err != nil {
log.Printf("Error during shutdown: %v", err)
}
sm.app.Quit()
}),
)
menu := fyne.NewMenu("LocalAI", menuItems...)
sm.desk.SetSystemTrayMenu(menu)
}
// UpdateRunningState updates the systray based on running state
func (sm *SystrayManager) UpdateRunningState(isRunning bool) {
sm.updateStartStopItem()
}
// UpdateStatus updates the systray menu to reflect status changes
func (sm *SystrayManager) UpdateStatus(status string) {
sm.recreateMenu()
}
// checkForUpdates checks for available updates
func (sm *SystrayManager) checkForUpdates() {
go func() {
log.Printf("Checking for updates...")
available, version, err := sm.launcher.CheckForUpdates()
if err != nil {
log.Printf("Failed to check for updates: %v", err)
return
}
log.Printf("Update check result: available=%v, version=%s", available, version)
if available {
sm.hasUpdateAvailable = true
sm.latestVersion = version
sm.recreateMenu()
}
}()
}
// downloadUpdate downloads the latest update
func (sm *SystrayManager) downloadUpdate() {
if !sm.hasUpdateAvailable {
return
}
// Show progress window
sm.showDownloadProgress(sm.latestVersion)
}
// showSettings shows the settings window
func (sm *SystrayManager) showSettings() {
sm.window.Show()
sm.window.RequestFocus()
}
// showWelcomeWindow shows the welcome window
func (sm *SystrayManager) showWelcomeWindow() {
if sm.launcher.GetUI() != nil {
sm.launcher.GetUI().ShowWelcomeWindow()
}
}
// openDataFolder opens the data folder in file manager
func (sm *SystrayManager) openDataFolder() {
dataPath := sm.launcher.GetDataPath()
if parsedURL, err := url.Parse("file://" + dataPath); err == nil {
sm.app.OpenURL(parsedURL)
}
}
// NotifyUpdateAvailable sets update notification in systray
func (sm *SystrayManager) NotifyUpdateAvailable(version string) {
sm.hasUpdateAvailable = true
sm.latestVersion = version
sm.recreateMenu()
}
// truncateText truncates text to specified length and adds ellipsis if needed
func (sm *SystrayManager) truncateText(text string, maxLength int) string {
if len(text) <= maxLength {
return text
}
return text[:maxLength-3] + "..."
}
// showStatusDetails shows a detailed status window with full information
func (sm *SystrayManager) showStatusDetails(status, version string) {
fyne.DoAndWait(func() {
// Create status details window
statusWindow := sm.app.NewWindow("LocalAI Status Details")
statusWindow.Resize(fyne.NewSize(500, 400))
statusWindow.CenterOnScreen()
// Status information
statusLabel := widget.NewLabel("Current Status:")
statusValue := widget.NewLabel(status)
statusValue.Wrapping = fyne.TextWrapWord
// Version information (only show if version exists)
var versionContainer fyne.CanvasObject
if version != "" {
versionLabel := widget.NewLabel("Installed Version:")
versionValue := widget.NewLabel(version)
versionValue.Wrapping = fyne.TextWrapWord
versionContainer = container.NewVBox(versionLabel, versionValue)
}
// Running state
runningLabel := widget.NewLabel("Running State:")
runningValue := widget.NewLabel("")
if sm.launcher.IsRunning() {
runningValue.SetText("🟢 Running")
} else {
runningValue.SetText("🔴 Stopped")
}
// WebUI URL
webuiLabel := widget.NewLabel("WebUI URL:")
webuiValue := widget.NewLabel(sm.launcher.GetWebUIURL())
webuiValue.Wrapping = fyne.TextWrapWord
// Recent logs (last 20 lines)
logsLabel := widget.NewLabel("Recent Logs:")
logsText := widget.NewMultiLineEntry()
logsText.SetText(sm.launcher.GetRecentLogs())
logsText.Wrapping = fyne.TextWrapWord
logsText.Disable() // Make it read-only
// Buttons
closeButton := widget.NewButton("Close", func() {
statusWindow.Close()
})
refreshButton := widget.NewButton("Refresh", func() {
// Refresh the status information
statusValue.SetText(sm.launcher.GetLastStatus())
// Note: Version refresh is not implemented for simplicity
// The version will be updated when the status details window is reopened
if sm.launcher.IsRunning() {
runningValue.SetText("🟢 Running")
} else {
runningValue.SetText("🔴 Stopped")
}
logsText.SetText(sm.launcher.GetRecentLogs())
})
openWebUIButton := widget.NewButton("Open WebUI", func() {
sm.openWebUI()
})
// Layout
buttons := container.NewHBox(closeButton, refreshButton, openWebUIButton)
// Build info container dynamically
infoItems := []fyne.CanvasObject{
statusLabel, statusValue,
widget.NewSeparator(),
}
// Add version section if it exists
if versionContainer != nil {
infoItems = append(infoItems, versionContainer, widget.NewSeparator())
}
infoItems = append(infoItems,
runningLabel, runningValue,
widget.NewSeparator(),
webuiLabel, webuiValue,
)
infoContainer := container.NewVBox(infoItems...)
content := container.NewVBox(
infoContainer,
widget.NewSeparator(),
logsLabel,
logsText,
widget.NewSeparator(),
buttons,
)
statusWindow.SetContent(content)
statusWindow.Show()
})
}
// showErrorDialog shows a simple error dialog
func (sm *SystrayManager) showErrorDialog(title, message string) {
fyne.DoAndWait(func() {
dialog.ShowError(fmt.Errorf("%s", message), sm.window)
})
}
// showStartupErrorDialog shows a detailed error dialog with process logs
func (sm *SystrayManager) showStartupErrorDialog(err error) {
fyne.DoAndWait(func() {
// Get the recent process logs (more useful for debugging)
logs := sm.launcher.GetRecentLogs()
// Create error window
errorWindow := sm.app.NewWindow("LocalAI Startup Failed")
errorWindow.Resize(fyne.NewSize(600, 500))
errorWindow.CenterOnScreen()
// Error message
errorLabel := widget.NewLabel(fmt.Sprintf("Failed to start LocalAI:\n%s", err.Error()))
errorLabel.Wrapping = fyne.TextWrapWord
// Logs display
logsLabel := widget.NewLabel("Process Logs:")
logsText := widget.NewMultiLineEntry()
logsText.SetText(logs)
logsText.Wrapping = fyne.TextWrapWord
logsText.Disable() // Make it read-only
// Buttons
closeButton := widget.NewButton("Close", func() {
errorWindow.Close()
})
retryButton := widget.NewButton("Retry", func() {
errorWindow.Close()
// Try to start again
go func() {
if retryErr := sm.launcher.StartLocalAI(); retryErr != nil {
sm.showStartupErrorDialog(retryErr)
}
}()
})
openLogsButton := widget.NewButton("Open Logs Folder", func() {
sm.openDataFolder()
})
// Layout
buttons := container.NewHBox(closeButton, retryButton, openLogsButton)
content := container.NewVBox(
errorLabel,
widget.NewSeparator(),
logsLabel,
logsText,
widget.NewSeparator(),
buttons,
)
errorWindow.SetContent(content)
errorWindow.Show()
})
}
// showDownloadProgress shows a progress window for downloading updates
func (sm *SystrayManager) showDownloadProgress(version string) {
// Create a new window for download progress
progressWindow := sm.app.NewWindow("Downloading LocalAI Update")
progressWindow.Resize(fyne.NewSize(400, 250))
progressWindow.CenterOnScreen()
// Progress bar
progressBar := widget.NewProgressBar()
progressBar.SetValue(0)
// Status label
statusLabel := widget.NewLabel("Preparing download...")
// Release notes button
releaseNotesButton := widget.NewButton("View Release Notes", func() {
releaseNotesURL, err := sm.launcher.githubReleaseNotesURL(version)
if err != nil {
log.Printf("Failed to parse URL: %v", err)
return
}
sm.app.OpenURL(releaseNotesURL)
})
// Progress container
progressContainer := container.NewVBox(
widget.NewLabel(fmt.Sprintf("Downloading LocalAI version %s", version)),
progressBar,
statusLabel,
widget.NewSeparator(),
releaseNotesButton,
)
progressWindow.SetContent(progressContainer)
progressWindow.Show()
// Start download in background
go func() {
err := sm.launcher.DownloadUpdate(version, func(progress float64) {
// Update progress bar
fyne.Do(func() {
progressBar.SetValue(progress)
percentage := int(progress * 100)
statusLabel.SetText(fmt.Sprintf("Downloading... %d%%", percentage))
})
})
// Handle completion
fyne.Do(func() {
if err != nil {
statusLabel.SetText(fmt.Sprintf("Download failed: %v", err))
// Show error dialog
dialog.ShowError(err, progressWindow)
} else {
statusLabel.SetText("Download completed successfully!")
progressBar.SetValue(1.0)
// Show restart dialog
dialog.ShowConfirm("Update Downloaded",
"LocalAI has been updated successfully. Please restart the launcher to use the new version.",
func(restart bool) {
if restart {
sm.app.Quit()
}
progressWindow.Close()
}, progressWindow)
}
})
// Update systray menu
if err == nil {
sm.hasUpdateAvailable = false
sm.latestVersion = ""
sm.recreateMenu()
}
}()
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/release_manager_test.go | cmd/launcher/internal/release_manager_test.go | package launcher_test
import (
"os"
"path/filepath"
"runtime"
"time"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
launcher "github.com/mudler/LocalAI/cmd/launcher/internal"
)
var _ = Describe("ReleaseManager", func() {
var (
rm *launcher.ReleaseManager
tempDir string
)
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "launcher-test-*")
Expect(err).ToNot(HaveOccurred())
rm = launcher.NewReleaseManager()
// Override binary path for testing
rm.BinaryPath = tempDir
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
Describe("NewReleaseManager", func() {
It("should create a release manager with correct defaults", func() {
newRM := launcher.NewReleaseManager()
Expect(newRM.GitHubOwner).To(Equal("mudler"))
Expect(newRM.GitHubRepo).To(Equal("LocalAI"))
Expect(newRM.BinaryPath).To(ContainSubstring(".localai"))
Expect(newRM.HTTPClient).ToNot(BeNil())
Expect(newRM.HTTPClient.Timeout).To(Equal(30 * time.Second))
})
})
Describe("GetBinaryName", func() {
It("should return correct binary name for current platform", func() {
binaryName := rm.GetBinaryName("v3.4.0")
expectedOS := runtime.GOOS
expectedArch := runtime.GOARCH
expected := "local-ai-v3.4.0-" + expectedOS + "-" + expectedArch
Expect(binaryName).To(Equal(expected))
})
It("should handle version with and without 'v' prefix", func() {
withV := rm.GetBinaryName("v3.4.0")
withoutV := rm.GetBinaryName("3.4.0")
// Both should produce the same result
Expect(withV).To(Equal(withoutV))
})
})
Describe("GetBinaryPath", func() {
It("should return the correct binary path", func() {
path := rm.GetBinaryPath()
expected := filepath.Join(tempDir, "local-ai")
Expect(path).To(Equal(expected))
})
})
Describe("GetInstalledVersion", func() {
It("should return empty when no binary exists", func() {
version := rm.GetInstalledVersion()
Expect(version).To(BeEmpty()) // No binary installed in test
})
It("should return empty version when binary exists but no metadata", func() {
// Create a fake binary for testing
err := os.MkdirAll(rm.BinaryPath, 0755)
Expect(err).ToNot(HaveOccurred())
binaryPath := rm.GetBinaryPath()
err = os.WriteFile(binaryPath, []byte("fake binary"), 0755)
Expect(err).ToNot(HaveOccurred())
version := rm.GetInstalledVersion()
Expect(version).To(BeEmpty())
})
})
Context("with mocked responses", func() {
// Note: In a real implementation, we'd mock HTTP responses
// For now, we'll test the structure and error handling
Describe("GetLatestRelease", func() {
It("should handle network errors gracefully", func() {
// This test would require mocking HTTP client
// For demonstration, we're just testing the method exists
_, err := rm.GetLatestRelease()
// We expect either success or a network error, not a panic
// In a real test, we'd mock the HTTP response
if err != nil {
Expect(err.Error()).To(ContainSubstring("failed to fetch"))
}
})
})
Describe("DownloadRelease", func() {
It("should create binary directory if it doesn't exist", func() {
// Remove the temp directory to test creation
os.RemoveAll(tempDir)
// This will fail due to network, but should create the directory
rm.DownloadRelease("v3.4.0", nil)
// Check if directory was created
_, err := os.Stat(tempDir)
Expect(err).ToNot(HaveOccurred())
})
})
})
Describe("VerifyChecksum functionality", func() {
var (
testFile string
checksumFile string
)
BeforeEach(func() {
testFile = filepath.Join(tempDir, "test-binary")
checksumFile = filepath.Join(tempDir, "checksums.txt")
})
It("should verify checksums correctly", func() {
// Create a test file with known content
testContent := []byte("test content for checksum")
err := os.WriteFile(testFile, testContent, 0644)
Expect(err).ToNot(HaveOccurred())
// Calculate expected SHA256
// This is a simplified test - in practice we'd use the actual checksum
checksumContent := "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 test-binary\n"
err = os.WriteFile(checksumFile, []byte(checksumContent), 0644)
Expect(err).ToNot(HaveOccurred())
// Test checksum verification
// Note: This will fail because our content doesn't match the empty string hash
// In a real test, we'd calculate the actual hash
err = rm.VerifyChecksum(testFile, checksumFile, "test-binary")
// We expect this to fail since we're using a dummy checksum
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("checksum mismatch"))
})
It("should handle missing checksum file", func() {
// Create test file but no checksum file
err := os.WriteFile(testFile, []byte("test"), 0644)
Expect(err).ToNot(HaveOccurred())
err = rm.VerifyChecksum(testFile, checksumFile, "test-binary")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to open checksums file"))
})
It("should handle missing binary in checksums", func() {
// Create files but checksum doesn't contain our binary
err := os.WriteFile(testFile, []byte("test"), 0644)
Expect(err).ToNot(HaveOccurred())
checksumContent := "hash other-binary\n"
err = os.WriteFile(checksumFile, []byte(checksumContent), 0644)
Expect(err).ToNot(HaveOccurred())
err = rm.VerifyChecksum(testFile, checksumFile, "test-binary")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("checksum not found"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/ui.go | cmd/launcher/internal/ui.go | package launcher
import (
"fmt"
"log"
"net/url"
"fyne.io/fyne/v2"
"fyne.io/fyne/v2/container"
"fyne.io/fyne/v2/dialog"
"fyne.io/fyne/v2/widget"
)
// EnvVar represents an environment variable
type EnvVar struct {
Key string
Value string
}
// LauncherUI handles the user interface
type LauncherUI struct {
// Status display
statusLabel *widget.Label
versionLabel *widget.Label
// Control buttons
startStopButton *widget.Button
webUIButton *widget.Button
updateButton *widget.Button
downloadButton *widget.Button
// Configuration
modelsPathEntry *widget.Entry
backendsPathEntry *widget.Entry
addressEntry *widget.Entry
logLevelSelect *widget.Select
startOnBootCheck *widget.Check
// Environment Variables
envVarsData []EnvVar
newEnvKeyEntry *widget.Entry
newEnvValueEntry *widget.Entry
updateEnvironmentDisplay func()
// Logs
logText *widget.Entry
// Progress
progressBar *widget.ProgressBar
// Update management
latestVersion string
// Reference to launcher
launcher *Launcher
}
// NewLauncherUI creates a new UI instance
func NewLauncherUI() *LauncherUI {
return &LauncherUI{
statusLabel: widget.NewLabel("Initializing..."),
versionLabel: widget.NewLabel("Version: Unknown"),
startStopButton: widget.NewButton("Start LocalAI", nil),
webUIButton: widget.NewButton("Open WebUI", nil),
updateButton: widget.NewButton("Check for Updates", nil),
modelsPathEntry: widget.NewEntry(),
backendsPathEntry: widget.NewEntry(),
addressEntry: widget.NewEntry(),
logLevelSelect: widget.NewSelect([]string{"error", "warn", "info", "debug", "trace"}, nil),
startOnBootCheck: widget.NewCheck("Start LocalAI on system boot", nil),
logText: widget.NewMultiLineEntry(),
progressBar: widget.NewProgressBar(),
envVarsData: []EnvVar{}, // Initialize the environment variables slice
}
}
// CreateMainUI creates the main UI layout
func (ui *LauncherUI) CreateMainUI(launcher *Launcher) *fyne.Container {
ui.launcher = launcher
ui.setupBindings()
// Main tab with status and controls
// Configuration is now the main content
configTab := ui.createConfigTab()
// Create a simple container instead of tabs since we only have settings
tabs := container.NewVBox(
widget.NewCard("LocalAI Launcher Settings", "", configTab),
)
return tabs
}
// createConfigTab creates the configuration tab
func (ui *LauncherUI) createConfigTab() *fyne.Container {
// Path configuration
pathsCard := widget.NewCard("Paths", "", container.NewGridWithColumns(2,
widget.NewLabel("Models Path:"),
ui.modelsPathEntry,
widget.NewLabel("Backends Path:"),
ui.backendsPathEntry,
))
// Server configuration
serverCard := widget.NewCard("Server", "", container.NewVBox(
container.NewGridWithColumns(2,
widget.NewLabel("Address:"),
ui.addressEntry,
widget.NewLabel("Log Level:"),
ui.logLevelSelect,
),
ui.startOnBootCheck,
))
// Save button
saveButton := widget.NewButton("Save Configuration", func() {
ui.saveConfiguration()
})
// Environment Variables section
envCard := ui.createEnvironmentSection()
return container.NewVBox(
pathsCard,
serverCard,
envCard,
saveButton,
)
}
// createEnvironmentSection creates the environment variables section for the config tab
func (ui *LauncherUI) createEnvironmentSection() *fyne.Container {
// Initialize environment variables widgets
ui.newEnvKeyEntry = widget.NewEntry()
ui.newEnvKeyEntry.SetPlaceHolder("Environment Variable Name")
ui.newEnvValueEntry = widget.NewEntry()
ui.newEnvValueEntry.SetPlaceHolder("Environment Variable Value")
// Add button
addButton := widget.NewButton("Add Environment Variable", func() {
ui.addEnvironmentVariable()
})
// Environment variables list with delete buttons
ui.envVarsData = []EnvVar{}
// Create container for environment variables
envVarsContainer := container.NewVBox()
// Update function to rebuild the environment variables display
ui.updateEnvironmentDisplay = func() {
envVarsContainer.Objects = nil
for i, envVar := range ui.envVarsData {
index := i // Capture index for closure
// Create row with label and delete button
envLabel := widget.NewLabel(fmt.Sprintf("%s = %s", envVar.Key, envVar.Value))
deleteBtn := widget.NewButton("Delete", func() {
ui.confirmDeleteEnvironmentVariable(index)
})
deleteBtn.Importance = widget.DangerImportance
row := container.NewBorder(nil, nil, nil, deleteBtn, envLabel)
envVarsContainer.Add(row)
}
envVarsContainer.Refresh()
}
// Create a scrollable container for the environment variables
envScroll := container.NewScroll(envVarsContainer)
envScroll.SetMinSize(fyne.NewSize(400, 150))
// Input section for adding new environment variables
inputSection := container.NewVBox(
container.NewGridWithColumns(2,
ui.newEnvKeyEntry,
ui.newEnvValueEntry,
),
addButton,
)
// Environment variables card
envCard := widget.NewCard("Environment Variables", "", container.NewVBox(
inputSection,
widget.NewSeparator(),
envScroll,
))
return container.NewVBox(envCard)
}
// addEnvironmentVariable adds a new environment variable
func (ui *LauncherUI) addEnvironmentVariable() {
key := ui.newEnvKeyEntry.Text
value := ui.newEnvValueEntry.Text
log.Printf("addEnvironmentVariable: attempting to add %s=%s", key, value)
log.Printf("addEnvironmentVariable: current ui.envVarsData has %d items: %v", len(ui.envVarsData), ui.envVarsData)
if key == "" {
log.Printf("addEnvironmentVariable: key is empty, showing error")
dialog.ShowError(fmt.Errorf("environment variable name cannot be empty"), ui.launcher.window)
return
}
// Check if key already exists
for _, envVar := range ui.envVarsData {
if envVar.Key == key {
log.Printf("addEnvironmentVariable: key %s already exists, showing error", key)
dialog.ShowError(fmt.Errorf("environment variable '%s' already exists", key), ui.launcher.window)
return
}
}
log.Printf("addEnvironmentVariable: adding new env var %s=%s", key, value)
ui.envVarsData = append(ui.envVarsData, EnvVar{Key: key, Value: value})
log.Printf("addEnvironmentVariable: after adding, ui.envVarsData has %d items: %v", len(ui.envVarsData), ui.envVarsData)
fyne.Do(func() {
if ui.updateEnvironmentDisplay != nil {
ui.updateEnvironmentDisplay()
}
// Clear input fields
ui.newEnvKeyEntry.SetText("")
ui.newEnvValueEntry.SetText("")
})
log.Printf("addEnvironmentVariable: calling saveEnvironmentVariables")
// Save to configuration
ui.saveEnvironmentVariables()
}
// removeEnvironmentVariable removes an environment variable by index
func (ui *LauncherUI) removeEnvironmentVariable(index int) {
if index >= 0 && index < len(ui.envVarsData) {
ui.envVarsData = append(ui.envVarsData[:index], ui.envVarsData[index+1:]...)
fyne.Do(func() {
if ui.updateEnvironmentDisplay != nil {
ui.updateEnvironmentDisplay()
}
})
ui.saveEnvironmentVariables()
}
}
// saveEnvironmentVariables saves environment variables to the configuration
func (ui *LauncherUI) saveEnvironmentVariables() {
if ui.launcher == nil {
log.Printf("saveEnvironmentVariables: launcher is nil")
return
}
config := ui.launcher.GetConfig()
log.Printf("saveEnvironmentVariables: before - Environment vars: %v", config.EnvironmentVars)
config.EnvironmentVars = make(map[string]string)
for _, envVar := range ui.envVarsData {
config.EnvironmentVars[envVar.Key] = envVar.Value
log.Printf("saveEnvironmentVariables: adding %s=%s", envVar.Key, envVar.Value)
}
log.Printf("saveEnvironmentVariables: after - Environment vars: %v", config.EnvironmentVars)
log.Printf("saveEnvironmentVariables: calling SetConfig with %d environment variables", len(config.EnvironmentVars))
err := ui.launcher.SetConfig(config)
if err != nil {
log.Printf("saveEnvironmentVariables: failed to save config: %v", err)
} else {
log.Printf("saveEnvironmentVariables: config saved successfully")
}
}
// confirmDeleteEnvironmentVariable shows confirmation dialog for deleting an environment variable
func (ui *LauncherUI) confirmDeleteEnvironmentVariable(index int) {
if index >= 0 && index < len(ui.envVarsData) {
envVar := ui.envVarsData[index]
dialog.ShowConfirm("Remove Environment Variable",
fmt.Sprintf("Remove environment variable '%s'?", envVar.Key),
func(remove bool) {
if remove {
ui.removeEnvironmentVariable(index)
}
}, ui.launcher.window)
}
}
// setupBindings sets up event handlers for UI elements
func (ui *LauncherUI) setupBindings() {
// Start/Stop button
ui.startStopButton.OnTapped = func() {
if ui.launcher.IsRunning() {
ui.stopLocalAI()
} else {
ui.startLocalAI()
}
}
// WebUI button
ui.webUIButton.OnTapped = func() {
ui.openWebUI()
}
ui.webUIButton.Disable() // Disabled until LocalAI is running
// Update button
ui.updateButton.OnTapped = func() {
ui.checkForUpdates()
}
// Log level selection
ui.logLevelSelect.OnChanged = func(selected string) {
if ui.launcher != nil {
config := ui.launcher.GetConfig()
config.LogLevel = selected
ui.launcher.SetConfig(config)
}
}
}
// startLocalAI starts the LocalAI service
func (ui *LauncherUI) startLocalAI() {
fyne.Do(func() {
ui.startStopButton.Disable()
})
ui.UpdateStatus("Starting LocalAI...")
go func() {
err := ui.launcher.StartLocalAI()
if err != nil {
ui.UpdateStatus("Failed to start: " + err.Error())
fyne.DoAndWait(func() {
dialog.ShowError(err, ui.launcher.window)
})
} else {
fyne.Do(func() {
ui.startStopButton.SetText("Stop LocalAI")
ui.webUIButton.Enable()
})
}
fyne.Do(func() {
ui.startStopButton.Enable()
})
}()
}
// stopLocalAI stops the LocalAI service
func (ui *LauncherUI) stopLocalAI() {
fyne.Do(func() {
ui.startStopButton.Disable()
})
ui.UpdateStatus("Stopping LocalAI...")
go func() {
err := ui.launcher.StopLocalAI()
if err != nil {
fyne.DoAndWait(func() {
dialog.ShowError(err, ui.launcher.window)
})
} else {
fyne.Do(func() {
ui.startStopButton.SetText("Start LocalAI")
ui.webUIButton.Disable()
})
}
fyne.Do(func() {
ui.startStopButton.Enable()
})
}()
}
// openWebUI opens the LocalAI WebUI in the default browser
func (ui *LauncherUI) openWebUI() {
webURL := ui.launcher.GetWebUIURL()
parsedURL, err := url.Parse(webURL)
if err != nil {
dialog.ShowError(err, ui.launcher.window)
return
}
// Open URL in default browser
fyne.CurrentApp().OpenURL(parsedURL)
}
// saveConfiguration saves the current configuration
func (ui *LauncherUI) saveConfiguration() {
log.Printf("saveConfiguration: starting to save configuration")
config := ui.launcher.GetConfig()
log.Printf("saveConfiguration: current config Environment vars: %v", config.EnvironmentVars)
log.Printf("saveConfiguration: ui.envVarsData has %d items: %v", len(ui.envVarsData), ui.envVarsData)
config.ModelsPath = ui.modelsPathEntry.Text
config.BackendsPath = ui.backendsPathEntry.Text
config.Address = ui.addressEntry.Text
config.LogLevel = ui.logLevelSelect.Selected
config.StartOnBoot = ui.startOnBootCheck.Checked
// Ensure environment variables are included in the configuration
config.EnvironmentVars = make(map[string]string)
for _, envVar := range ui.envVarsData {
config.EnvironmentVars[envVar.Key] = envVar.Value
log.Printf("saveConfiguration: adding env var %s=%s", envVar.Key, envVar.Value)
}
log.Printf("saveConfiguration: final config Environment vars: %v", config.EnvironmentVars)
err := ui.launcher.SetConfig(config)
if err != nil {
log.Printf("saveConfiguration: failed to save config: %v", err)
dialog.ShowError(err, ui.launcher.window)
} else {
log.Printf("saveConfiguration: config saved successfully")
dialog.ShowInformation("Configuration", "Configuration saved successfully", ui.launcher.window)
}
}
// checkForUpdates checks for available updates
func (ui *LauncherUI) checkForUpdates() {
fyne.Do(func() {
ui.updateButton.Disable()
})
ui.UpdateStatus("Checking for updates...")
go func() {
available, version, err := ui.launcher.CheckForUpdates()
if err != nil {
ui.UpdateStatus("Failed to check updates: " + err.Error())
fyne.DoAndWait(func() {
dialog.ShowError(err, ui.launcher.window)
})
} else if available {
ui.latestVersion = version // Store the latest version
ui.UpdateStatus("Update available: " + version)
fyne.Do(func() {
if ui.downloadButton != nil {
ui.downloadButton.Enable()
}
})
ui.NotifyUpdateAvailable(version)
} else {
ui.UpdateStatus("No updates available")
fyne.DoAndWait(func() {
dialog.ShowInformation("Updates", "You are running the latest version", ui.launcher.window)
})
}
fyne.Do(func() {
ui.updateButton.Enable()
})
}()
}
// downloadUpdate downloads the latest update
func (ui *LauncherUI) downloadUpdate() {
// Use stored version or check for updates
version := ui.latestVersion
if version == "" {
_, v, err := ui.launcher.CheckForUpdates()
if err != nil {
dialog.ShowError(err, ui.launcher.window)
return
}
version = v
ui.latestVersion = version
}
if version == "" {
dialog.ShowError(fmt.Errorf("no version information available"), ui.launcher.window)
return
}
// Disable buttons during download
if ui.downloadButton != nil {
fyne.Do(func() {
ui.downloadButton.Disable()
})
}
fyne.Do(func() {
ui.progressBar.Show()
ui.progressBar.SetValue(0)
})
ui.UpdateStatus("Downloading update " + version + "...")
go func() {
err := ui.launcher.DownloadUpdate(version, func(progress float64) {
// Update progress bar
fyne.Do(func() {
ui.progressBar.SetValue(progress)
})
// Update status with percentage
percentage := int(progress * 100)
ui.UpdateStatus(fmt.Sprintf("Downloading update %s... %d%%", version, percentage))
})
fyne.Do(func() {
ui.progressBar.Hide()
})
// Re-enable buttons after download
if ui.downloadButton != nil {
fyne.Do(func() {
ui.downloadButton.Enable()
})
}
if err != nil {
fyne.DoAndWait(func() {
ui.UpdateStatus("Failed to download update: " + err.Error())
dialog.ShowError(err, ui.launcher.window)
})
} else {
fyne.DoAndWait(func() {
ui.UpdateStatus("Update downloaded successfully")
dialog.ShowInformation("Update", "Update downloaded successfully. Please restart the launcher to use the new version.", ui.launcher.window)
})
}
}()
}
// UpdateStatus updates the status label
func (ui *LauncherUI) UpdateStatus(status string) {
if ui.statusLabel != nil {
fyne.Do(func() {
ui.statusLabel.SetText(status)
})
}
}
// OnLogUpdate handles new log content
func (ui *LauncherUI) OnLogUpdate(logLine string) {
if ui.logText != nil {
fyne.Do(func() {
currentText := ui.logText.Text
ui.logText.SetText(currentText + logLine)
// Auto-scroll to bottom (simplified)
ui.logText.CursorRow = len(ui.logText.Text)
})
}
}
// NotifyUpdateAvailable shows an update notification
func (ui *LauncherUI) NotifyUpdateAvailable(version string) {
if ui.launcher != nil && ui.launcher.window != nil {
fyne.DoAndWait(func() {
dialog.ShowConfirm("Update Available",
"A new version ("+version+") is available. Would you like to download it?",
func(confirmed bool) {
if confirmed {
ui.downloadUpdate()
}
}, ui.launcher.window)
})
}
}
// LoadConfiguration loads the current configuration into UI elements
func (ui *LauncherUI) LoadConfiguration() {
if ui.launcher == nil {
log.Printf("UI LoadConfiguration: launcher is nil")
return
}
config := ui.launcher.GetConfig()
log.Printf("UI LoadConfiguration: loading config - ModelsPath=%s, BackendsPath=%s, Address=%s, LogLevel=%s",
config.ModelsPath, config.BackendsPath, config.Address, config.LogLevel)
log.Printf("UI LoadConfiguration: Environment vars: %v", config.EnvironmentVars)
ui.modelsPathEntry.SetText(config.ModelsPath)
ui.backendsPathEntry.SetText(config.BackendsPath)
ui.addressEntry.SetText(config.Address)
ui.logLevelSelect.SetSelected(config.LogLevel)
ui.startOnBootCheck.SetChecked(config.StartOnBoot)
// Load environment variables
ui.envVarsData = []EnvVar{}
for key, value := range config.EnvironmentVars {
ui.envVarsData = append(ui.envVarsData, EnvVar{Key: key, Value: value})
}
if ui.updateEnvironmentDisplay != nil {
fyne.Do(func() {
ui.updateEnvironmentDisplay()
})
}
// Update version display
version := ui.launcher.GetCurrentVersion()
ui.versionLabel.SetText("Version: " + version)
log.Printf("UI LoadConfiguration: configuration loaded successfully")
}
// showDownloadProgress shows a progress window for downloading LocalAI
func (ui *LauncherUI) showDownloadProgress(version, title string) {
fyne.DoAndWait(func() {
// Create progress window using the launcher's app
progressWindow := ui.launcher.app.NewWindow("Downloading LocalAI")
progressWindow.Resize(fyne.NewSize(400, 250))
progressWindow.CenterOnScreen()
// Progress bar
progressBar := widget.NewProgressBar()
progressBar.SetValue(0)
// Status label
statusLabel := widget.NewLabel("Preparing download...")
// Release notes button
releaseNotesButton := widget.NewButton("View Release Notes", func() {
releaseNotesURL, err := ui.launcher.githubReleaseNotesURL(version)
if err != nil {
log.Printf("Failed to parse URL: %v", err)
return
}
ui.launcher.app.OpenURL(releaseNotesURL)
})
// Progress container
progressContainer := container.NewVBox(
widget.NewLabel(title),
progressBar,
statusLabel,
widget.NewSeparator(),
releaseNotesButton,
)
progressWindow.SetContent(progressContainer)
progressWindow.Show()
// Start download in background
go func() {
err := ui.launcher.DownloadUpdate(version, func(progress float64) {
// Update progress bar
fyne.Do(func() {
progressBar.SetValue(progress)
percentage := int(progress * 100)
statusLabel.SetText(fmt.Sprintf("Downloading... %d%%", percentage))
})
})
// Handle completion
fyne.Do(func() {
if err != nil {
statusLabel.SetText(fmt.Sprintf("Download failed: %v", err))
// Show error dialog
dialog.ShowError(err, progressWindow)
} else {
statusLabel.SetText("Download completed successfully!")
progressBar.SetValue(1.0)
// Show success dialog
dialog.ShowConfirm("Installation Complete",
"LocalAI has been downloaded and installed successfully. You can now start LocalAI from the launcher.",
func(close bool) {
progressWindow.Close()
// Update status
ui.UpdateStatus("LocalAI installed successfully")
}, progressWindow)
}
})
}()
})
}
// UpdateRunningState updates UI based on LocalAI running state
func (ui *LauncherUI) UpdateRunningState(isRunning bool) {
fyne.Do(func() {
if isRunning {
ui.startStopButton.SetText("Stop LocalAI")
ui.webUIButton.Enable()
} else {
ui.startStopButton.SetText("Start LocalAI")
ui.webUIButton.Disable()
}
})
}
// ShowWelcomeWindow displays the welcome window with helpful information
func (ui *LauncherUI) ShowWelcomeWindow() {
if ui.launcher == nil || ui.launcher.window == nil {
log.Printf("Cannot show welcome window: launcher or window is nil")
return
}
fyne.DoAndWait(func() {
// Create welcome window
welcomeWindow := ui.launcher.app.NewWindow("Welcome to LocalAI Launcher")
welcomeWindow.Resize(fyne.NewSize(600, 500))
welcomeWindow.CenterOnScreen()
welcomeWindow.SetCloseIntercept(func() {
welcomeWindow.Close()
})
// Title
titleLabel := widget.NewLabel("Welcome to LocalAI Launcher!")
titleLabel.TextStyle = fyne.TextStyle{Bold: true}
titleLabel.Alignment = fyne.TextAlignCenter
// Welcome message
welcomeText := `LocalAI Launcher makes it easy to run LocalAI on your system.
What you can do:
• Start and stop LocalAI server
• Configure models and backends paths
• Set environment variables
• Check for updates automatically
• Access LocalAI WebUI when running
Getting Started:
1. Configure your models and backends paths
2. Click "Start LocalAI" to begin
3. Use "Open WebUI" to access the interface
4. Check the system tray for quick access`
welcomeLabel := widget.NewLabel(welcomeText)
welcomeLabel.Wrapping = fyne.TextWrapWord
// Useful links section
linksTitle := widget.NewLabel("Useful Links:")
linksTitle.TextStyle = fyne.TextStyle{Bold: true}
// Create link buttons
docsButton := widget.NewButton("📚 Documentation", func() {
ui.openURL("https://localai.io/docs/")
})
githubButton := widget.NewButton("🐙 GitHub Repository", func() {
ui.openURL("https://github.com/mudler/LocalAI")
})
modelsButton := widget.NewButton("🤖 Model Gallery", func() {
ui.openURL("https://localai.io/models/")
})
communityButton := widget.NewButton("💬 Community", func() {
ui.openURL("https://discord.gg/XgwjKptP7Z")
})
// Checkbox to disable welcome window
dontShowAgainCheck := widget.NewCheck("Don't show this welcome window again", func(checked bool) {
if ui.launcher != nil {
config := ui.launcher.GetConfig()
v := !checked
config.ShowWelcome = &v
ui.launcher.SetConfig(config)
}
})
config := ui.launcher.GetConfig()
if config.ShowWelcome != nil {
dontShowAgainCheck.SetChecked(*config.ShowWelcome)
}
// Close button
closeButton := widget.NewButton("Get Started", func() {
welcomeWindow.Close()
})
closeButton.Importance = widget.HighImportance
// Layout
linksContainer := container.NewVBox(
linksTitle,
docsButton,
githubButton,
modelsButton,
communityButton,
)
content := container.NewVBox(
titleLabel,
widget.NewSeparator(),
welcomeLabel,
widget.NewSeparator(),
linksContainer,
widget.NewSeparator(),
dontShowAgainCheck,
widget.NewSeparator(),
closeButton,
)
welcomeWindow.SetContent(content)
welcomeWindow.Show()
})
}
// openURL opens a URL in the default browser
func (ui *LauncherUI) openURL(urlString string) {
parsedURL, err := url.Parse(urlString)
if err != nil {
log.Printf("Failed to parse URL %s: %v", urlString, err)
return
}
fyne.CurrentApp().OpenURL(parsedURL)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/launcher/internal/release_manager.go | cmd/launcher/internal/release_manager.go | package launcher
import (
"bufio"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"time"
"github.com/mudler/LocalAI/internal"
)
// Release represents a LocalAI release
type Release struct {
Version string `json:"tag_name"`
Name string `json:"name"`
Body string `json:"body"`
PublishedAt time.Time `json:"published_at"`
Assets []Asset `json:"assets"`
}
// Asset represents a release asset
type Asset struct {
Name string `json:"name"`
BrowserDownloadURL string `json:"browser_download_url"`
Size int64 `json:"size"`
}
// ReleaseManager handles LocalAI release management
type ReleaseManager struct {
// GitHubOwner is the GitHub repository owner
GitHubOwner string
// GitHubRepo is the GitHub repository name
GitHubRepo string
// BinaryPath is where the LocalAI binary is stored locally
BinaryPath string
// CurrentVersion is the currently installed version
CurrentVersion string
// ChecksumsPath is where checksums are stored
ChecksumsPath string
// MetadataPath is where version metadata is stored
MetadataPath string
// HTTPClient is the HTTP client used for downloads
HTTPClient *http.Client
}
// NewReleaseManager creates a new release manager
func NewReleaseManager() *ReleaseManager {
homeDir, _ := os.UserHomeDir()
binaryPath := filepath.Join(homeDir, ".localai", "bin")
checksumsPath := filepath.Join(homeDir, ".localai", "checksums")
metadataPath := filepath.Join(homeDir, ".localai", "metadata")
return &ReleaseManager{
GitHubOwner: "mudler",
GitHubRepo: "LocalAI",
BinaryPath: binaryPath,
CurrentVersion: internal.PrintableVersion(),
ChecksumsPath: checksumsPath,
MetadataPath: metadataPath,
HTTPClient: &http.Client{
Timeout: 30 * time.Second,
},
}
}
// GetLatestRelease fetches the latest release information from GitHub
func (rm *ReleaseManager) GetLatestRelease() (*Release, error) {
url := fmt.Sprintf("https://api.github.com/repos/%s/%s/releases/latest", rm.GitHubOwner, rm.GitHubRepo)
resp, err := rm.HTTPClient.Get(url)
if err != nil {
return nil, fmt.Errorf("failed to fetch latest release: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("failed to fetch latest release: status %d", resp.StatusCode)
}
// Parse the JSON response properly
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to read response body: %w", err)
}
release := &Release{}
if err := json.Unmarshal(body, release); err != nil {
return nil, fmt.Errorf("failed to parse JSON response: %w", err)
}
// Validate the release data
if release.Version == "" {
return nil, fmt.Errorf("no version found in release data")
}
return release, nil
}
// DownloadRelease downloads a specific version of LocalAI
func (rm *ReleaseManager) DownloadRelease(version string, progressCallback func(float64)) error {
// Ensure the binary directory exists
if err := os.MkdirAll(rm.BinaryPath, 0755); err != nil {
return fmt.Errorf("failed to create binary directory: %w", err)
}
// Determine the binary name based on OS and architecture
binaryName := rm.GetBinaryName(version)
localPath := filepath.Join(rm.BinaryPath, "local-ai")
// Download the binary
downloadURL := fmt.Sprintf("https://github.com/%s/%s/releases/download/%s/%s",
rm.GitHubOwner, rm.GitHubRepo, version, binaryName)
if err := rm.downloadFile(downloadURL, localPath, progressCallback); err != nil {
return fmt.Errorf("failed to download binary: %w", err)
}
// Download and verify checksums
checksumURL := fmt.Sprintf("https://github.com/%s/%s/releases/download/%s/LocalAI-%s-checksums.txt",
rm.GitHubOwner, rm.GitHubRepo, version, version)
checksumPath := filepath.Join(rm.BinaryPath, "checksums.txt")
manualChecksumPath := filepath.Join(rm.ChecksumsPath, fmt.Sprintf("checksums-%s.txt", version))
// First, check if there's already a checksum file (either manually placed or previously downloaded)
// and honor that, skipping download entirely in such case
var downloadErr error
if _, err := os.Stat(manualChecksumPath); err == nil {
log.Printf("Using existing checksums from: %s", manualChecksumPath)
checksumPath = manualChecksumPath
} else if _, err := os.Stat(checksumPath); err == nil {
log.Printf("Using existing checksums from: %s", checksumPath)
} else {
// No existing checksum file found, try to download
downloadErr = rm.downloadFile(checksumURL, checksumPath, nil)
if downloadErr != nil {
log.Printf("Warning: failed to download checksums: %v", downloadErr)
log.Printf("Warning: Checksum verification will be skipped. For security, you can manually place checksums at: %s", manualChecksumPath)
log.Printf("Download checksums from: %s", checksumURL)
// Continue without verification - log warning but don't fail
}
}
// Verify the checksum if we have a checksum file
if _, err := os.Stat(checksumPath); err == nil {
if err := rm.VerifyChecksum(localPath, checksumPath, binaryName); err != nil {
return fmt.Errorf("checksum verification failed: %w", err)
}
log.Printf("Checksum verification successful")
// Save checksums persistently for future verification
if downloadErr == nil {
if err := rm.saveChecksums(version, checksumPath, binaryName); err != nil {
log.Printf("Warning: failed to save checksums: %v", err)
}
}
} else {
log.Printf("Warning: Proceeding without checksum verification")
}
// Make the binary executable
if err := os.Chmod(localPath, 0755); err != nil {
return fmt.Errorf("failed to make binary executable: %w", err)
}
return nil
}
// GetBinaryName returns the appropriate binary name for the current platform
func (rm *ReleaseManager) GetBinaryName(version string) string {
versionStr := strings.TrimPrefix(version, "v")
os := runtime.GOOS
arch := runtime.GOARCH
// Map Go arch names to the release naming convention
switch arch {
case "amd64":
arch = "amd64"
case "arm64":
arch = "arm64"
default:
arch = "amd64" // fallback
}
return fmt.Sprintf("local-ai-v%s-%s-%s", versionStr, os, arch)
}
// downloadFile downloads a file from a URL to a local path with optional progress callback
func (rm *ReleaseManager) downloadFile(url, filepath string, progressCallback func(float64)) error {
return rm.downloadFileWithRetry(url, filepath, progressCallback, 3)
}
// downloadFileWithRetry downloads a file from a URL with retry logic
func (rm *ReleaseManager) downloadFileWithRetry(url, filepath string, progressCallback func(float64), maxRetries int) error {
var lastErr error
for attempt := 1; attempt <= maxRetries; attempt++ {
if attempt > 1 {
log.Printf("Retrying download (attempt %d/%d): %s", attempt, maxRetries, url)
time.Sleep(time.Duration(attempt) * time.Second)
}
resp, err := rm.HTTPClient.Get(url)
if err != nil {
lastErr = err
continue
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
lastErr = fmt.Errorf("bad status: %s", resp.Status)
continue
}
out, err := os.Create(filepath)
if err != nil {
resp.Body.Close()
return err
}
// Create a progress reader if callback is provided
var reader io.Reader = resp.Body
if progressCallback != nil && resp.ContentLength > 0 {
reader = &progressReader{
Reader: resp.Body,
Total: resp.ContentLength,
Callback: progressCallback,
}
}
_, err = io.Copy(out, reader)
resp.Body.Close()
out.Close()
if err != nil {
lastErr = err
os.Remove(filepath)
continue
}
return nil
}
return fmt.Errorf("failed after %d attempts: %w", maxRetries, lastErr)
}
// saveChecksums saves checksums persistently for future verification
func (rm *ReleaseManager) saveChecksums(version, checksumPath, binaryName string) error {
// Ensure checksums directory exists
if err := os.MkdirAll(rm.ChecksumsPath, 0755); err != nil {
return fmt.Errorf("failed to create checksums directory: %w", err)
}
// Read the downloaded checksums file
checksumData, err := os.ReadFile(checksumPath)
if err != nil {
return fmt.Errorf("failed to read checksums file: %w", err)
}
// Save to persistent location with version info
persistentPath := filepath.Join(rm.ChecksumsPath, fmt.Sprintf("checksums-%s.txt", version))
if err := os.WriteFile(persistentPath, checksumData, 0644); err != nil {
return fmt.Errorf("failed to write persistent checksums: %w", err)
}
// Also save a "latest" checksums file for the current version
latestPath := filepath.Join(rm.ChecksumsPath, "checksums-latest.txt")
if err := os.WriteFile(latestPath, checksumData, 0644); err != nil {
return fmt.Errorf("failed to write latest checksums: %w", err)
}
// Save version metadata
if err := rm.saveVersionMetadata(version); err != nil {
log.Printf("Warning: failed to save version metadata: %v", err)
}
log.Printf("Checksums saved for version %s", version)
return nil
}
// saveVersionMetadata saves the installed version information
func (rm *ReleaseManager) saveVersionMetadata(version string) error {
// Ensure metadata directory exists
if err := os.MkdirAll(rm.MetadataPath, 0755); err != nil {
return fmt.Errorf("failed to create metadata directory: %w", err)
}
// Create metadata structure
metadata := struct {
Version string `json:"version"`
InstalledAt time.Time `json:"installed_at"`
BinaryPath string `json:"binary_path"`
}{
Version: version,
InstalledAt: time.Now(),
BinaryPath: rm.GetBinaryPath(),
}
// Marshal to JSON
metadataData, err := json.MarshalIndent(metadata, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal metadata: %w", err)
}
// Save metadata file
metadataPath := filepath.Join(rm.MetadataPath, "installed-version.json")
if err := os.WriteFile(metadataPath, metadataData, 0644); err != nil {
return fmt.Errorf("failed to write metadata file: %w", err)
}
log.Printf("Version metadata saved: %s", version)
return nil
}
// progressReader wraps an io.Reader to provide download progress
type progressReader struct {
io.Reader
Total int64
Current int64
Callback func(float64)
}
func (pr *progressReader) Read(p []byte) (int, error) {
n, err := pr.Reader.Read(p)
pr.Current += int64(n)
if pr.Callback != nil {
progress := float64(pr.Current) / float64(pr.Total)
pr.Callback(progress)
}
return n, err
}
// VerifyChecksum verifies the downloaded file against the provided checksums
func (rm *ReleaseManager) VerifyChecksum(filePath, checksumPath, binaryName string) error {
// Calculate the SHA256 of the downloaded file
file, err := os.Open(filePath)
if err != nil {
return fmt.Errorf("failed to open file for checksum: %w", err)
}
defer file.Close()
hasher := sha256.New()
if _, err := io.Copy(hasher, file); err != nil {
return fmt.Errorf("failed to calculate checksum: %w", err)
}
calculatedHash := hex.EncodeToString(hasher.Sum(nil))
// Read the checksums file
checksumFile, err := os.Open(checksumPath)
if err != nil {
return fmt.Errorf("failed to open checksums file: %w", err)
}
defer checksumFile.Close()
scanner := bufio.NewScanner(checksumFile)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if strings.Contains(line, binaryName) {
parts := strings.Fields(line)
if len(parts) >= 2 {
expectedHash := parts[0]
if calculatedHash == expectedHash {
return nil // Checksum verified
}
return fmt.Errorf("checksum mismatch: expected %s, got %s", expectedHash, calculatedHash)
}
}
}
return fmt.Errorf("checksum not found for %s", binaryName)
}
// GetInstalledVersion returns the currently installed version
func (rm *ReleaseManager) GetInstalledVersion() string {
// Fallback: Check if the LocalAI binary exists and try to get its version
binaryPath := rm.GetBinaryPath()
if _, err := os.Stat(binaryPath); os.IsNotExist(err) {
return "" // No version installed
}
// try to get version from metadata
if version := rm.loadVersionMetadata(); version != "" {
return version
}
// Try to run the binary to get the version (fallback method)
version, err := exec.Command(binaryPath, "--version").Output()
if err != nil {
// If binary exists but --version fails, try to determine from filename or other means
log.Printf("Binary exists but --version failed: %v", err)
return ""
}
stringVersion := strings.TrimSpace(string(version))
stringVersion = strings.TrimRight(stringVersion, "\n")
return stringVersion
}
// loadVersionMetadata loads the installed version from metadata file
func (rm *ReleaseManager) loadVersionMetadata() string {
metadataPath := filepath.Join(rm.MetadataPath, "installed-version.json")
// Check if metadata file exists
if _, err := os.Stat(metadataPath); os.IsNotExist(err) {
return ""
}
// Read metadata file
metadataData, err := os.ReadFile(metadataPath)
if err != nil {
log.Printf("Failed to read metadata file: %v", err)
return ""
}
// Parse metadata
var metadata struct {
Version string `json:"version"`
InstalledAt time.Time `json:"installed_at"`
BinaryPath string `json:"binary_path"`
}
if err := json.Unmarshal(metadataData, &metadata); err != nil {
log.Printf("Failed to parse metadata file: %v", err)
return ""
}
// Verify that the binary path in metadata matches current binary path
if metadata.BinaryPath != rm.GetBinaryPath() {
log.Printf("Binary path mismatch in metadata, ignoring")
return ""
}
log.Printf("Loaded version from metadata: %s (installed at %s)", metadata.Version, metadata.InstalledAt.Format("2006-01-02 15:04:05"))
return metadata.Version
}
// GetBinaryPath returns the path to the LocalAI binary
func (rm *ReleaseManager) GetBinaryPath() string {
return filepath.Join(rm.BinaryPath, "local-ai")
}
// IsUpdateAvailable checks if an update is available
func (rm *ReleaseManager) IsUpdateAvailable() (bool, string, error) {
log.Printf("IsUpdateAvailable: checking for updates...")
latest, err := rm.GetLatestRelease()
if err != nil {
log.Printf("IsUpdateAvailable: failed to get latest release: %v", err)
return false, "", err
}
log.Printf("IsUpdateAvailable: latest release version: %s", latest.Version)
current := rm.GetInstalledVersion()
log.Printf("IsUpdateAvailable: current installed version: %s", current)
if current == "" {
// No version installed, offer to download latest
log.Printf("IsUpdateAvailable: no version installed, offering latest: %s", latest.Version)
return true, latest.Version, nil
}
updateAvailable := latest.Version != current
log.Printf("IsUpdateAvailable: update available: %v (latest: %s, current: %s)", updateAvailable, latest.Version, current)
return updateAvailable, latest.Version, nil
}
// IsLocalAIInstalled checks if LocalAI binary exists and is valid
func (rm *ReleaseManager) IsLocalAIInstalled() bool {
binaryPath := rm.GetBinaryPath()
if _, err := os.Stat(binaryPath); os.IsNotExist(err) {
return false
}
// Verify the binary integrity
if err := rm.VerifyInstalledBinary(); err != nil {
log.Printf("Binary integrity check failed: %v", err)
// Remove corrupted binary
if removeErr := os.Remove(binaryPath); removeErr != nil {
log.Printf("Failed to remove corrupted binary: %v", removeErr)
}
return false
}
return true
}
// VerifyInstalledBinary verifies the installed binary against saved checksums
func (rm *ReleaseManager) VerifyInstalledBinary() error {
binaryPath := rm.GetBinaryPath()
// Check if we have saved checksums
latestChecksumsPath := filepath.Join(rm.ChecksumsPath, "checksums-latest.txt")
if _, err := os.Stat(latestChecksumsPath); os.IsNotExist(err) {
return fmt.Errorf("no saved checksums found")
}
// Get the binary name for the current version from metadata
currentVersion := rm.loadVersionMetadata()
if currentVersion == "" {
return fmt.Errorf("cannot determine current version from metadata")
}
binaryName := rm.GetBinaryName(currentVersion)
// Verify against saved checksums
return rm.VerifyChecksum(binaryPath, latestChecksumsPath, binaryName)
}
// CleanupPartialDownloads removes any partial or corrupted downloads
func (rm *ReleaseManager) CleanupPartialDownloads() error {
binaryPath := rm.GetBinaryPath()
// Check if binary exists but is corrupted
if _, err := os.Stat(binaryPath); err == nil {
// Binary exists, verify it
if verifyErr := rm.VerifyInstalledBinary(); verifyErr != nil {
log.Printf("Found corrupted binary, removing: %v", verifyErr)
if removeErr := os.Remove(binaryPath); removeErr != nil {
log.Printf("Failed to remove corrupted binary: %v", removeErr)
}
// Clear metadata since binary is corrupted
rm.clearVersionMetadata()
}
}
// Clean up any temporary checksum files
tempChecksumsPath := filepath.Join(rm.BinaryPath, "checksums.txt")
if _, err := os.Stat(tempChecksumsPath); err == nil {
if removeErr := os.Remove(tempChecksumsPath); removeErr != nil {
log.Printf("Failed to remove temporary checksums: %v", removeErr)
}
}
return nil
}
// clearVersionMetadata clears the version metadata (used when binary is corrupted or removed)
func (rm *ReleaseManager) clearVersionMetadata() {
metadataPath := filepath.Join(rm.MetadataPath, "installed-version.json")
if err := os.Remove(metadataPath); err != nil && !os.IsNotExist(err) {
log.Printf("Failed to clear version metadata: %v", err)
} else {
log.Printf("Version metadata cleared")
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/cmd/local-ai/main.go | cmd/local-ai/main.go | package main
import (
"os"
"path/filepath"
"github.com/alecthomas/kong"
"github.com/joho/godotenv"
"github.com/mudler/LocalAI/core/cli"
"github.com/mudler/LocalAI/internal"
"github.com/mudler/xlog"
_ "github.com/mudler/LocalAI/swagger"
)
func main() {
var err error
// Initialize xlog at a level of INFO, we will set the desired level after we parse the CLI options
xlog.SetLogger(xlog.NewLogger(xlog.LogLevel("info"), "text"))
// handle loading environment variables from .env files
envFiles := []string{".env", "localai.env"}
homeDir, err := os.UserHomeDir()
if err == nil {
envFiles = append(envFiles, filepath.Join(homeDir, "localai.env"), filepath.Join(homeDir, ".config/localai.env"))
}
envFiles = append(envFiles, "/etc/localai.env")
for _, envFile := range envFiles {
if _, err := os.Stat(envFile); err == nil {
xlog.Debug("env file found, loading environment variables from file", "envFile", envFile)
err = godotenv.Load(envFile)
if err != nil {
xlog.Error("failed to load environment variables from file", "error", err, "envFile", envFile)
continue
}
}
}
// Actually parse the CLI options
ctx := kong.Parse(&cli.CLI,
kong.Description(
` LocalAI is a drop-in replacement OpenAI API for running LLM, GPT and genAI models locally on CPU, GPUs with consumer grade hardware.
For a list of all available models run local-ai models list
Copyright: Ettore Di Giacinto
Version: ${version}
`,
),
kong.UsageOnError(),
kong.Vars{
"basepath": kong.ExpandPath("."),
"galleries": `[{"name":"localai", "url":"github:mudler/LocalAI/gallery/index.yaml@master"}]`,
"backends": `[{"name":"localai", "url":"github:mudler/LocalAI/backend/index.yaml@master"}]`,
"version": internal.PrintableVersion(),
},
)
// Configure the logging level before we run the application
// This is here to preserve the existing --debug flag functionality
logLevel := "info"
if cli.CLI.Debug && cli.CLI.LogLevel == nil {
logLevel = "debug"
cli.CLI.LogLevel = &logLevel
}
if cli.CLI.LogLevel == nil {
cli.CLI.LogLevel = &logLevel
}
// Set xlog logger with the desired level and text format
xlog.SetLogger(xlog.NewLogger(xlog.LogLevel(*cli.CLI.LogLevel), *cli.CLI.LogFormat))
// Run the thing!
err = ctx.Run(&cli.CLI.Context)
if err != nil {
xlog.Fatal("Error running the application", "error", err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/integration/stores_test.go | tests/integration/stores_test.go | package integration_test
import (
"context"
"math"
"math/rand"
"os"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/mudler/xlog"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/store"
"github.com/mudler/LocalAI/pkg/system"
)
func normalize(vecs [][]float32) {
for i, k := range vecs {
norm := float64(0)
for _, x := range k {
norm += float64(x * x)
}
norm = math.Sqrt(norm)
for j, x := range k {
vecs[i][j] = x / float32(norm)
}
}
}
var _ = Describe("Integration tests for the stores backend(s) and internal APIs", Label("stores"), func() {
Context("Embedded Store get,set and delete", func() {
var sl *model.ModelLoader
var sc grpc.Backend
var tmpdir string
BeforeEach(func() {
var err error
zerolog.SetGlobalLevel(zerolog.DebugLevel)
tmpdir, err = os.MkdirTemp("", "")
Expect(err).ToNot(HaveOccurred())
debug := true
bc := config.ModelConfig{
Name: "store test",
Debug: &debug,
Backend: model.LocalStoreBackend,
}
storeOpts := []model.Option{
model.WithBackendString(bc.Backend),
model.WithModel("test"),
}
systemState, err := system.GetSystemState(
system.WithModelPath(tmpdir),
)
Expect(err).ToNot(HaveOccurred())
sl = model.NewModelLoader(systemState)
sc, err = sl.Load(storeOpts...)
Expect(err).ToNot(HaveOccurred())
Expect(sc).ToNot(BeNil())
})
AfterEach(func() {
err := sl.StopAllGRPC()
Expect(err).ToNot(HaveOccurred())
err = os.RemoveAll(tmpdir)
Expect(err).ToNot(HaveOccurred())
})
It("should be able to set a key", func() {
err := store.SetSingle(context.Background(), sc, []float32{0.1, 0.2, 0.3}, []byte("test"))
Expect(err).ToNot(HaveOccurred())
})
It("should be able to set keys", func() {
err := store.SetCols(context.Background(), sc, [][]float32{{0.1, 0.2, 0.3}, {0.4, 0.5, 0.6}}, [][]byte{[]byte("test1"), []byte("test2")})
Expect(err).ToNot(HaveOccurred())
err = store.SetCols(context.Background(), sc, [][]float32{{0.7, 0.8, 0.9}, {0.10, 0.11, 0.12}}, [][]byte{[]byte("test3"), []byte("test4")})
Expect(err).ToNot(HaveOccurred())
})
It("should be able to get a key", func() {
err := store.SetSingle(context.Background(), sc, []float32{0.1, 0.2, 0.3}, []byte("test"))
Expect(err).ToNot(HaveOccurred())
val, err := store.GetSingle(context.Background(), sc, []float32{0.1, 0.2, 0.3})
Expect(err).ToNot(HaveOccurred())
Expect(val).To(Equal([]byte("test")))
})
It("should be able to get keys", func() {
//set 3 entries
err := store.SetCols(context.Background(), sc, [][]float32{{0.1, 0.2, 0.3}, {0.4, 0.5, 0.6}, {0.7, 0.8, 0.9}}, [][]byte{[]byte("test1"), []byte("test2"), []byte("test3")})
Expect(err).ToNot(HaveOccurred())
//get 3 entries
keys, vals, err := store.GetCols(context.Background(), sc, [][]float32{{0.1, 0.2, 0.3}, {0.4, 0.5, 0.6}, {0.7, 0.8, 0.9}})
Expect(err).ToNot(HaveOccurred())
Expect(keys).To(HaveLen(3))
Expect(vals).To(HaveLen(3))
for i, k := range keys {
v := vals[i]
if k[0] == 0.1 && k[1] == 0.2 && k[2] == 0.3 {
Expect(v).To(Equal([]byte("test1")))
} else if k[0] == 0.4 && k[1] == 0.5 && k[2] == 0.6 {
Expect(v).To(Equal([]byte("test2")))
} else {
Expect(k).To(Equal([]float32{0.7, 0.8, 0.9}))
Expect(v).To(Equal([]byte("test3")))
}
}
//get 2 entries
keys, vals, err = store.GetCols(context.Background(), sc, [][]float32{{0.7, 0.8, 0.9}, {0.1, 0.2, 0.3}})
Expect(err).ToNot(HaveOccurred())
Expect(keys).To(HaveLen(2))
Expect(vals).To(HaveLen(2))
for i, k := range keys {
v := vals[i]
if k[0] == 0.1 && k[1] == 0.2 && k[2] == 0.3 {
Expect(v).To(Equal([]byte("test1")))
} else {
Expect(k).To(Equal([]float32{0.7, 0.8, 0.9}))
Expect(v).To(Equal([]byte("test3")))
}
}
})
It("should be able to delete a key", func() {
err := store.SetSingle(context.Background(), sc, []float32{0.1, 0.2, 0.3}, []byte("test"))
Expect(err).ToNot(HaveOccurred())
err = store.DeleteSingle(context.Background(), sc, []float32{0.1, 0.2, 0.3})
Expect(err).ToNot(HaveOccurred())
val, _ := store.GetSingle(context.Background(), sc, []float32{0.1, 0.2, 0.3})
Expect(val).To(BeNil())
})
It("should be able to delete keys", func() {
//set 3 entries
err := store.SetCols(context.Background(), sc, [][]float32{{0.1, 0.2, 0.3}, {0.4, 0.5, 0.6}, {0.7, 0.8, 0.9}}, [][]byte{[]byte("test1"), []byte("test2"), []byte("test3")})
Expect(err).ToNot(HaveOccurred())
//delete 2 entries
err = store.DeleteCols(context.Background(), sc, [][]float32{{0.1, 0.2, 0.3}, {0.7, 0.8, 0.9}})
Expect(err).ToNot(HaveOccurred())
//get 1 entry
keys, vals, err := store.GetCols(context.Background(), sc, [][]float32{{0.4, 0.5, 0.6}})
Expect(err).ToNot(HaveOccurred())
Expect(keys).To(HaveLen(1))
Expect(vals).To(HaveLen(1))
Expect(keys[0]).To(Equal([]float32{0.4, 0.5, 0.6}))
Expect(vals[0]).To(Equal([]byte("test2")))
//get deleted entries
keys, vals, err = store.GetCols(context.Background(), sc, [][]float32{{0.1, 0.2, 0.3}, {0.7, 0.8, 0.9}})
Expect(err).ToNot(HaveOccurred())
Expect(keys).To(HaveLen(0))
Expect(vals).To(HaveLen(0))
})
It("should be able to find smilar keys", func() {
// set 3 vectors that are at varying angles to {0.5, 0.5, 0.5}
err := store.SetCols(context.Background(), sc, [][]float32{{0.5, 0.5, 0.5}, {0.6, 0.6, -0.6}, {0.7, -0.7, -0.7}}, [][]byte{[]byte("test1"), []byte("test2"), []byte("test3")})
Expect(err).ToNot(HaveOccurred())
// find similar keys
keys, vals, sims, err := store.Find(context.Background(), sc, []float32{0.1, 0.3, 0.5}, 2)
Expect(err).ToNot(HaveOccurred())
Expect(keys).To(HaveLen(2))
Expect(vals).To(HaveLen(2))
Expect(sims).To(HaveLen(2))
for i, k := range keys {
s := sims[i]
xlog.Debug("key", "similarity", s, "key", k)
}
Expect(keys[0]).To(Equal([]float32{0.5, 0.5, 0.5}))
Expect(vals[0]).To(Equal([]byte("test1")))
Expect(keys[1]).To(Equal([]float32{0.6, 0.6, -0.6}))
})
It("should be able to find similar normalized keys", func() {
// set 3 vectors that are at varying angles to {0.5, 0.5, 0.5}
keys := [][]float32{{0.1, 0.3, 0.5}, {0.5, 0.5, 0.5}, {0.6, 0.6, -0.6}, {0.7, -0.7, -0.7}}
vals := [][]byte{[]byte("test0"), []byte("test1"), []byte("test2"), []byte("test3")}
normalize(keys)
err := store.SetCols(context.Background(), sc, keys, vals)
Expect(err).ToNot(HaveOccurred())
// find similar keys
ks, vals, sims, err := store.Find(context.Background(), sc, keys[0], 3)
Expect(err).ToNot(HaveOccurred())
Expect(ks).To(HaveLen(3))
Expect(vals).To(HaveLen(3))
Expect(sims).To(HaveLen(3))
for i, k := range ks {
s := sims[i]
xlog.Debug("key", "similarity", s, "key", k)
}
Expect(ks[0]).To(Equal(keys[0]))
Expect(vals[0]).To(Equal(vals[0]))
Expect(sims[0]).To(BeNumerically("~", 1, 0.0001))
Expect(ks[1]).To(Equal(keys[1]))
Expect(vals[1]).To(Equal(vals[1]))
})
It("It produces the correct cosine similarities for orthogonal and opposite unit vectors", func() {
keys := [][]float32{{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0}, {-1.0, 0.0, 0.0}}
vals := [][]byte{[]byte("x"), []byte("y"), []byte("z"), []byte("-z")}
err := store.SetCols(context.Background(), sc, keys, vals)
Expect(err).ToNot(HaveOccurred())
_, _, sims, err := store.Find(context.Background(), sc, keys[0], 4)
Expect(err).ToNot(HaveOccurred())
Expect(sims).To(Equal([]float32{1.0, 0.0, 0.0, -1.0}))
})
It("It produces the correct cosine similarities for orthogonal and opposite vectors", func() {
keys := [][]float32{{1.0, 0.0, 1.0}, {0.0, 2.0, 0.0}, {0.0, 0.0, -1.0}, {-1.0, 0.0, -1.0}}
vals := [][]byte{[]byte("x"), []byte("y"), []byte("z"), []byte("-z")}
err := store.SetCols(context.Background(), sc, keys, vals)
Expect(err).ToNot(HaveOccurred())
_, _, sims, err := store.Find(context.Background(), sc, keys[0], 4)
Expect(err).ToNot(HaveOccurred())
Expect(sims[0]).To(BeNumerically("~", 1, 0.1))
Expect(sims[1]).To(BeNumerically("~", 0, 0.1))
Expect(sims[2]).To(BeNumerically("~", -0.7, 0.1))
Expect(sims[3]).To(BeNumerically("~", -1, 0.1))
})
expectTriangleEq := func(keys [][]float32, vals [][]byte) {
sims := map[string]map[string]float32{}
// compare every key vector pair and store the similarities in a lookup table
// that uses the values as keys
for i, k := range keys {
_, valsk, simsk, err := store.Find(context.Background(), sc, k, 9)
Expect(err).ToNot(HaveOccurred())
for j, v := range valsk {
p := string(vals[i])
q := string(v)
if sims[p] == nil {
sims[p] = map[string]float32{}
}
//log.Debug().Strs("vals", []string{p, q}).Float32("similarity", simsk[j]).Send()
sims[p][q] = simsk[j]
}
}
// Check that the triangle inequality holds for every combination of the triplet
// u, v and w
for _, simsu := range sims {
for w, simw := range simsu {
// acos(u,w) <= ...
uws := math.Acos(float64(simw))
// ... acos(u,v) + acos(v,w)
for v, _ := range simsu {
uvws := math.Acos(float64(simsu[v])) + math.Acos(float64(sims[v][w]))
//log.Debug().Str("u", u).Str("v", v).Str("w", w).Send()
//log.Debug().Float32("uw", simw).Float32("uv", simsu[v]).Float32("vw", sims[v][w]).Send()
Expect(uws).To(BeNumerically("<=", uvws))
}
}
}
}
It("It obeys the triangle inequality for normalized values", func() {
keys := [][]float32{
{1.0, 0.0, 0.0}, {0.0, 1.0, 0.0}, {0.0, 0.0, 1.0},
{-1.0, 0.0, 0.0}, {0.0, -1.0, 0.0}, {0.0, 0.0, -1.0},
{2.0, 3.0, 4.0}, {9.0, 7.0, 1.0}, {0.0, -1.2, 2.3},
}
vals := [][]byte{
[]byte("x"), []byte("y"), []byte("z"),
[]byte("-x"), []byte("-y"), []byte("-z"),
[]byte("u"), []byte("v"), []byte("w"),
}
normalize(keys[6:])
err := store.SetCols(context.Background(), sc, keys, vals)
Expect(err).ToNot(HaveOccurred())
expectTriangleEq(keys, vals)
})
It("It obeys the triangle inequality", func() {
rnd := rand.New(rand.NewSource(151))
keys := make([][]float32, 20)
vals := make([][]byte, 20)
for i := range keys {
k := make([]float32, 768)
for j := range k {
k[j] = rnd.Float32()
}
keys[i] = k
}
c := byte('a')
for i := range vals {
vals[i] = []byte{c}
c += 1
}
err := store.SetCols(context.Background(), sc, keys, vals)
Expect(err).ToNot(HaveOccurred())
expectTriangleEq(keys, vals)
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/integration/integration_suite_test.go | tests/integration/integration_suite_test.go | package integration_test
import (
"os"
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/mudler/xlog"
)
func TestLocalAI(t *testing.T) {
xlog.SetLogger(xlog.NewLogger(xlog.LogLevel("info"), "text"))
RegisterFailHandler(Fail)
RunSpecs(t, "LocalAI test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/e2e-aio/e2e_test.go | tests/e2e-aio/e2e_test.go | package e2e_test
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"github.com/mudler/LocalAI/core/schema"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/sashabaranov/go-openai"
"github.com/sashabaranov/go-openai/jsonschema"
)
var _ = Describe("E2E test", func() {
Context("Generating", func() {
BeforeEach(func() {
//
})
// Check that the GPU was used
AfterEach(func() {
//
})
Context("text", func() {
It("correctly", func() {
model := "gpt-4"
resp, err := client.CreateChatCompletion(context.TODO(),
openai.ChatCompletionRequest{
Model: model, Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: "How much is 2+2?",
},
}})
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Choices)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Choices[0].Message.Content).To(Or(ContainSubstring("4"), ContainSubstring("four")), fmt.Sprint(resp.Choices[0].Message.Content))
})
})
Context("function calls", func() {
It("correctly invoke", func() {
params := jsonschema.Definition{
Type: jsonschema.Object,
Properties: map[string]jsonschema.Definition{
"location": {
Type: jsonschema.String,
Description: "The city and state, e.g. San Francisco, CA",
},
"unit": {
Type: jsonschema.String,
Enum: []string{"celsius", "fahrenheit"},
},
},
Required: []string{"location"},
}
f := openai.FunctionDefinition{
Name: "get_current_weather",
Description: "Get the current weather in a given location",
Parameters: params,
}
t := openai.Tool{
Type: openai.ToolTypeFunction,
Function: &f,
}
dialogue := []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleUser, Content: "What is the weather in Boston today?"},
}
resp, err := client.CreateChatCompletion(context.TODO(),
openai.ChatCompletionRequest{
Model: openai.GPT4,
Messages: dialogue,
Tools: []openai.Tool{t},
},
)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Choices)).To(Equal(1), fmt.Sprint(resp))
msg := resp.Choices[0].Message
Expect(len(msg.ToolCalls)).To(Equal(1), fmt.Sprint(msg.ToolCalls))
Expect(msg.ToolCalls[0].Function.Name).To(Equal("get_current_weather"), fmt.Sprint(msg.ToolCalls[0].Function.Name))
Expect(msg.ToolCalls[0].Function.Arguments).To(ContainSubstring("Boston"), fmt.Sprint(msg.ToolCalls[0].Function.Arguments))
})
})
Context("json", func() {
It("correctly", func() {
model := "gpt-4"
req := openai.ChatCompletionRequest{
ResponseFormat: &openai.ChatCompletionResponseFormat{Type: openai.ChatCompletionResponseFormatTypeJSONObject},
Model: model,
Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: "Generate a JSON object of an animal with 'name', 'gender' and 'legs' fields",
},
},
}
resp, err := client.CreateChatCompletion(context.TODO(), req)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Choices)).To(Equal(1), fmt.Sprint(resp))
var i map[string]interface{}
err = json.Unmarshal([]byte(resp.Choices[0].Message.Content), &i)
Expect(err).ToNot(HaveOccurred())
Expect(i).To(HaveKey("name"))
Expect(i).To(HaveKey("gender"))
Expect(i).To(HaveKey("legs"))
})
})
Context("images", func() {
It("correctly", func() {
req := openai.ImageRequest{
Prompt: "test",
Quality: "1",
Size: openai.CreateImageSize256x256,
}
resp, err := client.CreateImage(context.TODO(), req)
Expect(err).ToNot(HaveOccurred(), fmt.Sprintf("error sending image request %+v", req))
Expect(len(resp.Data)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Data[0].URL).To(ContainSubstring("png"), fmt.Sprint(resp.Data[0].URL))
})
It("correctly changes the response format to url", func() {
resp, err := client.CreateImage(context.TODO(),
openai.ImageRequest{
Prompt: "test",
Size: openai.CreateImageSize256x256,
Quality: "1",
ResponseFormat: openai.CreateImageResponseFormatURL,
},
)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Data)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Data[0].URL).To(ContainSubstring("png"), fmt.Sprint(resp.Data[0].URL))
})
It("correctly changes the response format to base64", func() {
resp, err := client.CreateImage(context.TODO(),
openai.ImageRequest{
Prompt: "test",
Size: openai.CreateImageSize256x256,
Quality: "1",
ResponseFormat: openai.CreateImageResponseFormatB64JSON,
},
)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Data)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Data[0].B64JSON).ToNot(BeEmpty(), fmt.Sprint(resp.Data[0].B64JSON))
})
})
Context("embeddings", func() {
It("correctly", func() {
resp, err := client.CreateEmbeddings(context.TODO(),
openai.EmbeddingRequestStrings{
Input: []string{"doc"},
Model: openai.AdaEmbeddingV2,
},
)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Data)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Data[0].Embedding).ToNot(BeEmpty())
resp2, err := client.CreateEmbeddings(context.TODO(),
openai.EmbeddingRequestStrings{
Input: []string{"cat"},
Model: openai.AdaEmbeddingV2,
},
)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp2.Data)).To(Equal(1), fmt.Sprint(resp))
Expect(resp2.Data[0].Embedding).ToNot(BeEmpty())
Expect(resp2.Data[0].Embedding).ToNot(Equal(resp.Data[0].Embedding))
resp3, err := client.CreateEmbeddings(context.TODO(),
openai.EmbeddingRequestStrings{
Input: []string{"doc", "cat"},
Model: openai.AdaEmbeddingV2,
},
)
Expect(err).ToNot(HaveOccurred())
Expect(len(resp3.Data)).To(Equal(2), fmt.Sprint(resp))
Expect(resp3.Data[0].Embedding).ToNot(BeEmpty())
Expect(resp3.Data[0].Embedding).To(Equal(resp.Data[0].Embedding))
Expect(resp3.Data[1].Embedding).To(Equal(resp2.Data[0].Embedding))
Expect(resp3.Data[0].Embedding).ToNot(Equal(resp3.Data[1].Embedding))
})
})
Context("vision", func() {
It("correctly", func() {
model := "gpt-4o"
resp, err := client.CreateChatCompletion(context.TODO(),
openai.ChatCompletionRequest{
Model: model, Messages: []openai.ChatCompletionMessage{
{
Role: "user",
MultiContent: []openai.ChatMessagePart{
{
Type: openai.ChatMessagePartTypeText,
Text: "What is in the image?",
},
{
Type: openai.ChatMessagePartTypeImageURL,
ImageURL: &openai.ChatMessageImageURL{
URL: "https://picsum.photos/id/22/4434/3729",
Detail: openai.ImageURLDetailLow,
},
},
},
},
}})
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Choices)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Choices[0].Message.Content).To(Or(ContainSubstring("man"), ContainSubstring("road")), fmt.Sprint(resp.Choices[0].Message.Content))
})
})
Context("text to audio", func() {
It("correctly", func() {
res, err := client.CreateSpeech(context.Background(), openai.CreateSpeechRequest{
Model: openai.TTSModel1,
Input: "Hello!",
Voice: openai.VoiceAlloy,
})
Expect(err).ToNot(HaveOccurred())
defer res.Close()
_, err = io.ReadAll(res)
Expect(err).ToNot(HaveOccurred())
})
})
Context("audio to text", func() {
It("correctly", func() {
downloadURL := "https://cdn.openai.com/whisper/draft-20220913a/micro-machines.wav"
file, err := downloadHttpFile(downloadURL)
Expect(err).ToNot(HaveOccurred())
req := openai.AudioRequest{
Model: openai.Whisper1,
FilePath: file,
}
resp, err := client.CreateTranscription(context.Background(), req)
Expect(err).ToNot(HaveOccurred())
Expect(resp.Text).To(ContainSubstring("This is the"), fmt.Sprint(resp.Text))
})
})
Context("vad", func() {
It("correctly", func() {
modelName := "silero-vad"
req := schema.VADRequest{
BasicModelRequest: schema.BasicModelRequest{
Model: modelName,
},
Audio: SampleVADAudio, // Use hardcoded sample data for now.
}
serialized, err := json.Marshal(req)
Expect(err).To(BeNil())
Expect(serialized).ToNot(BeNil())
vadEndpoint := apiEndpoint + "/vad"
resp, err := http.Post(vadEndpoint, "application/json", bytes.NewReader(serialized))
Expect(err).To(BeNil())
Expect(resp).ToNot(BeNil())
body, err := io.ReadAll(resp.Body)
Expect(err).ToNot(HaveOccurred())
Expect(resp.StatusCode).To(Equal(200))
deserializedResponse := schema.VADResponse{}
err = json.Unmarshal(body, &deserializedResponse)
Expect(err).To(BeNil())
Expect(deserializedResponse).ToNot(BeZero())
Expect(deserializedResponse.Segments).ToNot(BeZero())
})
})
Context("reranker", func() {
It("correctly", func() {
modelName := "jina-reranker-v1-base-en"
const query = "Organic skincare products for sensitive skin"
var documents = []string{
"Eco-friendly kitchenware for modern homes",
"Biodegradable cleaning supplies for eco-conscious consumers",
"Organic cotton baby clothes for sensitive skin",
"Natural organic skincare range for sensitive skin",
"Tech gadgets for smart homes: 2024 edition",
"Sustainable gardening tools and compost solutions",
"Sensitive skin-friendly facial cleansers and toners",
"Organic food wraps and storage solutions",
"All-natural pet food for dogs with allergies",
"Yoga mats made from recycled materials",
}
// Exceed len or requested results
randomValue := int(GinkgoRandomSeed()) % (len(documents) + 1)
requestResults := randomValue + 1 // at least 1 results
// Cap expectResults by the length of documents
expectResults := min(requestResults, len(documents))
var maybeSkipTopN = &requestResults
if requestResults >= len(documents) && int(GinkgoRandomSeed())%2 == 0 {
maybeSkipTopN = nil
}
resp, body := requestRerank(modelName, query, documents, maybeSkipTopN, apiEndpoint)
Expect(resp.StatusCode).To(Equal(200), fmt.Sprintf("body: %s, response: %+v", body, resp))
deserializedResponse := schema.JINARerankResponse{}
err := json.Unmarshal(body, &deserializedResponse)
Expect(err).To(BeNil())
Expect(deserializedResponse).ToNot(BeZero())
Expect(deserializedResponse.Model).To(Equal(modelName))
//Expect(len(deserializedResponse.Results)).To(BeNumerically(">", 0))
Expect(len(deserializedResponse.Results)).To(Equal(expectResults))
// Assert that relevance scores are in decreasing order
for i := 1; i < len(deserializedResponse.Results); i++ {
Expect(deserializedResponse.Results[i].RelevanceScore).To(
BeNumerically("<=", deserializedResponse.Results[i-1].RelevanceScore),
fmt.Sprintf("Result at index %d should have lower relevance score than previous result.", i),
)
}
// Assert that each result's index points to the correct document
for i, result := range deserializedResponse.Results {
Expect(result.Index).To(
And(
BeNumerically(">=", 0),
BeNumerically("<", len(documents)),
),
fmt.Sprintf("Result at position %d has index %d which should be within bounds [0, %d)", i, result.Index, len(documents)),
)
Expect(result.Document.Text).To(
Equal(documents[result.Index]),
fmt.Sprintf("Result at position %d (index %d) should have document text '%s', but got '%s'",
i, result.Index, documents[result.Index], result.Document.Text),
)
}
zeroOrNeg := int(GinkgoRandomSeed())%2 - 1 // Results in either -1 or 0
resp, body = requestRerank(modelName, query, documents, &zeroOrNeg, apiEndpoint)
Expect(resp.StatusCode).To(Equal(422), fmt.Sprintf("body: %s, response: %+v", body, resp))
})
})
})
})
func downloadHttpFile(url string) (string, error) {
resp, err := http.Get(url)
if err != nil {
return "", err
}
defer resp.Body.Close()
tmpfile, err := os.CreateTemp("", "example")
if err != nil {
return "", err
}
defer tmpfile.Close()
_, err = io.Copy(tmpfile, resp.Body)
if err != nil {
return "", err
}
return tmpfile.Name(), nil
}
func requestRerank(modelName, query string, documents []string, topN *int, apiEndpoint string) (*http.Response, []byte) {
req := schema.JINARerankRequest{
BasicModelRequest: schema.BasicModelRequest{
Model: modelName,
},
Query: query,
Documents: documents,
TopN: topN,
}
serialized, err := json.Marshal(req)
Expect(err).To(BeNil())
Expect(serialized).ToNot(BeNil())
rerankerEndpoint := apiEndpoint + "/rerank"
resp, err := http.Post(rerankerEndpoint, "application/json", bytes.NewReader(serialized))
Expect(err).To(BeNil())
Expect(resp).ToNot(BeNil())
body, err := io.ReadAll(resp.Body)
Expect(err).ToNot(HaveOccurred())
return resp, body
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/e2e-aio/sample_data_test.go | tests/e2e-aio/sample_data_test.go | package e2e_test
// e2e VAD test has had issues with wav files. Temporarily test by using a manually-dumped slice of data obtained via:
// Downloaded https://models.silero.ai/vad_models/en.wav
// Converted with:
// ffmpeg -t 15 -i en.wav -f f32le -acodec pcm_f32le - | od -An -v -t f4 | awk '{for(i=1;i<=NF;i++) printf "%s,", $i} END {print ""}' > output.txt
var SampleVADAudio []float32 = []float32{
-0.004486084,
-0.0053710938,
-0.0054016113,
-0.005126953,
-0.0046081543,
-0.0040283203,
-0.003692627,
-0.0025939941,
-0.002166748,
-0.0019226074,
-0.0012207031,
-0.0009765625,
-0.00088500977,
-0.0013122559,
-0.0021362305,
-0.0030212402,
-0.0040893555,
-0.005279541,
-0.00592041,
-0.0056152344,
-0.0049438477,
-0.0032958984,
-0.00045776367,
0.0043640137,
0.010101318,
0.014953613,
0.016235352,
0.015197754,
0.013000488,
0.008972168,
0.0054626465,
0.0043029785,
0.0050964355,
0.0037231445,
0.0026245117,
0.0017089844,
-0.0017700195,
-0.005065918,
-0.007537842,
-0.010375977,
-0.011779785,
-0.011779785,
-0.011169434,
-0.007751465,
-0.0034179688,
3.0517578e-05,
0.0029296875,
0.0044555664,
0.0049438477,
0.0045776367,
0.0037841797,
0.004211426,
0.00491333,
0.005493164,
0.006011963,
0.0053710938,
0.0038452148,
0.0014343262,
-0.0014648438,
-0.004425049,
-0.0071105957,
-0.00894165,
-0.009674072,
-0.009399414,
-0.00793457,
-0.006072998,
-0.0039978027,
-0.0017700195,
-0.0005493164,
0.000579834,
0.0018005371,
0.0029296875,
0.0041503906,
0.005554199,
0.007232666,
0.008270264,
0.008575439,
0.008300781,
0.0069885254,
0.005279541,
0.0037841797,
0.0018920898,
3.0517578e-05,
-0.001373291,
-0.0024719238,
-0.0032348633,
-0.0036010742,
-0.0040893555,
-0.004486084,
-0.0044555664,
-0.004180908,
-0.0038452148,
-0.0033874512,
-0.0020141602,
-0.0006713867,
0.00088500977,
0.0022888184,
0.0025939941,
0.002960205,
0.0026550293,
0.0020141602,
0.0014648438,
0.000579834,
-0.00015258789,
-0.0009765625,
-0.0016479492,
-0.0025024414,
-0.0033874512,
-0.0037841797,
-0.0043640137,
-0.005340576,
-0.005554199,
-0.005493164,
-0.0056152344,
-0.005065918,
-0.0041503906,
-0.0031433105,
-0.0016174316,
0,
0.0017089844,
0.0034484863,
0.004333496,
0.0051879883,
0.005584717,
0.00579834,
0.0061035156,
0.0059509277,
0.0055236816,
0.0048217773,
0.004119873,
0.0030212402,
0.0016784668,
0.000579834,
9.1552734e-05,
-0.0007324219,
-0.0016174316,
-0.0017700195,
-0.001953125,
-0.0016174316,
-0.0009765625,
-0.00024414062,
0.0006713867,
0.0012817383,
0.0017089844,
0.002166748,
0.002532959,
0.002746582,
0.0025939941,
0.0022888184,
0.0020751953,
0.0016174316,
0.0010681152,
0.00021362305,
-0.000579834,
-0.0016174316,
-0.0027160645,
-0.0034484863,
-0.0039367676,
-0.004272461,
-0.004058838,
-0.0036315918,
-0.0034484863,
-0.0033874512,
-0.0033569336,
-0.0031433105,
-0.0025634766,
-0.0020751953,
-0.0015563965,
-0.0010986328,
-0.0013122559,
-0.0013122559,
-0.00091552734,
-0.00021362305,
-0.00018310547,
-0.0004272461,
-0.0004272461,
-0.00048828125,
-0.00015258789,
0.00061035156,
0.0010986328,
0.0011291504,
0.0015869141,
0.002166748,
0.0024719238,
0.0032043457,
0.0040893555,
0.0040283203,
0.0034484863,
0.0032043457,
0.0023498535,
0.0017089844,
0.001373291,
0.0010681152,
0.0011901855,
0.000579834,
0.00018310547,
9.1552734e-05,
-0.00024414062,
-0.00048828125,
-0.00018310547,
0.00018310547,
0.00045776367,
0.00091552734,
0.00076293945,
0.0014038086,
0.002105713,
0.002105713,
0.0016784668,
0.001373291,
0.0012817383,
0.0016479492,
0.002166748,
0.0019836426,
0.0006713867,
-0.0011291504,
-0.0025634766,
-0.004760742,
-0.0059814453,
-0.005218506,
-0.004852295,
-0.005706787,
-0.0052490234,
-0.0053100586,
-0.0068969727,
-0.0072021484,
-0.007232666,
-0.008026123,
-0.008483887,
-0.009124756,
-0.009887695,
-0.009429932,
-0.009246826,
-0.00869751,
-0.0065307617,
-0.004333496,
-0.0023498535,
-3.0517578e-05,
0.0024108887,
0.0048828125,
0.007659912,
0.011291504,
0.015655518,
0.021026611,
0.02709961,
0.031097412,
0.030670166,
0.02609253,
0.01928711,
0.009887695,
0.000579834,
-0.0042419434,
-0.007232666,
-0.01083374,
-0.012298584,
-0.012878418,
-0.014923096,
-0.016174316,
-0.016815186,
-0.016540527,
-0.013824463,
-0.010131836,
-0.0052490234,
0.0007324219,
0.0067749023,
0.012298584,
0.016296387,
0.018005371,
0.017425537,
0.015075684,
0.011138916,
0.0074157715,
0.004547119,
0.0019226074,
-0.00076293945,
-0.003692627,
-0.00680542,
-0.009857178,
-0.012176514,
-0.013397217,
-0.01373291,
-0.013885498,
-0.014373779,
-0.013305664,
-0.011505127,
-0.010101318,
-0.0059509277,
0,
0.0028381348,
0.0035095215,
0.0058898926,
0.0052490234,
0.0018310547,
0.0012512207,
0.00048828125,
-0.0027160645,
-0.0049743652,
-0.008361816,
-0.012023926,
-0.0132751465,
-0.014343262,
-0.014007568,
-0.0115356445,
-0.00982666,
-0.009002686,
-0.005706787,
-0.0025939941,
0.00076293945,
0.006072998,
0.013671875,
0.021240234,
0.029174805,
0.038635254,
0.039886475,
0.036071777,
0.032073975,
0.022277832,
0.010864258,
0.0050354004,
-9.1552734e-05,
-0.0060424805,
-0.009124756,
-0.012573242,
-0.017120361,
-0.020080566,
-0.02166748,
-0.022735596,
-0.020477295,
-0.016448975,
-0.012176514,
-0.0049438477,
0.003112793,
0.01083374,
0.017913818,
0.022247314,
0.023254395,
0.02166748,
0.018066406,
0.014282227,
0.010894775,
0.007751465,
0.0046691895,
0.000579834,
-0.004119873,
-0.008392334,
-0.012664795,
-0.015472412,
-0.016571045,
-0.017059326,
-0.016967773,
-0.016204834,
-0.013336182,
-0.0099487305,
-0.008056641,
-0.0049743652,
0.0009765625,
0.0019836426,
0.00048828125,
0.004425049,
0.0038146973,
-0.0010375977,
-0.0010681152,
-0.002166748,
-0.0074157715,
-0.011505127,
-0.015380859,
-0.021209717,
-0.027832031,
-0.033233643,
-0.03555298,
-0.032318115,
-0.024932861,
-0.01626587,
-0.0050354004,
0.00592041,
0.01184082,
0.0154418945,
0.023406982,
0.03164673,
0.043518066,
0.06713867,
0.08605957,
0.08691406,
0.078430176,
0.059631348,
0.03149414,
0.005126953,
-0.013977051,
-0.021362305,
-0.030883789,
-0.038482666,
-0.037963867,
-0.04309082,
-0.04574585,
-0.042175293,
-0.041992188,
-0.038513184,
-0.028533936,
-0.019866943,
-0.0071105957,
0.011291504,
0.029296875,
0.044708252,
0.054534912,
0.056274414,
0.051330566,
0.040405273,
0.026000977,
0.013702393,
0.0034484863,
-0.0057678223,
-0.013641357,
-0.021209717,
-0.02947998,
-0.03744507,
-0.04360962,
-0.047821045,
-0.04852295,
-0.04473877,
-0.037139893,
-0.02633667,
-0.012451172,
0.0026245117,
0.016235352,
0.026489258,
0.03366089,
0.038360596,
0.038116455,
0.034729004,
0.02911377,
0.022583008,
0.014831543,
0.006439209,
-0.0022583008,
-0.00982666,
-0.01663208,
-0.026367188,
-0.031188965,
-0.032928467,
-0.036224365,
-0.03515625,
-0.029968262,
-0.02508545,
-0.017700195,
-0.007537842,
0.00088500977,
0.0068969727,
0.0121154785,
0.014892578,
0.015075684,
0.01586914,
0.015563965,
0.013763428,
0.011016846,
0.006011963,
-0.0004272461,
-0.0049743652,
-0.010314941,
-0.013519287,
-0.0115356445,
-0.009765625,
-0.009216309,
-0.00592041,
-0.0005187988,
0.0016479492,
0.0063171387,
0.018005371,
0.028198242,
0.036102295,
0.043395996,
0.0440979,
0.040771484,
0.033233643,
0.022521973,
0.013641357,
0.006164551,
-0.00289917,
-0.008026123,
-0.011108398,
-0.016937256,
-0.01864624,
-0.019134521,
-0.02053833,
-0.019226074,
-0.016723633,
-0.014709473,
-0.008331299,
-0.0010375977,
0.0065307617,
0.015258789,
0.021270752,
0.023712158,
0.0234375,
0.02166748,
0.016967773,
0.012298584,
0.007873535,
0.0020446777,
-0.003967285,
-0.009857178,
-0.015991211,
-0.021484375,
-0.025482178,
-0.028747559,
-0.030517578,
-0.030792236,
-0.028747559,
-0.023986816,
-0.017425537,
-0.009521484,
-0.0019836426,
0.00390625,
0.0077819824,
0.00869751,
0.0070495605,
0.0038757324,
-0.0025024414,
-0.011291504,
-0.020935059,
-0.029205322,
-0.033691406,
-0.032073975,
-0.028961182,
-0.026428223,
-0.02154541,
-0.019836426,
-0.023132324,
-0.023651123,
-0.015716553,
-0.006958008,
0.00079345703,
0.018188477,
0.03488159,
0.039733887,
0.050567627,
0.063964844,
0.0680542,
0.077941895,
0.09371948,
0.10159302,
0.09677124,
0.07757568,
0.047698975,
0.014373779,
-0.016845703,
-0.041931152,
-0.05126953,
-0.053619385,
-0.0592041,
-0.05722046,
-0.056549072,
-0.05770874,
-0.049804688,
-0.042114258,
-0.03390503,
-0.017150879,
-0.0010375977,
0.013702393,
0.032714844,
0.049682617,
0.061279297,
0.06661987,
0.06283569,
0.050323486,
0.03265381,
0.013641357,
-0.004058838,
-0.017730713,
-0.027832031,
-0.036743164,
-0.043518066,
-0.05001831,
-0.056488037,
-0.05883789,
-0.057678223,
-0.052734375,
-0.043151855,
-0.029907227,
-0.0138549805,
0.0040893555,
0.020904541,
0.03463745,
0.045684814,
0.051971436,
0.05215454,
0.047027588,
0.038848877,
0.027770996,
0.013244629,
-0.000579834,
-0.009521484,
-0.019592285,
-0.029815674,
-0.034942627,
-0.038391113,
-0.042175293,
-0.042755127,
-0.03768921,
-0.030731201,
-0.021728516,
-0.012451172,
-0.0036315918,
0.0045776367,
0.0093688965,
0.011779785,
0.014038086,
0.01361084,
0.009887695,
0.0061035156,
0.0015869141,
-0.004760742,
-0.012207031,
-0.01675415,
-0.019470215,
-0.022705078,
-0.027893066,
-0.028259277,
-0.023803711,
-0.024841309,
-0.023773193,
-0.01449585,
-0.0065307617,
-0.002532959,
0.010437012,
0.026428223,
0.03579712,
0.04611206,
0.059814453,
0.066101074,
0.06921387,
0.08013916,
0.086364746,
0.092315674,
0.09277344,
0.071746826,
0.043518066,
0.01361084,
-0.019683838,
-0.045135498,
-0.05432129,
-0.05682373,
-0.059448242,
-0.055267334,
-0.051757812,
-0.05154419,
-0.044891357,
-0.03643799,
-0.028167725,
-0.013793945,
0.0018920898,
0.016479492,
0.034179688,
0.05026245,
0.062194824,
0.06814575,
0.06439209,
0.05368042,
0.035461426,
0.014526367,
-0.003326416,
-0.01675415,
-0.027008057,
-0.035339355,
-0.040863037,
-0.046722412,
-0.049987793,
-0.048614502,
-0.043823242,
-0.035339355,
-0.02432251,
-0.014587402,
-0.0049438477,
0.005218506,
0.01361084,
0.02053833,
0.027496338,
0.031799316,
0.028656006,
0.023529053,
0.016296387,
0.00579834,
-0.002166748,
-0.008392334,
-0.015045166,
-0.020355225,
-0.024993896,
-0.028442383,
-0.03125,
-0.03262329,
-0.02822876,
-0.022338867,
-0.01928711,
-0.0154418945,
-0.009552002,
-0.009155273,
-0.011169434,
-0.009155273,
-0.009674072,
-0.0113220215,
-0.014434814,
-0.016998291,
-0.017303467,
-0.019744873,
-0.017730713,
-0.0093688965,
-0.006958008,
-0.0049743652,
0.0017089844,
0.0021972656,
-0.00088500977,
0.0010986328,
0.0082092285,
0.008270264,
0.0072021484,
0.016967773,
0.020935059,
0.022491455,
0.035888672,
0.043548584,
0.050872803,
0.0574646,
0.06286621,
0.07608032,
0.090148926,
0.09509277,
0.08023071,
0.05718994,
0.026031494,
-0.007843018,
-0.034301758,
-0.04486084,
-0.045959473,
-0.051452637,
-0.049438477,
-0.0463562,
-0.05267334,
-0.052093506,
-0.045715332,
-0.04260254,
-0.030303955,
-0.01550293,
-0.002380371,
0.014770508,
0.033111572,
0.048583984,
0.05807495,
0.05960083,
0.053833008,
0.03945923,
0.020233154,
0.005065918,
-0.007080078,
-0.016052246,
-0.021484375,
-0.026611328,
-0.032928467,
-0.037841797,
-0.041412354,
-0.04144287,
-0.035888672,
-0.026031494,
-0.014984131,
-0.0043029785,
0.005126953,
0.011383057,
0.016906738,
0.022827148,
0.026672363,
0.026489258,
0.02532959,
0.019958496,
0.0107421875,
0.0035095215,
-0.0034484863,
-0.01083374,
-0.015838623,
-0.020446777,
-0.025299072,
-0.02798462,
-0.030395508,
-0.028808594,
-0.024291992,
-0.020233154,
-0.016143799,
-0.011627197,
-0.008728027,
-0.00970459,
-0.010864258,
-0.011627197,
-0.0138549805,
-0.017333984,
-0.017913818,
-0.01763916,
-0.020111084,
-0.022766113,
-0.020721436,
-0.023956299,
-0.027038574,
-0.022521973,
-0.01928711,
-0.01763916,
-0.012786865,
-0.0024414062,
-0.00088500977,
0.0026855469,
0.010620117,
0.017974854,
0.018157959,
0.021270752,
0.028778076,
0.031036377,
0.03366089,
0.041503906,
0.05380249,
0.051483154,
0.05734253,
0.06298828,
0.070495605,
0.0925293,
0.10925293,
0.10235596,
0.07485962,
0.041381836,
0.00024414062,
-0.037719727,
-0.054534912,
-0.046905518,
-0.04937744,
-0.048553467,
-0.038116455,
-0.04815674,
-0.054107666,
-0.047454834,
-0.047424316,
-0.04043579,
-0.02166748,
-0.008178711,
0.008575439,
0.03012085,
0.046936035,
0.059631348,
0.06378174,
0.057647705,
0.042175293,
0.02154541,
0.0019836426,
-0.011169434,
-0.016571045,
-0.018432617,
-0.020599365,
-0.024230957,
-0.031097412,
-0.040618896,
-0.04837036,
-0.053009033,
-0.05203247,
-0.044708252,
-0.03289795,
-0.016571045,
0.00036621094,
0.014099121,
0.02444458,
0.029296875,
0.02947998,
0.026000977,
0.020721436,
0.01586914,
0.012390137,
0.0093688965,
0.007598877,
0.003326416,
-0.00491333,
-0.0119018555,
-0.019989014,
-0.028167725,
-0.03100586,
-0.03024292,
-0.029418945,
-0.026275635,
-0.020599365,
-0.017486572,
-0.015411377,
-0.009124756,
-0.008056641,
-0.010620117,
-0.008453369,
-0.008270264,
-0.011047363,
-0.010040283,
-0.009094238,
-0.0132751465,
-0.017089844,
-0.023712158,
-0.03137207,
-0.03326416,
-0.032806396,
-0.027191162,
-0.016845703,
-0.010192871,
-0.0050964355,
-0.0011901855,
-0.0032958984,
-0.0031738281,
0.005340576,
0.010437012,
0.020324707,
0.03677368,
0.042999268,
0.05078125,
0.058654785,
0.056640625,
0.05947876,
0.07019043,
0.07775879,
0.093170166,
0.117126465,
0.12197876,
0.09991455,
0.06832886,
0.031036377,
-0.0138549805,
-0.04800415,
-0.054260254,
-0.052246094,
-0.057159424,
-0.049224854,
-0.04736328,
-0.0619812,
-0.062561035,
-0.061950684,
-0.06500244,
-0.052703857,
-0.035583496,
-0.019256592,
0.007232666,
0.034973145,
0.05441284,
0.068725586,
0.069122314,
0.060028076,
0.045135498,
0.028259277,
0.017089844,
0.012176514,
0.012054443,
0.009033203,
0.0020141602,
-0.011444092,
-0.029296875,
-0.0501709,
-0.067993164,
-0.07650757,
-0.074157715,
-0.06378174,
-0.04864502,
-0.029083252,
-0.012969971,
-0.0014343262,
0.0068359375,
0.013763428,
0.018188477,
0.020996094,
0.023376465,
0.028259277,
0.034057617,
0.034973145,
0.033447266,
0.027038574,
0.014160156,
-0.0029907227,
-0.020629883,
-0.03286743,
-0.03918457,
-0.041290283,
-0.03704834,
-0.032440186,
-0.031402588,
-0.030700684,
-0.03201294,
-0.03265381,
-0.029174805,
-0.023010254,
-0.014984131,
-0.0067443848,
6.1035156e-05,
0.0007324219,
-0.0053710938,
-0.0152282715,
-0.02130127,
-0.026184082,
-0.02746582,
-0.022064209,
-0.014556885,
-0.006164551,
-0.0053710938,
-0.00021362305,
0.0048217773,
-0.0018920898,
-0.002319336,
0.008972168,
0.010925293,
0.013092041,
0.028137207,
0.03475952,
0.027160645,
0.037902832,
0.04837036,
0.04598999,
0.067718506,
0.095062256,
0.1177063,
0.1385498,
0.14031982,
0.10803223,
0.06488037,
0.023071289,
-0.013397217,
-0.03277588,
-0.030822754,
-0.024810791,
-0.033599854,
-0.038879395,
-0.053375244,
-0.07562256,
-0.08596802,
-0.08880615,
-0.0869751,
-0.07043457,
-0.046844482,
-0.02243042,
0.010131836,
0.03665161,
0.05609131,
0.06838989,
0.06719971,
0.059661865,
0.05041504,
0.04333496,
0.041259766,
0.0446167,
0.04510498,
0.03604126,
0.018615723,
-0.0082092285,
-0.03805542,
-0.0635376,
-0.080963135,
-0.08670044,
-0.081451416,
-0.07022095,
-0.05731201,
-0.044189453,
-0.033691406,
-0.024993896,
-0.015808105,
-0.006713867,
0.0039978027,
0.01550293,
0.029205322,
0.042144775,
0.05026245,
0.05291748,
0.046844482,
0.03250122,
0.013397217,
-0.004699707,
-0.019134521,
-0.027130127,
-0.03366089,
-0.037322998,
-0.039031982,
-0.045806885,
-0.049957275,
-0.049987793,
-0.046691895,
-0.039642334,
-0.027862549,
-0.015258789,
-0.0066223145,
-0.00048828125,
-0.00091552734,
-0.004486084,
-0.009246826,
-0.01687622,
-0.020843506,
-0.025390625,
-0.023376465,
-0.014770508,
-0.011993408,
-0.0058898926,
0.0054016113,
0.0055236816,
-0.003326416,
-0.00015258789,
0.0015869141,
-0.007446289,
0.006011963,
0.025909424,
0.031585693,
0.044189453,
0.056121826,
0.058898926,
0.060821533,
0.078948975,
0.10449219,
0.13085938,
0.14950562,
0.13824463,
0.10751343,
0.061798096,
0.01889038,
-0.008514404,
-0.02670288,
-0.026977539,
-0.026855469,
-0.040924072,
-0.05606079,
-0.07183838,
-0.091033936,
-0.09957886,
-0.09567261,
-0.08969116,
-0.0730896,
-0.05102539,
-0.026306152,
0.0038452148,
0.029937744,
0.049621582,
0.060760498,
0.06112671,
0.054779053,
0.05142212,
0.048797607,
0.051452637,
0.05630493,
0.051574707,
0.03778076,
0.015533447,
-0.012573242,
-0.039093018,
-0.058135986,
-0.070892334,
-0.07397461,
-0.07070923,
-0.06741333,
-0.06289673,
-0.05368042,
-0.045135498,
-0.037139893,
-0.023773193,
-0.009887695,
0.0057373047,
0.021820068,
0.036102295,
0.047332764,
0.053985596,
0.050994873,
0.040802002,
0.028442383,
0.013092041,
-0.0025024414,
-0.010528564,
-0.015625,
-0.024932861,
-0.032470703,
-0.037506104,
-0.047027588,
-0.052246094,
-0.044647217,
-0.041137695,
-0.032836914,
-0.023773193,
-0.019592285,
-0.018005371,
-0.017120361,
-0.013122559,
-0.012298584,
-0.012817383,
-0.012207031,
-0.014038086,
-0.020904541,
-0.02166748,
-0.024871826,
-0.024414062,
-0.018920898,
-0.014892578,
-0.011352539,
0.000579834,
0.006164551,
0.012298584,
0.029449463,
0.033843994,
0.0423584,
0.047546387,
0.04837036,
0.06109619,
0.07836914,
0.095184326,
0.12661743,
0.16177368,
0.16009521,
0.13150024,
0.09915161,
0.047180176,
0.0028686523,
-0.013824463,
-0.019073486,
-0.017364502,
-0.022521973,
-0.03970337,
-0.06713867,
-0.095214844,
-0.11557007,
-0.12338257,
-0.11975098,
-0.102508545,
-0.075531006,
-0.047180176,
-0.018188477,
0.010925293,
0.032104492,
0.0423584,
0.048706055,
0.051635742,
0.052337646,
0.057861328,
0.06915283,
0.07659912,
0.0758667,
0.063964844,
0.040863037,
0.009063721,
-0.02154541,
-0.04522705,
-0.06173706,
-0.068359375,
-0.06915283,
-0.06817627,
-0.06652832,
-0.064208984,
-0.06259155,
-0.05911255,
-0.052734375,
-0.039916992,
-0.020233154,
0.00088500977,
0.020599365,
0.038970947,
0.04849243,
0.049438477,
0.04522705,
0.036865234,
0.026824951,
0.017425537,
0.013031006,
0.00579834,
0.00018310547,
-0.01083374,
-0.02432251,
-0.036010742,
-0.04675293,
-0.05001831,
-0.048431396,
-0.03881836,
-0.028533936,
-0.022094727,
-0.017425537,
-0.01977539,
-0.025634766,
-0.028411865,
-0.026397705,
-0.021575928,
-0.010955811,
-0.0012207031,
-0.009979248,
-0.011230469,
-0.0073242188,
-0.0140686035,
-0.013580322,
0.0036621094,
0.006958008,
-0.0020141602,
0.003479004,
0.004486084,
0.0053100586,
0.013885498,
0.02545166,
0.036712646,
0.0546875,
0.062194824,
0.08026123,
0.12289429,
0.14099121,
0.15356445,
0.15060425,
0.10449219,
0.06594849,
0.03869629,
0.008331299,
0.009735107,
0.021026611,
0.0026855469,
-0.018585205,
-0.041656494,
-0.08218384,
-0.107055664,
-0.11453247,
-0.11782837,
-0.10391235,
-0.08432007,
-0.06863403,
-0.046325684,
-0.023254395,
-0.009460449,
0.0048217773,
0.016815186,
0.023132324,
0.035064697,
0.052947998,
0.06945801,
0.08648682,
0.09487915,
0.086883545,
0.07220459,
0.049865723,
0.024841309,
0.0049743652,
-0.010650635,
-0.023284912,
-0.031036377,
-0.04107666,
-0.055755615,
-0.06845093,
-0.078826904,
-0.08554077,
-0.08428955,
-0.07284546,
-0.056762695,
-0.039276123,
-0.020385742,
-0.002960205,
0.008239746,
0.016784668,
0.024108887,
0.027679443,
0.03125,
0.03640747,
0.039031982,
0.03677368,
0.029876709,
0.019317627,
0.0026855469,
-0.0154418945,
-0.02670288,
-0.034210205,
-0.038726807,
-0.04119873,
-0.04147339,
-0.04272461,
-0.04852295,
-0.054504395,
-0.054473877,
-0.05545044,
-0.05441284,
-0.044067383,
-0.034729004,
-0.03060913,
-0.030426025,
-0.029632568,
-0.031311035,
-0.027923584,
-0.016662598,
-0.0024108887,
0.013092041,
0.026641846,
0.034729004,
0.040985107,
0.051574707,
0.04827881,
0.057556152,
0.076934814,
0.09240723,
0.12896729,
0.1708374,
0.19329834,
0.1711731,
0.13171387,
0.09436035,
0.046081543,
0.019805908,
0.032104492,
0.03540039,
0.016448975,
-0.0028686523,
-0.046325684,
-0.098724365,
-0.124053955,
-0.14181519,
-0.14694214,
-0.12948608,
-0.11178589,
-0.09719849,
-0.076049805,
-0.054382324,
-0.042633057,
-0.028076172,
-0.014831543,
-0.002746582,
0.019226074,
0.0463562,
0.07531738,
0.100860596,
0.113586426,
0.10845947,
0.0927124,
0.071777344,
0.052337646,
0.039642334,
0.030975342,
0.02230835,
0.009674072,
-0.010864258,
-0.036071777,
-0.05822754,
-0.076660156,
-0.08773804,
-0.08999634,
-0.08679199,
-0.07562256,
-0.060455322,
-0.044891357,
-0.028747559,
-0.017944336,
-0.009002686,
0.0018920898,
0.0107421875,
0.023101807,
0.041137695,
0.053344727,
0.056762695,
0.053497314,
0.043945312,
0.025512695,
0.009002686,
-0.0006713867,
-0.008178711,
-0.011688232,
-0.018188477,
-0.023468018,
-0.033447266,
-0.046905518,
-0.056793213,
-0.058380127,
-0.05947876,
-0.058624268,
-0.051452637,
-0.051574707,
-0.049987793,
-0.04949951,
-0.047576904,
-0.04269409,
-0.032196045,
-0.024475098,
-0.02053833,
-0.0068359375,
-0.004638672,
-0.0007324219,
0.018859863,
0.027252197,
0.031402588,
0.046173096,
0.052490234,
0.053497314,
0.06878662,
0.097595215,
0.12820435,
0.15222168,
0.1581726,
0.13256836,
0.09603882,
0.06561279,
0.041778564,
0.040039062,
0.051239014,
0.046203613,
0.02822876,
-0.00061035156,
-0.039276123,
-0.06918335,
-0.09060669,
-0.101379395,
-0.10192871,
-0.09890747,
-0.09133911,
-0.08206177,
-0.07211304,
-0.06225586,
-0.05722046,
-0.054595947,
-0.04937744,
-0.036254883,
-0.012207031,
0.017425537,
0.04989624,
0.07550049,
0.085510254,
0.0836792,
0.076660156,
0.07229614,
0.074798584,
0.080078125,
0.083099365,
0.077697754,
0.060516357,
0.03491211,
0.0048217773,
-0.021881104,
-0.040802002,
-0.05419922,
-0.06314087,
-0.06814575,
-0.07019043,
-0.07324219,
-0.07559204,
-0.07345581,
-0.07034302,
-0.064208984,
-0.051818848,
-0.032928467,
-0.012969971,
0.007019043,
0.023498535,
0.032196045,
0.03692627,
0.035491943,
0.03161621,
0.030181885,
0.029968262,
0.027709961,
0.023986816,
0.015716553,
0.00012207031,
-0.01751709,
-0.036743164,
-0.050567627,
-0.059814453,
-0.06661987,
-0.069122314,
-0.07159424,
-0.07589722,
-0.07876587,
-0.07821655,
-0.08013916,
-0.0769043,
-0.06341553,
-0.05316162,
-0.04095459,
-0.023071289,
-0.0053100586,
0.008392334,
0.018920898,
0.035583496,
0.04699707,
0.06100464,
0.07901001,
0.10058594,
0.13220215,
0.16015625,
0.18157959,
0.17526245,
0.14385986,
0.11437988,
0.082611084,
0.062042236,
0.067474365,
0.07418823,
0.063446045,
0.040405273,
0.006713867,
-0.03652954,
-0.069488525,
-0.08880615,
-0.10128784,
-0.100860596,
-0.095214844,
-0.09124756,
-0.08718872,
-0.08151245,
-0.079956055,
-0.081207275,
-0.07803345,
-0.06954956,
-0.052337646,
-0.028015137,
0.00012207031,
0.026367188,
0.04446411,
0.053863525,
0.057525635,
0.058654785,
0.062042236,
0.07110596,
0.08380127,
0.09503174,
0.097351074,
0.08694458,
0.06707764,
0.043670654,
0.021575928,
0.0036621094,
-0.0063476562,
-0.011627197,
-0.019378662,
-0.02859497,
-0.036621094,
-0.046173096,
-0.05618286,
-0.0635376,
-0.064697266,
-0.05709839,
-0.046051025,
-0.030883789,
-0.015411377,
-0.005706787,
-0.0022277832,
-0.0014038086,
-0.002319336,
-0.00048828125,
0.0060424805,
0.011444092,
0.016113281,
0.016326904,
0.011779785,
0.0012512207,
-0.01473999,
-0.02670288,
-0.03643799,
-0.045837402,
-0.05041504,
-0.054748535,
-0.06085205,
-0.06765747,
-0.07369995,
-0.078826904,
-0.08074951,
-0.07876587,
-0.07635498,
-0.071746826,
-0.057678223,
-0.044769287,
-0.033599854,
-0.015991211,
-0.003479004,
0.0076293945,
0.02230835,
0.041809082,
0.057037354,
0.0758667,
0.10784912,
0.14334106,
0.16967773,
0.16799927,
0.14880371,
0.11932373,
0.088409424,
0.074920654,
0.07595825,
0.08364868,
0.080200195,
0.058135986,
0.029388428,
-0.0022888184,
-0.027191162,
-0.042419434,
-0.056762695,
-0.062164307,
-0.06085205,
-0.060272217,
-0.055389404,
-0.049072266,
-0.049926758,
-0.05618286,
-0.0642395,
-0.06796265,
-0.057373047,
-0.037963867,
-0.016143799,
0.0016784668,
0.00970459,
0.0071105957,
-0.0005187988,
-0.005493164,
-0.001739502,
0.0075683594,
0.019195557,
0.030303955,
0.0340271,
0.033843994,
0.028656006,
0.021118164,
0.013427734,
0.0077209473,
0.0053710938,
0.0061035156,
0.009429932,
0.013580322,
0.015808105,
0.013519287,
0.008239746,
0.0008239746,
-0.0014343262,
0.00036621094,
0.006958008,
0.015777588,
0.021453857,
0.023162842,
0.019042969,
0.012420654,
0.005218506,
9.1552734e-05,
-0.0026245117,
-0.0034179688,
-0.005554199,
-0.00579834,
-0.011810303,
-0.02319336,
-0.032836914,
-0.046081543,
-0.058288574,
-0.066833496,
-0.068725586,
-0.073150635,
-0.07589722,
-0.075164795,
-0.08047485,
-0.081604004,
-0.08267212,
-0.08630371,
-0.08358765,
-0.07458496,
-0.064971924,
-0.05532837,
-0.036315918,
-0.016937256,
-0.0040283203,
0.0082092285,
0.023529053,
0.03729248,
0.044311523,
0.06588745,
0.09487915,
0.12768555,
0.1546936,
0.15496826,
0.14324951,
0.123168945,
0.0982666,
0.08389282,
0.0831604,
0.08520508,
0.08105469,
0.065979004,
0.045837402,
0.024841309,
0.005004883,
-0.015319824,
-0.031921387,
-0.039733887,
-0.04397583,
-0.04437256,
-0.040802002,
-0.03475952,
-0.036590576,
-0.046539307,
-0.056518555,
-0.06286621,
-0.057861328,
-0.045440674,
-0.031799316,
-0.019561768,
-0.015533447,
-0.018981934,
-0.027435303,
-0.03363037,
-0.032806396,
-0.028717041,
-0.02255249,
-0.014923096,
-0.0067749023,
9.1552734e-05,
0.0024414062,
0.0023498535,
0.002532959,
0.0022888184,
0.005493164,
0.014831543,
0.026824951,
0.03918457,
0.048095703,
0.04827881,
0.044708252,
0.04119873,
0.037994385,
0.03894043,
0.042266846,
0.044921875,
0.043426514,
0.037231445,
0.027801514,
0.016571045,
0.003967285,
-0.007598877,
-0.015838623,
-0.02319336,
-0.025299072,
-0.026916504,
-0.02911377,
-0.035614014,
-0.046142578,
-0.05883789,
-0.06890869,
-0.072784424,
-0.074920654,
-0.07394409,
-0.071624756,
-0.07336426,
-0.07809448,
-0.07885742,
-0.080841064,
-0.08126831,
-0.07675171,
-0.06484985,
-0.054901123,
-0.043518066,
-0.03012085,
-0.019348145,
-0.010864258,
-0.0021362305,
0.01260376,
0.030059814,
0.052764893,
0.08123779,
0.11001587,
0.12145996,
0.122406006,
0.11898804,
0.111083984,
0.1043396,
0.10369873,
0.107940674,
0.105163574,
0.097839355,
0.08648682,
0.06796265,
0.05126953,
0.036010742,
0.016235352,
0.004333496,
-0.0018920898,
-0.007537842,
-0.0093688965,
-0.012176514,
-0.021026611,
-0.034729004,
-0.0491333,
-0.060424805,
-0.0625,
-0.055511475,
-0.04675293,
-0.040527344,
-0.038116455,
-0.041931152,
-0.04928589,
-0.055541992,
-0.057922363,
-0.055541992,
-0.050354004,
-0.043060303,
-0.0340271,
-0.024963379,
-0.018188477,
-0.0138549805,
-0.010223389,
-0.007537842,
-0.0017089844,
0.009521484,
0.022338867,
0.036468506,
0.0491333,
0.05670166,
0.05871582,
0.057617188,
0.056365967,
0.055541992,
0.055267334,
0.056793213,
0.057556152,
0.056854248,
0.05105591,
0.04055786,
0.029327393,
0.01260376,
-0.0031738281,
-0.015930176,
-0.022949219,
-0.028778076,
-0.032928467,
-0.03555298,
-0.043060303,
-0.050323486,
-0.059051514,
-0.06552124,
-0.06896973,
-0.07086182,
-0.06878662,
-0.06277466,
-0.061950684,
-0.060180664,
-0.059814453,
-0.0625,
-0.065093994,
-0.06576538,
-0.06109619,
-0.05831909,
-0.05154419,
-0.043304443,
-0.03643799,
-0.029724121,
-0.017578125,
-0.008361816,
0.0048828125,
0.027404785,
0.048065186,
0.06271362,
0.07437134,
0.082977295,
0.08432007,
0.087402344,
0.09152222,
0.0954895,
0.09674072,
0.096069336,
0.091918945,
0.084991455,
0.07763672,
0.069366455,
0.056488037,
0.043945312,
0.03427124,
0.025268555,
0.019805908,
0.015563965,
0.010009766,
0.0011291504,
-0.010528564,
-0.022644043,
-0.030944824,
-0.036102295,
-0.036590576,
-0.035858154,
-0.036254883,
-0.036895752,
-0.0395813,
-0.043029785,
-0.046051025,
-0.048583984,
-0.05065918,
-0.05203247,
-0.05142212,
-0.047790527,
-0.042236328,
-0.036621094,
-0.031982422,
-0.028259277,
-0.024353027,
-0.019866943,
-0.013092041,
-0.003692627,
0.0061950684,
0.016540527,
0.026641846,
0.035369873,
0.041900635,
0.047698975,
0.050689697,
0.053131104,
0.055389404,
0.0569458,
0.05734253,
0.055999756,
0.052764893,
0.046569824,
0.03793335,
0.027252197,
0.017028809,
0.0063171387,
-0.00390625,
-0.012512207,
-0.019348145,
-0.026245117,
-0.03213501,
-0.036834717,
-0.041778564,
-0.04547119,
-0.048583984,
-0.050720215,
-0.051452637,
-0.05065918,
-0.04949951,
-0.049041748,
-0.048828125,
-0.048858643,
-0.04925537,
-0.05215454,
-0.05307007,
-0.05130005,
-0.05227661,
-0.05178833,
-0.048187256,
-0.044799805,
-0.041534424,
-0.03552246,
-0.026824951,
-0.018035889,
-0.008300781,
0.0023498535,
0.012756348,
0.023620605,
0.033233643,
0.039733887,
0.044952393,
0.049316406,
0.053619385,
0.06008911,
0.06512451,
0.068115234,
0.07028198,
0.06967163,
0.06585693,
0.06402588,
0.065093994,
0.0663147,
0.06719971,
0.066467285,
0.062347412,
0.055541992,
0.04901123,
0.042297363,
0.03692627,
0.032928467,
0.026489258,
0.018371582,
0.011413574,
0.0039978027,
-0.0019836426,
-0.0072631836,
-0.012817383,
-0.019012451,
-0.026519775,
-0.034454346,
-0.041534424,
-0.04623413,
-0.04940796,
-0.051208496,
-0.05117798,
-0.04940796,
-0.047546387,
-0.04547119,
-0.043518066,
-0.040527344,
-0.036224365,
-0.030944824,
-0.024353027,
-0.016448975,
-0.008911133,
-0.0031738281,
0.0015258789,
0.005859375,
0.010406494,
0.013092041,
0.016113281,
0.018493652,
0.018829346,
0.017730713,
0.0146484375,
0.010894775,
0.0069885254,
0.0032348633,
-0.0007019043,
-0.0039367676,
-0.007904053,
-0.011566162,
-0.013946533,
-0.015930176,
-0.018249512,
-0.020263672,
-0.022277832,
-0.025604248,
-0.028930664,
-0.031555176,
-0.033355713,
-0.034973145,
-0.035339355,
-0.036102295,
-0.037902832,
-0.039733887,
-0.040924072,
-0.04107666,
-0.04043579,
-0.03668213,
-0.031097412,
-0.026184082,
-0.019897461,
-0.012329102,
-0.006591797,
-0.0014038086,
0.00680542,
0.015655518,
0.022247314,
0.027252197,
0.030181885,
0.032226562,
0.035583496,
0.041656494,
0.048095703,
0.052124023,
0.05532837,
0.056671143,
0.055999756,
0.056549072,
0.060028076,
0.06335449,
0.0652771,
0.06655884,
0.06411743,
0.058746338,
0.054016113,
0.051086426,
0.047210693,
0.043060303,
0.038024902,
0.030212402,
0.021850586,
0.014221191,
0.0070495605,
0.0010070801,
-0.0056762695,
-0.0134887695,
-0.021331787,
-0.029327393,
-0.03540039,
-0.04055786,
-0.044433594,
-0.04699707,
-0.04876709,
-0.049468994,
-0.048339844,
-0.046051025,
-0.043548584,
-0.04055786,
-0.037841797,
-0.03463745,
-0.029388428,
-0.023101807,
-0.017303467,
-0.011932373,
-0.0069274902,
-0.0028381348,
-9.1552734e-05,
0.004211426,
0.008148193,
0.011138916,
0.0134887695,
0.014709473,
0.01473999,
0.013977051,
0.013031006,
0.009857178,
0.0060424805,
0.0014953613,
-0.0019226074,
-0.0038452148,
-0.0054626465,
-0.00579834,
-0.0063476562,
-0.009033203,
-0.013336182,
-0.01651001,
-0.01965332,
-0.022766113,
-0.024627686,
-0.027648926,
-0.030181885,
-0.03164673,
-0.034332275,
-0.03704834,
-0.039367676,
-0.041656494,
-0.04449463,
-0.04437256,
-0.041015625,
-0.038269043,
-0.03375244,
-0.026824951,
-0.020843506,
-0.014343262,
-0.0065612793,
0.00079345703,
0.0066833496,
0.016113281,
0.024719238,
0.02758789,
0.031585693,
0.035980225,
0.03765869,
0.04248047,
0.051452637,
0.05493164,
0.055633545,
0.0579834,
0.05807495,
0.057800293,
0.06259155,
0.067596436,
0.06906128,
0.07098389,
0.07043457,
0.0657959,
0.061187744,
0.057891846,
0.052734375,
0.04647827,
0.04019165,
0.032440186,
0.024169922,
0.01586914,
0.007751465,
3.0517578e-05,
-0.008483887,
-0.016845703,
-0.024932861,
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | true |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/e2e-aio/e2e_suite_test.go | tests/e2e-aio/e2e_suite_test.go | package e2e_test
import (
"context"
"fmt"
"os"
"runtime"
"testing"
"time"
"github.com/docker/go-connections/nat"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/sashabaranov/go-openai"
"github.com/testcontainers/testcontainers-go"
"github.com/testcontainers/testcontainers-go/wait"
)
var container testcontainers.Container
var client *openai.Client
var containerImage = os.Getenv("LOCALAI_IMAGE")
var containerImageTag = os.Getenv("LOCALAI_IMAGE_TAG")
var modelsDir = os.Getenv("LOCALAI_MODELS_DIR")
var backendDir = os.Getenv("LOCALAI_BACKEND_DIR")
var apiEndpoint = os.Getenv("LOCALAI_API_ENDPOINT")
var apiKey = os.Getenv("LOCALAI_API_KEY")
const (
defaultApiPort = "8080"
)
func TestLocalAI(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "LocalAI E2E test suite")
}
var _ = BeforeSuite(func() {
var defaultConfig openai.ClientConfig
if apiEndpoint == "" {
startDockerImage()
apiPort, err := container.MappedPort(context.Background(), nat.Port(defaultApiPort))
Expect(err).To(Not(HaveOccurred()))
defaultConfig = openai.DefaultConfig(apiKey)
apiEndpoint = "http://localhost:" + apiPort.Port() + "/v1" // So that other tests can reference this value safely.
defaultConfig.BaseURL = apiEndpoint
} else {
GinkgoWriter.Printf("docker apiEndpoint set from env: %q\n", apiEndpoint)
defaultConfig = openai.DefaultConfig(apiKey)
defaultConfig.BaseURL = apiEndpoint
}
// Wait for API to be ready
client = openai.NewClientWithConfig(defaultConfig)
Eventually(func() error {
_, err := client.ListModels(context.TODO())
return err
}, "50m").ShouldNot(HaveOccurred())
})
var _ = AfterSuite(func() {
if container != nil {
Expect(container.Terminate(context.Background())).To(Succeed())
}
})
var _ = AfterEach(func() {
// Add any cleanup needed after each test
})
type logConsumer struct {
}
func (l *logConsumer) Accept(log testcontainers.Log) {
GinkgoWriter.Write([]byte(log.Content))
}
func startDockerImage() {
// get cwd
cwd, err := os.Getwd()
Expect(err).To(Not(HaveOccurred()))
md := cwd + "/models"
bd := cwd + "/backends"
if backendDir != "" {
bd = backendDir
}
if modelsDir != "" {
md = modelsDir
}
proc := runtime.NumCPU()
req := testcontainers.ContainerRequest{
Image: fmt.Sprintf("%s:%s", containerImage, containerImageTag),
ExposedPorts: []string{defaultApiPort},
LogConsumerCfg: &testcontainers.LogConsumerConfig{
Consumers: []testcontainers.LogConsumer{
&logConsumer{},
},
},
Env: map[string]string{
"MODELS_PATH": "/models",
"BACKENDS_PATH": "/backends",
"DEBUG": "true",
"THREADS": fmt.Sprint(proc),
"LOCALAI_SINGLE_ACTIVE_BACKEND": "true",
},
Mounts: testcontainers.ContainerMounts{
{
Source: testcontainers.DockerBindMountSource{
HostPath: md,
},
Target: "/models",
},
{
Source: testcontainers.DockerBindMountSource{
HostPath: bd,
},
Target: "/backends",
},
},
WaitingFor: wait.ForAll(
wait.ForListeningPort(nat.Port(defaultApiPort)).WithStartupTimeout(10*time.Minute),
wait.ForHTTP("/v1/models").WithPort(nat.Port(defaultApiPort)).WithStartupTimeout(10*time.Minute),
),
}
GinkgoWriter.Printf("Launching Docker Container %s:%s\n", containerImage, containerImageTag)
ctx := context.Background()
c, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{
ContainerRequest: req,
Started: true,
})
Expect(err).To(Not(HaveOccurred()))
container = c
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/e2e/e2e_test.go | tests/e2e/e2e_test.go | package e2e_test
import (
"context"
"fmt"
"os"
"os/exec"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
openaigo "github.com/otiai10/openaigo"
"github.com/sashabaranov/go-openai"
)
var _ = Describe("E2E test", func() {
var client *openai.Client
var client2 *openaigo.Client
Context("API with ephemeral models", func() {
BeforeEach(func() {
defaultConfig := openai.DefaultConfig("")
defaultConfig.BaseURL = localAIURL
client2 = openaigo.NewClient("")
client2.BaseURL = defaultConfig.BaseURL
// Wait for API to be ready
client = openai.NewClientWithConfig(defaultConfig)
Eventually(func() error {
_, err := client.ListModels(context.TODO())
return err
}, "2m").ShouldNot(HaveOccurred())
})
// Check that the GPU was used
AfterEach(func() {
cmd := exec.Command("/bin/bash", "-xce", "docker logs $(docker ps -q --filter ancestor=localai-tests)")
out, err := cmd.CombinedOutput()
Expect(err).ToNot(HaveOccurred(), string(out))
// Execute docker logs $$(docker ps -q --filter ancestor=localai-tests) as a command and check the output
if os.Getenv("BUILD_TYPE") == "cublas" {
Expect(string(out)).To(ContainSubstring("found 1 CUDA devices"), string(out))
Expect(string(out)).To(ContainSubstring("using CUDA for GPU acceleration"), string(out))
} else {
fmt.Println("Skipping GPU check")
Expect(string(out)).To(ContainSubstring("[llama-cpp] Loads OK"), string(out))
Expect(string(out)).To(ContainSubstring("llama_model_loader"), string(out))
}
})
Context("Generates text", func() {
It("streams chat tokens", func() {
model := "gpt-4"
resp, err := client.CreateChatCompletion(context.TODO(),
openai.ChatCompletionRequest{
Model: model, Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: "How much is 2+2?",
},
}})
Expect(err).ToNot(HaveOccurred())
Expect(len(resp.Choices)).To(Equal(1), fmt.Sprint(resp))
Expect(resp.Choices[0].Message.Content).To(Or(ContainSubstring("4"), ContainSubstring("four")), fmt.Sprint(resp.Choices[0].Message.Content))
})
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/tests/e2e/e2e_suite_test.go | tests/e2e/e2e_suite_test.go | package e2e_test
import (
"os"
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var (
localAIURL = os.Getenv("LOCALAI_API")
)
func TestLocalAI(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "LocalAI E2E test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/swagger/docs.go | swagger/docs.go | // Package swagger Code generated by swaggo/swag. DO NOT EDIT
package swagger
import "github.com/swaggo/swag"
const docTemplate = `{
"schemes": {{ marshal .Schemes }},
"swagger": "2.0",
"info": {
"description": "{{escape .Description}}",
"title": "{{.Title}}",
"contact": {
"name": "LocalAI",
"url": "https://localai.io"
},
"license": {
"name": "MIT",
"url": "https://raw.githubusercontent.com/mudler/LocalAI/master/LICENSE"
},
"version": "{{.Version}}"
},
"host": "{{.Host}}",
"basePath": "{{.BasePath}}",
"paths": {
"/api/agent/jobs": {
"get": {
"description": "Get a list of agent jobs, optionally filtered by task_id and status",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "List agent jobs",
"parameters": [
{
"type": "string",
"description": "Filter by task ID",
"name": "task_id",
"in": "query"
},
{
"type": "string",
"description": "Filter by status (pending, running, completed, failed, cancelled)",
"name": "status",
"in": "query"
},
{
"type": "integer",
"description": "Limit number of results",
"name": "limit",
"in": "query"
}
],
"responses": {
"200": {
"description": "List of jobs",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/schema.Job"
}
}
}
}
}
},
"/api/agent/jobs/execute": {
"post": {
"description": "Create and execute a new agent job",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Execute an agent job",
"parameters": [
{
"description": "Job execution request",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.JobExecutionRequest"
}
}
],
"responses": {
"201": {
"description": "Job created",
"schema": {
"$ref": "#/definitions/schema.JobExecutionResponse"
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"/api/agent/jobs/{id}": {
"get": {
"description": "Get an agent job by ID",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Get an agent job",
"parameters": [
{
"type": "string",
"description": "Job ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Job details",
"schema": {
"$ref": "#/definitions/schema.Job"
}
},
"404": {
"description": "Job not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
},
"delete": {
"description": "Delete an agent job by ID",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Delete an agent job",
"parameters": [
{
"type": "string",
"description": "Job ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Job deleted",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"404": {
"description": "Job not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"/api/agent/jobs/{id}/cancel": {
"post": {
"description": "Cancel a running or pending agent job",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Cancel an agent job",
"parameters": [
{
"type": "string",
"description": "Job ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Job cancelled",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Job cannot be cancelled",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"404": {
"description": "Job not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"/api/agent/tasks": {
"get": {
"description": "Get a list of all agent tasks",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "List all agent tasks",
"responses": {
"200": {
"description": "List of tasks",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/schema.Task"
}
}
}
}
},
"post": {
"description": "Create a new reusable agent task with prompt template and configuration",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Create a new agent task",
"parameters": [
{
"description": "Task definition",
"name": "task",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.Task"
}
}
],
"responses": {
"201": {
"description": "Task created",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"500": {
"description": "Internal server error",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"/api/agent/tasks/{id}": {
"get": {
"description": "Get an agent task by ID",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Get an agent task",
"parameters": [
{
"type": "string",
"description": "Task ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Task details",
"schema": {
"$ref": "#/definitions/schema.Task"
}
},
"404": {
"description": "Task not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
},
"put": {
"description": "Update an existing agent task",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Update an agent task",
"parameters": [
{
"type": "string",
"description": "Task ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "Updated task definition",
"name": "task",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.Task"
}
}
],
"responses": {
"200": {
"description": "Task updated",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"404": {
"description": "Task not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
},
"delete": {
"description": "Delete an agent task by ID",
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Delete an agent task",
"parameters": [
{
"type": "string",
"description": "Task ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Task deleted",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"404": {
"description": "Task not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"/api/agent/tasks/{name}/execute": {
"post": {
"description": "Execute an agent task by its name (convenience endpoint). Parameters can be provided in the request body as a JSON object with string values.",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"tags": [
"agent-jobs"
],
"summary": "Execute a task by name",
"parameters": [
{
"type": "string",
"description": "Task name",
"name": "name",
"in": "path",
"required": true
},
{
"description": "Template parameters (JSON object with string values)",
"name": "request",
"in": "body",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
],
"responses": {
"201": {
"description": "Job created",
"schema": {
"$ref": "#/definitions/schema.JobExecutionResponse"
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"404": {
"description": "Task not found",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
}
}
},
"/api/p2p": {
"get": {
"summary": "Returns available P2P nodes",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/schema.P2PNodesResponse"
}
}
}
}
}
},
"/api/p2p/token": {
"get": {
"summary": "Show the P2P token",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "string"
}
}
}
}
},
"/backend/monitor": {
"get": {
"summary": "Backend monitor endpoint",
"parameters": [
{
"description": "Backend statistics request",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.BackendMonitorRequest"
}
}
],
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/proto.StatusResponse"
}
}
}
}
},
"/backend/shutdown": {
"post": {
"summary": "Backend monitor endpoint",
"parameters": [
{
"description": "Backend statistics request",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.BackendMonitorRequest"
}
}
],
"responses": {}
}
},
"/backends": {
"get": {
"summary": "List all Backends",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/gallery.GalleryBackend"
}
}
}
}
}
},
"/backends/apply": {
"post": {
"summary": "Install backends to LocalAI.",
"parameters": [
{
"description": "query params",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/localai.GalleryBackend"
}
}
],
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/schema.BackendResponse"
}
}
}
}
},
"/backends/available": {
"get": {
"summary": "List all available Backends",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/gallery.GalleryBackend"
}
}
}
}
}
},
"/backends/delete/{name}": {
"post": {
"summary": "delete backends from LocalAI.",
"parameters": [
{
"type": "string",
"description": "Backend name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/schema.BackendResponse"
}
}
}
}
},
"/backends/galleries": {
"get": {
"summary": "List all Galleries",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/config.Gallery"
}
}
}
}
}
},
"/backends/jobs": {
"get": {
"summary": "Returns all the jobs status progress",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/services.GalleryOpStatus"
}
}
}
}
}
},
"/backends/jobs/{uuid}": {
"get": {
"summary": "Returns the job status",
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/services.GalleryOpStatus"
}
}
}
}
},
"/metrics": {
"get": {
"summary": "Prometheus metrics endpoint",
"parameters": [
{
"description": "Gallery details",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/config.Gallery"
}
}
],
"responses": {}
}
},
"/models/apply": {
"post": {
"summary": "Install models to LocalAI.",
"parameters": [
{
"description": "query params",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/localai.GalleryModel"
}
}
],
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/schema.GalleryResponse"
}
}
}
}
},
"/models/available": {
"get": {
"summary": "List installable models.",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/gallery.GalleryModel"
}
}
}
}
}
},
"/models/delete/{name}": {
"post": {
"summary": "delete models to LocalAI.",
"parameters": [
{
"type": "string",
"description": "Model name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/schema.GalleryResponse"
}
}
}
}
},
"/models/galleries": {
"get": {
"summary": "List all Galleries",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/config.Gallery"
}
}
}
}
}
},
"/models/jobs": {
"get": {
"summary": "Returns all the jobs status progress",
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/services.GalleryOpStatus"
}
}
}
}
}
},
"/models/jobs/{uuid}": {
"get": {
"summary": "Returns the job status",
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/services.GalleryOpStatus"
}
}
}
}
},
"/system": {
"get": {
"summary": "Show the LocalAI instance information",
"responses": {
"200": {
"description": "Response",
"schema": {
"$ref": "#/definitions/schema.SystemInformationResponse"
}
}
}
}
},
"/tokenMetrics": {
"get": {
"consumes": [
"application/json"
],
"produces": [
"audio/x-wav"
],
"summary": "Get TokenMetrics for Active Slot.",
"responses": {
"200": {
"description": "generated audio/wav file",
"schema": {
"type": "string"
}
}
}
}
},
"/tts": {
"post": {
"consumes": [
"application/json"
],
"produces": [
"audio/x-wav"
],
"summary": "Generates audio from the input text.",
"parameters": [
{
"description": "query params",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.TTSRequest"
}
}
],
"responses": {
"200": {
"description": "generated audio/wav file",
"schema": {
"type": "string"
}
}
}
}
},
"/v1/audio/speech": {
"post": {
"consumes": [
"application/json"
],
"produces": [
"audio/x-wav"
],
"summary": "Generates audio from the input text.",
"parameters": [
{
"description": "query params",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/schema.TTSRequest"
}
}
],
"responses": {
"200": {
"description": "generated audio/wav file",
"schema": {
"type": "string"
}
}
}
}
},
"/v1/audio/transcriptions": {
"post": {
"consumes": [
"multipart/form-data"
],
"summary": "Transcribes audio into the input language.",
"parameters": [
{
"type": "string",
"description": "model",
"name": "model",
"in": "formData",
"required": true
},
{
"type": "file",
"description": "file",
"name": "file",
"in": "formData",
"required": true
}
],
"responses": {
"200": {
"description": "Response",
"schema": {
"type": "object",
"additionalProperties": {
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | true |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/bark-cpp/gobark.go | backend/go/bark-cpp/gobark.go | package main
// #cgo CXXFLAGS: -I${SRCDIR}/sources/bark.cpp/ -I${SRCDIR}/sources/bark.cpp/encodec.cpp -I${SRCDIR}/sources/bark.cpp/encodec.cpp/ggml/include -I${SRCDIR}/sources/bark.cpp/examples -I${SRCDIR}/sources/bark.cpp/spm-headers
// #cgo LDFLAGS: -L${SRCDIR}/ -L${SRCDIR}/sources/bark.cpp/build/examples -L${SRCDIR}/sources/bark.cpp/build/encodec.cpp/ggml/src/ -L${SRCDIR}/sources/bark.cpp/build/encodec.cpp/ -lbark -lencodec -lcommon -lggml -lgomp
// #include <gobark.h>
// #include <stdlib.h>
import "C"
import (
"fmt"
"unsafe"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
)
type Bark struct {
base.SingleThread
threads int
}
func (sd *Bark) Load(opts *pb.ModelOptions) error {
sd.threads = int(opts.Threads)
modelFile := C.CString(opts.ModelFile)
defer C.free(unsafe.Pointer(modelFile))
ret := C.load_model(modelFile)
if ret != 0 {
return fmt.Errorf("inference failed")
}
return nil
}
func (sd *Bark) TTS(opts *pb.TTSRequest) error {
t := C.CString(opts.Text)
defer C.free(unsafe.Pointer(t))
dst := C.CString(opts.Dst)
defer C.free(unsafe.Pointer(dst))
threads := C.int(sd.threads)
ret := C.tts(t, threads, dst)
if ret != 0 {
return fmt.Errorf("inference failed")
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/bark-cpp/main.go | backend/go/bark-cpp/main.go | package main
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
flag.Parse()
if err := grpc.StartServer(*addr, &Bark{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/huggingface/langchain.go | backend/go/huggingface/langchain.go | package main
// This is a wrapper to statisfy the GRPC service interface
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"fmt"
"os"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/langchain"
)
type LLM struct {
base.Base
langchain *langchain.HuggingFace
model string
}
func (llm *LLM) Load(opts *pb.ModelOptions) error {
var err error
hfToken := os.Getenv("HUGGINGFACEHUB_API_TOKEN")
if hfToken == "" {
return fmt.Errorf("no huggingface token provided")
}
llm.langchain, err = langchain.NewHuggingFace(opts.Model, hfToken)
llm.model = opts.Model
return err
}
func (llm *LLM) Predict(opts *pb.PredictOptions) (string, error) {
o := []langchain.PredictOption{
langchain.SetModel(llm.model),
langchain.SetMaxTokens(int(opts.Tokens)),
langchain.SetTemperature(float64(opts.Temperature)),
langchain.SetStopWords(opts.StopPrompts),
}
pred, err := llm.langchain.PredictHuggingFace(opts.Prompt, o...)
if err != nil {
return "", err
}
return pred.Completion, nil
}
func (llm *LLM) PredictStream(opts *pb.PredictOptions, results chan string) error {
o := []langchain.PredictOption{
langchain.SetModel(llm.model),
langchain.SetMaxTokens(int(opts.Tokens)),
langchain.SetTemperature(float64(opts.Temperature)),
langchain.SetStopWords(opts.StopPrompts),
}
go func() {
res, err := llm.langchain.PredictHuggingFace(opts.Prompt, o...)
if err != nil {
fmt.Println("err: ", err)
}
results <- res.Completion
close(results)
}()
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/huggingface/main.go | backend/go/huggingface/main.go | package main
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
flag.Parse()
if err := grpc.StartServer(*addr, &LLM{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/silero-vad/vad.go | backend/go/silero-vad/vad.go | package main
// This is a wrapper to statisfy the GRPC service interface
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"fmt"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/streamer45/silero-vad-go/speech"
)
type VAD struct {
base.SingleThread
detector *speech.Detector
}
func (vad *VAD) Load(opts *pb.ModelOptions) error {
v, err := speech.NewDetector(speech.DetectorConfig{
ModelPath: opts.ModelFile,
SampleRate: 16000,
//WindowSize: 1024,
Threshold: 0.5,
MinSilenceDurationMs: 100,
SpeechPadMs: 30,
})
if err != nil {
return fmt.Errorf("create silero detector: %w", err)
}
vad.detector = v
return err
}
func (vad *VAD) VAD(req *pb.VADRequest) (pb.VADResponse, error) {
audio := req.Audio
if err := vad.detector.Reset(); err != nil {
return pb.VADResponse{}, fmt.Errorf("reset: %w", err)
}
segments, err := vad.detector.Detect(audio)
if err != nil {
return pb.VADResponse{}, fmt.Errorf("detect: %w", err)
}
vadSegments := []*pb.VADSegment{}
for _, s := range segments {
vadSegments = append(vadSegments, &pb.VADSegment{
Start: float32(s.SpeechStartAt),
End: float32(s.SpeechEndAt),
})
}
return pb.VADResponse{
Segments: vadSegments,
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/silero-vad/main.go | backend/go/silero-vad/main.go | package main
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
flag.Parse()
if err := grpc.StartServer(*addr, &VAD{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/local-store/debug.go | backend/go/local-store/debug.go | //go:build debug
// +build debug
package main
import (
"github.com/mudler/xlog"
)
func assert(cond bool, msg string) {
if !cond {
xlog.Fatal().Stack().Msg(msg)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/local-store/production.go | backend/go/local-store/production.go | //go:build !debug
// +build !debug
package main
func assert(cond bool, msg string) {
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/local-store/store.go | backend/go/local-store/store.go | package main
// This is a wrapper to statisfy the GRPC service interface
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"container/heap"
"errors"
"fmt"
"math"
"slices"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/xlog"
)
type Store struct {
base.SingleThread
// The sorted keys
keys [][]float32
// The sorted values
values [][]byte
// If for every K it holds that ||k||^2 = 1, then we can use the normalized distance functions
// TODO: Should we normalize incoming keys if they are not instead?
keysAreNormalized bool
// The first key decides the length of the keys
keyLen int
}
// TODO: Only used for sorting using Go's builtin implementation. The interfaces are columnar because
// that's theoretically best for memory layout and cache locality, but this isn't optimized yet.
type Pair struct {
Key []float32
Value []byte
}
func NewStore() *Store {
return &Store{
keys: make([][]float32, 0),
values: make([][]byte, 0),
keysAreNormalized: true,
keyLen: -1,
}
}
func compareSlices(k1, k2 []float32) int {
assert(len(k1) == len(k2), fmt.Sprintf("compareSlices: len(k1) = %d, len(k2) = %d", len(k1), len(k2)))
return slices.Compare(k1, k2)
}
func hasKey(unsortedSlice [][]float32, target []float32) bool {
return slices.ContainsFunc(unsortedSlice, func(k []float32) bool {
return compareSlices(k, target) == 0
})
}
func findInSortedSlice(sortedSlice [][]float32, target []float32) (int, bool) {
return slices.BinarySearchFunc(sortedSlice, target, func(k, t []float32) int {
return compareSlices(k, t)
})
}
func isSortedPairs(kvs []Pair) bool {
for i := 1; i < len(kvs); i++ {
if compareSlices(kvs[i-1].Key, kvs[i].Key) > 0 {
return false
}
}
return true
}
func isSortedKeys(keys [][]float32) bool {
for i := 1; i < len(keys); i++ {
if compareSlices(keys[i-1], keys[i]) > 0 {
return false
}
}
return true
}
func sortIntoKeySlicese(keys []*pb.StoresKey) [][]float32 {
ks := make([][]float32, len(keys))
for i, k := range keys {
ks[i] = k.Floats
}
slices.SortFunc(ks, compareSlices)
assert(len(ks) == len(keys), fmt.Sprintf("len(ks) = %d, len(keys) = %d", len(ks), len(keys)))
assert(isSortedKeys(ks), "keys are not sorted")
return ks
}
func (s *Store) Load(opts *pb.ModelOptions) error {
if opts.Model != "" {
return errors.New("not implemented")
}
return nil
}
// Sort the incoming kvs and merge them with the existing sorted kvs
func (s *Store) StoresSet(opts *pb.StoresSetOptions) error {
if len(opts.Keys) == 0 {
return fmt.Errorf("no keys to add")
}
if len(opts.Keys) != len(opts.Values) {
return fmt.Errorf("len(keys) = %d, len(values) = %d", len(opts.Keys), len(opts.Values))
}
if s.keyLen == -1 {
s.keyLen = len(opts.Keys[0].Floats)
} else {
if len(opts.Keys[0].Floats) != s.keyLen {
return fmt.Errorf("Try to add key with length %d when existing length is %d", len(opts.Keys[0].Floats), s.keyLen)
}
}
kvs := make([]Pair, len(opts.Keys))
for i, k := range opts.Keys {
if s.keysAreNormalized && !isNormalized(k.Floats) {
s.keysAreNormalized = false
var sample []float32
if len(s.keys) > 5 {
sample = k.Floats[:5]
} else {
sample = k.Floats
}
xlog.Debug("Key is not normalized", "sample", sample)
}
kvs[i] = Pair{
Key: k.Floats,
Value: opts.Values[i].Bytes,
}
}
slices.SortFunc(kvs, func(a, b Pair) int {
return compareSlices(a.Key, b.Key)
})
assert(len(kvs) == len(opts.Keys), fmt.Sprintf("len(kvs) = %d, len(opts.Keys) = %d", len(kvs), len(opts.Keys)))
assert(isSortedPairs(kvs), "keys are not sorted")
l := len(kvs) + len(s.keys)
merge_ks := make([][]float32, 0, l)
merge_vs := make([][]byte, 0, l)
i, j := 0, 0
for {
if i+j >= l {
break
}
if i >= len(kvs) {
merge_ks = append(merge_ks, s.keys[j])
merge_vs = append(merge_vs, s.values[j])
j++
continue
}
if j >= len(s.keys) {
merge_ks = append(merge_ks, kvs[i].Key)
merge_vs = append(merge_vs, kvs[i].Value)
i++
continue
}
c := compareSlices(kvs[i].Key, s.keys[j])
if c < 0 {
merge_ks = append(merge_ks, kvs[i].Key)
merge_vs = append(merge_vs, kvs[i].Value)
i++
} else if c > 0 {
merge_ks = append(merge_ks, s.keys[j])
merge_vs = append(merge_vs, s.values[j])
j++
} else {
merge_ks = append(merge_ks, kvs[i].Key)
merge_vs = append(merge_vs, kvs[i].Value)
i++
j++
}
}
assert(len(merge_ks) == l, fmt.Sprintf("len(merge_ks) = %d, l = %d", len(merge_ks), l))
assert(isSortedKeys(merge_ks), "merge keys are not sorted")
s.keys = merge_ks
s.values = merge_vs
return nil
}
func (s *Store) StoresDelete(opts *pb.StoresDeleteOptions) error {
if len(opts.Keys) == 0 {
return fmt.Errorf("no keys to delete")
}
if len(opts.Keys) == 0 {
return fmt.Errorf("no keys to add")
}
if s.keyLen == -1 {
s.keyLen = len(opts.Keys[0].Floats)
} else {
if len(opts.Keys[0].Floats) != s.keyLen {
return fmt.Errorf("Trying to delete key with length %d when existing length is %d", len(opts.Keys[0].Floats), s.keyLen)
}
}
ks := sortIntoKeySlicese(opts.Keys)
l := len(s.keys) - len(ks)
merge_ks := make([][]float32, 0, l)
merge_vs := make([][]byte, 0, l)
tail_ks := s.keys
tail_vs := s.values
for _, k := range ks {
j, found := findInSortedSlice(tail_ks, k)
if found {
merge_ks = append(merge_ks, tail_ks[:j]...)
merge_vs = append(merge_vs, tail_vs[:j]...)
tail_ks = tail_ks[j+1:]
tail_vs = tail_vs[j+1:]
} else {
assert(!hasKey(s.keys, k), fmt.Sprintf("Key exists, but was not found: t=%d, %v", len(tail_ks), k))
}
xlog.Debug("Delete", "found", found, "tailLen", len(tail_ks), "j", j, "mergeKeysLen", len(merge_ks), "mergeValuesLen", len(merge_vs))
}
merge_ks = append(merge_ks, tail_ks...)
merge_vs = append(merge_vs, tail_vs...)
assert(len(merge_ks) <= len(s.keys), fmt.Sprintf("len(merge_ks) = %d, len(s.keys) = %d", len(merge_ks), len(s.keys)))
s.keys = merge_ks
s.values = merge_vs
assert(len(s.keys) >= l, fmt.Sprintf("len(s.keys) = %d, l = %d", len(s.keys), l))
assert(isSortedKeys(s.keys), "keys are not sorted")
assert(func() bool {
for _, k := range ks {
if _, found := findInSortedSlice(s.keys, k); found {
return false
}
}
return true
}(), "Keys to delete still present")
if len(s.keys) != l {
xlog.Debug("Delete: Some keys not found", "keysLen", len(s.keys), "expectedLen", l)
}
return nil
}
func (s *Store) StoresGet(opts *pb.StoresGetOptions) (pb.StoresGetResult, error) {
pbKeys := make([]*pb.StoresKey, 0, len(opts.Keys))
pbValues := make([]*pb.StoresValue, 0, len(opts.Keys))
ks := sortIntoKeySlicese(opts.Keys)
if len(s.keys) == 0 {
xlog.Debug("Get: No keys in store")
}
if s.keyLen == -1 {
s.keyLen = len(opts.Keys[0].Floats)
} else {
if len(opts.Keys[0].Floats) != s.keyLen {
return pb.StoresGetResult{}, fmt.Errorf("Try to get a key with length %d when existing length is %d", len(opts.Keys[0].Floats), s.keyLen)
}
}
tail_k := s.keys
tail_v := s.values
for i, k := range ks {
j, found := findInSortedSlice(tail_k, k)
if found {
pbKeys = append(pbKeys, &pb.StoresKey{
Floats: k,
})
pbValues = append(pbValues, &pb.StoresValue{
Bytes: tail_v[j],
})
tail_k = tail_k[j+1:]
tail_v = tail_v[j+1:]
} else {
assert(!hasKey(s.keys, k), fmt.Sprintf("Key exists, but was not found: i=%d, %v", i, k))
}
}
if len(pbKeys) != len(opts.Keys) {
xlog.Debug("Get: Some keys not found", "pbKeysLen", len(pbKeys), "optsKeysLen", len(opts.Keys), "storeKeysLen", len(s.keys))
}
return pb.StoresGetResult{
Keys: pbKeys,
Values: pbValues,
}, nil
}
func isNormalized(k []float32) bool {
var sum float64
for _, v := range k {
v64 := float64(v)
sum += v64 * v64
}
s := math.Sqrt(sum)
return s >= 0.99 && s <= 1.01
}
// TODO: This we could replace with handwritten SIMD code
func normalizedCosineSimilarity(k1, k2 []float32) float32 {
assert(len(k1) == len(k2), fmt.Sprintf("normalizedCosineSimilarity: len(k1) = %d, len(k2) = %d", len(k1), len(k2)))
var dot float32
for i := 0; i < len(k1); i++ {
dot += k1[i] * k2[i]
}
assert(dot >= -1.01 && dot <= 1.01, fmt.Sprintf("dot = %f", dot))
// 2.0 * (1.0 - dot) would be the Euclidean distance
return dot
}
type PriorityItem struct {
Similarity float32
Key []float32
Value []byte
}
type PriorityQueue []*PriorityItem
func (pq PriorityQueue) Len() int { return len(pq) }
func (pq PriorityQueue) Less(i, j int) bool {
// Inverted because the most similar should be at the top
return pq[i].Similarity < pq[j].Similarity
}
func (pq PriorityQueue) Swap(i, j int) {
pq[i], pq[j] = pq[j], pq[i]
}
func (pq *PriorityQueue) Push(x any) {
item := x.(*PriorityItem)
*pq = append(*pq, item)
}
func (pq *PriorityQueue) Pop() any {
old := *pq
n := len(old)
item := old[n-1]
*pq = old[0 : n-1]
return item
}
func (s *Store) StoresFindNormalized(opts *pb.StoresFindOptions) (pb.StoresFindResult, error) {
tk := opts.Key.Floats
top_ks := make(PriorityQueue, 0, int(opts.TopK))
heap.Init(&top_ks)
for i, k := range s.keys {
sim := normalizedCosineSimilarity(tk, k)
heap.Push(&top_ks, &PriorityItem{
Similarity: sim,
Key: k,
Value: s.values[i],
})
if top_ks.Len() > int(opts.TopK) {
heap.Pop(&top_ks)
}
}
similarities := make([]float32, top_ks.Len())
pbKeys := make([]*pb.StoresKey, top_ks.Len())
pbValues := make([]*pb.StoresValue, top_ks.Len())
for i := top_ks.Len() - 1; i >= 0; i-- {
item := heap.Pop(&top_ks).(*PriorityItem)
similarities[i] = item.Similarity
pbKeys[i] = &pb.StoresKey{
Floats: item.Key,
}
pbValues[i] = &pb.StoresValue{
Bytes: item.Value,
}
}
return pb.StoresFindResult{
Keys: pbKeys,
Values: pbValues,
Similarities: similarities,
}, nil
}
func cosineSimilarity(k1, k2 []float32, mag1 float64) float32 {
assert(len(k1) == len(k2), fmt.Sprintf("cosineSimilarity: len(k1) = %d, len(k2) = %d", len(k1), len(k2)))
var dot, mag2 float64
for i := 0; i < len(k1); i++ {
dot += float64(k1[i] * k2[i])
mag2 += float64(k2[i] * k2[i])
}
sim := float32(dot / (mag1 * math.Sqrt(mag2)))
assert(sim >= -1.01 && sim <= 1.01, fmt.Sprintf("sim = %f", sim))
return sim
}
func (s *Store) StoresFindFallback(opts *pb.StoresFindOptions) (pb.StoresFindResult, error) {
tk := opts.Key.Floats
top_ks := make(PriorityQueue, 0, int(opts.TopK))
heap.Init(&top_ks)
var mag1 float64
for _, v := range tk {
mag1 += float64(v * v)
}
mag1 = math.Sqrt(mag1)
for i, k := range s.keys {
dist := cosineSimilarity(tk, k, mag1)
heap.Push(&top_ks, &PriorityItem{
Similarity: dist,
Key: k,
Value: s.values[i],
})
if top_ks.Len() > int(opts.TopK) {
heap.Pop(&top_ks)
}
}
similarities := make([]float32, top_ks.Len())
pbKeys := make([]*pb.StoresKey, top_ks.Len())
pbValues := make([]*pb.StoresValue, top_ks.Len())
for i := top_ks.Len() - 1; i >= 0; i-- {
item := heap.Pop(&top_ks).(*PriorityItem)
similarities[i] = item.Similarity
pbKeys[i] = &pb.StoresKey{
Floats: item.Key,
}
pbValues[i] = &pb.StoresValue{
Bytes: item.Value,
}
}
return pb.StoresFindResult{
Keys: pbKeys,
Values: pbValues,
Similarities: similarities,
}, nil
}
func (s *Store) StoresFind(opts *pb.StoresFindOptions) (pb.StoresFindResult, error) {
tk := opts.Key.Floats
if len(tk) != s.keyLen {
return pb.StoresFindResult{}, fmt.Errorf("Try to find key with length %d when existing length is %d", len(tk), s.keyLen)
}
if opts.TopK < 1 {
return pb.StoresFindResult{}, fmt.Errorf("opts.TopK = %d, must be >= 1", opts.TopK)
}
if s.keyLen == -1 {
s.keyLen = len(opts.Key.Floats)
} else {
if len(opts.Key.Floats) != s.keyLen {
return pb.StoresFindResult{}, fmt.Errorf("Try to add key with length %d when existing length is %d", len(opts.Key.Floats), s.keyLen)
}
}
if s.keysAreNormalized && isNormalized(tk) {
return s.StoresFindNormalized(opts)
} else {
if s.keysAreNormalized {
var sample []float32
if len(s.keys) > 5 {
sample = tk[:5]
} else {
sample = tk
}
xlog.Debug("Trying to compare non-normalized key with normalized keys", "sample", sample)
}
return s.StoresFindFallback(opts)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/local-store/main.go | backend/go/local-store/main.go | package main
// Note: this is started internally by LocalAI and a server is allocated for each store
import (
"flag"
"os"
grpc "github.com/mudler/LocalAI/pkg/grpc"
"github.com/mudler/xlog"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
xlog.SetLogger(xlog.NewLogger(xlog.LogLevel(os.Getenv("LOCALAI_LOG_LEVEL")), os.Getenv("LOCALAI_LOG_FORMAT")))
flag.Parse()
if err := grpc.StartServer(*addr, NewStore()); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/llm/llama/llama.go | backend/go/llm/llama/llama.go | package main
// This is a wrapper to statisfy the GRPC service interface
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"fmt"
"path/filepath"
"github.com/go-skynet/go-llama.cpp"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
)
type LLM struct {
base.SingleThread
llama *llama.LLama
draftModel *llama.LLama
}
func (llm *LLM) Load(opts *pb.ModelOptions) error {
ropeFreqBase := float32(10000)
ropeFreqScale := float32(1)
if opts.RopeFreqBase != 0 {
ropeFreqBase = opts.RopeFreqBase
}
if opts.RopeFreqScale != 0 {
ropeFreqScale = opts.RopeFreqScale
}
llamaOpts := []llama.ModelOption{
llama.WithRopeFreqBase(ropeFreqBase),
llama.WithRopeFreqScale(ropeFreqScale),
}
if opts.NoMulMatQ {
llamaOpts = append(llamaOpts, llama.SetMulMatQ(false))
}
// Get base path of opts.ModelFile and use the same for lora (assume the same path)
basePath := filepath.Dir(opts.ModelFile)
if opts.LoraAdapter != "" {
llamaOpts = append(llamaOpts, llama.SetLoraAdapter(filepath.Join(basePath, opts.LoraAdapter)))
}
if opts.LoraBase != "" {
llamaOpts = append(llamaOpts, llama.SetLoraBase(filepath.Join(basePath, opts.LoraBase)))
}
if opts.ContextSize != 0 {
llamaOpts = append(llamaOpts, llama.SetContext(int(opts.ContextSize)))
}
if opts.F16Memory {
llamaOpts = append(llamaOpts, llama.EnableF16Memory)
}
if opts.Embeddings {
llamaOpts = append(llamaOpts, llama.EnableEmbeddings)
}
if opts.Reranking {
llamaOpts = append(llamaOpts, llama.EnableReranking)
}
if opts.NGPULayers != 0 {
llamaOpts = append(llamaOpts, llama.SetGPULayers(int(opts.NGPULayers)))
}
llamaOpts = append(llamaOpts, llama.SetMMap(opts.MMap))
llamaOpts = append(llamaOpts, llama.SetMainGPU(opts.MainGPU))
llamaOpts = append(llamaOpts, llama.SetTensorSplit(opts.TensorSplit))
if opts.NBatch != 0 {
llamaOpts = append(llamaOpts, llama.SetNBatch(int(opts.NBatch)))
} else {
llamaOpts = append(llamaOpts, llama.SetNBatch(512))
}
if opts.NUMA {
llamaOpts = append(llamaOpts, llama.EnableNUMA)
}
if opts.LowVRAM {
llamaOpts = append(llamaOpts, llama.EnabelLowVRAM)
}
if opts.DraftModel != "" {
// https://github.com/ggerganov/llama.cpp/blob/71ca2fad7d6c0ef95ef9944fb3a1a843e481f314/examples/speculative/speculative.cpp#L40
llamaOpts = append(llamaOpts, llama.SetPerplexity(true))
}
model, err := llama.New(opts.ModelFile, llamaOpts...)
if opts.DraftModel != "" {
// opts.DraftModel is relative to opts.ModelFile, so we need to get the basepath of opts.ModelFile
if !filepath.IsAbs(opts.DraftModel) {
dir := filepath.Dir(opts.ModelFile)
opts.DraftModel = filepath.Join(dir, opts.DraftModel)
}
draftModel, err := llama.New(opts.DraftModel, llamaOpts...)
if err != nil {
return err
}
llm.draftModel = draftModel
}
llm.llama = model
return err
}
func buildPredictOptions(opts *pb.PredictOptions) []llama.PredictOption {
ropeFreqBase := float32(10000)
ropeFreqScale := float32(1)
if opts.RopeFreqBase != 0 {
ropeFreqBase = opts.RopeFreqBase
}
if opts.RopeFreqScale != 0 {
ropeFreqScale = opts.RopeFreqScale
}
predictOptions := []llama.PredictOption{
llama.SetTemperature(opts.Temperature),
llama.SetTopP(opts.TopP),
llama.SetTopK(int(opts.TopK)),
llama.SetTokens(int(opts.Tokens)),
llama.SetThreads(int(opts.Threads)),
llama.WithGrammar(opts.Grammar),
llama.SetRopeFreqBase(ropeFreqBase),
llama.SetRopeFreqScale(ropeFreqScale),
llama.SetNegativePromptScale(opts.NegativePromptScale),
llama.SetNegativePrompt(opts.NegativePrompt),
}
if opts.PromptCacheAll {
predictOptions = append(predictOptions, llama.EnablePromptCacheAll)
}
if opts.PromptCacheRO {
predictOptions = append(predictOptions, llama.EnablePromptCacheRO)
}
// Expected absolute path
if opts.PromptCachePath != "" {
predictOptions = append(predictOptions, llama.SetPathPromptCache(opts.PromptCachePath))
}
if opts.Mirostat != 0 {
predictOptions = append(predictOptions, llama.SetMirostat(int(opts.Mirostat)))
}
if opts.MirostatETA != 0 {
predictOptions = append(predictOptions, llama.SetMirostatETA(opts.MirostatETA))
}
if opts.MirostatTAU != 0 {
predictOptions = append(predictOptions, llama.SetMirostatTAU(opts.MirostatTAU))
}
if opts.Debug {
predictOptions = append(predictOptions, llama.Debug)
}
predictOptions = append(predictOptions, llama.SetStopWords(opts.StopPrompts...))
if opts.PresencePenalty != 0 {
predictOptions = append(predictOptions, llama.SetPenalty(opts.PresencePenalty))
}
if opts.NKeep != 0 {
predictOptions = append(predictOptions, llama.SetNKeep(int(opts.NKeep)))
}
if opts.Batch != 0 {
predictOptions = append(predictOptions, llama.SetBatch(int(opts.Batch)))
}
if opts.F16KV {
predictOptions = append(predictOptions, llama.EnableF16KV)
}
if opts.IgnoreEOS {
predictOptions = append(predictOptions, llama.IgnoreEOS)
}
if opts.Seed != 0 {
predictOptions = append(predictOptions, llama.SetSeed(int(opts.Seed)))
}
if opts.NDraft != 0 {
predictOptions = append(predictOptions, llama.SetNDraft(int(opts.NDraft)))
}
//predictOptions = append(predictOptions, llama.SetLogitBias(c.Seed))
predictOptions = append(predictOptions, llama.SetFrequencyPenalty(opts.FrequencyPenalty))
predictOptions = append(predictOptions, llama.SetMlock(opts.MLock))
predictOptions = append(predictOptions, llama.SetMemoryMap(opts.MMap))
predictOptions = append(predictOptions, llama.SetPredictionMainGPU(opts.MainGPU))
predictOptions = append(predictOptions, llama.SetPredictionTensorSplit(opts.TensorSplit))
predictOptions = append(predictOptions, llama.SetTailFreeSamplingZ(opts.TailFreeSamplingZ))
predictOptions = append(predictOptions, llama.SetTypicalP(opts.TypicalP))
return predictOptions
}
func (llm *LLM) Predict(opts *pb.PredictOptions) (string, error) {
if llm.draftModel != nil {
return llm.llama.SpeculativeSampling(llm.draftModel, opts.Prompt, buildPredictOptions(opts)...)
}
return llm.llama.Predict(opts.Prompt, buildPredictOptions(opts)...)
}
func (llm *LLM) PredictStream(opts *pb.PredictOptions, results chan string) error {
predictOptions := buildPredictOptions(opts)
predictOptions = append(predictOptions, llama.SetTokenCallback(func(token string) bool {
results <- token
return true
}))
go func() {
var err error
if llm.draftModel != nil {
_, err = llm.llama.SpeculativeSampling(llm.draftModel, opts.Prompt, buildPredictOptions(opts)...)
} else {
_, err = llm.llama.Predict(opts.Prompt, predictOptions...)
}
if err != nil {
fmt.Println("err: ", err)
}
close(results)
}()
return nil
}
func (llm *LLM) Embeddings(opts *pb.PredictOptions) ([]float32, error) {
predictOptions := buildPredictOptions(opts)
if len(opts.EmbeddingTokens) > 0 {
tokens := []int{}
for _, t := range opts.EmbeddingTokens {
tokens = append(tokens, int(t))
}
return llm.llama.TokenEmbeddings(tokens, predictOptions...)
}
return llm.llama.Embeddings(opts.Embeddings, predictOptions...)
}
func (llm *LLM) TokenizeString(opts *pb.PredictOptions) (pb.TokenizationResponse, error) {
predictOptions := buildPredictOptions(opts)
l, tokens, err := llm.llama.TokenizeString(opts.Prompt, predictOptions...)
if err != nil {
return pb.TokenizationResponse{}, err
}
return pb.TokenizationResponse{
Length: l,
Tokens: tokens,
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/llm/llama/main.go | backend/go/llm/llama/main.go | package main
// GRPC Falcon server
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
flag.Parse()
if err := grpc.StartServer(*addr, &LLM{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/stablediffusion-ggml/gosd.go | backend/go/stablediffusion-ggml/gosd.go | package main
import (
"fmt"
"os"
"path/filepath"
"runtime"
"strings"
"unsafe"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/utils"
)
type SDGGML struct {
base.SingleThread
threads int
sampleMethod string
cfgScale float32
}
var (
LoadModel func(model, model_apth string, options []uintptr, threads int32, diff int) int
GenImage func(params uintptr, steps int, dst string, cfgScale float32, srcImage string, strength float32, maskImage string, refImages []uintptr, refImagesCount int) int
TilingParamsSetEnabled func(params uintptr, enabled bool)
TilingParamsSetTileSizes func(params uintptr, tileSizeX int, tileSizeY int)
TilingParamsSetRelSizes func(params uintptr, relSizeX float32, relSizeY float32)
TilingParamsSetTargetOverlap func(params uintptr, targetOverlap float32)
ImgGenParamsNew func() uintptr
ImgGenParamsSetPrompts func(params uintptr, prompt string, negativePrompt string)
ImgGenParamsSetDimensions func(params uintptr, width int, height int)
ImgGenParamsSetSeed func(params uintptr, seed int64)
ImgGenParamsGetVaeTilingParams func(params uintptr) uintptr
)
// Copied from Purego internal/strings
// TODO: We should upstream sending []string
func hasSuffix(s, suffix string) bool {
return len(s) >= len(suffix) && s[len(s)-len(suffix):] == suffix
}
func CString(name string) *byte {
if hasSuffix(name, "\x00") {
return &(*(*[]byte)(unsafe.Pointer(&name)))[0]
}
b := make([]byte, len(name)+1)
copy(b, name)
return &b[0]
}
func (sd *SDGGML) Load(opts *pb.ModelOptions) error {
sd.threads = int(opts.Threads)
modelPath := opts.ModelPath
modelFile := opts.ModelFile
modelPathC := modelPath
var diffusionModel int
var oo []string
for _, op := range opts.Options {
if op == "diffusion_model" {
diffusionModel = 1
continue
}
// If it's an option path, we resolve absolute path from the model path
if strings.Contains(op, ":") && strings.Contains(op, "path") {
data := strings.Split(op, ":")
data[1] = filepath.Join(opts.ModelPath, data[1])
if err := utils.VerifyPath(data[1], opts.ModelPath); err == nil {
oo = append(oo, strings.Join(data, ":"))
}
} else {
oo = append(oo, op)
}
}
fmt.Fprintf(os.Stderr, "Options: %+v\n", oo)
// At the time of writing Purego doesn't recurse into slices and convert Go strings to pointers so we need to do that
var keepAlive []any
options := make([]uintptr, len(oo), len(oo)+1)
for i, op := range oo {
bytep := CString(op)
options[i] = uintptr(unsafe.Pointer(bytep))
keepAlive = append(keepAlive, bytep)
}
sd.cfgScale = opts.CFGScale
ret := LoadModel(modelFile, modelPathC, options, opts.Threads, diffusionModel)
runtime.KeepAlive(keepAlive)
fmt.Fprintf(os.Stderr, "LoadModel: %d\n", ret)
if ret != 0 {
return fmt.Errorf("could not load model")
}
return nil
}
func (sd *SDGGML) GenerateImage(opts *pb.GenerateImageRequest) error {
t := opts.PositivePrompt
dst := opts.Dst
negative := opts.NegativePrompt
srcImage := opts.Src
var maskImage string
if opts.EnableParameters != "" {
if strings.Contains(opts.EnableParameters, "mask:") {
parts := strings.Split(opts.EnableParameters, "mask:")
if len(parts) > 1 {
maskPath := strings.TrimSpace(parts[1])
if maskPath != "" {
maskImage = maskPath
}
}
}
}
// At the time of writing Purego doesn't recurse into slices and convert Go strings to pointers so we need to do that
var keepAlive []any
refImagesCount := len(opts.RefImages)
refImages := make([]uintptr, refImagesCount, refImagesCount+1)
for i, ri := range opts.RefImages {
bytep := CString(ri)
refImages[i] = uintptr(unsafe.Pointer(bytep))
keepAlive = append(keepAlive, bytep)
}
// Default strength for img2img (0.75 is a good default)
strength := float32(0.75)
// free'd by GenImage
p := ImgGenParamsNew()
ImgGenParamsSetPrompts(p, t, negative)
ImgGenParamsSetDimensions(p, int(opts.Width), int(opts.Height))
ImgGenParamsSetSeed(p, int64(opts.Seed))
vaep := ImgGenParamsGetVaeTilingParams(p)
TilingParamsSetEnabled(vaep, false)
ret := GenImage(p, int(opts.Step), dst, sd.cfgScale, srcImage, strength, maskImage, refImages, refImagesCount)
runtime.KeepAlive(keepAlive)
fmt.Fprintf(os.Stderr, "GenImage: %d\n", ret)
if ret != 0 {
return fmt.Errorf("inference failed")
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/stablediffusion-ggml/main.go | backend/go/stablediffusion-ggml/main.go | package main
import (
"flag"
"github.com/ebitengine/purego"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
type LibFuncs struct {
FuncPtr any
Name string
}
func main() {
gosd, err := purego.Dlopen("./libgosd.so", purego.RTLD_NOW|purego.RTLD_GLOBAL)
if err != nil {
panic(err)
}
libFuncs := []LibFuncs{
{&LoadModel, "load_model"},
{&GenImage, "gen_image"},
{&TilingParamsSetEnabled, "sd_tiling_params_set_enabled"},
{&TilingParamsSetTileSizes, "sd_tiling_params_set_tile_sizes"},
{&TilingParamsSetRelSizes, "sd_tiling_params_set_rel_sizes"},
{&TilingParamsSetTargetOverlap, "sd_tiling_params_set_target_overlap"},
{&ImgGenParamsNew, "sd_img_gen_params_new"},
{&ImgGenParamsSetPrompts, "sd_img_gen_params_set_prompts"},
{&ImgGenParamsSetDimensions, "sd_img_gen_params_set_dimensions"},
{&ImgGenParamsSetSeed, "sd_img_gen_params_set_seed"},
{&ImgGenParamsGetVaeTilingParams, "sd_img_gen_params_get_vae_tiling_params"},
}
for _, lf := range libFuncs {
purego.RegisterLibFunc(lf.FuncPtr, gosd, lf.Name)
}
flag.Parse()
if err := grpc.StartServer(*addr, &SDGGML{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/whisper/gowhisper.go | backend/go/whisper/gowhisper.go | package main
import (
"fmt"
"os"
"path/filepath"
"strings"
"unsafe"
"github.com/go-audio/wav"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/utils"
)
var (
CppLoadModel func(modelPath string) int
CppLoadModelVAD func(modelPath string) int
CppVAD func(pcmf32 []float32, pcmf32Size uintptr, segsOut unsafe.Pointer, segsOutLen unsafe.Pointer) int
CppTranscribe func(threads uint32, lang string, translate bool, diarize bool, pcmf32 []float32, pcmf32Len uintptr, segsOutLen unsafe.Pointer, prompt string) int
CppGetSegmentText func(i int) string
CppGetSegmentStart func(i int) int64
CppGetSegmentEnd func(i int) int64
CppNTokens func(i int) int
CppGetTokenID func(i int, j int) int
CppGetSegmentSpeakerTurnNext func(i int) bool
)
type Whisper struct {
base.SingleThread
}
func (w *Whisper) Load(opts *pb.ModelOptions) error {
vadOnly := false
for _, oo := range opts.Options {
if oo == "vad_only" {
vadOnly = true
} else {
fmt.Fprintf(os.Stderr, "Unrecognized option: %v\n", oo)
}
}
if vadOnly {
if ret := CppLoadModelVAD(opts.ModelFile); ret != 0 {
return fmt.Errorf("Failed to load Whisper VAD model")
}
return nil
}
if ret := CppLoadModel(opts.ModelFile); ret != 0 {
return fmt.Errorf("Failed to load Whisper transcription model")
}
return nil
}
func (w *Whisper) VAD(req *pb.VADRequest) (pb.VADResponse, error) {
audio := req.Audio
// We expect 0xdeadbeef to be overwritten and if we see it in a stack trace we know it wasn't
segsPtr, segsLen := uintptr(0xdeadbeef), uintptr(0xdeadbeef)
segsPtrPtr, segsLenPtr := unsafe.Pointer(&segsPtr), unsafe.Pointer(&segsLen)
if ret := CppVAD(audio, uintptr(len(audio)), segsPtrPtr, segsLenPtr); ret != 0 {
return pb.VADResponse{}, fmt.Errorf("Failed VAD")
}
// Happens when CPP vector has not had any elements pushed to it
if segsPtr == 0 {
return pb.VADResponse{
Segments: []*pb.VADSegment{},
}, nil
}
// unsafeptr warning is caused by segsPtr being on the stack and therefor being subject to stack copying AFAICT
// however the stack shouldn't have grown between setting segsPtr and now, also the memory pointed to is allocated by C++
segs := unsafe.Slice((*float32)(unsafe.Pointer(segsPtr)), segsLen)
vadSegments := []*pb.VADSegment{}
for i := range len(segs) >> 1 {
s := segs[2*i] / 100
t := segs[2*i+1] / 100
vadSegments = append(vadSegments, &pb.VADSegment{
Start: s,
End: t,
})
}
return pb.VADResponse{
Segments: vadSegments,
}, nil
}
func (w *Whisper) AudioTranscription(opts *pb.TranscriptRequest) (pb.TranscriptResult, error) {
dir, err := os.MkdirTemp("", "whisper")
if err != nil {
return pb.TranscriptResult{}, err
}
defer os.RemoveAll(dir)
convertedPath := filepath.Join(dir, "converted.wav")
if err := utils.AudioToWav(opts.Dst, convertedPath); err != nil {
return pb.TranscriptResult{}, err
}
// Open samples
fh, err := os.Open(convertedPath)
if err != nil {
return pb.TranscriptResult{}, err
}
defer fh.Close()
// Read samples
d := wav.NewDecoder(fh)
buf, err := d.FullPCMBuffer()
if err != nil {
return pb.TranscriptResult{}, err
}
data := buf.AsFloat32Buffer().Data
segsLen := uintptr(0xdeadbeef)
segsLenPtr := unsafe.Pointer(&segsLen)
if ret := CppTranscribe(opts.Threads, opts.Language, opts.Translate, opts.Diarize, data, uintptr(len(data)), segsLenPtr, opts.Prompt); ret != 0 {
return pb.TranscriptResult{}, fmt.Errorf("Failed Transcribe")
}
segments := []*pb.TranscriptSegment{}
text := ""
for i := range int(segsLen) {
s := CppGetSegmentStart(i)
t := CppGetSegmentEnd(i)
txt := strings.Clone(CppGetSegmentText(i))
tokens := make([]int32, CppNTokens(i))
if opts.Diarize && CppGetSegmentSpeakerTurnNext(i) {
txt += " [SPEAKER_TURN]"
}
for j := range tokens {
tokens[j] = int32(CppGetTokenID(i, j))
}
segment := &pb.TranscriptSegment{
Id: int32(i),
Text: txt,
Start: s, End: t,
Tokens: tokens,
}
segments = append(segments, segment)
text += " " + strings.TrimSpace(txt)
}
return pb.TranscriptResult{
Segments: segments,
Text: strings.TrimSpace(text),
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/whisper/main.go | backend/go/whisper/main.go | package main
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
"os"
"github.com/ebitengine/purego"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
type LibFuncs struct {
FuncPtr any
Name string
}
func main() {
// Get library name from environment variable, default to fallback
libName := os.Getenv("WHISPER_LIBRARY")
if libName == "" {
libName = "./libgowhisper-fallback.so"
}
gosd, err := purego.Dlopen(libName, purego.RTLD_NOW|purego.RTLD_GLOBAL)
if err != nil {
panic(err)
}
libFuncs := []LibFuncs{
{&CppLoadModel, "load_model"},
{&CppLoadModelVAD, "load_model_vad"},
{&CppVAD, "vad"},
{&CppTranscribe, "transcribe"},
{&CppGetSegmentText, "get_segment_text"},
{&CppGetSegmentStart, "get_segment_t0"},
{&CppGetSegmentEnd, "get_segment_t1"},
{&CppNTokens, "n_tokens"},
{&CppGetTokenID, "get_token_id"},
{&CppGetSegmentSpeakerTurnNext, "get_segment_speaker_turn_next"},
}
for _, lf := range libFuncs {
purego.RegisterLibFunc(lf.FuncPtr, gosd, lf.Name)
}
flag.Parse()
if err := grpc.StartServer(*addr, &Whisper{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/piper/piper.go | backend/go/piper/piper.go | package main
// This is a wrapper to statisfy the GRPC service interface
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"fmt"
"os"
"path/filepath"
"github.com/mudler/LocalAI/pkg/grpc/base"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
piper "github.com/mudler/go-piper"
)
type Piper struct {
base.SingleThread
piper *PiperB
}
func (sd *Piper) Load(opts *pb.ModelOptions) error {
if filepath.Ext(opts.ModelFile) != ".onnx" {
return fmt.Errorf("unsupported model type %s (should end with .onnx)", opts.ModelFile)
}
var err error
// Note: the Model here is a path to a directory containing the model files
sd.piper, err = New(os.Getenv("ESPEAK_NG_DATA"))
return err
}
func (sd *Piper) TTS(opts *pb.TTSRequest) error {
return sd.piper.TTS(opts.Text, opts.Model, opts.Dst)
}
type PiperB struct {
assetDir string
}
func New(assetDir string) (*PiperB, error) {
if _, err := os.Stat(assetDir); err != nil {
return nil, err
}
return &PiperB{
assetDir: assetDir,
}, nil
}
func (s *PiperB) TTS(text, model, dst string) error {
return piper.TextToWav(text, model, s.assetDir, "", dst)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/backend/go/piper/main.go | backend/go/piper/main.go | package main
// Note: this is started internally by LocalAI and a server is allocated for each model
import (
"flag"
grpc "github.com/mudler/LocalAI/pkg/grpc"
)
var (
addr = flag.String("addr", "localhost:50051", "the address to connect to")
)
func main() {
flag.Parse()
if err := grpc.StartServer(*addr, &Piper{}); err != nil {
panic(err)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/internal/version.go | internal/version.go | package internal
import "fmt"
var Version = ""
var Commit = ""
func PrintableVersion() string {
return fmt.Sprintf("%s (%s)", Version, Commit)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/dependencies_manager/manager.go | core/dependencies_manager/manager.go | package main
import (
"fmt"
"os"
"path/filepath"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/utils"
"gopkg.in/yaml.v3"
)
type Asset struct {
FileName string `yaml:"filename"`
URL string `yaml:"url"`
SHA string `yaml:"sha"`
}
func main() {
// read the YAML file which contains a list of assets
// and download them in the asset path
assets := []Asset{}
assetFile := os.Args[1]
destPath := os.Args[2]
// read the YAML file
f, err := os.ReadFile(assetFile)
if err != nil {
panic(err)
}
// unmarshal the YAML data into a struct
if err := yaml.Unmarshal(f, &assets); err != nil {
panic(err)
}
// download the assets
for _, asset := range assets {
uri := downloader.URI(asset.URL)
if err := uri.DownloadFile(filepath.Join(destPath, asset.FileName), asset.SHA, 1, 1, utils.DisplayDownloadFunction); err != nil {
panic(err)
}
}
fmt.Println("Finished downloading assets")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/list_models.go | core/services/list_models.go | package services
import (
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/model"
)
type LooseFilePolicy int
const (
LOOSE_ONLY LooseFilePolicy = iota
SKIP_IF_CONFIGURED
SKIP_ALWAYS
ALWAYS_INCLUDE
)
func ListModels(bcl *config.ModelConfigLoader, ml *model.ModelLoader, filter config.ModelConfigFilterFn, looseFilePolicy LooseFilePolicy) ([]string, error) {
var skipMap map[string]interface{} = map[string]interface{}{}
dataModels := []string{}
// Start with known configurations
for _, c := range bcl.GetModelConfigsByFilter(filter) {
// Is this better than looseFilePolicy <= SKIP_IF_CONFIGURED ? less performant but more readable?
if (looseFilePolicy == SKIP_IF_CONFIGURED) || (looseFilePolicy == LOOSE_ONLY) {
skipMap[c.Model] = nil
}
if looseFilePolicy != LOOSE_ONLY {
dataModels = append(dataModels, c.Name)
}
}
// Then iterate through the loose files if requested.
if looseFilePolicy != SKIP_ALWAYS {
models, err := ml.ListFilesInModelPath()
if err != nil {
return nil, err
}
for _, m := range models {
// And only adds them if they shouldn't be skipped.
if _, exists := skipMap[m]; !exists && filter(m, nil) {
dataModels = append(dataModels, m)
}
}
}
return dataModels, nil
}
func CheckIfModelExists(bcl *config.ModelConfigLoader, ml *model.ModelLoader, modelName string, looseFilePolicy LooseFilePolicy) (bool, error) {
filter, err := config.BuildNameFilterFn(modelName)
if err != nil {
return false, err
}
models, err := ListModels(bcl, ml, filter, looseFilePolicy)
if err != nil {
return false, err
}
return (len(models) > 0), nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/metrics.go | core/services/metrics.go | package services
import (
"context"
"github.com/mudler/xlog"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/exporters/prometheus"
"go.opentelemetry.io/otel/metric"
metricApi "go.opentelemetry.io/otel/sdk/metric"
)
type LocalAIMetricsService struct {
Meter metric.Meter
ApiTimeMetric metric.Float64Histogram
}
func (m *LocalAIMetricsService) ObserveAPICall(method string, path string, duration float64) {
opts := metric.WithAttributes(
attribute.String("method", method),
attribute.String("path", path),
)
m.ApiTimeMetric.Record(context.Background(), duration, opts)
}
// setupOTelSDK bootstraps the OpenTelemetry pipeline.
// If it does not return an error, make sure to call shutdown for proper cleanup.
func NewLocalAIMetricsService() (*LocalAIMetricsService, error) {
exporter, err := prometheus.New()
if err != nil {
return nil, err
}
provider := metricApi.NewMeterProvider(metricApi.WithReader(exporter))
meter := provider.Meter("github.com/mudler/LocalAI")
apiTimeMetric, err := meter.Float64Histogram("api_call", metric.WithDescription("api calls"))
if err != nil {
return nil, err
}
return &LocalAIMetricsService{
Meter: meter,
ApiTimeMetric: apiTimeMetric,
}, nil
}
func (lams LocalAIMetricsService) Shutdown() error {
// TODO: Not sure how to actually do this:
//// setupOTelSDK bootstraps the OpenTelemetry pipeline.
//// If it does not return an error, make sure to call shutdown for proper cleanup.
xlog.Warn("LocalAIMetricsService Shutdown called, but OTelSDK proper shutdown not yet implemented?")
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/models.go | core/services/models.go | package services
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/xlog"
"gopkg.in/yaml.v2"
)
const (
processingMessage = "processing file: %s. Total: %s. Current: %s"
)
func (g *GalleryService) modelHandler(op *GalleryOp[gallery.GalleryModel, gallery.ModelConfig], cl *config.ModelConfigLoader, systemState *system.SystemState) error {
utils.ResetDownloadTimers()
// Check if already cancelled
if op.Context != nil {
select {
case <-op.Context.Done():
g.UpdateStatus(op.ID, &GalleryOpStatus{
Cancelled: true,
Processed: true,
Message: "cancelled",
GalleryElementName: op.GalleryElementName,
})
return op.Context.Err()
default:
}
}
g.UpdateStatus(op.ID, &GalleryOpStatus{Message: fmt.Sprintf("processing model: %s", op.GalleryElementName), Progress: 0, Cancellable: true})
// displayDownload displays the download progress
progressCallback := func(fileName string, current string, total string, percentage float64) {
// Check for cancellation during progress updates
if op.Context != nil {
select {
case <-op.Context.Done():
return
default:
}
}
g.UpdateStatus(op.ID, &GalleryOpStatus{Message: fmt.Sprintf(processingMessage, fileName, total, current), FileName: fileName, Progress: percentage, TotalFileSize: total, DownloadedFileSize: current, Cancellable: true})
utils.DisplayDownloadFunction(fileName, current, total, percentage)
}
err := processModelOperation(op, systemState, g.modelLoader, g.appConfig.EnforcePredownloadScans, g.appConfig.AutoloadBackendGalleries, progressCallback)
if err != nil {
// Check if error is due to cancellation
if op.Context != nil && errors.Is(err, op.Context.Err()) {
g.UpdateStatus(op.ID, &GalleryOpStatus{
Cancelled: true,
Processed: true,
Message: "cancelled",
GalleryElementName: op.GalleryElementName,
})
return err
}
return err
}
// Check for cancellation before final steps
if op.Context != nil {
select {
case <-op.Context.Done():
g.UpdateStatus(op.ID, &GalleryOpStatus{
Cancelled: true,
Processed: true,
Message: "cancelled",
GalleryElementName: op.GalleryElementName,
})
return op.Context.Err()
default:
}
}
// Reload models
err = cl.LoadModelConfigsFromPath(systemState.Model.ModelsPath, g.appConfig.ToConfigLoaderOptions()...)
if err != nil {
return err
}
err = cl.Preload(systemState.Model.ModelsPath)
if err != nil {
return err
}
g.UpdateStatus(op.ID,
&GalleryOpStatus{
Deletion: op.Delete,
Processed: true,
GalleryElementName: op.GalleryElementName,
Message: "completed",
Progress: 100,
Cancellable: false})
return nil
}
func installModelFromRemoteConfig(ctx context.Context, systemState *system.SystemState, modelLoader *model.ModelLoader, req gallery.GalleryModel, downloadStatus func(string, string, string, float64), enforceScan, automaticallyInstallBackend bool, backendGalleries []config.Gallery) error {
config, err := gallery.GetGalleryConfigFromURLWithContext[gallery.ModelConfig](ctx, req.URL, systemState.Model.ModelsPath)
if err != nil {
return err
}
config.Files = append(config.Files, req.AdditionalFiles...)
installedModel, err := gallery.InstallModel(ctx, systemState, req.Name, &config, req.Overrides, downloadStatus, enforceScan)
if err != nil {
return err
}
if automaticallyInstallBackend && installedModel.Backend != "" {
if err := gallery.InstallBackendFromGallery(ctx, backendGalleries, systemState, modelLoader, installedModel.Backend, downloadStatus, false); err != nil {
return err
}
}
return nil
}
type galleryModel struct {
gallery.GalleryModel `yaml:",inline"` // https://github.com/go-yaml/yaml/issues/63
ID string `json:"id"`
}
func processRequests(systemState *system.SystemState, modelLoader *model.ModelLoader, enforceScan, automaticallyInstallBackend bool, galleries []config.Gallery, backendGalleries []config.Gallery, requests []galleryModel) error {
ctx := context.Background()
var err error
for _, r := range requests {
utils.ResetDownloadTimers()
if r.ID == "" {
err = installModelFromRemoteConfig(ctx, systemState, modelLoader, r.GalleryModel, utils.DisplayDownloadFunction, enforceScan, automaticallyInstallBackend, backendGalleries)
} else {
err = gallery.InstallModelFromGallery(
ctx, galleries, backendGalleries, systemState, modelLoader, r.ID, r.GalleryModel, utils.DisplayDownloadFunction, enforceScan, automaticallyInstallBackend)
}
}
return err
}
func ApplyGalleryFromFile(systemState *system.SystemState, modelLoader *model.ModelLoader, enforceScan, automaticallyInstallBackend bool, galleries []config.Gallery, backendGalleries []config.Gallery, s string) error {
dat, err := os.ReadFile(s)
if err != nil {
return err
}
var requests []galleryModel
if err := yaml.Unmarshal(dat, &requests); err != nil {
return err
}
return processRequests(systemState, modelLoader, enforceScan, automaticallyInstallBackend, galleries, backendGalleries, requests)
}
func ApplyGalleryFromString(systemState *system.SystemState, modelLoader *model.ModelLoader, enforceScan, automaticallyInstallBackend bool, galleries []config.Gallery, backendGalleries []config.Gallery, s string) error {
var requests []galleryModel
err := json.Unmarshal([]byte(s), &requests)
if err != nil {
return err
}
return processRequests(systemState, modelLoader, enforceScan, automaticallyInstallBackend, galleries, backendGalleries, requests)
}
// processModelOperation handles the installation or deletion of a model
func processModelOperation(
op *GalleryOp[gallery.GalleryModel, gallery.ModelConfig],
systemState *system.SystemState,
modelLoader *model.ModelLoader,
enforcePredownloadScans bool,
automaticallyInstallBackend bool,
progressCallback func(string, string, string, float64),
) error {
ctx := op.Context
if ctx == nil {
ctx = context.Background()
}
// Check for cancellation before starting
select {
case <-ctx.Done():
return ctx.Err()
default:
}
switch {
case op.Delete:
return gallery.DeleteModelFromSystem(systemState, op.GalleryElementName)
case op.GalleryElement != nil:
installedModel, err := gallery.InstallModel(
ctx, systemState, op.GalleryElement.Name,
op.GalleryElement,
op.Req.Overrides,
progressCallback, enforcePredownloadScans)
if err != nil {
return err
}
if automaticallyInstallBackend && installedModel.Backend != "" {
xlog.Debug("Installing backend", "backend", installedModel.Backend)
if err := gallery.InstallBackendFromGallery(ctx, op.BackendGalleries, systemState, modelLoader, installedModel.Backend, progressCallback, false); err != nil {
return err
}
}
return nil
case op.GalleryElementName != "":
return gallery.InstallModelFromGallery(ctx, op.Galleries, op.BackendGalleries, systemState, modelLoader, op.GalleryElementName, op.Req, progressCallback, enforcePredownloadScans, automaticallyInstallBackend)
default:
return installModelFromRemoteConfig(ctx, systemState, modelLoader, op.Req, progressCallback, enforcePredownloadScans, automaticallyInstallBackend, op.BackendGalleries)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/backends_test.go | core/services/backends_test.go | package services_test
import (
"context"
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"gopkg.in/yaml.v2"
)
var _ = Describe("InstallExternalBackend", func() {
var (
tempDir string
galleries []config.Gallery
ml *model.ModelLoader
systemState *system.SystemState
)
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "backends-service-test-*")
Expect(err).NotTo(HaveOccurred())
systemState, err = system.GetSystemState(system.WithBackendPath(tempDir))
Expect(err).NotTo(HaveOccurred())
ml = model.NewModelLoader(systemState)
// Setup test gallery
galleries = []config.Gallery{
{
Name: "test-gallery",
URL: "file://" + filepath.Join(tempDir, "test-gallery.yaml"),
},
}
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
Context("with gallery backend name", func() {
BeforeEach(func() {
// Create a test gallery file with a test backend
testBackend := []map[string]interface{}{
{
"name": "test-backend",
"uri": "https://gist.githubusercontent.com/mudler/71d5376bc2aa168873fa519fa9f4bd56/raw/testbackend/run.sh",
},
}
data, err := yaml.Marshal(testBackend)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, "test-gallery.yaml"), data, 0644)
Expect(err).NotTo(HaveOccurred())
})
It("should fail when name or alias is provided for gallery backend", func() {
err := services.InstallExternalBackend(
context.Background(),
galleries,
systemState,
ml,
nil,
"test-backend", // gallery name
"custom-name", // name should not be allowed
"",
)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("specifying a name or alias is not supported for gallery backends"))
})
It("should fail when backend is not found in gallery", func() {
err := services.InstallExternalBackend(
context.Background(),
galleries,
systemState,
ml,
nil,
"non-existent-backend",
"",
"",
)
Expect(err).To(HaveOccurred())
})
})
Context("with OCI image", func() {
It("should fail when name is not provided for OCI image", func() {
err := services.InstallExternalBackend(
context.Background(),
galleries,
systemState,
ml,
nil,
"oci://quay.io/mudler/tests:localai-backend-test",
"", // name is required for OCI images
"",
)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("specifying a name is required for OCI images"))
})
})
Context("with directory path", func() {
var testBackendPath string
BeforeEach(func() {
// Create a test backend directory with required files
testBackendPath = filepath.Join(tempDir, "source-backend")
err := os.MkdirAll(testBackendPath, 0750)
Expect(err).NotTo(HaveOccurred())
// Create run.sh
err = os.WriteFile(filepath.Join(testBackendPath, "run.sh"), []byte("#!/bin/bash\necho test"), 0755)
Expect(err).NotTo(HaveOccurred())
})
It("should infer name from directory path when name is not provided", func() {
// This test verifies that the function attempts to install using the directory name
// The actual installation may fail due to test environment limitations
err := services.InstallExternalBackend(
context.Background(),
galleries,
systemState,
ml,
nil,
testBackendPath,
"", // name should be inferred as "source-backend"
"",
)
// The function should at least attempt to install with the inferred name
// Even if it fails for other reasons, it shouldn't fail due to missing name
if err != nil {
Expect(err.Error()).NotTo(ContainSubstring("name is required"))
}
})
It("should use provided name when specified", func() {
err := services.InstallExternalBackend(
context.Background(),
galleries,
systemState,
ml,
nil,
testBackendPath,
"custom-backend-name",
"",
)
// The function should use the provided name
if err != nil {
Expect(err.Error()).NotTo(ContainSubstring("name is required"))
}
})
It("should support alias when provided", func() {
err := services.InstallExternalBackend(
context.Background(),
galleries,
systemState,
ml,
nil,
testBackendPath,
"custom-backend-name",
"custom-alias",
)
// The function should accept alias for directory paths
if err != nil {
Expect(err.Error()).NotTo(ContainSubstring("alias is not supported"))
}
})
})
})
var _ = Describe("GalleryOp with External Backend", func() {
It("should have external backend fields in GalleryOp", func() {
// Test that the GalleryOp struct has the new external backend fields
op := services.GalleryOp[string, string]{
ExternalURI: "oci://example.com/backend:latest",
ExternalName: "test-backend",
ExternalAlias: "test-alias",
}
Expect(op.ExternalURI).To(Equal("oci://example.com/backend:latest"))
Expect(op.ExternalName).To(Equal("test-backend"))
Expect(op.ExternalAlias).To(Equal("test-alias"))
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/gallery.go | core/services/gallery.go | package services
import (
"context"
"fmt"
"sync"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
)
type GalleryService struct {
appConfig *config.ApplicationConfig
sync.Mutex
ModelGalleryChannel chan GalleryOp[gallery.GalleryModel, gallery.ModelConfig]
BackendGalleryChannel chan GalleryOp[gallery.GalleryBackend, any]
modelLoader *model.ModelLoader
statuses map[string]*GalleryOpStatus
cancellations map[string]context.CancelFunc
}
func NewGalleryService(appConfig *config.ApplicationConfig, ml *model.ModelLoader) *GalleryService {
return &GalleryService{
appConfig: appConfig,
ModelGalleryChannel: make(chan GalleryOp[gallery.GalleryModel, gallery.ModelConfig]),
BackendGalleryChannel: make(chan GalleryOp[gallery.GalleryBackend, any]),
modelLoader: ml,
statuses: make(map[string]*GalleryOpStatus),
cancellations: make(map[string]context.CancelFunc),
}
}
func (g *GalleryService) UpdateStatus(s string, op *GalleryOpStatus) {
g.Lock()
defer g.Unlock()
g.statuses[s] = op
}
func (g *GalleryService) GetStatus(s string) *GalleryOpStatus {
g.Lock()
defer g.Unlock()
return g.statuses[s]
}
func (g *GalleryService) GetAllStatus() map[string]*GalleryOpStatus {
g.Lock()
defer g.Unlock()
return g.statuses
}
// CancelOperation cancels an in-progress operation by its ID
func (g *GalleryService) CancelOperation(id string) error {
g.Lock()
defer g.Unlock()
// Check if operation is already cancelled
if status, ok := g.statuses[id]; ok && status.Cancelled {
return fmt.Errorf("operation %q is already cancelled", id)
}
cancelFunc, exists := g.cancellations[id]
if !exists {
return fmt.Errorf("operation %q not found or already completed", id)
}
// Cancel the operation
cancelFunc()
// Update status to reflect cancellation
if status, ok := g.statuses[id]; ok {
status.Cancelled = true
status.Processed = true
status.Message = "cancelled"
} else {
// Create status for queued operations that haven't started yet
g.statuses[id] = &GalleryOpStatus{
Cancelled: true,
Processed: true,
Message: "cancelled",
Cancellable: false,
}
}
// Clean up cancellation function
delete(g.cancellations, id)
return nil
}
// storeCancellation stores a cancellation function for an operation
func (g *GalleryService) storeCancellation(id string, cancelFunc context.CancelFunc) {
g.Lock()
defer g.Unlock()
g.cancellations[id] = cancelFunc
}
// StoreCancellation is a public method to store a cancellation function for an operation
// This allows cancellation functions to be stored immediately when operations are created,
// enabling cancellation of queued operations that haven't started processing yet.
func (g *GalleryService) StoreCancellation(id string, cancelFunc context.CancelFunc) {
g.storeCancellation(id, cancelFunc)
}
// removeCancellation removes a cancellation function when operation completes
func (g *GalleryService) removeCancellation(id string) {
g.Lock()
defer g.Unlock()
delete(g.cancellations, id)
}
func (g *GalleryService) Start(c context.Context, cl *config.ModelConfigLoader, systemState *system.SystemState) error {
// updates the status with an error
var updateError func(id string, e error)
if !g.appConfig.OpaqueErrors {
updateError = func(id string, e error) {
g.UpdateStatus(id, &GalleryOpStatus{Error: e, Processed: true, Message: "error: " + e.Error()})
}
} else {
updateError = func(id string, _ error) {
g.UpdateStatus(id, &GalleryOpStatus{Error: fmt.Errorf("an error occurred"), Processed: true})
}
}
go func() {
for {
select {
case <-c.Done():
return
case op := <-g.BackendGalleryChannel:
// Create context if not provided
if op.Context == nil {
op.Context, op.CancelFunc = context.WithCancel(c)
g.storeCancellation(op.ID, op.CancelFunc)
} else if op.CancelFunc != nil {
g.storeCancellation(op.ID, op.CancelFunc)
}
err := g.backendHandler(&op, systemState)
if err != nil {
updateError(op.ID, err)
}
g.removeCancellation(op.ID)
case op := <-g.ModelGalleryChannel:
// Create context if not provided
if op.Context == nil {
op.Context, op.CancelFunc = context.WithCancel(c)
g.storeCancellation(op.ID, op.CancelFunc)
} else if op.CancelFunc != nil {
g.storeCancellation(op.ID, op.CancelFunc)
}
err := g.modelHandler(&op, cl, systemState)
if err != nil {
updateError(op.ID, err)
}
g.removeCancellation(op.ID)
}
}
}()
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/operation.go | core/services/operation.go | package services
import (
"context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/xsync"
)
type GalleryOp[T any, E any] struct {
ID string
GalleryElementName string
Delete bool
Req T
// If specified, we install directly the gallery element
GalleryElement *E
Galleries []config.Gallery
BackendGalleries []config.Gallery
// Context for cancellation support
Context context.Context
CancelFunc context.CancelFunc
// External backend installation parameters (for OCI/URL/path)
// These are used when installing backends from external sources rather than galleries
ExternalURI string // The OCI image, URL, or path
ExternalName string // Custom name for the backend
ExternalAlias string // Custom alias for the backend
}
type GalleryOpStatus struct {
Deletion bool `json:"deletion"` // Deletion is true if the operation is a deletion
FileName string `json:"file_name"`
Error error `json:"error"`
Processed bool `json:"processed"`
Message string `json:"message"`
Progress float64 `json:"progress"`
TotalFileSize string `json:"file_size"`
DownloadedFileSize string `json:"downloaded_size"`
GalleryElementName string `json:"gallery_element_name"`
Cancelled bool `json:"cancelled"` // Cancelled is true if the operation was cancelled
Cancellable bool `json:"cancellable"` // Cancellable is true if the operation can be cancelled
}
type OpCache struct {
status *xsync.SyncedMap[string, string]
backendOps *xsync.SyncedMap[string, bool] // Tracks which operations are backend operations
galleryService *GalleryService
}
func NewOpCache(galleryService *GalleryService) *OpCache {
return &OpCache{
status: xsync.NewSyncedMap[string, string](),
backendOps: xsync.NewSyncedMap[string, bool](),
galleryService: galleryService,
}
}
func (m *OpCache) Set(key string, value string) {
m.status.Set(key, value)
}
// SetBackend sets a key-value pair and marks it as a backend operation
func (m *OpCache) SetBackend(key string, value string) {
m.status.Set(key, value)
m.backendOps.Set(key, true)
}
// IsBackendOp returns true if the given key is a backend operation
func (m *OpCache) IsBackendOp(key string) bool {
return m.backendOps.Get(key)
}
func (m *OpCache) Get(key string) string {
return m.status.Get(key)
}
func (m *OpCache) DeleteUUID(uuid string) {
for _, k := range m.status.Keys() {
if m.status.Get(k) == uuid {
m.status.Delete(k)
m.backendOps.Delete(k) // Also clean up the backend flag
}
}
}
func (m *OpCache) Map() map[string]string {
return m.status.Map()
}
func (m *OpCache) Exists(key string) bool {
return m.status.Exists(key)
}
func (m *OpCache) GetStatus() (map[string]string, map[string]string) {
processingModelsData := m.Map()
taskTypes := map[string]string{}
for k, v := range processingModelsData {
status := m.galleryService.GetStatus(v)
taskTypes[k] = "Installation"
if status != nil && status.Deletion {
taskTypes[k] = "Deletion"
} else if status == nil {
taskTypes[k] = "Waiting"
}
}
return processingModelsData, taskTypes
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/backends.go | core/services/backends.go | package services
import (
"context"
"errors"
"fmt"
"path/filepath"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/xlog"
)
func (g *GalleryService) backendHandler(op *GalleryOp[gallery.GalleryBackend, any], systemState *system.SystemState) error {
utils.ResetDownloadTimers()
// Check if already cancelled
if op.Context != nil {
select {
case <-op.Context.Done():
g.UpdateStatus(op.ID, &GalleryOpStatus{
Cancelled: true,
Processed: true,
Message: "cancelled",
GalleryElementName: op.GalleryElementName,
})
return op.Context.Err()
default:
}
}
g.UpdateStatus(op.ID, &GalleryOpStatus{Message: fmt.Sprintf("processing backend: %s", op.GalleryElementName), Progress: 0, Cancellable: true})
// displayDownload displays the download progress
progressCallback := func(fileName string, current string, total string, percentage float64) {
// Check for cancellation during progress updates
if op.Context != nil {
select {
case <-op.Context.Done():
return
default:
}
}
g.UpdateStatus(op.ID, &GalleryOpStatus{Message: fmt.Sprintf(processingMessage, fileName, total, current), FileName: fileName, Progress: percentage, TotalFileSize: total, DownloadedFileSize: current, Cancellable: true})
utils.DisplayDownloadFunction(fileName, current, total, percentage)
}
ctx := op.Context
if ctx == nil {
ctx = context.Background()
}
var err error
if op.Delete {
err = gallery.DeleteBackendFromSystem(g.appConfig.SystemState, op.GalleryElementName)
g.modelLoader.DeleteExternalBackend(op.GalleryElementName)
} else if op.ExternalURI != "" {
// External backend installation (OCI image, URL, or path)
xlog.Info("Installing external backend", "uri", op.ExternalURI, "name", op.ExternalName, "alias", op.ExternalAlias)
err = InstallExternalBackend(ctx, g.appConfig.BackendGalleries, systemState, g.modelLoader, progressCallback, op.ExternalURI, op.ExternalName, op.ExternalAlias)
// Update GalleryElementName for status tracking if a name was derived
if op.ExternalName != "" {
op.GalleryElementName = op.ExternalName
}
} else {
// Standard gallery installation
xlog.Warn("installing backend", "backend", op.GalleryElementName)
xlog.Debug("backend galleries", "galleries", g.appConfig.BackendGalleries)
err = gallery.InstallBackendFromGallery(ctx, g.appConfig.BackendGalleries, systemState, g.modelLoader, op.GalleryElementName, progressCallback, true)
}
if err != nil {
// Check if error is due to cancellation
if op.Context != nil && errors.Is(err, op.Context.Err()) {
g.UpdateStatus(op.ID, &GalleryOpStatus{
Cancelled: true,
Processed: true,
Message: "cancelled",
GalleryElementName: op.GalleryElementName,
})
return err
}
xlog.Error("error installing backend", "error", err, "backend", op.GalleryElementName)
if !op.Delete {
// If we didn't install the backend, we need to make sure we don't have a leftover directory
gallery.DeleteBackendFromSystem(systemState, op.GalleryElementName)
}
return err
}
g.UpdateStatus(op.ID,
&GalleryOpStatus{
Deletion: op.Delete,
Processed: true,
GalleryElementName: op.GalleryElementName,
Message: "completed",
Progress: 100,
Cancellable: false})
return nil
}
// InstallExternalBackend installs a backend from an external source (OCI image, URL, or path).
// This method contains the logic to detect the input type and call the appropriate installation function.
// It can be used by both CLI and Web UI for installing backends from external sources.
func InstallExternalBackend(ctx context.Context, galleries []config.Gallery, systemState *system.SystemState, modelLoader *model.ModelLoader, downloadStatus func(string, string, string, float64), backend, name, alias string) error {
uri := downloader.URI(backend)
switch {
case uri.LooksLikeDir():
if name == "" { // infer it from the path
name = filepath.Base(backend)
}
xlog.Info("Installing backend from path", "backend", backend, "name", name)
if err := gallery.InstallBackend(ctx, systemState, modelLoader, &gallery.GalleryBackend{
Metadata: gallery.Metadata{
Name: name,
},
Alias: alias,
URI: backend,
}, downloadStatus); err != nil {
return fmt.Errorf("error installing backend %s: %w", backend, err)
}
case uri.LooksLikeOCI() && !uri.LooksLikeOCIFile():
if name == "" {
return fmt.Errorf("specifying a name is required for OCI images")
}
xlog.Info("Installing backend from OCI image", "backend", backend, "name", name)
if err := gallery.InstallBackend(ctx, systemState, modelLoader, &gallery.GalleryBackend{
Metadata: gallery.Metadata{
Name: name,
},
Alias: alias,
URI: backend,
}, downloadStatus); err != nil {
return fmt.Errorf("error installing backend %s: %w", backend, err)
}
case uri.LooksLikeOCIFile():
derivedName, err := uri.FilenameFromUrl()
if err != nil {
return fmt.Errorf("failed to get filename from URL: %w", err)
}
// strip extension if any
derivedName = strings.TrimSuffix(derivedName, filepath.Ext(derivedName))
// Use provided name if available, otherwise use derived name
if name == "" {
name = derivedName
}
xlog.Info("Installing backend from OCI image", "backend", backend, "name", name)
if err := gallery.InstallBackend(ctx, systemState, modelLoader, &gallery.GalleryBackend{
Metadata: gallery.Metadata{
Name: name,
},
Alias: alias,
URI: backend,
}, downloadStatus); err != nil {
return fmt.Errorf("error installing backend %s: %w", backend, err)
}
default:
// Treat as gallery backend name
if name != "" || alias != "" {
return fmt.Errorf("specifying a name or alias is not supported for gallery backends")
}
err := gallery.InstallBackendFromGallery(ctx, galleries, systemState, modelLoader, backend, downloadStatus, true)
if err != nil {
return fmt.Errorf("error installing backend %s: %w", backend, err)
}
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/agent_jobs.go | core/services/agent_jobs.go | package services
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"net"
"net/http"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"text/template"
"time"
"github.com/Masterminds/sprig/v3"
"github.com/google/uuid"
"github.com/mudler/LocalAI/core/config"
mcpTools "github.com/mudler/LocalAI/core/http/endpoints/mcp"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/core/templates"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/xsync"
"github.com/mudler/cogito"
"github.com/robfig/cron/v3"
"github.com/mudler/xlog"
)
// AgentJobService manages agent tasks and job execution
type AgentJobService struct {
appConfig *config.ApplicationConfig
modelLoader *model.ModelLoader
configLoader *config.ModelConfigLoader
evaluator *templates.Evaluator
// Storage (file-based with in-memory cache)
tasks *xsync.SyncedMap[string, schema.Task]
jobs *xsync.SyncedMap[string, schema.Job]
tasksFile string // Path to agent_tasks.json
jobsFile string // Path to agent_jobs.json
// Job execution channel
jobQueue chan JobExecution
// Cancellation support
cancellations *xsync.SyncedMap[string, context.CancelFunc]
// Cron scheduler
cronScheduler *cron.Cron
cronEntries *xsync.SyncedMap[string, cron.EntryID]
// Job retention
retentionDays int // From runtime settings, default: 30
// Service lifecycle
ctx context.Context
cancel context.CancelFunc
// Mutex for file operations
fileMutex sync.Mutex
}
// JobExecution represents a job to be executed
type JobExecution struct {
Job schema.Job
Task schema.Task
Ctx context.Context
Cancel context.CancelFunc
}
const (
JobImageType = "image"
JobVideoType = "video"
JobAudioType = "audio"
JobFileType = "file"
)
// NewAgentJobService creates a new AgentJobService instance
func NewAgentJobService(
appConfig *config.ApplicationConfig,
modelLoader *model.ModelLoader,
configLoader *config.ModelConfigLoader,
evaluator *templates.Evaluator,
) *AgentJobService {
retentionDays := appConfig.AgentJobRetentionDays
if retentionDays == 0 {
retentionDays = 30 // Default
}
tasksFile := ""
jobsFile := ""
if appConfig.DynamicConfigsDir != "" {
tasksFile = filepath.Join(appConfig.DynamicConfigsDir, "agent_tasks.json")
jobsFile = filepath.Join(appConfig.DynamicConfigsDir, "agent_jobs.json")
}
return &AgentJobService{
appConfig: appConfig,
modelLoader: modelLoader,
configLoader: configLoader,
evaluator: evaluator,
tasks: xsync.NewSyncedMap[string, schema.Task](),
jobs: xsync.NewSyncedMap[string, schema.Job](),
tasksFile: tasksFile,
jobsFile: jobsFile,
jobQueue: make(chan JobExecution, 100), // Buffer for 100 jobs
cancellations: xsync.NewSyncedMap[string, context.CancelFunc](),
cronScheduler: cron.New(), // Support seconds in cron
cronEntries: xsync.NewSyncedMap[string, cron.EntryID](),
retentionDays: retentionDays,
}
}
// LoadTasksFromFile loads tasks from agent_tasks.json
func (s *AgentJobService) LoadTasksFromFile() error {
if s.tasksFile == "" {
return nil // No file path configured
}
s.fileMutex.Lock()
defer s.fileMutex.Unlock()
if _, err := os.Stat(s.tasksFile); os.IsNotExist(err) {
xlog.Debug("agent_tasks.json not found, starting with empty tasks")
return nil
}
fileContent, err := os.ReadFile(s.tasksFile)
if err != nil {
return fmt.Errorf("failed to read tasks file: %w", err)
}
var tasksFile schema.TasksFile
if err := json.Unmarshal(fileContent, &tasksFile); err != nil {
return fmt.Errorf("failed to parse tasks file: %w", err)
}
for _, task := range tasksFile.Tasks {
s.tasks.Set(task.ID, task)
// Schedule cron if enabled and has cron expression
if task.Enabled && task.Cron != "" {
if err := s.ScheduleCronTask(task); err != nil {
xlog.Warn("Failed to schedule cron task on load", "error", err, "task_id", task.ID)
}
}
}
xlog.Info("Loaded tasks from file", "count", len(tasksFile.Tasks))
return nil
}
// SaveTasksToFile saves tasks to agent_tasks.json
func (s *AgentJobService) SaveTasksToFile() error {
if s.tasksFile == "" {
return nil // No file path configured
}
s.fileMutex.Lock()
defer s.fileMutex.Unlock()
tasksFile := schema.TasksFile{
Tasks: s.tasks.Values(),
}
fileContent, err := json.MarshalIndent(tasksFile, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal tasks: %w", err)
}
if err := os.WriteFile(s.tasksFile, fileContent, 0600); err != nil {
return fmt.Errorf("failed to write tasks file: %w", err)
}
return nil
}
// LoadJobsFromFile loads jobs from agent_jobs.json
func (s *AgentJobService) LoadJobsFromFile() error {
if s.jobsFile == "" {
return nil // No file path configured
}
s.fileMutex.Lock()
defer s.fileMutex.Unlock()
if _, err := os.Stat(s.jobsFile); os.IsNotExist(err) {
xlog.Debug("agent_jobs.json not found, starting with empty jobs")
return nil
}
fileContent, err := os.ReadFile(s.jobsFile)
if err != nil {
return fmt.Errorf("failed to read jobs file: %w", err)
}
var jobsFile schema.JobsFile
if err := json.Unmarshal(fileContent, &jobsFile); err != nil {
return fmt.Errorf("failed to parse jobs file: %w", err)
}
// Load jobs into memory
for _, job := range jobsFile.Jobs {
s.jobs.Set(job.ID, job)
}
xlog.Info("Loaded jobs from file", "count", len(jobsFile.Jobs))
return nil
}
// SaveJobsToFile saves jobs to agent_jobs.json
func (s *AgentJobService) SaveJobsToFile() error {
if s.jobsFile == "" {
return nil // No file path configured
}
s.fileMutex.Lock()
defer s.fileMutex.Unlock()
jobsFile := schema.JobsFile{
Jobs: s.jobs.Values(),
LastCleanup: time.Now(),
}
fileContent, err := json.MarshalIndent(jobsFile, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal jobs: %w", err)
}
if err := os.WriteFile(s.jobsFile, fileContent, 0600); err != nil {
return fmt.Errorf("failed to write jobs file: %w", err)
}
return nil
}
// CreateTask creates a new task
func (s *AgentJobService) CreateTask(task schema.Task) (string, error) {
if task.Name == "" {
return "", fmt.Errorf("task name is required")
}
if task.Model == "" {
return "", fmt.Errorf("task model is required")
}
if task.Prompt == "" {
return "", fmt.Errorf("task prompt is required")
}
// Generate UUID
id := uuid.New().String()
task.ID = id
now := time.Now()
task.CreatedAt = now
task.UpdatedAt = now
if !task.Enabled {
task.Enabled = true // Default to enabled
}
// Store task
s.tasks.Set(id, task)
// Schedule cron if enabled and has cron expression
if task.Enabled && task.Cron != "" {
if err := s.ScheduleCronTask(task); err != nil {
xlog.Warn("Failed to schedule cron task", "error", err, "task_id", id)
// Don't fail task creation if cron scheduling fails
}
}
// Save to file
if err := s.SaveTasksToFile(); err != nil {
xlog.Error("Failed to save tasks to file", "error", err)
// Don't fail task creation if file save fails
}
return id, nil
}
// UpdateTask updates an existing task
func (s *AgentJobService) UpdateTask(id string, task schema.Task) error {
if !s.tasks.Exists(id) {
return fmt.Errorf("task not found: %s", id)
}
existing := s.tasks.Get(id)
// Preserve ID and CreatedAt
task.ID = id
task.CreatedAt = existing.CreatedAt
task.UpdatedAt = time.Now()
// Unschedule old cron if it had one
if existing.Cron != "" {
s.UnscheduleCronTask(id)
}
// Store updated task
s.tasks.Set(id, task)
// Schedule new cron if enabled and has cron expression
if task.Enabled && task.Cron != "" {
if err := s.ScheduleCronTask(task); err != nil {
xlog.Warn("Failed to schedule cron task", "error", err, "task_id", id)
}
}
// Save to file
if err := s.SaveTasksToFile(); err != nil {
xlog.Error("Failed to save tasks to file", "error", err)
}
return nil
}
// DeleteTask deletes a task
func (s *AgentJobService) DeleteTask(id string) error {
if !s.tasks.Exists(id) {
return fmt.Errorf("task not found: %s", id)
}
// Unschedule cron
s.UnscheduleCronTask(id)
// Remove from memory
s.tasks.Delete(id)
// Save to file
if err := s.SaveTasksToFile(); err != nil {
xlog.Error("Failed to save tasks to file", "error", err)
}
return nil
}
// GetTask retrieves a task by ID
func (s *AgentJobService) GetTask(id string) (*schema.Task, error) {
task := s.tasks.Get(id)
if task.ID == "" {
return nil, fmt.Errorf("task not found: %s", id)
}
return &task, nil
}
// ListTasks returns all tasks, sorted by creation date (newest first)
func (s *AgentJobService) ListTasks() []schema.Task {
tasks := s.tasks.Values()
// Sort by CreatedAt descending (newest first), then by Name for stability
sort.Slice(tasks, func(i, j int) bool {
if tasks[i].CreatedAt.Equal(tasks[j].CreatedAt) {
return tasks[i].Name < tasks[j].Name
}
return tasks[i].CreatedAt.After(tasks[j].CreatedAt)
})
return tasks
}
// buildPrompt builds a prompt from a template with parameters
func (s *AgentJobService) buildPrompt(templateStr string, params map[string]string) (string, error) {
tmpl, err := template.New("prompt").Parse(templateStr)
if err != nil {
return "", fmt.Errorf("failed to parse prompt template: %w", err)
}
var buf bytes.Buffer
if err := tmpl.Execute(&buf, params); err != nil {
return "", fmt.Errorf("failed to execute prompt template: %w", err)
}
return buf.String(), nil
}
// ExecuteJob creates and queues a job for execution
// multimedia can be nil for backward compatibility
func (s *AgentJobService) ExecuteJob(taskID string, params map[string]string, triggeredBy string, multimedia *schema.MultimediaAttachment) (string, error) {
task := s.tasks.Get(taskID)
if task.ID == "" {
return "", fmt.Errorf("task not found: %s", taskID)
}
if !task.Enabled {
return "", fmt.Errorf("task is disabled: %s", taskID)
}
// Create job
jobID := uuid.New().String()
now := time.Now()
job := schema.Job{
ID: jobID,
TaskID: taskID,
Status: schema.JobStatusPending,
Parameters: params,
CreatedAt: now,
TriggeredBy: triggeredBy,
}
// Handle multimedia: merge task-level (for cron) and job-level (for manual execution)
if triggeredBy == "cron" && len(task.MultimediaSources) > 0 {
// Fetch multimedia from task sources
job.Images = []string{}
job.Videos = []string{}
job.Audios = []string{}
job.Files = []string{}
for _, source := range task.MultimediaSources {
// Fetch content from URL with custom headers
dataURI, err := s.fetchMultimediaFromURL(source.URL, source.Headers, source.Type)
if err != nil {
xlog.Warn("Failed to fetch multimedia from task source", "error", err, "url", source.URL, "type", source.Type)
continue
}
// Add to appropriate slice based on type
switch source.Type {
case JobImageType:
job.Images = append(job.Images, dataURI)
case JobVideoType:
job.Videos = append(job.Videos, dataURI)
case JobAudioType:
job.Audios = append(job.Audios, dataURI)
case JobFileType:
job.Files = append(job.Files, dataURI)
}
}
}
// Override with job-level multimedia if provided (manual execution takes precedence)
if multimedia != nil {
if len(multimedia.Images) > 0 {
job.Images = multimedia.Images
}
if len(multimedia.Videos) > 0 {
job.Videos = multimedia.Videos
}
if len(multimedia.Audios) > 0 {
job.Audios = multimedia.Audios
}
if len(multimedia.Files) > 0 {
job.Files = multimedia.Files
}
}
// Store job
s.jobs.Set(jobID, job)
// Save to file (async, don't block)
go func() {
if err := s.SaveJobsToFile(); err != nil {
xlog.Error("Failed to save jobs to file", "error", err)
}
}()
// Create context for cancellation
ctx, cancel := context.WithCancel(context.Background())
s.cancellations.Set(jobID, cancel)
// Queue job
select {
case s.jobQueue <- JobExecution{
Job: job,
Task: task,
Ctx: ctx,
Cancel: cancel,
}:
default:
// Queue is full, update job status
job.Status = schema.JobStatusFailed
job.Error = "job queue is full"
s.jobs.Set(jobID, job)
return "", fmt.Errorf("job queue is full")
}
return jobID, nil
}
// GetJob retrieves a job by ID
func (s *AgentJobService) GetJob(id string) (*schema.Job, error) {
job := s.jobs.Get(id)
if job.ID == "" {
return nil, fmt.Errorf("job not found: %s", id)
}
return &job, nil
}
// ListJobs returns jobs, optionally filtered by task_id and status
func (s *AgentJobService) ListJobs(taskID *string, status *schema.JobStatus, limit int) []schema.Job {
allJobs := s.jobs.Values()
filtered := []schema.Job{}
for _, job := range allJobs {
if taskID != nil && job.TaskID != *taskID {
continue
}
if status != nil && job.Status != *status {
continue
}
filtered = append(filtered, job)
}
// Sort by CreatedAt descending (newest first)
for i := 0; i < len(filtered)-1; i++ {
for j := i + 1; j < len(filtered); j++ {
if filtered[i].CreatedAt.Before(filtered[j].CreatedAt) {
filtered[i], filtered[j] = filtered[j], filtered[i]
}
}
}
// Apply limit
if limit > 0 && limit < len(filtered) {
filtered = filtered[:limit]
}
return filtered
}
// CancelJob cancels a running job
func (s *AgentJobService) CancelJob(id string) error {
job := s.jobs.Get(id)
if job.ID == "" {
return fmt.Errorf("job not found: %s", id)
}
if job.Status != schema.JobStatusPending && job.Status != schema.JobStatusRunning {
return fmt.Errorf("job cannot be cancelled: status is %s", job.Status)
}
// Cancel context
if s.cancellations.Exists(id) {
cancel := s.cancellations.Get(id)
cancel()
s.cancellations.Delete(id)
}
// Update job status
now := time.Now()
job.Status = schema.JobStatusCancelled
job.CompletedAt = &now
s.jobs.Set(id, job)
// Save to file (async)
go func() {
if err := s.SaveJobsToFile(); err != nil {
xlog.Error("Failed to save jobs to file", "error", err)
}
}()
return nil
}
// DeleteJob deletes a job
func (s *AgentJobService) DeleteJob(id string) error {
if !s.jobs.Exists(id) {
return fmt.Errorf("job not found: %s", id)
}
s.jobs.Delete(id)
// Save to file
if err := s.SaveJobsToFile(); err != nil {
xlog.Error("Failed to save jobs to file", "error", err)
}
return nil
}
type multimediaContent struct {
url string
mediaType string
}
func (mu multimediaContent) URL() string {
return mu.url
}
// fetchMultimediaFromURL fetches multimedia content from a URL with custom headers
// and converts it to a data URI string
func (s *AgentJobService) fetchMultimediaFromURL(url string, headers map[string]string, mediaType string) (string, error) {
// Create HTTP request
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return "", fmt.Errorf("failed to create request: %w", err)
}
// Set custom headers
for key, value := range headers {
req.Header.Set(key, value)
}
// Execute request
client := &http.Client{Timeout: 30 * time.Second}
resp, err := client.Do(req)
if err != nil {
return "", fmt.Errorf("failed to fetch URL: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
return "", fmt.Errorf("HTTP error: %d", resp.StatusCode)
}
// Read content
data, err := io.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("failed to read response: %w", err)
}
// Encode to base64
encoded := base64.StdEncoding.EncodeToString(data)
// Determine MIME type
mimeType := s.getMimeTypeForMediaType(mediaType)
if contentType := resp.Header.Get("Content-Type"); contentType != "" {
mimeType = contentType
}
// Return as data URI
return fmt.Sprintf("data:%s;base64,%s", mimeType, encoded), nil
}
// getMimeTypeForMediaType returns the default MIME type for a media type
func (s *AgentJobService) getMimeTypeForMediaType(mediaType string) string {
switch mediaType {
case JobImageType:
return "image/png"
case JobVideoType:
return "video/mp4"
case JobAudioType:
return "audio/mpeg"
case JobFileType:
return "application/octet-stream"
default:
return "application/octet-stream"
}
}
// convertToMultimediaContent converts a slice of strings (URLs or base64) to multimediaContent objects
func (s *AgentJobService) convertToMultimediaContent(items []string, mediaType string) ([]cogito.Multimedia, error) {
result := make([]cogito.Multimedia, 0, len(items))
for _, item := range items {
if item == "" {
continue
}
// Check if it's already a data URI
if strings.HasPrefix(item, "data:") {
result = append(result, multimediaContent{url: item, mediaType: mediaType})
continue
}
// Check if it's a URL
if strings.HasPrefix(item, "http://") || strings.HasPrefix(item, "https://") {
// Pass URL directly to cogito (it handles fetching)
result = append(result, multimediaContent{url: item, mediaType: mediaType})
continue
}
// Assume it's base64 without data URI prefix
// Add appropriate prefix based on media type
mimeType := s.getMimeTypeForMediaType(mediaType)
dataURI := fmt.Sprintf("data:%s;base64,%s", mimeType, item)
result = append(result, multimediaContent{url: dataURI, mediaType: mediaType})
}
return result, nil
}
// executeJobInternal executes a job using cogito
func (s *AgentJobService) executeJobInternal(job schema.Job, task schema.Task, ctx context.Context) error {
// Update job status to running
now := time.Now()
job.Status = schema.JobStatusRunning
job.StartedAt = &now
s.jobs.Set(job.ID, job)
// Load model config
modelConfig, err := s.configLoader.LoadModelConfigFileByNameDefaultOptions(task.Model, s.appConfig)
if err != nil {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to load model config: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to load model config: %w", err)
}
// Validate MCP configuration
if modelConfig.MCP.Servers == "" && modelConfig.MCP.Stdio == "" {
job.Status = schema.JobStatusFailed
job.Error = "no MCP servers configured for model"
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("no MCP servers configured for model: %s", task.Model)
}
// Get MCP config from model config
remote, stdio, err := modelConfig.MCP.MCPConfigFromYAML()
if err != nil {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to get MCP config: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to get MCP config: %w", err)
}
// Get MCP sessions
sessions, err := mcpTools.SessionsFromMCPConfig(modelConfig.Name, remote, stdio)
if err != nil {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to get MCP sessions: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to get MCP sessions: %w", err)
}
if len(sessions) == 0 {
job.Status = schema.JobStatusFailed
job.Error = "no working MCP servers found"
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("no working MCP servers found")
}
// Build prompt from template
prompt, err := s.buildPrompt(task.Prompt, job.Parameters)
if err != nil {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to build prompt: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to build prompt: %w", err)
}
// Create cogito fragment
fragment := cogito.NewEmptyFragment()
// Collect all multimedia content
multimediaItems := []cogito.Multimedia{}
// Convert images
if len(job.Images) > 0 {
images, err := s.convertToMultimediaContent(job.Images, JobImageType)
if err != nil {
xlog.Warn("Failed to convert images", "error", err, "job_id", job.ID)
} else {
multimediaItems = append(multimediaItems, images...)
}
}
// Convert videos
if len(job.Videos) > 0 {
videos, err := s.convertToMultimediaContent(job.Videos, JobVideoType)
if err != nil {
xlog.Warn("Failed to convert videos", "error", err, "job_id", job.ID)
} else {
multimediaItems = append(multimediaItems, videos...)
}
}
// Convert audios
if len(job.Audios) > 0 {
audios, err := s.convertToMultimediaContent(job.Audios, JobAudioType)
if err != nil {
xlog.Warn("Failed to convert audios", "error", err, "job_id", job.ID)
} else {
multimediaItems = append(multimediaItems, audios...)
}
}
// Convert files
if len(job.Files) > 0 {
files, err := s.convertToMultimediaContent(job.Files, JobFileType)
if err != nil {
xlog.Warn("Failed to convert files", "error", err, "job_id", job.ID)
} else {
multimediaItems = append(multimediaItems, files...)
}
}
fragment = fragment.AddMessage("user", prompt, multimediaItems...)
// Get API address and key
_, port, err := net.SplitHostPort(s.appConfig.APIAddress)
if err != nil {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to parse API address: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to parse API address: %w", err)
}
apiKey := ""
if len(s.appConfig.ApiKeys) > 0 {
apiKey = s.appConfig.ApiKeys[0]
}
// Create LLM client
defaultLLM := cogito.NewOpenAILLM(modelConfig.Name, apiKey, "http://127.0.0.1:"+port)
// Initialize traces slice
job.Traces = []schema.JobTrace{}
// Build cogito options
cogitoOpts := modelConfig.BuildCogitoOptions()
cogitoOpts = append(
cogitoOpts,
cogito.WithContext(ctx),
cogito.WithMCPs(sessions...),
cogito.WithStatusCallback(func(status string) {
xlog.Debug("Status", "job_id", job.ID, "model", modelConfig.Name, "status", status)
// Store trace
trace := schema.JobTrace{
Type: "status",
Content: status,
Timestamp: time.Now(),
}
job.Traces = append(job.Traces, trace)
s.jobs.Set(job.ID, job)
}),
cogito.WithReasoningCallback(func(reasoning string) {
xlog.Debug("Reasoning", "job_id", job.ID, "model", modelConfig.Name, "reasoning", reasoning)
// Store trace
trace := schema.JobTrace{
Type: "reasoning",
Content: reasoning,
Timestamp: time.Now(),
}
job.Traces = append(job.Traces, trace)
s.jobs.Set(job.ID, job)
}),
cogito.WithToolCallBack(func(t *cogito.ToolChoice, state *cogito.SessionState) cogito.ToolCallDecision {
xlog.Debug("Tool call", "job_id", job.ID, "model", modelConfig.Name, "tool", t.Name, "reasoning", t.Reasoning, "arguments", t.Arguments)
// Store trace
arguments := make(map[string]interface{})
if t.Arguments != nil {
arguments = t.Arguments
}
trace := schema.JobTrace{
Type: "tool_call",
Content: t.Reasoning,
Timestamp: time.Now(),
ToolName: t.Name,
Arguments: arguments,
}
job.Traces = append(job.Traces, trace)
s.jobs.Set(job.ID, job)
return cogito.ToolCallDecision{
Approved: true,
}
}),
cogito.WithToolCallResultCallback(func(t cogito.ToolStatus) {
xlog.Debug("Tool call result", "job_id", job.ID, "model", modelConfig.Name, "tool", t.Name, "result", t.Result, "tool_arguments", t.ToolArguments)
// Store trace
arguments := make(map[string]interface{})
// Convert ToolArguments to map via JSON marshaling
if toolArgsBytes, err := json.Marshal(t.ToolArguments); err == nil {
var toolArgsMap map[string]interface{}
if err := json.Unmarshal(toolArgsBytes, &toolArgsMap); err == nil {
arguments = toolArgsMap
}
}
arguments["result"] = t.Result
trace := schema.JobTrace{
Type: "tool_result",
Content: t.Result,
Timestamp: time.Now(),
ToolName: t.Name,
Arguments: arguments,
}
job.Traces = append(job.Traces, trace)
s.jobs.Set(job.ID, job)
}),
)
// Execute tools
f, err := cogito.ExecuteTools(defaultLLM, fragment, cogitoOpts...)
if err != nil && !errors.Is(err, cogito.ErrNoToolSelected) {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to execute tools: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to execute tools: %w", err)
}
// Get final response
f, err = defaultLLM.Ask(ctx, f)
if err != nil {
job.Status = schema.JobStatusFailed
job.Error = fmt.Sprintf("failed to get response: %v", err)
completedAt := time.Now()
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
return fmt.Errorf("failed to get response: %w", err)
}
// Extract traces from fragment.Status after execution
// This provides complete information about tool calls and results
// We use Status data to supplement/replace callback data for completeness
if f.Status != nil {
// Clear existing tool_call and tool_result traces (from callbacks) and replace with Status data
// Keep status and reasoning traces from callbacks
filteredTraces := []schema.JobTrace{}
for _, trace := range job.Traces {
if trace.Type != "tool_call" && trace.Type != "tool_result" {
filteredTraces = append(filteredTraces, trace)
}
}
job.Traces = filteredTraces
// Extract tool calls from Status.ToolsCalled
if len(f.Status.ToolsCalled) > 0 {
for _, toolCallInterface := range f.Status.ToolsCalled {
// Marshal to JSON and unmarshal to extract fields
if toolCallBytes, err := json.Marshal(toolCallInterface); err == nil {
var toolCallData map[string]interface{}
if err := json.Unmarshal(toolCallBytes, &toolCallData); err == nil {
arguments := make(map[string]interface{})
if args, ok := toolCallData["arguments"].(map[string]interface{}); ok {
arguments = args
}
reasoning := ""
if r, ok := toolCallData["reasoning"].(string); ok {
reasoning = r
}
name := ""
if n, ok := toolCallData["name"].(string); ok {
name = n
}
trace := schema.JobTrace{
Type: "tool_call",
Content: reasoning,
Timestamp: time.Now(),
ToolName: name,
Arguments: arguments,
}
job.Traces = append(job.Traces, trace)
}
}
}
}
// Extract tool results from Status.ToolResults
if len(f.Status.ToolResults) > 0 {
for _, toolResult := range f.Status.ToolResults {
arguments := make(map[string]interface{})
// Convert ToolArguments to map via JSON marshaling
if toolArgsBytes, err := json.Marshal(toolResult.ToolArguments); err == nil {
var toolArgsMap map[string]interface{}
if err := json.Unmarshal(toolArgsBytes, &toolArgsMap); err == nil {
arguments = toolArgsMap
}
}
arguments["result"] = toolResult.Result
trace := schema.JobTrace{
Type: "tool_result",
Content: toolResult.Result,
Timestamp: time.Now(),
ToolName: toolResult.Name,
Arguments: arguments,
}
job.Traces = append(job.Traces, trace)
}
}
}
// Update job with result
completedAt := time.Now()
job.Status = schema.JobStatusCompleted
job.Result = f.LastMessage().Content
job.CompletedAt = &completedAt
s.jobs.Set(job.ID, job)
// Save to file (async)
go func() {
if err := s.SaveJobsToFile(); err != nil {
xlog.Error("Failed to save jobs to file", "error", err)
}
}()
// Send webhooks (non-blocking)
go func() {
s.sendWebhooks(job, task)
}()
return nil
}
// worker processes jobs from the queue
func (s *AgentJobService) worker(ctx context.Context) {
for {
select {
case <-ctx.Done():
return
case exec := <-s.jobQueue:
// Check if job was cancelled before execution
select {
case <-exec.Ctx.Done():
job := exec.Job
now := time.Now()
job.Status = schema.JobStatusCancelled
job.CompletedAt = &now
s.jobs.Set(job.ID, job)
s.cancellations.Delete(job.ID)
continue
default:
}
// Execute job
err := s.executeJobInternal(exec.Job, exec.Task, exec.Ctx)
if err != nil {
xlog.Error("Job execution failed", "error", err, "job_id", exec.Job.ID)
}
// Clean up cancellation
s.cancellations.Delete(exec.Job.ID)
}
}
}
// ScheduleCronTask schedules a task to run on a cron schedule
func (s *AgentJobService) ScheduleCronTask(task schema.Task) error {
if task.Cron == "" {
return nil // No cron expression
}
// Parse cron expression (support standard 5-field format)
// Convert to 6-field format if needed (with seconds)
cronExpr := task.Cron
// Use cron parameters if provided, otherwise use empty map
cronParams := task.CronParameters
if cronParams == nil {
cronParams = map[string]string{}
}
entryID, err := s.cronScheduler.AddFunc(cronExpr, func() {
// Create job for cron execution with configured parameters
// Multimedia will be fetched from task sources in ExecuteJob
_, err := s.ExecuteJob(task.ID, cronParams, "cron", nil)
if err != nil {
xlog.Error("Failed to execute cron job", "error", err, "task_id", task.ID)
}
})
if err != nil {
return fmt.Errorf("failed to parse cron expression: %w", err)
}
s.cronEntries.Set(task.ID, entryID)
xlog.Info("Scheduled cron task", "task_id", task.ID, "cron", cronExpr)
return nil
}
// UnscheduleCronTask removes a task from the cron scheduler
func (s *AgentJobService) UnscheduleCronTask(taskID string) {
if s.cronEntries.Exists(taskID) {
entryID := s.cronEntries.Get(taskID)
s.cronScheduler.Remove(entryID)
s.cronEntries.Delete(taskID)
xlog.Info("Unscheduled cron task", "task_id", taskID)
}
}
// sendWebhooks sends webhook notifications to all configured webhooks
func (s *AgentJobService) sendWebhooks(job schema.Job, task schema.Task) {
// Collect all webhook configs from new format
webhookConfigs := task.Webhooks
if len(webhookConfigs) == 0 {
return // No webhooks configured
}
xlog.Info("Sending webhooks", "job_id", job.ID, "webhook_count", len(webhookConfigs))
// Send all webhooks concurrently and track results
var wg sync.WaitGroup
errors := make(chan webhookError, len(webhookConfigs))
successCount := 0
for _, webhookConfig := range webhookConfigs {
wg.Add(1)
go func(config schema.WebhookConfig) {
defer wg.Done()
if err := s.sendWebhook(job, task, config); err != nil {
errors <- webhookError{
URL: config.URL,
Error: err.Error(),
}
} else {
successCount++
}
}(webhookConfig)
}
wg.Wait()
close(errors)
// Collect errors
var webhookErrors []string
for err := range errors {
webhookErrors = append(webhookErrors, fmt.Sprintf("%s: %s", err.URL, err.Error))
}
// Update job with webhook status
job = s.jobs.Get(job.ID)
if job.ID == "" {
return
}
now := time.Now()
if len(webhookErrors) == 0 {
// All webhooks succeeded
job.WebhookSent = true
job.WebhookSentAt = &now
job.WebhookError = ""
} else if successCount > 0 {
// Some succeeded, some failed
job.WebhookSent = true
job.WebhookSentAt = &now
job.WebhookError = fmt.Sprintf("Some webhooks failed (%d/%d succeeded): %s", successCount, len(webhookConfigs), strings.Join(webhookErrors, "; "))
} else {
// All failed
job.WebhookSent = false
job.WebhookError = fmt.Sprintf("All webhooks failed: %s", strings.Join(webhookErrors, "; "))
}
s.jobs.Set(job.ID, job)
// Save to file (async)
go func() {
if err := s.SaveJobsToFile(); err != nil {
xlog.Error("Failed to save jobs to file", "error", err)
}
}()
}
// webhookError represents a webhook delivery error
type webhookError struct {
URL string
Error string
}
// sendWebhook sends a single webhook notification
// Returns an error if the webhook delivery failed
func (s *AgentJobService) sendWebhook(job schema.Job, task schema.Task, webhookConfig schema.WebhookConfig) error {
// Build payload
payload, err := s.buildWebhookPayload(job, task, webhookConfig)
if err != nil {
xlog.Error("Failed to build webhook payload", "error", err, "job_id", job.ID, "webhook_url", webhookConfig.URL)
return fmt.Errorf("failed to build payload: %w", err)
}
xlog.Debug("Sending webhook", "job_id", job.ID, "webhook_url", webhookConfig.URL, "payload", string(payload))
// Determine HTTP method (default to POST)
method := webhookConfig.Method
if method == "" {
method = "POST"
}
// Create HTTP request
req, err := http.NewRequest(method, webhookConfig.URL, bytes.NewBuffer(payload))
if err != nil {
xlog.Error("Failed to create webhook request", "error", err, "job_id", job.ID, "webhook_url", webhookConfig.URL)
return fmt.Errorf("failed to create request: %w", err)
}
// Set headers
req.Header.Set("Content-Type", "application/json")
for key, value := range webhookConfig.Headers {
req.Header.Set(key, value)
}
// Execute with retry
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | true |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/backend_monitor.go | core/services/backend_monitor.go | package services
import (
"context"
"fmt"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/xlog"
gopsutil "github.com/shirou/gopsutil/v3/process"
)
type BackendMonitorService struct {
modelConfigLoader *config.ModelConfigLoader
modelLoader *model.ModelLoader
options *config.ApplicationConfig // Taking options in case we need to inspect ExternalGRPCBackends, though that's out of scope for now, hence the name.
}
func NewBackendMonitorService(modelLoader *model.ModelLoader, configLoader *config.ModelConfigLoader, appConfig *config.ApplicationConfig) *BackendMonitorService {
return &BackendMonitorService{
modelLoader: modelLoader,
modelConfigLoader: configLoader,
options: appConfig,
}
}
func (bms *BackendMonitorService) SampleLocalBackendProcess(model string) (*schema.BackendMonitorResponse, error) {
config, exists := bms.modelConfigLoader.GetModelConfig(model)
var backend string
if exists {
backend = config.Model
} else {
// Last ditch effort: use it raw, see if a backend happens to match.
backend = model
}
if !strings.HasSuffix(backend, ".bin") {
backend = fmt.Sprintf("%s.bin", backend)
}
pid, err := bms.modelLoader.GetGRPCPID(backend)
if err != nil {
xlog.Error("failed to find GRPC pid", "error", err, "model", model)
return nil, err
}
// Name is slightly frightening but this does _not_ create a new process, rather it looks up an existing process by PID.
backendProcess, err := gopsutil.NewProcess(int32(pid))
if err != nil {
xlog.Error("error getting process info", "error", err, "model", model, "pid", pid)
return nil, err
}
memInfo, err := backendProcess.MemoryInfo()
if err != nil {
xlog.Error("error getting memory info", "error", err, "model", model, "pid", pid)
return nil, err
}
memPercent, err := backendProcess.MemoryPercent()
if err != nil {
xlog.Error("error getting memory percent", "error", err, "model", model, "pid", pid)
return nil, err
}
cpuPercent, err := backendProcess.CPUPercent()
if err != nil {
xlog.Error("error getting cpu percent", "error", err, "model", model, "pid", pid)
return nil, err
}
return &schema.BackendMonitorResponse{
MemoryInfo: memInfo,
MemoryPercent: memPercent,
CPUPercent: cpuPercent,
}, nil
}
func (bms BackendMonitorService) CheckAndSample(modelName string) (*proto.StatusResponse, error) {
modelAddr := bms.modelLoader.CheckIsLoaded(modelName)
if modelAddr == nil {
return nil, fmt.Errorf("backend %s is not currently loaded", modelName)
}
status, rpcErr := modelAddr.GRPC(false, nil).Status(context.TODO())
if rpcErr != nil {
xlog.Warn("backend experienced an error retrieving status info", "backend", modelName, "error", rpcErr)
val, slbErr := bms.SampleLocalBackendProcess(modelName)
if slbErr != nil {
return nil, fmt.Errorf("backend %s experienced an error retrieving status info via rpc: %s, then failed local node process sample: %s", modelName, rpcErr.Error(), slbErr.Error())
}
return &proto.StatusResponse{
State: proto.StatusResponse_ERROR,
Memory: &proto.MemoryUsageData{
Total: val.MemoryInfo.VMS,
Breakdown: map[string]uint64{
"gopsutil-RSS": val.MemoryInfo.RSS,
},
},
}, nil
}
return status, nil
}
func (bms BackendMonitorService) ShutdownModel(modelName string) error {
return bms.modelLoader.ShutdownModel(modelName)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.