repo stringlengths 6 47 | file_url stringlengths 77 269 | file_path stringlengths 5 186 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-07 08:35:43 2026-01-07 08:55:24 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/services_suite_test.go | core/services/services_suite_test.go | package services_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestServices(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "LocalAI services test")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/services/agent_jobs_test.go | core/services/agent_jobs_test.go | package services_test
import (
"context"
"os"
"time"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/core/templates"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("AgentJobService", func() {
var (
service *services.AgentJobService
tempDir string
appConfig *config.ApplicationConfig
modelLoader *model.ModelLoader
configLoader *config.ModelConfigLoader
evaluator *templates.Evaluator
)
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "agent_jobs_test")
Expect(err).NotTo(HaveOccurred())
systemState := &system.SystemState{}
systemState.Model.ModelsPath = tempDir
appConfig = config.NewApplicationConfig(
config.WithDynamicConfigDir(tempDir),
config.WithContext(context.Background()),
)
appConfig.SystemState = systemState
appConfig.APIAddress = "127.0.0.1:8080"
appConfig.AgentJobRetentionDays = 30
modelLoader = model.NewModelLoader(systemState)
configLoader = config.NewModelConfigLoader(tempDir)
evaluator = templates.NewEvaluator(tempDir)
service = services.NewAgentJobService(
appConfig,
modelLoader,
configLoader,
evaluator,
)
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
Describe("Task CRUD operations", func() {
It("should create a task", func() {
task := schema.Task{
Name: "Test Task",
Description: "Test Description",
Model: "test-model",
Prompt: "Hello {{.name}}",
Enabled: true,
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
Expect(id).NotTo(BeEmpty())
retrieved, err := service.GetTask(id)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.Name).To(Equal("Test Task"))
Expect(retrieved.Description).To(Equal("Test Description"))
Expect(retrieved.Model).To(Equal("test-model"))
Expect(retrieved.Prompt).To(Equal("Hello {{.name}}"))
})
It("should update a task", func() {
task := schema.Task{
Name: "Original Task",
Model: "test-model",
Prompt: "Original prompt",
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
updatedTask := schema.Task{
Name: "Updated Task",
Model: "test-model",
Prompt: "Updated prompt",
}
err = service.UpdateTask(id, updatedTask)
Expect(err).NotTo(HaveOccurred())
retrieved, err := service.GetTask(id)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.Name).To(Equal("Updated Task"))
Expect(retrieved.Prompt).To(Equal("Updated prompt"))
})
It("should delete a task", func() {
task := schema.Task{
Name: "Task to Delete",
Model: "test-model",
Prompt: "Prompt",
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
err = service.DeleteTask(id)
Expect(err).NotTo(HaveOccurred())
_, err = service.GetTask(id)
Expect(err).To(HaveOccurred())
})
It("should list all tasks", func() {
task1 := schema.Task{Name: "Task 1", Model: "test-model", Prompt: "Prompt 1"}
task2 := schema.Task{Name: "Task 2", Model: "test-model", Prompt: "Prompt 2"}
_, err := service.CreateTask(task1)
Expect(err).NotTo(HaveOccurred())
_, err = service.CreateTask(task2)
Expect(err).NotTo(HaveOccurred())
tasks := service.ListTasks()
Expect(len(tasks)).To(BeNumerically(">=", 2))
})
})
Describe("Job operations", func() {
var taskID string
BeforeEach(func() {
task := schema.Task{
Name: "Test Task",
Model: "test-model",
Prompt: "Hello {{.name}}",
Enabled: true,
}
var err error
taskID, err = service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
})
It("should create and queue a job", func() {
params := map[string]string{"name": "World"}
jobID, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
Expect(jobID).NotTo(BeEmpty())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.TaskID).To(Equal(taskID))
Expect(job.Status).To(Equal(schema.JobStatusPending))
Expect(job.Parameters).To(Equal(params))
})
It("should list jobs with filters", func() {
params := map[string]string{}
jobID1, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
time.Sleep(10 * time.Millisecond) // Ensure different timestamps
jobID2, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
allJobs := service.ListJobs(nil, nil, 0)
Expect(len(allJobs)).To(BeNumerically(">=", 2))
filteredJobs := service.ListJobs(&taskID, nil, 0)
Expect(len(filteredJobs)).To(BeNumerically(">=", 2))
status := schema.JobStatusPending
pendingJobs := service.ListJobs(nil, &status, 0)
Expect(len(pendingJobs)).To(BeNumerically(">=", 2))
// Verify both jobs are in the list
jobIDs := make(map[string]bool)
for _, job := range pendingJobs {
jobIDs[job.ID] = true
}
Expect(jobIDs[jobID1]).To(BeTrue())
Expect(jobIDs[jobID2]).To(BeTrue())
})
It("should cancel a pending job", func() {
params := map[string]string{}
jobID, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
err = service.CancelJob(jobID)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.Status).To(Equal(schema.JobStatusCancelled))
})
It("should delete a job", func() {
params := map[string]string{}
jobID, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
err = service.DeleteJob(jobID)
Expect(err).NotTo(HaveOccurred())
_, err = service.GetJob(jobID)
Expect(err).To(HaveOccurred())
})
})
Describe("File operations", func() {
It("should save and load tasks from file", func() {
task := schema.Task{
Name: "Persistent Task",
Model: "test-model",
Prompt: "Test prompt",
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
// Create a new service instance to test loading
newService := services.NewAgentJobService(
appConfig,
modelLoader,
configLoader,
evaluator,
)
err = newService.LoadTasksFromFile()
Expect(err).NotTo(HaveOccurred())
retrieved, err := newService.GetTask(id)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.Name).To(Equal("Persistent Task"))
})
It("should save and load jobs from file", func() {
task := schema.Task{
Name: "Test Task",
Model: "test-model",
Prompt: "Test prompt",
Enabled: true,
}
taskID, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
params := map[string]string{}
jobID, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
service.SaveJobsToFile()
// Create a new service instance to test loading
newService := services.NewAgentJobService(
appConfig,
modelLoader,
configLoader,
evaluator,
)
err = newService.LoadJobsFromFile()
Expect(err).NotTo(HaveOccurred())
retrieved, err := newService.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.TaskID).To(Equal(taskID))
})
})
Describe("Prompt templating", func() {
It("should build prompt from template with parameters", func() {
task := schema.Task{
Name: "Template Task",
Model: "test-model",
Prompt: "Hello {{.name}}, you are {{.role}}",
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
// We can't directly test buildPrompt as it's private, but we can test via ExecuteJob
// which uses it internally. However, without a real model, the job will fail.
// So we'll just verify the task was created correctly.
Expect(id).NotTo(BeEmpty())
})
})
Describe("Job cleanup", func() {
It("should cleanup old jobs", func() {
task := schema.Task{
Name: "Test Task",
Model: "test-model",
Prompt: "Test prompt",
Enabled: true,
}
taskID, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
params := map[string]string{}
jobID, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
// Manually set job creation time to be old
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
// Modify the job's CreatedAt to be 31 days ago
oldTime := time.Now().AddDate(0, 0, -31)
job.CreatedAt = oldTime
// We can't directly modify jobs in the service, so we'll test cleanup differently
// by setting retention to 0 and creating a new job
// Test that cleanup runs without error
err = service.CleanupOldJobs()
Expect(err).NotTo(HaveOccurred())
})
})
Describe("Multimedia support", func() {
Describe("Task multimedia sources", func() {
It("should create a task with multimedia sources", func() {
task := schema.Task{
Name: "Multimedia Task",
Model: "test-model",
Prompt: "Analyze this image",
MultimediaSources: []schema.MultimediaSourceConfig{
{
Type: "image",
URL: "https://example.com/image.png",
Headers: map[string]string{"Authorization": "Bearer token123"},
},
{
Type: "video",
URL: "https://example.com/video.mp4",
},
},
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
Expect(id).NotTo(BeEmpty())
retrieved, err := service.GetTask(id)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.MultimediaSources).To(HaveLen(2))
Expect(retrieved.MultimediaSources[0].Type).To(Equal("image"))
Expect(retrieved.MultimediaSources[0].URL).To(Equal("https://example.com/image.png"))
Expect(retrieved.MultimediaSources[0].Headers["Authorization"]).To(Equal("Bearer token123"))
Expect(retrieved.MultimediaSources[1].Type).To(Equal("video"))
})
It("should save and load tasks with multimedia sources from file", func() {
task := schema.Task{
Name: "Persistent Multimedia Task",
Model: "test-model",
Prompt: "Test prompt",
MultimediaSources: []schema.MultimediaSourceConfig{
{
Type: "audio",
URL: "https://example.com/audio.mp3",
Headers: map[string]string{
"X-Custom-Header": "value",
},
},
},
}
id, err := service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
// Create a new service instance to test loading
newService := services.NewAgentJobService(
appConfig,
modelLoader,
configLoader,
evaluator,
)
err = newService.LoadTasksFromFile()
Expect(err).NotTo(HaveOccurred())
retrieved, err := newService.GetTask(id)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.Name).To(Equal("Persistent Multimedia Task"))
Expect(retrieved.MultimediaSources).To(HaveLen(1))
Expect(retrieved.MultimediaSources[0].Type).To(Equal("audio"))
Expect(retrieved.MultimediaSources[0].URL).To(Equal("https://example.com/audio.mp3"))
Expect(retrieved.MultimediaSources[0].Headers["X-Custom-Header"]).To(Equal("value"))
})
})
Describe("Job multimedia", func() {
var taskID string
BeforeEach(func() {
task := schema.Task{
Name: "Test Task",
Model: "test-model",
Prompt: "Hello {{.name}}",
Enabled: true,
}
var err error
taskID, err = service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
})
It("should create a job with multimedia content", func() {
params := map[string]string{"name": "World"}
multimedia := &schema.MultimediaAttachment{
Images: []string{"https://example.com/image1.png", "data:image/png;base64,iVBORw0KG"},
Videos: []string{"https://example.com/video.mp4"},
Audios: []string{"data:audio/mpeg;base64,SUQzBAAAAA"},
Files: []string{"https://example.com/file.pdf"},
}
jobID, err := service.ExecuteJob(taskID, params, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
Expect(jobID).NotTo(BeEmpty())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.TaskID).To(Equal(taskID))
Expect(job.Images).To(HaveLen(2))
Expect(job.Images[0]).To(Equal("https://example.com/image1.png"))
Expect(job.Images[1]).To(Equal("data:image/png;base64,iVBORw0KG"))
Expect(job.Videos).To(HaveLen(1))
Expect(job.Videos[0]).To(Equal("https://example.com/video.mp4"))
Expect(job.Audios).To(HaveLen(1))
Expect(job.Audios[0]).To(Equal("data:audio/mpeg;base64,SUQzBAAAAA"))
Expect(job.Files).To(HaveLen(1))
Expect(job.Files[0]).To(Equal("https://example.com/file.pdf"))
})
It("should create a job with partial multimedia (only images)", func() {
params := map[string]string{}
multimedia := &schema.MultimediaAttachment{
Images: []string{"https://example.com/image.png"},
}
jobID, err := service.ExecuteJob(taskID, params, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.Images).To(HaveLen(1))
Expect(job.Videos).To(BeEmpty())
Expect(job.Audios).To(BeEmpty())
Expect(job.Files).To(BeEmpty())
})
It("should create a job without multimedia (nil)", func() {
params := map[string]string{"name": "Test"}
jobID, err := service.ExecuteJob(taskID, params, "test", nil)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.Images).To(BeEmpty())
Expect(job.Videos).To(BeEmpty())
Expect(job.Audios).To(BeEmpty())
Expect(job.Files).To(BeEmpty())
})
It("should save and load jobs with multimedia from file", func() {
params := map[string]string{}
multimedia := &schema.MultimediaAttachment{
Images: []string{"https://example.com/image.png"},
Videos: []string{"https://example.com/video.mp4"},
}
jobID, err := service.ExecuteJob(taskID, params, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
// Wait a bit for async save to complete
time.Sleep(50 * time.Millisecond)
// Ensure directory exists before saving
err = os.MkdirAll(tempDir, 0755)
Expect(err).NotTo(HaveOccurred())
err = service.SaveJobsToFile()
Expect(err).NotTo(HaveOccurred())
// Create a new service instance to test loading
newService := services.NewAgentJobService(
appConfig,
modelLoader,
configLoader,
evaluator,
)
err = newService.LoadJobsFromFile()
Expect(err).NotTo(HaveOccurred())
retrieved, err := newService.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(retrieved.TaskID).To(Equal(taskID))
Expect(retrieved.Images).To(HaveLen(1))
Expect(retrieved.Images[0]).To(Equal("https://example.com/image.png"))
Expect(retrieved.Videos).To(HaveLen(1))
Expect(retrieved.Videos[0]).To(Equal("https://example.com/video.mp4"))
})
})
Describe("Multimedia format handling", func() {
var taskID string
BeforeEach(func() {
task := schema.Task{
Name: "Test Task",
Model: "test-model",
Prompt: "Test prompt",
Enabled: true,
}
var err error
taskID, err = service.CreateTask(task)
Expect(err).NotTo(HaveOccurred())
})
It("should handle URLs correctly", func() {
multimedia := &schema.MultimediaAttachment{
Images: []string{"https://example.com/image.png"},
Videos: []string{"http://example.com/video.mp4"},
}
jobID, err := service.ExecuteJob(taskID, map[string]string{}, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.Images[0]).To(Equal("https://example.com/image.png"))
Expect(job.Videos[0]).To(Equal("http://example.com/video.mp4"))
})
It("should handle data URIs correctly", func() {
multimedia := &schema.MultimediaAttachment{
Images: []string{"data:image/png;base64,iVBORw0KG"},
Videos: []string{"data:video/mp4;base64,AAAAIGZ0eXBpc29t"},
}
jobID, err := service.ExecuteJob(taskID, map[string]string{}, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
Expect(job.Images[0]).To(Equal("data:image/png;base64,iVBORw0KG"))
Expect(job.Videos[0]).To(Equal("data:video/mp4;base64,AAAAIGZ0eXBpc29t"))
})
It("should handle base64 strings (will be converted during execution)", func() {
// Base64 strings without data URI prefix should be stored as-is
// They will be converted to data URIs during execution
multimedia := &schema.MultimediaAttachment{
Images: []string{"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="},
}
jobID, err := service.ExecuteJob(taskID, map[string]string{}, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
// The base64 string is stored as-is in the job
Expect(job.Images[0]).To(Equal("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="))
})
It("should handle empty multimedia arrays", func() {
multimedia := &schema.MultimediaAttachment{
Images: []string{""},
}
jobID, err := service.ExecuteJob(taskID, map[string]string{}, "test", multimedia)
Expect(err).NotTo(HaveOccurred())
job, err := service.GetJob(jobID)
Expect(err).NotTo(HaveOccurred())
// Empty strings are stored in the job but will be filtered during execution
// The job stores what was provided, filtering happens in convertToMultimediaContent
Expect(job.Images).To(HaveLen(1))
Expect(job.Images[0]).To(Equal(""))
Expect(job.Videos).To(BeEmpty())
})
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/gallery_test.go | core/gallery/gallery_test.go | package gallery_test
import (
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/config"
. "github.com/mudler/LocalAI/core/gallery"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"gopkg.in/yaml.v2"
)
var _ = Describe("Gallery", func() {
var tempDir string
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "gallery-test-*")
Expect(err).NotTo(HaveOccurred())
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
Describe("ReadConfigFile", func() {
It("should read and unmarshal a valid YAML file", func() {
testConfig := map[string]interface{}{
"name": "test-model",
"description": "A test model",
"license": "MIT",
}
yamlData, err := yaml.Marshal(testConfig)
Expect(err).NotTo(HaveOccurred())
filePath := filepath.Join(tempDir, "test.yaml")
err = os.WriteFile(filePath, yamlData, 0644)
Expect(err).NotTo(HaveOccurred())
var result map[string]interface{}
config, err := ReadConfigFile[map[string]interface{}](filePath)
Expect(err).NotTo(HaveOccurred())
Expect(config).NotTo(BeNil())
result = *config
Expect(result["name"]).To(Equal("test-model"))
Expect(result["description"]).To(Equal("A test model"))
Expect(result["license"]).To(Equal("MIT"))
})
It("should return error when file does not exist", func() {
_, err := ReadConfigFile[map[string]interface{}]("nonexistent.yaml")
Expect(err).To(HaveOccurred())
})
It("should return error when YAML is invalid", func() {
filePath := filepath.Join(tempDir, "invalid.yaml")
err := os.WriteFile(filePath, []byte("invalid: yaml: content: [unclosed"), 0644)
Expect(err).NotTo(HaveOccurred())
_, err = ReadConfigFile[map[string]interface{}](filePath)
Expect(err).To(HaveOccurred())
})
})
Describe("GalleryElements Search", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{
Metadata: Metadata{
Name: "bert-embeddings",
Description: "BERT model for embeddings",
Tags: []string{"embeddings", "bert", "nlp"},
License: "Apache-2.0",
Gallery: config.Gallery{
Name: "huggingface",
},
},
},
{
Metadata: Metadata{
Name: "gpt-2",
Description: "GPT-2 language model",
Tags: []string{"gpt", "language-model"},
License: "MIT",
Gallery: config.Gallery{
Name: "openai",
},
},
},
{
Metadata: Metadata{
Name: "llama-7b",
Description: "LLaMA 7B model",
Tags: []string{"llama", "llm"},
License: "LLaMA",
Gallery: config.Gallery{
Name: "meta",
},
},
},
}
})
It("should find elements by exact name match", func() {
results := elements.Search("bert-embeddings")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("bert-embeddings"))
})
It("should find elements by partial name match", func() {
results := elements.Search("bert")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("bert-embeddings"))
})
It("should find elements by description", func() {
results := elements.Search("embeddings")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("bert-embeddings"))
})
It("should find elements by gallery name", func() {
results := elements.Search("huggingface")
Expect(results).To(HaveLen(1))
Expect(results[0].GetGallery().Name).To(Equal("huggingface"))
})
It("should find elements by tags", func() {
results := elements.Search("nlp")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("bert-embeddings"))
})
It("should be case insensitive", func() {
results := elements.Search("BERT")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("bert-embeddings"))
})
It("should find multiple elements", func() {
results := elements.Search("gpt")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("gpt-2"))
})
It("should return empty results for no matches", func() {
results := elements.Search("nonexistent")
Expect(results).To(HaveLen(0))
})
It("should use fuzzy matching", func() {
results := elements.Search("bert-emb")
Expect(results).To(HaveLen(1))
Expect(results[0].GetName()).To(Equal("bert-embeddings"))
})
})
Describe("GalleryElements SortByName", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{Name: "zebra"}},
{Metadata: Metadata{Name: "alpha"}},
{Metadata: Metadata{Name: "beta"}},
}
})
It("should sort ascending", func() {
sorted := elements.SortByName("asc")
Expect(sorted).To(HaveLen(3))
Expect(sorted[0].GetName()).To(Equal("alpha"))
Expect(sorted[1].GetName()).To(Equal("beta"))
Expect(sorted[2].GetName()).To(Equal("zebra"))
})
It("should sort descending", func() {
sorted := elements.SortByName("desc")
Expect(sorted).To(HaveLen(3))
Expect(sorted[0].GetName()).To(Equal("zebra"))
Expect(sorted[1].GetName()).To(Equal("beta"))
Expect(sorted[2].GetName()).To(Equal("alpha"))
})
It("should be case insensitive", func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{Name: "Zebra"}},
{Metadata: Metadata{Name: "alpha"}},
{Metadata: Metadata{Name: "Beta"}},
}
sorted := elements.SortByName("asc")
Expect(sorted[0].GetName()).To(Equal("alpha"))
Expect(sorted[1].GetName()).To(Equal("Beta"))
Expect(sorted[2].GetName()).To(Equal("Zebra"))
})
})
Describe("GalleryElements SortByRepository", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{
Metadata: Metadata{
Gallery: config.Gallery{Name: "zebra-repo"},
},
},
{
Metadata: Metadata{
Gallery: config.Gallery{Name: "alpha-repo"},
},
},
{
Metadata: Metadata{
Gallery: config.Gallery{Name: "beta-repo"},
},
},
}
})
It("should sort ascending", func() {
sorted := elements.SortByRepository("asc")
Expect(sorted).To(HaveLen(3))
Expect(sorted[0].GetGallery().Name).To(Equal("alpha-repo"))
Expect(sorted[1].GetGallery().Name).To(Equal("beta-repo"))
Expect(sorted[2].GetGallery().Name).To(Equal("zebra-repo"))
})
It("should sort descending", func() {
sorted := elements.SortByRepository("desc")
Expect(sorted).To(HaveLen(3))
Expect(sorted[0].GetGallery().Name).To(Equal("zebra-repo"))
Expect(sorted[1].GetGallery().Name).To(Equal("beta-repo"))
Expect(sorted[2].GetGallery().Name).To(Equal("alpha-repo"))
})
})
Describe("GalleryElements SortByLicense", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{License: "MIT"}},
{Metadata: Metadata{License: "Apache-2.0"}},
{Metadata: Metadata{License: ""}},
{Metadata: Metadata{License: "GPL-3.0"}},
}
})
It("should sort ascending with empty licenses at end", func() {
sorted := elements.SortByLicense("asc")
Expect(sorted).To(HaveLen(4))
Expect(sorted[0].GetLicense()).To(Equal("Apache-2.0"))
Expect(sorted[1].GetLicense()).To(Equal("GPL-3.0"))
Expect(sorted[2].GetLicense()).To(Equal("MIT"))
Expect(sorted[3].GetLicense()).To(Equal(""))
})
It("should sort descending with empty licenses at beginning", func() {
sorted := elements.SortByLicense("desc")
Expect(sorted).To(HaveLen(4))
Expect(sorted[0].GetLicense()).To(Equal(""))
Expect(sorted[1].GetLicense()).To(Equal("MIT"))
Expect(sorted[2].GetLicense()).To(Equal("GPL-3.0"))
Expect(sorted[3].GetLicense()).To(Equal("Apache-2.0"))
})
It("should handle all empty licenses", func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{License: ""}},
{Metadata: Metadata{License: ""}},
}
sorted := elements.SortByLicense("asc")
Expect(sorted).To(HaveLen(2))
})
})
Describe("GalleryElements SortByInstalled", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{Name: "installed-2", Installed: true}},
{Metadata: Metadata{Name: "not-installed-1", Installed: false}},
{Metadata: Metadata{Name: "installed-1", Installed: true}},
{Metadata: Metadata{Name: "not-installed-2", Installed: false}},
}
})
It("should sort ascending with installed first, then by name", func() {
sorted := elements.SortByInstalled("asc")
Expect(sorted).To(HaveLen(4))
Expect(sorted[0].GetInstalled()).To(BeTrue())
Expect(sorted[0].GetName()).To(Equal("installed-1"))
Expect(sorted[1].GetInstalled()).To(BeTrue())
Expect(sorted[1].GetName()).To(Equal("installed-2"))
Expect(sorted[2].GetInstalled()).To(BeFalse())
Expect(sorted[2].GetName()).To(Equal("not-installed-1"))
Expect(sorted[3].GetInstalled()).To(BeFalse())
Expect(sorted[3].GetName()).To(Equal("not-installed-2"))
})
It("should sort descending with not-installed first, then by name", func() {
sorted := elements.SortByInstalled("desc")
Expect(sorted).To(HaveLen(4))
Expect(sorted[0].GetInstalled()).To(BeFalse())
Expect(sorted[0].GetName()).To(Equal("not-installed-2"))
Expect(sorted[1].GetInstalled()).To(BeFalse())
Expect(sorted[1].GetName()).To(Equal("not-installed-1"))
Expect(sorted[2].GetInstalled()).To(BeTrue())
Expect(sorted[2].GetName()).To(Equal("installed-2"))
Expect(sorted[3].GetInstalled()).To(BeTrue())
Expect(sorted[3].GetName()).To(Equal("installed-1"))
})
})
Describe("GalleryElements FindByName", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{Name: "bert-embeddings"}},
{Metadata: Metadata{Name: "gpt-2"}},
{Metadata: Metadata{Name: "llama-7b"}},
}
})
It("should find element by exact name", func() {
result := elements.FindByName("bert-embeddings")
Expect(result).NotTo(BeNil())
Expect(result.GetName()).To(Equal("bert-embeddings"))
})
It("should be case insensitive", func() {
result := elements.FindByName("BERT-EMBEDDINGS")
Expect(result).NotTo(BeNil())
Expect(result.GetName()).To(Equal("bert-embeddings"))
})
It("should return zero value when not found", func() {
result := elements.FindByName("nonexistent")
Expect(result).To(BeNil())
})
})
Describe("GalleryElements Paginate", func() {
var elements GalleryElements[*GalleryModel]
BeforeEach(func() {
elements = GalleryElements[*GalleryModel]{
{Metadata: Metadata{Name: "model-1"}},
{Metadata: Metadata{Name: "model-2"}},
{Metadata: Metadata{Name: "model-3"}},
{Metadata: Metadata{Name: "model-4"}},
{Metadata: Metadata{Name: "model-5"}},
}
})
It("should return first page", func() {
page := elements.Paginate(1, 2)
Expect(page).To(HaveLen(2))
Expect(page[0].GetName()).To(Equal("model-1"))
Expect(page[1].GetName()).To(Equal("model-2"))
})
It("should return second page", func() {
page := elements.Paginate(2, 2)
Expect(page).To(HaveLen(2))
Expect(page[0].GetName()).To(Equal("model-3"))
Expect(page[1].GetName()).To(Equal("model-4"))
})
It("should return partial last page", func() {
page := elements.Paginate(3, 2)
Expect(page).To(HaveLen(1))
Expect(page[0].GetName()).To(Equal("model-5"))
})
It("should handle page beyond range", func() {
page := elements.Paginate(10, 2)
Expect(page).To(HaveLen(0))
})
It("should handle empty elements", func() {
empty := GalleryElements[*GalleryModel]{}
page := empty.Paginate(1, 10)
Expect(page).To(HaveLen(0))
})
})
Describe("FindGalleryElement", func() {
var models []*GalleryModel
BeforeEach(func() {
models = []*GalleryModel{
{
Metadata: Metadata{
Name: "bert-embeddings",
Gallery: config.Gallery{
Name: "huggingface",
},
},
},
{
Metadata: Metadata{
Name: "gpt-2",
Gallery: config.Gallery{
Name: "openai",
},
},
},
}
})
It("should find element by name without @ notation", func() {
result := FindGalleryElement(models, "bert-embeddings")
Expect(result).NotTo(BeNil())
Expect(result.GetName()).To(Equal("bert-embeddings"))
})
It("should find element by name with @ notation", func() {
result := FindGalleryElement(models, "huggingface@bert-embeddings")
Expect(result).NotTo(BeNil())
Expect(result.GetName()).To(Equal("bert-embeddings"))
Expect(result.GetGallery().Name).To(Equal("huggingface"))
})
It("should be case insensitive", func() {
result := FindGalleryElement(models, "BERT-EMBEDDINGS")
Expect(result).NotTo(BeNil())
Expect(result.GetName()).To(Equal("bert-embeddings"))
})
It("should handle path separators in name", func() {
// Path separators are replaced with __, so bert/embeddings becomes bert__embeddings
// This test verifies the replacement happens, but won't match unless model name has __
modelsWithPath := []*GalleryModel{
{
Metadata: Metadata{
Name: "bert__embeddings",
Gallery: config.Gallery{
Name: "huggingface",
},
},
},
}
result := FindGalleryElement(modelsWithPath, "bert/embeddings")
Expect(result).NotTo(BeNil())
Expect(result.GetName()).To(Equal("bert__embeddings"))
})
It("should return zero value when not found", func() {
result := FindGalleryElement(models, "nonexistent")
Expect(result).To(BeNil())
})
It("should return zero value when gallery@name not found", func() {
result := FindGalleryElement(models, "nonexistent@model")
Expect(result).To(BeNil())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/models_types.go | core/gallery/models_types.go | package gallery
import (
"fmt"
"github.com/mudler/LocalAI/core/config"
)
// GalleryModel is the struct used to represent a model in the gallery returned by the endpoint.
// It is used to install the model by resolving the URL and downloading the files.
// The other fields are used to override the configuration of the model.
type GalleryModel struct {
Metadata `json:",inline" yaml:",inline"`
// config_file is read in the situation where URL is blank - and therefore this is a base config.
ConfigFile map[string]interface{} `json:"config_file,omitempty" yaml:"config_file,omitempty"`
// Overrides are used to override the configuration of the model located at URL
Overrides map[string]interface{} `json:"overrides,omitempty" yaml:"overrides,omitempty"`
}
func (m *GalleryModel) GetInstalled() bool {
return m.Installed
}
func (m *GalleryModel) GetLicense() string {
return m.License
}
func (m *GalleryModel) SetGallery(gallery config.Gallery) {
m.Gallery = gallery
}
func (m *GalleryModel) SetInstalled(installed bool) {
m.Installed = installed
}
func (m *GalleryModel) GetName() string {
return m.Name
}
func (m *GalleryModel) GetGallery() config.Gallery {
return m.Gallery
}
func (m GalleryModel) ID() string {
return fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name)
}
func (m *GalleryModel) GetTags() []string {
return m.Tags
}
func (m *GalleryModel) GetDescription() string {
return m.Description
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/models.go | core/gallery/models.go | package gallery
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"slices"
"strings"
"dario.cat/mergo"
lconfig "github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/xlog"
"gopkg.in/yaml.v3"
)
/*
description: |
foo
license: ""
urls:
-
-
name: "bar"
config_file: |
# Note, name will be injected. or generated by the alias wanted by the user
threads: 14
files:
- filename: ""
sha: ""
uri: ""
prompt_templates:
- name: ""
content: ""
*/
// ModelConfig is the model configuration which contains all the model details
// This configuration is read from the gallery endpoint and is used to download and install the model
// It is the internal structure, separated from the request
type ModelConfig struct {
Description string `yaml:"description"`
Icon string `yaml:"icon"`
License string `yaml:"license"`
URLs []string `yaml:"urls"`
Name string `yaml:"name"`
ConfigFile string `yaml:"config_file"`
Files []File `yaml:"files"`
PromptTemplates []PromptTemplate `yaml:"prompt_templates"`
}
type File struct {
Filename string `yaml:"filename" json:"filename"`
SHA256 string `yaml:"sha256" json:"sha256"`
URI string `yaml:"uri" json:"uri"`
}
type PromptTemplate struct {
Name string `yaml:"name"`
Content string `yaml:"content"`
}
// Installs a model from the gallery
func InstallModelFromGallery(
ctx context.Context,
modelGalleries, backendGalleries []lconfig.Gallery,
systemState *system.SystemState,
modelLoader *model.ModelLoader,
name string, req GalleryModel, downloadStatus func(string, string, string, float64), enforceScan, automaticallyInstallBackend bool) error {
applyModel := func(model *GalleryModel) error {
name = strings.ReplaceAll(name, string(os.PathSeparator), "__")
var config ModelConfig
if len(model.URL) > 0 {
var err error
config, err = GetGalleryConfigFromURLWithContext[ModelConfig](ctx, model.URL, systemState.Model.ModelsPath)
if err != nil {
return err
}
config.Description = model.Description
config.License = model.License
} else if len(model.ConfigFile) > 0 {
// TODO: is this worse than using the override method with a blank cfg yaml?
reYamlConfig, err := yaml.Marshal(model.ConfigFile)
if err != nil {
return err
}
config = ModelConfig{
ConfigFile: string(reYamlConfig),
Description: model.Description,
License: model.License,
URLs: model.URLs,
Name: model.Name,
Files: make([]File, 0), // Real values get added below, must be blank
// Prompt Template Skipped for now - I expect in this mode that they will be delivered as files.
}
} else {
return fmt.Errorf("invalid gallery model %+v", model)
}
installName := model.Name
if req.Name != "" {
installName = req.Name
}
// Copy the model configuration from the request schema
config.URLs = append(config.URLs, model.URLs...)
config.Icon = model.Icon
config.Files = append(config.Files, req.AdditionalFiles...)
config.Files = append(config.Files, model.AdditionalFiles...)
// TODO model.Overrides could be merged with user overrides (not defined yet)
if req.Overrides != nil {
if err := mergo.Merge(&model.Overrides, req.Overrides, mergo.WithOverride); err != nil {
return err
}
}
installedModel, err := InstallModel(ctx, systemState, installName, &config, model.Overrides, downloadStatus, enforceScan)
if err != nil {
return err
}
xlog.Debug("Installed model", "model", installedModel.Name)
if automaticallyInstallBackend && installedModel.Backend != "" {
xlog.Debug("Installing backend", "backend", installedModel.Backend)
if err := InstallBackendFromGallery(ctx, backendGalleries, systemState, modelLoader, installedModel.Backend, downloadStatus, false); err != nil {
return err
}
}
return nil
}
models, err := AvailableGalleryModels(modelGalleries, systemState)
if err != nil {
return err
}
model := FindGalleryElement(models, name)
if model == nil {
return fmt.Errorf("no model found with name %q", name)
}
return applyModel(model)
}
func InstallModel(ctx context.Context, systemState *system.SystemState, nameOverride string, config *ModelConfig, configOverrides map[string]interface{}, downloadStatus func(string, string, string, float64), enforceScan bool) (*lconfig.ModelConfig, error) {
basePath := systemState.Model.ModelsPath
// Create base path if it doesn't exist
err := os.MkdirAll(basePath, 0750)
if err != nil {
return nil, fmt.Errorf("failed to create base path: %v", err)
}
if len(configOverrides) > 0 {
xlog.Debug("Config overrides", "overrides", configOverrides)
}
// Download files and verify their SHA
for i, file := range config.Files {
// Check for cancellation before each file
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
xlog.Debug("Checking file exists and matches SHA", "filename", file.Filename)
if err := utils.VerifyPath(file.Filename, basePath); err != nil {
return nil, err
}
// Create file path
filePath := filepath.Join(basePath, file.Filename)
if enforceScan {
scanResults, err := downloader.HuggingFaceScan(downloader.URI(file.URI))
if err != nil && errors.Is(err, downloader.ErrUnsafeFilesFound) {
xlog.Error("Contains unsafe file(s)!", "model", config.Name, "clamAV", scanResults.ClamAVInfectedFiles, "pickles", scanResults.DangerousPickles)
return nil, err
}
}
uri := downloader.URI(file.URI)
if err := uri.DownloadFileWithContext(ctx, filePath, file.SHA256, i, len(config.Files), downloadStatus); err != nil {
return nil, err
}
}
// Write prompt template contents to separate files
for _, template := range config.PromptTemplates {
if err := utils.VerifyPath(template.Name+".tmpl", basePath); err != nil {
return nil, err
}
// Create file path
filePath := filepath.Join(basePath, template.Name+".tmpl")
// Create parent directory
err := os.MkdirAll(filepath.Dir(filePath), 0750)
if err != nil {
return nil, fmt.Errorf("failed to create parent directory for prompt template %q: %v", template.Name, err)
}
// Create and write file content
err = os.WriteFile(filePath, []byte(template.Content), 0600)
if err != nil {
return nil, fmt.Errorf("failed to write prompt template %q: %v", template.Name, err)
}
xlog.Debug("Prompt template written", "template", template.Name)
}
name := config.Name
if nameOverride != "" {
name = nameOverride
}
if err := utils.VerifyPath(name+".yaml", basePath); err != nil {
return nil, err
}
modelConfig := lconfig.ModelConfig{}
// write config file
if len(configOverrides) != 0 || len(config.ConfigFile) != 0 {
configFilePath := filepath.Join(basePath, name+".yaml")
// Read and update config file as map[string]interface{}
configMap := make(map[string]interface{})
err = yaml.Unmarshal([]byte(config.ConfigFile), &configMap)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal config YAML: %v", err)
}
configMap["name"] = name
if configOverrides != nil {
if err := mergo.Merge(&configMap, configOverrides, mergo.WithOverride); err != nil {
return nil, err
}
}
// Write updated config file
updatedConfigYAML, err := yaml.Marshal(configMap)
if err != nil {
return nil, fmt.Errorf("failed to marshal updated config YAML: %v", err)
}
err = yaml.Unmarshal(updatedConfigYAML, &modelConfig)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal updated config YAML: %v", err)
}
if valid, err := modelConfig.Validate(); !valid {
return nil, fmt.Errorf("failed to validate updated config YAML: %v", err)
}
err = os.WriteFile(configFilePath, updatedConfigYAML, 0600)
if err != nil {
return nil, fmt.Errorf("failed to write updated config file: %v", err)
}
xlog.Debug("Written config file", "file", configFilePath)
}
// Save the model gallery file for further reference
modelFile := filepath.Join(basePath, galleryFileName(name))
data, err := yaml.Marshal(config)
if err != nil {
return nil, err
}
xlog.Debug("Written gallery file", "file", modelFile)
return &modelConfig, os.WriteFile(modelFile, data, 0600)
}
func galleryFileName(name string) string {
return "._gallery_" + name + ".yaml"
}
func GetLocalModelConfiguration(basePath string, name string) (*ModelConfig, error) {
name = strings.ReplaceAll(name, string(os.PathSeparator), "__")
galleryFile := filepath.Join(basePath, galleryFileName(name))
return ReadConfigFile[ModelConfig](galleryFile)
}
func listModelFiles(systemState *system.SystemState, name string) ([]string, error) {
configFile := filepath.Join(systemState.Model.ModelsPath, fmt.Sprintf("%s.yaml", name))
if err := utils.VerifyPath(configFile, systemState.Model.ModelsPath); err != nil {
return nil, fmt.Errorf("failed to verify path %s: %w", configFile, err)
}
// os.PathSeparator is not allowed in model names. Replace them with "__" to avoid conflicts with file paths.
name = strings.ReplaceAll(name, string(os.PathSeparator), "__")
galleryFile := filepath.Join(systemState.Model.ModelsPath, galleryFileName(name))
if err := utils.VerifyPath(galleryFile, systemState.Model.ModelsPath); err != nil {
return nil, fmt.Errorf("failed to verify path %s: %w", galleryFile, err)
}
additionalFiles := []string{}
allFiles := []string{}
// Galleryname is the name of the model in this case
dat, err := os.ReadFile(configFile)
if err == nil {
modelConfig := &lconfig.ModelConfig{}
err = yaml.Unmarshal(dat, &modelConfig)
if err != nil {
return nil, err
}
if modelConfig.Model != "" {
additionalFiles = append(additionalFiles, modelConfig.ModelFileName())
}
if modelConfig.MMProj != "" {
additionalFiles = append(additionalFiles, modelConfig.MMProjFileName())
}
}
// read the model config
galleryconfig, err := ReadConfigFile[ModelConfig](galleryFile)
if err == nil && galleryconfig != nil {
for _, f := range galleryconfig.Files {
fullPath := filepath.Join(systemState.Model.ModelsPath, f.Filename)
if err := utils.VerifyPath(fullPath, systemState.Model.ModelsPath); err != nil {
return allFiles, fmt.Errorf("failed to verify path %s: %w", fullPath, err)
}
allFiles = append(allFiles, fullPath)
}
} else {
xlog.Error("failed to read gallery file", "error", err, "file", configFile)
}
for _, f := range additionalFiles {
fullPath := filepath.Join(filepath.Join(systemState.Model.ModelsPath, f))
if err := utils.VerifyPath(fullPath, systemState.Model.ModelsPath); err != nil {
return allFiles, fmt.Errorf("failed to verify path %s: %w", fullPath, err)
}
allFiles = append(allFiles, fullPath)
}
allFiles = append(allFiles, galleryFile)
// skip duplicates
allFiles = utils.Unique(allFiles)
return allFiles, nil
}
func DeleteModelFromSystem(systemState *system.SystemState, name string) error {
configFile := filepath.Join(systemState.Model.ModelsPath, fmt.Sprintf("%s.yaml", name))
filesToRemove, err := listModelFiles(systemState, name)
if err != nil {
return err
}
allOtherFiles := []string{}
// Get all files of all other models
fi, err := os.ReadDir(systemState.Model.ModelsPath)
if err != nil {
return err
}
for _, f := range fi {
if f.IsDir() {
continue
}
if strings.HasPrefix(f.Name(), "._gallery_") {
continue
}
if !strings.HasSuffix(f.Name(), ".yaml") && !strings.HasSuffix(f.Name(), ".yml") {
continue
}
if f.Name() == fmt.Sprintf("%s.yaml", name) || f.Name() == fmt.Sprintf("%s.yml", name) {
continue
}
name := strings.TrimSuffix(f.Name(), ".yaml")
name = strings.TrimSuffix(name, ".yml")
xlog.Debug("Checking file", "file", f.Name())
files, err := listModelFiles(systemState, name)
if err != nil {
xlog.Debug("failed to list files for model", "error", err, "model", f.Name())
continue
}
allOtherFiles = append(allOtherFiles, files...)
}
xlog.Debug("Files to remove", "files", filesToRemove)
xlog.Debug("All other files", "files", allOtherFiles)
// Removing files
for _, f := range filesToRemove {
if slices.Contains(allOtherFiles, f) {
xlog.Debug("Skipping file because it is part of another model", "file", f)
continue
}
if e := os.Remove(f); e != nil {
xlog.Error("failed to remove file", "error", e, "file", f)
}
}
return os.Remove(configFile)
}
// This is ***NEVER*** going to be perfect or finished.
// This is a BEST EFFORT function to surface known-vulnerable models to users.
func SafetyScanGalleryModels(galleries []lconfig.Gallery, systemState *system.SystemState) error {
galleryModels, err := AvailableGalleryModels(galleries, systemState)
if err != nil {
return err
}
for _, gM := range galleryModels {
if gM.Installed {
err = errors.Join(err, SafetyScanGalleryModel(gM))
}
}
return err
}
func SafetyScanGalleryModel(galleryModel *GalleryModel) error {
for _, file := range galleryModel.AdditionalFiles {
scanResults, err := downloader.HuggingFaceScan(downloader.URI(file.URI))
if err != nil && errors.Is(err, downloader.ErrUnsafeFilesFound) {
xlog.Error("Contains unsafe file(s)!", "model", galleryModel.Name, "clamAV", scanResults.ClamAVInfectedFiles, "pickles", scanResults.DangerousPickles)
return err
}
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/metadata_type.go | core/gallery/metadata_type.go | package gallery
import "github.com/mudler/LocalAI/core/config"
type Metadata struct {
URL string `json:"url,omitempty" yaml:"url,omitempty"`
Name string `json:"name,omitempty" yaml:"name,omitempty"`
Description string `json:"description,omitempty" yaml:"description,omitempty"`
License string `json:"license,omitempty" yaml:"license,omitempty"`
URLs []string `json:"urls,omitempty" yaml:"urls,omitempty"`
Icon string `json:"icon,omitempty" yaml:"icon,omitempty"`
Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
// AdditionalFiles are used to add additional files to the model
AdditionalFiles []File `json:"files,omitempty" yaml:"files,omitempty"`
// Gallery is a reference to the gallery which contains the model
Gallery config.Gallery `json:"gallery,omitempty" yaml:"gallery,omitempty"`
// Installed is used to indicate if the model is installed or not
Installed bool `json:"installed,omitempty" yaml:"installed,omitempty"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/backends_test.go | core/gallery/backends_test.go | package gallery
import (
"context"
"encoding/json"
"os"
"path/filepath"
"runtime"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"gopkg.in/yaml.v2"
)
const (
testImage = "quay.io/mudler/tests:localai-backend-test"
)
var _ = Describe("Runtime capability-based backend selection", func() {
var tempDir string
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "gallery-caps-*")
Expect(err).NotTo(HaveOccurred())
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
It("ListSystemBackends prefers optimal alias candidate", func() {
// Arrange two installed backends sharing the same alias
must := func(err error) { Expect(err).NotTo(HaveOccurred()) }
cpuDir := filepath.Join(tempDir, "cpu-llama-cpp")
must(os.MkdirAll(cpuDir, 0o750))
cpuMeta := &BackendMetadata{Alias: "llama-cpp", Name: "cpu-llama-cpp"}
b, _ := json.Marshal(cpuMeta)
must(os.WriteFile(filepath.Join(cpuDir, "metadata.json"), b, 0o644))
must(os.WriteFile(filepath.Join(cpuDir, "run.sh"), []byte(""), 0o755))
cudaDir := filepath.Join(tempDir, "cuda12-llama-cpp")
must(os.MkdirAll(cudaDir, 0o750))
cudaMeta := &BackendMetadata{Alias: "llama-cpp", Name: "cuda12-llama-cpp"}
b, _ = json.Marshal(cudaMeta)
must(os.WriteFile(filepath.Join(cudaDir, "metadata.json"), b, 0o644))
must(os.WriteFile(filepath.Join(cudaDir, "run.sh"), []byte(""), 0o755))
// Default system: alias should point to CPU
sysDefault, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
must(err)
sysDefault.GPUVendor = "" // force default selection
backs, err := ListSystemBackends(sysDefault)
must(err)
aliasBack, ok := backs.Get("llama-cpp")
Expect(ok).To(BeTrue())
Expect(aliasBack.RunFile).To(Equal(filepath.Join(cpuDir, "run.sh")))
// concrete entries remain
_, ok = backs.Get("cpu-llama-cpp")
Expect(ok).To(BeTrue())
_, ok = backs.Get("cuda12-llama-cpp")
Expect(ok).To(BeTrue())
// NVIDIA system: alias should point to CUDA
// Force capability to nvidia to make the test deterministic on platforms like darwin/arm64 (which default to metal)
os.Setenv("LOCALAI_FORCE_META_BACKEND_CAPABILITY", "nvidia")
defer os.Unsetenv("LOCALAI_FORCE_META_BACKEND_CAPABILITY")
sysNvidia, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
must(err)
sysNvidia.GPUVendor = "nvidia"
sysNvidia.VRAM = 8 * 1024 * 1024 * 1024
backs, err = ListSystemBackends(sysNvidia)
must(err)
aliasBack, ok = backs.Get("llama-cpp")
Expect(ok).To(BeTrue())
Expect(aliasBack.RunFile).To(Equal(filepath.Join(cudaDir, "run.sh")))
})
})
var _ = Describe("Gallery Backends", func() {
var (
tempDir string
galleries []config.Gallery
ml *model.ModelLoader
systemState *system.SystemState
)
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "gallery-test-*")
Expect(err).NotTo(HaveOccurred())
// Setup test galleries
galleries = []config.Gallery{
{
Name: "test-gallery",
URL: "https://gist.githubusercontent.com/mudler/71d5376bc2aa168873fa519fa9f4bd56/raw/0557f9c640c159fa8e4eab29e8d98df6a3d6e80f/backend-gallery.yaml",
},
}
systemState, err = system.GetSystemState(system.WithBackendPath(tempDir))
Expect(err).NotTo(HaveOccurred())
ml = model.NewModelLoader(systemState)
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
Describe("InstallBackendFromGallery", func() {
It("should return error when backend is not found", func() {
err := InstallBackendFromGallery(context.TODO(), galleries, systemState, ml, "non-existent", nil, true)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("no backend found with name \"non-existent\""))
})
It("should install backend from gallery", func() {
err := InstallBackendFromGallery(context.TODO(), galleries, systemState, ml, "test-backend", nil, true)
Expect(err).ToNot(HaveOccurred())
Expect(filepath.Join(tempDir, "test-backend", "run.sh")).To(BeARegularFile())
})
})
Describe("Meta Backends", func() {
It("should identify meta backends correctly", func() {
metaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "meta-backend",
},
CapabilitiesMap: map[string]string{
"nvidia": "nvidia-backend",
"amd": "amd-backend",
"intel": "intel-backend",
},
}
Expect(metaBackend.IsMeta()).To(BeTrue())
regularBackend := &GalleryBackend{
Metadata: Metadata{
Name: "regular-backend",
},
URI: testImage,
}
Expect(regularBackend.IsMeta()).To(BeFalse())
emptyMetaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "empty-meta-backend",
},
CapabilitiesMap: map[string]string{},
}
Expect(emptyMetaBackend.IsMeta()).To(BeFalse())
nilMetaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "nil-meta-backend",
},
CapabilitiesMap: nil,
}
Expect(nilMetaBackend.IsMeta()).To(BeFalse())
})
It("should find best backend from meta based on system capabilities", func() {
metaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "meta-backend",
},
CapabilitiesMap: map[string]string{
"nvidia": "nvidia-backend",
"amd": "amd-backend",
"intel": "intel-backend",
"metal": "metal-backend",
"default": "default-backend",
},
}
nvidiaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "nvidia-backend",
},
URI: testImage,
}
amdBackend := &GalleryBackend{
Metadata: Metadata{
Name: "amd-backend",
},
URI: testImage,
}
metalBackend := &GalleryBackend{
Metadata: Metadata{
Name: "metal-backend",
},
URI: testImage,
}
defaultBackend := &GalleryBackend{
Metadata: Metadata{
Name: "default-backend",
},
URI: testImage,
}
backends := GalleryElements[*GalleryBackend]{nvidiaBackend, amdBackend, metalBackend, defaultBackend}
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
metal := &system.SystemState{}
bestBackend := metaBackend.FindBestBackendFromMeta(metal, backends)
Expect(bestBackend).To(Equal(metalBackend))
} else {
// Test with NVIDIA system state
nvidiaSystemState := &system.SystemState{GPUVendor: "nvidia", VRAM: 1000000000000}
bestBackend := metaBackend.FindBestBackendFromMeta(nvidiaSystemState, backends)
Expect(bestBackend).To(Equal(nvidiaBackend))
// Test with AMD system state
amdSystemState := &system.SystemState{GPUVendor: "amd", VRAM: 1000000000000}
bestBackend = metaBackend.FindBestBackendFromMeta(amdSystemState, backends)
Expect(bestBackend).To(Equal(amdBackend))
// Test with default system state (not enough VRAM)
defaultSystemState := &system.SystemState{GPUVendor: "amd"}
bestBackend = metaBackend.FindBestBackendFromMeta(defaultSystemState, backends)
Expect(bestBackend).To(Equal(defaultBackend))
// Test with default system state
defaultSystemState = &system.SystemState{GPUVendor: "default"}
bestBackend = metaBackend.FindBestBackendFromMeta(defaultSystemState, backends)
Expect(bestBackend).To(Equal(defaultBackend))
backends = GalleryElements[*GalleryBackend]{nvidiaBackend, amdBackend, metalBackend}
// Test with unsupported GPU vendor
unsupportedSystemState := &system.SystemState{GPUVendor: "unsupported"}
bestBackend = metaBackend.FindBestBackendFromMeta(unsupportedSystemState, backends)
Expect(bestBackend).To(BeNil())
}
})
It("should handle meta backend deletion correctly", func() {
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
Skip("Skipping test on darwin/arm64")
}
metaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "meta-backend",
},
CapabilitiesMap: map[string]string{
"nvidia": "nvidia-backend",
"amd": "amd-backend",
"intel": "intel-backend",
},
}
nvidiaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "nvidia-backend",
},
URI: testImage,
}
amdBackend := &GalleryBackend{
Metadata: Metadata{
Name: "amd-backend",
},
URI: testImage,
}
gallery := config.Gallery{
Name: "test-gallery",
URL: "file://" + filepath.Join(tempDir, "backend-gallery.yaml"),
}
galleryBackend := GalleryBackends{amdBackend, nvidiaBackend, metaBackend}
dat, err := yaml.Marshal(galleryBackend)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, "backend-gallery.yaml"), dat, 0644)
Expect(err).NotTo(HaveOccurred())
// Test with NVIDIA system state
nvidiaSystemState := &system.SystemState{
GPUVendor: "nvidia",
VRAM: 1000000000000,
Backend: system.Backend{BackendsPath: tempDir},
}
err = InstallBackendFromGallery(context.TODO(), []config.Gallery{gallery}, nvidiaSystemState, ml, "meta-backend", nil, true)
Expect(err).NotTo(HaveOccurred())
metaBackendPath := filepath.Join(tempDir, "meta-backend")
Expect(metaBackendPath).To(BeADirectory())
concreteBackendPath := filepath.Join(tempDir, "nvidia-backend")
Expect(concreteBackendPath).To(BeADirectory())
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
allBackends, err := ListSystemBackends(systemState)
Expect(err).NotTo(HaveOccurred())
Expect(allBackends).To(HaveKey("meta-backend"))
Expect(allBackends).To(HaveKey("nvidia-backend"))
// Delete meta backend by name
err = DeleteBackendFromSystem(systemState, "meta-backend")
Expect(err).NotTo(HaveOccurred())
// Verify meta backend directory is deleted
Expect(metaBackendPath).NotTo(BeADirectory())
// Verify concrete backend directory is deleted
Expect(concreteBackendPath).NotTo(BeADirectory())
})
It("should handle meta backend deletion correctly with aliases", func() {
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
Skip("Skipping test on darwin/arm64")
}
metaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "meta-backend",
},
Alias: "backend-alias",
CapabilitiesMap: map[string]string{
"nvidia": "nvidia-backend",
"amd": "amd-backend",
"intel": "intel-backend",
},
}
nvidiaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "nvidia-backend",
},
Alias: "backend-alias",
URI: testImage,
}
amdBackend := &GalleryBackend{
Metadata: Metadata{
Name: "amd-backend",
},
Alias: "backend-alias",
URI: testImage,
}
gallery := config.Gallery{
Name: "test-gallery",
URL: "file://" + filepath.Join(tempDir, "backend-gallery.yaml"),
}
galleryBackend := GalleryBackends{amdBackend, nvidiaBackend, metaBackend}
dat, err := yaml.Marshal(galleryBackend)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, "backend-gallery.yaml"), dat, 0644)
Expect(err).NotTo(HaveOccurred())
// Test with NVIDIA system state
nvidiaSystemState := &system.SystemState{
GPUVendor: "nvidia",
VRAM: 1000000000000,
Backend: system.Backend{BackendsPath: tempDir},
}
err = InstallBackendFromGallery(context.TODO(), []config.Gallery{gallery}, nvidiaSystemState, ml, "meta-backend", nil, true)
Expect(err).NotTo(HaveOccurred())
metaBackendPath := filepath.Join(tempDir, "meta-backend")
Expect(metaBackendPath).To(BeADirectory())
concreteBackendPath := filepath.Join(tempDir, "nvidia-backend")
Expect(concreteBackendPath).To(BeADirectory())
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
allBackends, err := ListSystemBackends(systemState)
Expect(err).NotTo(HaveOccurred())
Expect(allBackends).To(HaveKey("meta-backend"))
Expect(allBackends).To(HaveKey("nvidia-backend"))
mback, exists := allBackends.Get("meta-backend")
Expect(exists).To(BeTrue())
Expect(mback.IsMeta).To(BeTrue())
Expect(mback.Metadata.MetaBackendFor).To(Equal("nvidia-backend"))
// Delete meta backend by name
err = DeleteBackendFromSystem(systemState, "meta-backend")
Expect(err).NotTo(HaveOccurred())
// Verify meta backend directory is deleted
Expect(metaBackendPath).NotTo(BeADirectory())
// Verify concrete backend directory is deleted
Expect(concreteBackendPath).NotTo(BeADirectory())
})
It("should handle meta backend deletion correctly with aliases pointing to the same backend", func() {
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
Skip("Skipping test on darwin/arm64")
}
metaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "meta-backend",
},
Alias: "meta-backend",
CapabilitiesMap: map[string]string{
"nvidia": "nvidia-backend",
"amd": "amd-backend",
"intel": "intel-backend",
},
}
nvidiaBackend := &GalleryBackend{
Metadata: Metadata{
Name: "nvidia-backend",
},
Alias: "meta-backend",
URI: testImage,
}
amdBackend := &GalleryBackend{
Metadata: Metadata{
Name: "amd-backend",
},
Alias: "meta-backend",
URI: testImage,
}
gallery := config.Gallery{
Name: "test-gallery",
URL: "file://" + filepath.Join(tempDir, "backend-gallery.yaml"),
}
galleryBackend := GalleryBackends{amdBackend, nvidiaBackend, metaBackend}
dat, err := yaml.Marshal(galleryBackend)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, "backend-gallery.yaml"), dat, 0644)
Expect(err).NotTo(HaveOccurred())
// Test with NVIDIA system state
nvidiaSystemState := &system.SystemState{
GPUVendor: "nvidia",
VRAM: 1000000000000,
Backend: system.Backend{BackendsPath: tempDir},
}
err = InstallBackendFromGallery(context.TODO(), []config.Gallery{gallery}, nvidiaSystemState, ml, "meta-backend", nil, true)
Expect(err).NotTo(HaveOccurred())
metaBackendPath := filepath.Join(tempDir, "meta-backend")
Expect(metaBackendPath).To(BeADirectory())
concreteBackendPath := filepath.Join(tempDir, "nvidia-backend")
Expect(concreteBackendPath).To(BeADirectory())
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
allBackends, err := ListSystemBackends(systemState)
Expect(err).NotTo(HaveOccurred())
Expect(allBackends).To(HaveKey("meta-backend"))
Expect(allBackends).To(HaveKey("nvidia-backend"))
mback, exists := allBackends.Get("meta-backend")
Expect(exists).To(BeTrue())
Expect(mback.RunFile).To(Equal(filepath.Join(tempDir, "nvidia-backend", "run.sh")))
// Delete meta backend by name
err = DeleteBackendFromSystem(systemState, "meta-backend")
Expect(err).NotTo(HaveOccurred())
// Verify meta backend directory is deleted
Expect(metaBackendPath).NotTo(BeADirectory())
// Verify concrete backend directory is deleted
Expect(concreteBackendPath).NotTo(BeADirectory())
})
It("should list meta backends correctly in system backends", func() {
// Create a meta backend directory with metadata
metaBackendPath := filepath.Join(tempDir, "meta-backend")
err := os.MkdirAll(metaBackendPath, 0750)
Expect(err).NotTo(HaveOccurred())
// Create metadata file pointing to concrete backend
metadata := &BackendMetadata{
MetaBackendFor: "concrete-backend",
Name: "meta-backend",
InstalledAt: "2023-01-01T00:00:00Z",
}
metadataData, err := json.Marshal(metadata)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(metaBackendPath, "metadata.json"), metadataData, 0644)
Expect(err).NotTo(HaveOccurred())
// Create the concrete backend directory with run.sh
concreteBackendPath := filepath.Join(tempDir, "concrete-backend")
err = os.MkdirAll(concreteBackendPath, 0750)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(concreteBackendPath, "metadata.json"), []byte("{}"), 0755)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(concreteBackendPath, "run.sh"), []byte(""), 0755)
Expect(err).NotTo(HaveOccurred())
// List system backends
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
backends, err := ListSystemBackends(systemState)
Expect(err).NotTo(HaveOccurred())
metaBackend, exists := backends.Get("meta-backend")
concreteBackendRunFile := filepath.Join(tempDir, "concrete-backend", "run.sh")
// Should include both the meta backend name and concrete backend name
Expect(exists).To(BeTrue())
Expect(backends.Exists("concrete-backend")).To(BeTrue())
// meta-backend should be empty
Expect(metaBackend.IsMeta).To(BeTrue())
Expect(metaBackend.RunFile).To(Equal(concreteBackendRunFile))
// concrete-backend should point to its own run.sh
concreteBackend, exists := backends.Get("concrete-backend")
Expect(exists).To(BeTrue())
Expect(concreteBackend.RunFile).To(Equal(concreteBackendRunFile))
})
})
Describe("InstallBackend", func() {
It("should create base path if it doesn't exist", func() {
newPath := filepath.Join(tempDir, "new-path")
backend := GalleryBackend{
Metadata: Metadata{
Name: "test-backend",
},
URI: "test-uri",
}
systemState, err := system.GetSystemState(
system.WithBackendPath(newPath),
)
Expect(err).NotTo(HaveOccurred())
err = InstallBackend(context.TODO(), systemState, ml, &backend, nil)
Expect(newPath).To(BeADirectory())
Expect(err).To(HaveOccurred()) // Will fail due to invalid URI, but path should be created
})
It("should overwrite existing backend", func() {
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
Skip("Skipping test on darwin/arm64")
}
newPath := filepath.Join(tempDir, "test-backend")
// Create a dummy backend directory
err := os.MkdirAll(newPath, 0750)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(newPath, "metadata.json"), []byte("foo"), 0644)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(newPath, "run.sh"), []byte(""), 0644)
Expect(err).NotTo(HaveOccurred())
backend := GalleryBackend{
Metadata: Metadata{
Name: "test-backend",
},
URI: "quay.io/mudler/tests:localai-backend-test",
Alias: "test-alias",
}
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
err = InstallBackend(context.TODO(), systemState, ml, &backend, nil)
Expect(err).ToNot(HaveOccurred())
Expect(filepath.Join(tempDir, "test-backend", "metadata.json")).To(BeARegularFile())
dat, err := os.ReadFile(filepath.Join(tempDir, "test-backend", "metadata.json"))
Expect(err).ToNot(HaveOccurred())
Expect(string(dat)).ToNot(Equal("foo"))
})
It("should overwrite existing backend", func() {
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
Skip("Skipping test on darwin/arm64")
}
newPath := filepath.Join(tempDir, "test-backend")
// Create a dummy backend directory
err := os.MkdirAll(newPath, 0750)
Expect(err).NotTo(HaveOccurred())
backend := GalleryBackend{
Metadata: Metadata{
Name: "test-backend",
},
URI: "quay.io/mudler/tests:localai-backend-test",
Alias: "test-alias",
}
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
Expect(filepath.Join(tempDir, "test-backend", "metadata.json")).ToNot(BeARegularFile())
err = InstallBackend(context.TODO(), systemState, ml, &backend, nil)
Expect(err).ToNot(HaveOccurred())
Expect(filepath.Join(tempDir, "test-backend", "metadata.json")).To(BeARegularFile())
})
It("should create alias file when specified", func() {
if runtime.GOOS == "darwin" && runtime.GOARCH == "arm64" {
Skip("Skipping test on darwin/arm64")
}
backend := GalleryBackend{
Metadata: Metadata{
Name: "test-backend",
},
URI: "quay.io/mudler/tests:localai-backend-test",
Alias: "test-alias",
}
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
err = InstallBackend(context.TODO(), systemState, ml, &backend, nil)
Expect(err).ToNot(HaveOccurred())
Expect(filepath.Join(tempDir, "test-backend", "metadata.json")).To(BeARegularFile())
// Read and verify metadata
metadataData, err := os.ReadFile(filepath.Join(tempDir, "test-backend", "metadata.json"))
Expect(err).ToNot(HaveOccurred())
var metadata BackendMetadata
err = json.Unmarshal(metadataData, &metadata)
Expect(err).ToNot(HaveOccurred())
Expect(metadata.Alias).To(Equal("test-alias"))
Expect(metadata.Name).To(Equal("test-backend"))
Expect(filepath.Join(tempDir, "test-backend", "run.sh")).To(BeARegularFile())
// Check that the alias was recognized
backends, err := ListSystemBackends(systemState)
Expect(err).ToNot(HaveOccurred())
aliasBackend, exists := backends.Get("test-alias")
Expect(exists).To(BeTrue())
Expect(aliasBackend.RunFile).To(Equal(filepath.Join(tempDir, "test-backend", "run.sh")))
testB, exists := backends.Get("test-backend")
Expect(exists).To(BeTrue())
Expect(testB.RunFile).To(Equal(filepath.Join(tempDir, "test-backend", "run.sh")))
})
})
Describe("DeleteBackendFromSystem", func() {
It("should delete backend directory", func() {
backendName := "test-backend"
backendPath := filepath.Join(tempDir, backendName)
// Create a dummy backend directory
err := os.MkdirAll(backendPath, 0750)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(backendPath, "metadata.json"), []byte("{}"), 0644)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(backendPath, "run.sh"), []byte(""), 0644)
Expect(err).NotTo(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
err = DeleteBackendFromSystem(systemState, backendName)
Expect(err).NotTo(HaveOccurred())
Expect(backendPath).NotTo(BeADirectory())
})
It("should not error when backend doesn't exist", func() {
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
err = DeleteBackendFromSystem(systemState, "non-existent")
Expect(err).To(HaveOccurred())
})
})
Describe("ListSystemBackends", func() {
It("should list backends without aliases", func() {
// Create some dummy backend directories
backendNames := []string{"backend1", "backend2", "backend3"}
for _, name := range backendNames {
err := os.MkdirAll(filepath.Join(tempDir, name), 0750)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, name, "metadata.json"), []byte("{}"), 0755)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, name, "run.sh"), []byte(""), 0755)
Expect(err).NotTo(HaveOccurred())
}
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
backends, err := ListSystemBackends(systemState)
Expect(err).NotTo(HaveOccurred())
Expect(backends).To(HaveLen(len(backendNames)))
for _, name := range backendNames {
backend, exists := backends.Get(name)
Expect(exists).To(BeTrue())
Expect(backend.RunFile).To(Equal(filepath.Join(tempDir, name, "run.sh")))
}
})
It("should handle backends with aliases", func() {
backendName := "backend1"
alias := "alias1"
backendPath := filepath.Join(tempDir, backendName)
// Create backend directory
err := os.MkdirAll(backendPath, 0750)
Expect(err).NotTo(HaveOccurred())
// Create metadata file with alias
metadata := &BackendMetadata{
Alias: alias,
Name: backendName,
InstalledAt: "2023-01-01T00:00:00Z",
}
metadataData, err := json.Marshal(metadata)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(backendPath, "metadata.json"), metadataData, 0644)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(backendPath, "run.sh"), []byte(""), 0755)
Expect(err).NotTo(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithBackendPath(tempDir),
)
Expect(err).NotTo(HaveOccurred())
backends, err := ListSystemBackends(systemState)
Expect(err).NotTo(HaveOccurred())
backend, exists := backends.Get(alias)
Expect(exists).To(BeTrue())
Expect(backend.RunFile).To(Equal(filepath.Join(tempDir, backendName, "run.sh")))
})
It("should return error when base path doesn't exist", func() {
systemState, err := system.GetSystemState(
system.WithBackendPath("foobardir"),
)
Expect(err).NotTo(HaveOccurred())
_, err = ListSystemBackends(systemState)
Expect(err).To(HaveOccurred())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/gallery.go | core/gallery/gallery.go | package gallery
import (
"context"
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"time"
"github.com/lithammer/fuzzysearch/fuzzy"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/LocalAI/pkg/xsync"
"github.com/mudler/xlog"
"gopkg.in/yaml.v2"
)
func GetGalleryConfigFromURL[T any](url string, basePath string) (T, error) {
var config T
uri := downloader.URI(url)
err := uri.ReadWithCallback(basePath, func(url string, d []byte) error {
return yaml.Unmarshal(d, &config)
})
if err != nil {
xlog.Error("failed to get gallery config for url", "error", err, "url", url)
return config, err
}
return config, nil
}
func GetGalleryConfigFromURLWithContext[T any](ctx context.Context, url string, basePath string) (T, error) {
var config T
uri := downloader.URI(url)
err := uri.ReadWithAuthorizationAndCallback(ctx, basePath, "", func(url string, d []byte) error {
return yaml.Unmarshal(d, &config)
})
if err != nil {
xlog.Error("failed to get gallery config for url", "error", err, "url", url)
return config, err
}
return config, nil
}
func ReadConfigFile[T any](filePath string) (*T, error) {
// Read the YAML file
yamlFile, err := os.ReadFile(filePath)
if err != nil {
return nil, fmt.Errorf("failed to read YAML file: %v", err)
}
// Unmarshal YAML data into a Config struct
var config T
err = yaml.Unmarshal(yamlFile, &config)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal YAML: %v", err)
}
return &config, nil
}
type GalleryElement interface {
SetGallery(gallery config.Gallery)
SetInstalled(installed bool)
GetName() string
GetDescription() string
GetTags() []string
GetInstalled() bool
GetLicense() string
GetGallery() config.Gallery
}
type GalleryElements[T GalleryElement] []T
func (gm GalleryElements[T]) Search(term string) GalleryElements[T] {
var filteredModels GalleryElements[T]
term = strings.ToLower(term)
for _, m := range gm {
if fuzzy.Match(term, strings.ToLower(m.GetName())) ||
fuzzy.Match(term, strings.ToLower(m.GetGallery().Name)) ||
strings.Contains(strings.ToLower(m.GetName()), term) ||
strings.Contains(strings.ToLower(m.GetDescription()), term) ||
strings.Contains(strings.ToLower(m.GetGallery().Name), term) ||
strings.Contains(strings.ToLower(strings.Join(m.GetTags(), ",")), term) {
filteredModels = append(filteredModels, m)
}
}
return filteredModels
}
func (gm GalleryElements[T]) SortByName(sortOrder string) GalleryElements[T] {
sort.Slice(gm, func(i, j int) bool {
if sortOrder == "asc" {
return strings.ToLower(gm[i].GetName()) < strings.ToLower(gm[j].GetName())
} else {
return strings.ToLower(gm[i].GetName()) > strings.ToLower(gm[j].GetName())
}
})
return gm
}
func (gm GalleryElements[T]) SortByRepository(sortOrder string) GalleryElements[T] {
sort.Slice(gm, func(i, j int) bool {
if sortOrder == "asc" {
return strings.ToLower(gm[i].GetGallery().Name) < strings.ToLower(gm[j].GetGallery().Name)
} else {
return strings.ToLower(gm[i].GetGallery().Name) > strings.ToLower(gm[j].GetGallery().Name)
}
})
return gm
}
func (gm GalleryElements[T]) SortByLicense(sortOrder string) GalleryElements[T] {
sort.Slice(gm, func(i, j int) bool {
licenseI := gm[i].GetLicense()
licenseJ := gm[j].GetLicense()
var result bool
if licenseI == "" && licenseJ != "" {
return sortOrder == "desc"
} else if licenseI != "" && licenseJ == "" {
return sortOrder == "asc"
} else if licenseI == "" && licenseJ == "" {
return false
} else {
result = strings.ToLower(licenseI) < strings.ToLower(licenseJ)
}
if sortOrder == "desc" {
return !result
} else {
return result
}
})
return gm
}
func (gm GalleryElements[T]) SortByInstalled(sortOrder string) GalleryElements[T] {
sort.Slice(gm, func(i, j int) bool {
var result bool
// Sort by installed status: installed items first (true > false)
if gm[i].GetInstalled() != gm[j].GetInstalled() {
result = gm[i].GetInstalled()
} else {
result = strings.ToLower(gm[i].GetName()) < strings.ToLower(gm[j].GetName())
}
if sortOrder == "desc" {
return !result
} else {
return result
}
})
return gm
}
func (gm GalleryElements[T]) FindByName(name string) T {
for _, m := range gm {
if strings.EqualFold(m.GetName(), name) {
return m
}
}
var zero T
return zero
}
func (gm GalleryElements[T]) Paginate(pageNum int, itemsNum int) GalleryElements[T] {
start := (pageNum - 1) * itemsNum
end := start + itemsNum
if start > len(gm) {
start = len(gm)
}
if end > len(gm) {
end = len(gm)
}
return gm[start:end]
}
func FindGalleryElement[T GalleryElement](models []T, name string) T {
var model T
name = strings.ReplaceAll(name, string(os.PathSeparator), "__")
if !strings.Contains(name, "@") {
for _, m := range models {
if strings.EqualFold(strings.ToLower(m.GetName()), strings.ToLower(name)) {
model = m
break
}
}
} else {
for _, m := range models {
if strings.EqualFold(strings.ToLower(name), strings.ToLower(fmt.Sprintf("%s@%s", m.GetGallery().Name, m.GetName()))) {
model = m
break
}
}
}
return model
}
// List available models
// Models galleries are a list of yaml files that are hosted on a remote server (for example github).
// Each yaml file contains a list of models that can be downloaded and optionally overrides to define a new model setting.
func AvailableGalleryModels(galleries []config.Gallery, systemState *system.SystemState) (GalleryElements[*GalleryModel], error) {
var models []*GalleryModel
// Get models from galleries
for _, gallery := range galleries {
galleryModels, err := getGalleryElements(gallery, systemState.Model.ModelsPath, func(model *GalleryModel) bool {
if _, err := os.Stat(filepath.Join(systemState.Model.ModelsPath, fmt.Sprintf("%s.yaml", model.GetName()))); err == nil {
return true
}
return false
})
if err != nil {
return nil, err
}
models = append(models, galleryModels...)
}
return models, nil
}
// List available backends
func AvailableBackends(galleries []config.Gallery, systemState *system.SystemState) (GalleryElements[*GalleryBackend], error) {
var backends []*GalleryBackend
systemBackends, err := ListSystemBackends(systemState)
if err != nil {
return nil, err
}
// Get backends from galleries
for _, gallery := range galleries {
galleryBackends, err := getGalleryElements(gallery, systemState.Backend.BackendsPath, func(backend *GalleryBackend) bool {
return systemBackends.Exists(backend.GetName())
})
if err != nil {
return nil, err
}
backends = append(backends, galleryBackends...)
}
return backends, nil
}
func findGalleryURLFromReferenceURL(url string, basePath string) (string, error) {
var refFile string
uri := downloader.URI(url)
err := uri.ReadWithCallback(basePath, func(url string, d []byte) error {
refFile = string(d)
if len(refFile) == 0 {
return fmt.Errorf("invalid reference file at url %s: %s", url, d)
}
cutPoint := strings.LastIndex(url, "/")
refFile = url[:cutPoint+1] + refFile
return nil
})
return refFile, err
}
type galleryCacheEntry struct {
yamlEntry []byte
lastUpdated time.Time
}
func (entry galleryCacheEntry) hasExpired() bool {
return entry.lastUpdated.Before(time.Now().Add(-1 * time.Hour))
}
var galleryCache = xsync.NewSyncedMap[string, galleryCacheEntry]()
func getGalleryElements[T GalleryElement](gallery config.Gallery, basePath string, isInstalledCallback func(T) bool) ([]T, error) {
var models []T = []T{}
if strings.HasSuffix(gallery.URL, ".ref") {
var err error
gallery.URL, err = findGalleryURLFromReferenceURL(gallery.URL, basePath)
if err != nil {
return models, err
}
}
cacheKey := fmt.Sprintf("%s-%s", gallery.Name, gallery.URL)
if galleryCache.Exists(cacheKey) {
entry := galleryCache.Get(cacheKey)
// refresh if last updated is more than 1 hour ago
if !entry.hasExpired() {
err := yaml.Unmarshal(entry.yamlEntry, &models)
if err != nil {
return models, err
}
} else {
galleryCache.Delete(cacheKey)
}
}
uri := downloader.URI(gallery.URL)
if len(models) == 0 {
err := uri.ReadWithCallback(basePath, func(url string, d []byte) error {
galleryCache.Set(cacheKey, galleryCacheEntry{
yamlEntry: d,
lastUpdated: time.Now(),
})
return yaml.Unmarshal(d, &models)
})
if err != nil {
if yamlErr, ok := err.(*yaml.TypeError); ok {
xlog.Debug("YAML errors", "errors", strings.Join(yamlErr.Errors, "\n"), "models", models)
}
return models, fmt.Errorf("failed to read gallery elements: %w", err)
}
}
// Add gallery to models
for _, model := range models {
model.SetGallery(gallery)
model.SetInstalled(isInstalledCallback(model))
}
return models, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/backend_types.go | core/gallery/backend_types.go | package gallery
import (
"fmt"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
)
// BackendMetadata represents the metadata stored in a JSON file for each installed backend
type BackendMetadata struct {
// Alias is an optional alternative name for the backend
Alias string `json:"alias,omitempty"`
// MetaBackendFor points to the concrete backend if this is a meta backend
MetaBackendFor string `json:"meta_backend_for,omitempty"`
// Name is the original name from the gallery
Name string `json:"name,omitempty"`
// GalleryURL is the URL of the gallery this backend came from
GalleryURL string `json:"gallery_url,omitempty"`
// InstalledAt is the timestamp when the backend was installed
InstalledAt string `json:"installed_at,omitempty"`
}
type GalleryBackend struct {
Metadata `json:",inline" yaml:",inline"`
Alias string `json:"alias,omitempty" yaml:"alias,omitempty"`
URI string `json:"uri,omitempty" yaml:"uri,omitempty"`
Mirrors []string `json:"mirrors,omitempty" yaml:"mirrors,omitempty"`
CapabilitiesMap map[string]string `json:"capabilities,omitempty" yaml:"capabilities,omitempty"`
}
func (backend *GalleryBackend) FindBestBackendFromMeta(systemState *system.SystemState, backends GalleryElements[*GalleryBackend]) *GalleryBackend {
if systemState == nil {
return nil
}
realBackend := backend.CapabilitiesMap[systemState.Capability(backend.CapabilitiesMap)]
if realBackend == "" {
xlog.Debug("No backend found for reported capability", "backend", backend.Name, "reportedCapability", systemState.Capability(backend.CapabilitiesMap))
return nil
}
xlog.Debug("Found backend for reported capability", "backend", backend.Name, "reportedCapability", systemState.Capability(backend.CapabilitiesMap))
return backends.FindByName(realBackend)
}
func (m *GalleryBackend) GetInstalled() bool {
return m.Installed
}
func (m *GalleryBackend) GetLicense() string {
return m.License
}
type GalleryBackends []*GalleryBackend
func (m *GalleryBackend) SetGallery(gallery config.Gallery) {
m.Gallery = gallery
}
func (m *GalleryBackend) IsMeta() bool {
return len(m.CapabilitiesMap) > 0 && m.URI == ""
}
func (m *GalleryBackend) SetInstalled(installed bool) {
m.Installed = installed
}
func (m *GalleryBackend) GetName() string {
return m.Name
}
func (m *GalleryBackend) GetGallery() config.Gallery {
return m.Gallery
}
func (m *GalleryBackend) GetDescription() string {
return m.Description
}
func (m *GalleryBackend) GetTags() []string {
return m.Tags
}
func (m GalleryBackend) ID() string {
return fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/backends.go | core/gallery/backends.go | // Package gallery provides installation and registration utilities for LocalAI backends,
// including meta-backend resolution based on system capabilities.
package gallery
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
cp "github.com/otiai10/copy"
)
const (
metadataFile = "metadata.json"
runFile = "run.sh"
)
// backendCandidate represents an installed concrete backend option for a given alias
type backendCandidate struct {
name string
runFile string
}
// readBackendMetadata reads the metadata JSON file for a backend
func readBackendMetadata(backendPath string) (*BackendMetadata, error) {
metadataPath := filepath.Join(backendPath, metadataFile)
// If metadata file doesn't exist, return nil (for backward compatibility)
if _, err := os.Stat(metadataPath); os.IsNotExist(err) {
return nil, nil
}
data, err := os.ReadFile(metadataPath)
if err != nil {
return nil, fmt.Errorf("failed to read metadata file %q: %v", metadataPath, err)
}
var metadata BackendMetadata
if err := json.Unmarshal(data, &metadata); err != nil {
return nil, fmt.Errorf("failed to unmarshal metadata file %q: %v", metadataPath, err)
}
return &metadata, nil
}
// writeBackendMetadata writes the metadata JSON file for a backend
func writeBackendMetadata(backendPath string, metadata *BackendMetadata) error {
metadataPath := filepath.Join(backendPath, metadataFile)
data, err := json.MarshalIndent(metadata, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal metadata: %v", err)
}
if err := os.WriteFile(metadataPath, data, 0644); err != nil {
return fmt.Errorf("failed to write metadata file %q: %v", metadataPath, err)
}
return nil
}
// InstallBackendFromGallery installs a backend from the gallery.
func InstallBackendFromGallery(ctx context.Context, galleries []config.Gallery, systemState *system.SystemState, modelLoader *model.ModelLoader, name string, downloadStatus func(string, string, string, float64), force bool) error {
if !force {
// check if we already have the backend installed
backends, err := ListSystemBackends(systemState)
if err != nil {
return err
}
if backends.Exists(name) {
return nil
}
}
if name == "" {
return fmt.Errorf("backend name is empty")
}
xlog.Debug("Installing backend from gallery", "galleries", galleries, "name", name)
backends, err := AvailableBackends(galleries, systemState)
if err != nil {
return err
}
backend := FindGalleryElement(backends, name)
if backend == nil {
return fmt.Errorf("no backend found with name %q", name)
}
if backend.IsMeta() {
xlog.Debug("Backend is a meta backend", "systemState", systemState, "name", name)
// Then, let's try to find the best backend based on the capabilities map
bestBackend := backend.FindBestBackendFromMeta(systemState, backends)
if bestBackend == nil {
return fmt.Errorf("no backend found with capabilities %q", backend.CapabilitiesMap)
}
xlog.Debug("Installing backend from meta backend", "name", name, "bestBackend", bestBackend.Name)
// Then, let's install the best backend
if err := InstallBackend(ctx, systemState, modelLoader, bestBackend, downloadStatus); err != nil {
return err
}
// we need now to create a path for the meta backend, with the alias to the installed ones so it can be used to remove it
metaBackendPath := filepath.Join(systemState.Backend.BackendsPath, name)
if err := os.MkdirAll(metaBackendPath, 0750); err != nil {
return fmt.Errorf("failed to create meta backend path %q: %v", metaBackendPath, err)
}
// Create metadata for the meta backend
metaMetadata := &BackendMetadata{
MetaBackendFor: bestBackend.Name,
Name: name,
GalleryURL: backend.Gallery.URL,
InstalledAt: time.Now().Format(time.RFC3339),
}
if err := writeBackendMetadata(metaBackendPath, metaMetadata); err != nil {
return fmt.Errorf("failed to write metadata for meta backend %q: %v", name, err)
}
return nil
}
return InstallBackend(ctx, systemState, modelLoader, backend, downloadStatus)
}
func InstallBackend(ctx context.Context, systemState *system.SystemState, modelLoader *model.ModelLoader, config *GalleryBackend, downloadStatus func(string, string, string, float64)) error {
// Create base path if it doesn't exist
err := os.MkdirAll(systemState.Backend.BackendsPath, 0750)
if err != nil {
return fmt.Errorf("failed to create base path: %v", err)
}
if config.IsMeta() {
return fmt.Errorf("meta backends cannot be installed directly")
}
name := config.Name
backendPath := filepath.Join(systemState.Backend.BackendsPath, name)
err = os.MkdirAll(backendPath, 0750)
if err != nil {
return fmt.Errorf("failed to create base path: %v", err)
}
uri := downloader.URI(config.URI)
// Check if it is a directory
if uri.LooksLikeDir() {
// It is a directory, we just copy it over in the backend folder
if err := cp.Copy(config.URI, backendPath); err != nil {
return fmt.Errorf("failed copying: %w", err)
}
} else {
xlog.Debug("Downloading backend", "uri", config.URI, "backendPath", backendPath)
if err := uri.DownloadFileWithContext(ctx, backendPath, "", 1, 1, downloadStatus); err != nil {
success := false
// Try to download from mirrors
for _, mirror := range config.Mirrors {
// Check for cancellation before trying next mirror
select {
case <-ctx.Done():
return ctx.Err()
default:
}
if err := downloader.URI(mirror).DownloadFileWithContext(ctx, backendPath, "", 1, 1, downloadStatus); err == nil {
success = true
xlog.Debug("Downloaded backend", "uri", config.URI, "backendPath", backendPath)
break
}
}
if !success {
xlog.Error("Failed to download backend", "uri", config.URI, "backendPath", backendPath, "error", err)
return fmt.Errorf("failed to download backend %q: %v", config.URI, err)
}
} else {
xlog.Debug("Downloaded backend", "uri", config.URI, "backendPath", backendPath)
}
}
// sanity check - check if runfile is present
runFile := filepath.Join(backendPath, runFile)
if _, err := os.Stat(runFile); os.IsNotExist(err) {
xlog.Error("Run file not found", "runFile", runFile)
return fmt.Errorf("not a valid backend: run file not found %q", runFile)
}
// Create metadata for the backend
metadata := &BackendMetadata{
Name: name,
GalleryURL: config.Gallery.URL,
InstalledAt: time.Now().Format(time.RFC3339),
}
if config.Alias != "" {
metadata.Alias = config.Alias
}
if err := writeBackendMetadata(backendPath, metadata); err != nil {
return fmt.Errorf("failed to write metadata for backend %q: %v", name, err)
}
return RegisterBackends(systemState, modelLoader)
}
func DeleteBackendFromSystem(systemState *system.SystemState, name string) error {
backends, err := ListSystemBackends(systemState)
if err != nil {
return err
}
backend, ok := backends.Get(name)
if !ok {
return fmt.Errorf("backend %q not found", name)
}
if backend.IsSystem {
return fmt.Errorf("system backend %q cannot be deleted", name)
}
backendDirectory := filepath.Join(systemState.Backend.BackendsPath, name)
// check if the backend dir exists
if _, err := os.Stat(backendDirectory); os.IsNotExist(err) {
// if doesn't exist, it might be an alias, so we need to check if we have a matching alias in
// all the backends in the basePath
backends, err := os.ReadDir(systemState.Backend.BackendsPath)
if err != nil {
return err
}
foundBackend := false
for _, backend := range backends {
if backend.IsDir() {
metadata, err := readBackendMetadata(filepath.Join(systemState.Backend.BackendsPath, backend.Name()))
if err != nil {
return err
}
if metadata != nil && metadata.Alias == name {
backendDirectory = filepath.Join(systemState.Backend.BackendsPath, backend.Name())
foundBackend = true
break
}
}
}
// If no backend found, return successfully (idempotent behavior)
if !foundBackend {
return fmt.Errorf("no backend found with name %q", name)
}
}
// If it's a meta backend, delete also associated backend
metadata, err := readBackendMetadata(backendDirectory)
if err != nil {
return err
}
if metadata != nil && metadata.MetaBackendFor != "" {
metaBackendDirectory := filepath.Join(systemState.Backend.BackendsPath, metadata.MetaBackendFor)
xlog.Debug("Deleting meta backend", "backendDirectory", metaBackendDirectory)
if _, err := os.Stat(metaBackendDirectory); os.IsNotExist(err) {
return fmt.Errorf("meta backend %q not found", metadata.MetaBackendFor)
}
os.RemoveAll(metaBackendDirectory)
}
return os.RemoveAll(backendDirectory)
}
type SystemBackend struct {
Name string
RunFile string
IsMeta bool
IsSystem bool
Metadata *BackendMetadata
}
type SystemBackends map[string]SystemBackend
func (b SystemBackends) Exists(name string) bool {
_, ok := b[name]
return ok
}
func (b SystemBackends) Get(name string) (SystemBackend, bool) {
backend, ok := b[name]
return backend, ok
}
func (b SystemBackends) GetAll() []SystemBackend {
backends := make([]SystemBackend, 0)
for _, backend := range b {
backends = append(backends, backend)
}
return backends
}
func ListSystemBackends(systemState *system.SystemState) (SystemBackends, error) {
// Gather backends from system and user paths, then resolve alias conflicts by capability.
backends := make(SystemBackends)
// System-provided backends
if systemBackends, err := os.ReadDir(systemState.Backend.BackendsSystemPath); err == nil {
for _, systemBackend := range systemBackends {
if systemBackend.IsDir() {
run := filepath.Join(systemState.Backend.BackendsSystemPath, systemBackend.Name(), runFile)
if _, err := os.Stat(run); err == nil {
backends[systemBackend.Name()] = SystemBackend{
Name: systemBackend.Name(),
RunFile: run,
IsMeta: false,
IsSystem: true,
Metadata: nil,
}
}
}
}
} else if !errors.Is(err, os.ErrNotExist) {
xlog.Warn("Failed to read system backends, proceeding with user-managed backends", "error", err)
} else if errors.Is(err, os.ErrNotExist) {
xlog.Debug("No system backends found")
}
// User-managed backends and alias collection
entries, err := os.ReadDir(systemState.Backend.BackendsPath)
if err != nil {
return nil, err
}
aliasGroups := make(map[string][]backendCandidate)
metaMap := make(map[string]*BackendMetadata)
for _, e := range entries {
if !e.IsDir() {
continue
}
dir := e.Name()
run := filepath.Join(systemState.Backend.BackendsPath, dir, runFile)
var metadata *BackendMetadata
metadataPath := filepath.Join(systemState.Backend.BackendsPath, dir, metadataFile)
if _, err := os.Stat(metadataPath); os.IsNotExist(err) {
metadata = &BackendMetadata{Name: dir}
} else {
m, rerr := readBackendMetadata(filepath.Join(systemState.Backend.BackendsPath, dir))
if rerr != nil {
return nil, rerr
}
if m == nil {
metadata = &BackendMetadata{Name: dir}
} else {
metadata = m
}
}
metaMap[dir] = metadata
// Concrete backend entry
if _, err := os.Stat(run); err == nil {
backends[dir] = SystemBackend{
Name: dir,
RunFile: run,
IsMeta: false,
Metadata: metadata,
}
}
// Alias candidates
if metadata.Alias != "" {
aliasGroups[metadata.Alias] = append(aliasGroups[metadata.Alias], backendCandidate{name: dir, runFile: run})
}
// Meta backends indirection
if metadata.MetaBackendFor != "" {
backends[metadata.Name] = SystemBackend{
Name: metadata.Name,
RunFile: filepath.Join(systemState.Backend.BackendsPath, metadata.MetaBackendFor, runFile),
IsMeta: true,
Metadata: metadata,
}
}
}
// Resolve aliases using system capability preferences
tokens := systemState.BackendPreferenceTokens()
for alias, cands := range aliasGroups {
chosen := backendCandidate{}
// Try preference tokens
for _, t := range tokens {
for _, c := range cands {
if strings.Contains(strings.ToLower(c.name), t) && c.runFile != "" {
chosen = c
break
}
}
if chosen.runFile != "" {
break
}
}
// Fallback: first runnable
if chosen.runFile == "" {
for _, c := range cands {
if c.runFile != "" {
chosen = c
break
}
}
}
if chosen.runFile == "" {
continue
}
md := metaMap[chosen.name]
backends[alias] = SystemBackend{
Name: alias,
RunFile: chosen.runFile,
IsMeta: false,
Metadata: md,
}
}
return backends, nil
}
func RegisterBackends(systemState *system.SystemState, modelLoader *model.ModelLoader) error {
backends, err := ListSystemBackends(systemState)
if err != nil {
return err
}
for _, backend := range backends {
xlog.Debug("Registering backend", "name", backend.Name, "runFile", backend.RunFile)
modelLoader.SetExternalBackend(backend.Name, backend.RunFile)
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/gallery_suite_test.go | core/gallery/gallery_suite_test.go | package gallery_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestGallery(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Gallery test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/request_test.go | core/gallery/request_test.go | package gallery_test
import (
. "github.com/mudler/LocalAI/core/gallery"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Gallery API tests", func() {
Context("requests", func() {
It("parses github with a branch", func() {
req := GalleryModel{
Metadata: Metadata{
URL: "github:go-skynet/model-gallery/gpt4all-j.yaml@main",
},
}
e, err := GetGalleryConfigFromURL[ModelConfig](req.URL, "")
Expect(err).ToNot(HaveOccurred())
Expect(e.Name).To(Equal("gpt4all-j"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/models_test.go | core/gallery/models_test.go | package gallery_test
import (
"context"
"errors"
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/config"
. "github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/system"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"gopkg.in/yaml.v3"
)
const bertEmbeddingsURL = `https://gist.githubusercontent.com/mudler/0a080b166b87640e8644b09c2aee6e3b/raw/f0e8c26bb72edc16d9fbafbfd6638072126ff225/bert-embeddings-gallery.yaml`
var _ = Describe("Model test", func() {
BeforeEach(func() {
if os.Getenv("FIXTURES") == "" {
Skip("FIXTURES env var not set, skipping model tests")
}
})
Context("Downloading", func() {
It("applies model correctly", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
c, err := ReadConfigFile[ModelConfig](filepath.Join(os.Getenv("FIXTURES"), "gallery_simple.yaml"))
Expect(err).ToNot(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
_, err = InstallModel(context.TODO(), systemState, "", c, map[string]interface{}{}, func(string, string, string, float64) {}, true)
Expect(err).ToNot(HaveOccurred())
for _, f := range []string{"cerebras", "cerebras-completion.tmpl", "cerebras-chat.tmpl", "cerebras.yaml"} {
_, err = os.Stat(filepath.Join(tempdir, f))
Expect(err).ToNot(HaveOccurred())
}
content := map[string]interface{}{}
dat, err := os.ReadFile(filepath.Join(tempdir, "cerebras.yaml"))
Expect(err).ToNot(HaveOccurred())
err = yaml.Unmarshal(dat, content)
Expect(err).ToNot(HaveOccurred())
Expect(content["context_size"]).To(Equal(1024))
})
It("applies model from gallery correctly", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
gallery := []GalleryModel{{
Metadata: Metadata{
Name: "bert",
URL: bertEmbeddingsURL,
},
}}
out, err := yaml.Marshal(gallery)
Expect(err).ToNot(HaveOccurred())
galleryFilePath := filepath.Join(tempdir, "gallery_simple.yaml")
err = os.WriteFile(galleryFilePath, out, 0600)
Expect(err).ToNot(HaveOccurred())
Expect(filepath.IsAbs(galleryFilePath)).To(BeTrue(), galleryFilePath)
galleries := []config.Gallery{
{
Name: "test",
URL: "file://" + galleryFilePath,
},
}
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
models, err := AvailableGalleryModels(galleries, systemState)
Expect(err).ToNot(HaveOccurred())
Expect(len(models)).To(Equal(1))
Expect(models[0].Name).To(Equal("bert"))
Expect(models[0].URL).To(Equal(bertEmbeddingsURL))
Expect(models[0].Installed).To(BeFalse())
err = InstallModelFromGallery(context.TODO(), galleries, []config.Gallery{}, systemState, nil, "test@bert", GalleryModel{}, func(s1, s2, s3 string, f float64) {}, true, true)
Expect(err).ToNot(HaveOccurred())
dat, err := os.ReadFile(filepath.Join(tempdir, "bert.yaml"))
Expect(err).ToNot(HaveOccurred())
content := map[string]interface{}{}
err = yaml.Unmarshal(dat, &content)
Expect(err).ToNot(HaveOccurred())
Expect(content["usage"]).To(ContainSubstring("You can test this model with curl like this"))
models, err = AvailableGalleryModels(galleries, systemState)
Expect(err).ToNot(HaveOccurred())
Expect(len(models)).To(Equal(1))
Expect(models[0].Installed).To(BeTrue())
// delete
err = DeleteModelFromSystem(systemState, "bert")
Expect(err).ToNot(HaveOccurred())
models, err = AvailableGalleryModels(galleries, systemState)
Expect(err).ToNot(HaveOccurred())
Expect(len(models)).To(Equal(1))
Expect(models[0].Installed).To(BeFalse())
_, err = os.Stat(filepath.Join(tempdir, "bert.yaml"))
Expect(err).To(HaveOccurred())
Expect(errors.Is(err, os.ErrNotExist)).To(BeTrue())
})
It("renames model correctly", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
c, err := ReadConfigFile[ModelConfig](filepath.Join(os.Getenv("FIXTURES"), "gallery_simple.yaml"))
Expect(err).ToNot(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
_, err = InstallModel(context.TODO(), systemState, "foo", c, map[string]interface{}{}, func(string, string, string, float64) {}, true)
Expect(err).ToNot(HaveOccurred())
for _, f := range []string{"cerebras", "cerebras-completion.tmpl", "cerebras-chat.tmpl", "foo.yaml"} {
_, err = os.Stat(filepath.Join(tempdir, f))
Expect(err).ToNot(HaveOccurred())
}
})
It("overrides parameters", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
c, err := ReadConfigFile[ModelConfig](filepath.Join(os.Getenv("FIXTURES"), "gallery_simple.yaml"))
Expect(err).ToNot(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
_, err = InstallModel(context.TODO(), systemState, "foo", c, map[string]interface{}{"backend": "foo"}, func(string, string, string, float64) {}, true)
Expect(err).ToNot(HaveOccurred())
for _, f := range []string{"cerebras", "cerebras-completion.tmpl", "cerebras-chat.tmpl", "foo.yaml"} {
_, err = os.Stat(filepath.Join(tempdir, f))
Expect(err).ToNot(HaveOccurred())
}
content := map[string]interface{}{}
dat, err := os.ReadFile(filepath.Join(tempdir, "foo.yaml"))
Expect(err).ToNot(HaveOccurred())
err = yaml.Unmarshal(dat, content)
Expect(err).ToNot(HaveOccurred())
Expect(content["backend"]).To(Equal("foo"))
})
It("catches path traversals", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
c, err := ReadConfigFile[ModelConfig](filepath.Join(os.Getenv("FIXTURES"), "gallery_simple.yaml"))
Expect(err).ToNot(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
_, err = InstallModel(context.TODO(), systemState, "../../../foo", c, map[string]interface{}{}, func(string, string, string, float64) {}, true)
Expect(err).To(HaveOccurred())
})
It("handles nil configOverrides without panic", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
c, err := ReadConfigFile[ModelConfig](filepath.Join(os.Getenv("FIXTURES"), "gallery_simple.yaml"))
Expect(err).ToNot(HaveOccurred())
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
_, err = InstallModel(context.TODO(), systemState, "test-model", c, nil, func(string, string, string, float64) {}, true)
Expect(err).ToNot(HaveOccurred())
for _, f := range []string{"cerebras", "cerebras-completion.tmpl", "cerebras-chat.tmpl", "test-model.yaml"} {
_, err = os.Stat(filepath.Join(tempdir, f))
Expect(err).ToNot(HaveOccurred())
}
})
It("does not delete shared model files when one config is deleted", func() {
tempdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tempdir)
systemState, err := system.GetSystemState(
system.WithModelPath(tempdir),
)
Expect(err).ToNot(HaveOccurred())
// Create a shared model file
sharedModelFile := filepath.Join(tempdir, "shared_model.bin")
err = os.WriteFile(sharedModelFile, []byte("fake model content"), 0600)
Expect(err).ToNot(HaveOccurred())
// Create first model configuration
config1 := `name: model1
model: shared_model.bin`
err = os.WriteFile(filepath.Join(tempdir, "model1.yaml"), []byte(config1), 0600)
Expect(err).ToNot(HaveOccurred())
// Create first model's gallery file
galleryConfig1 := ModelConfig{
Name: "model1",
Files: []File{
{Filename: "shared_model.bin"},
},
}
galleryData1, err := yaml.Marshal(galleryConfig1)
Expect(err).ToNot(HaveOccurred())
err = os.WriteFile(filepath.Join(tempdir, "._gallery_model1.yaml"), galleryData1, 0600)
Expect(err).ToNot(HaveOccurred())
// Create second model configuration sharing the same model file
config2 := `name: model2
model: shared_model.bin`
err = os.WriteFile(filepath.Join(tempdir, "model2.yaml"), []byte(config2), 0600)
Expect(err).ToNot(HaveOccurred())
// Create second model's gallery file
galleryConfig2 := ModelConfig{
Name: "model2",
Files: []File{
{Filename: "shared_model.bin"},
},
}
galleryData2, err := yaml.Marshal(galleryConfig2)
Expect(err).ToNot(HaveOccurred())
err = os.WriteFile(filepath.Join(tempdir, "._gallery_model2.yaml"), galleryData2, 0600)
Expect(err).ToNot(HaveOccurred())
// Verify both configurations exist
_, err = os.Stat(filepath.Join(tempdir, "model1.yaml"))
Expect(err).ToNot(HaveOccurred())
_, err = os.Stat(filepath.Join(tempdir, "model2.yaml"))
Expect(err).ToNot(HaveOccurred())
// Verify the shared model file exists
_, err = os.Stat(sharedModelFile)
Expect(err).ToNot(HaveOccurred())
// Delete the first model
err = DeleteModelFromSystem(systemState, "model1")
Expect(err).ToNot(HaveOccurred())
// Verify the first configuration is deleted
_, err = os.Stat(filepath.Join(tempdir, "model1.yaml"))
Expect(err).To(HaveOccurred())
Expect(errors.Is(err, os.ErrNotExist)).To(BeTrue())
// Verify the shared model file still exists (not deleted because model2 still uses it)
_, err = os.Stat(sharedModelFile)
Expect(err).ToNot(HaveOccurred(), "shared model file should not be deleted when used by other configs")
// Verify the second configuration still exists
_, err = os.Stat(filepath.Join(tempdir, "model2.yaml"))
Expect(err).ToNot(HaveOccurred())
// Now delete the second model
err = DeleteModelFromSystem(systemState, "model2")
Expect(err).ToNot(HaveOccurred())
// Verify the second configuration is deleted
_, err = os.Stat(filepath.Join(tempdir, "model2.yaml"))
Expect(err).To(HaveOccurred())
Expect(errors.Is(err, os.ErrNotExist)).To(BeTrue())
// Verify the shared model file is now deleted (no more references)
_, err = os.Stat(sharedModelFile)
Expect(err).To(HaveOccurred(), "shared model file should be deleted when no configs reference it")
Expect(errors.Is(err, os.ErrNotExist)).To(BeTrue())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/mlx_test.go | core/gallery/importers/mlx_test.go | package importers_test
import (
"encoding/json"
"github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("MLXImporter", func() {
var importer *importers.MLXImporter
BeforeEach(func() {
importer = &importers.MLXImporter{}
})
Context("Match", func() {
It("should match when URI contains mlx-community/", func() {
details := importers.Details{
URI: "https://huggingface.co/mlx-community/test-model",
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when backend preference is mlx", func() {
preferences := json.RawMessage(`{"backend": "mlx"}`)
details := importers.Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when backend preference is mlx-vlm", func() {
preferences := json.RawMessage(`{"backend": "mlx-vlm"}`)
details := importers.Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should not match when URI does not contain mlx-community/ and no backend preference", func() {
details := importers.Details{
URI: "https://huggingface.co/other-org/test-model",
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should not match when backend preference is different", func() {
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
details := importers.Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should return false when JSON preferences are invalid", func() {
preferences := json.RawMessage(`invalid json`)
details := importers.Details{
URI: "https://huggingface.co/mlx-community/test-model",
Preferences: preferences,
}
// Invalid JSON causes Match to return false early
result := importer.Match(details)
Expect(result).To(BeFalse())
})
})
Context("Import", func() {
It("should import model config with default name and description", func() {
details := importers.Details{
URI: "https://huggingface.co/mlx-community/test-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("test-model"))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/mlx-community/test-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: mlx"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: https://huggingface.co/mlx-community/test-model"))
})
It("should import model config with custom name and description from preferences", func() {
preferences := json.RawMessage(`{"name": "custom-mlx-model", "description": "Custom MLX description"}`)
details := importers.Details{
URI: "https://huggingface.co/mlx-community/test-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-mlx-model"))
Expect(modelConfig.Description).To(Equal("Custom MLX description"))
})
It("should use custom backend from preferences", func() {
preferences := json.RawMessage(`{"backend": "mlx-vlm"}`)
details := importers.Details{
URI: "https://huggingface.co/mlx-community/test-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: mlx-vlm"))
})
It("should handle invalid JSON preferences", func() {
preferences := json.RawMessage(`invalid json`)
details := importers.Details{
URI: "https://huggingface.co/mlx-community/test-model",
Preferences: preferences,
}
_, err := importer.Import(details)
Expect(err).To(HaveOccurred())
})
It("should extract filename correctly from URI with path", func() {
details := importers.Details{
URI: "https://huggingface.co/mlx-community/path/to/model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("model"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/diffuser_test.go | core/gallery/importers/diffuser_test.go | package importers_test
import (
"encoding/json"
"github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/mudler/LocalAI/core/gallery/importers"
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("DiffuserImporter", func() {
var importer *DiffuserImporter
BeforeEach(func() {
importer = &DiffuserImporter{}
})
Context("Match", func() {
It("should match when backend preference is diffusers", func() {
preferences := json.RawMessage(`{"backend": "diffusers"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when HuggingFace details contain model_index.json", func() {
hfDetails := &hfapi.ModelDetails{
Files: []hfapi.ModelFile{
{Path: "model_index.json"},
},
}
details := Details{
URI: "https://huggingface.co/test/model",
HuggingFace: hfDetails,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when HuggingFace details contain scheduler config", func() {
hfDetails := &hfapi.ModelDetails{
Files: []hfapi.ModelFile{
{Path: "scheduler/scheduler_config.json"},
},
}
details := Details{
URI: "https://huggingface.co/test/model",
HuggingFace: hfDetails,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should not match when URI has no diffuser files and no backend preference", func() {
details := Details{
URI: "https://example.com/model.bin",
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should not match when backend preference is different", func() {
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should return false when JSON preferences are invalid", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
})
Context("Import", func() {
It("should import model config with default name and description", func() {
details := Details{
URI: "https://huggingface.co/test/my-diffuser-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("my-diffuser-model"))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/test/my-diffuser-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: diffusers"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: https://huggingface.co/test/my-diffuser-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("pipeline_type: StableDiffusionPipeline"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("enable_parameters: negative_prompt,num_inference_steps"))
})
It("should import model config with custom name and description from preferences", func() {
preferences := json.RawMessage(`{"name": "custom-diffuser", "description": "Custom diffuser model"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-diffuser"))
Expect(modelConfig.Description).To(Equal("Custom diffuser model"))
})
It("should use custom pipeline_type from preferences", func() {
preferences := json.RawMessage(`{"pipeline_type": "StableDiffusion3Pipeline"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("pipeline_type: StableDiffusion3Pipeline"))
})
It("should use default pipeline_type when not specified", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("pipeline_type: StableDiffusionPipeline"))
})
It("should use custom scheduler_type from preferences", func() {
preferences := json.RawMessage(`{"scheduler_type": "k_dpmpp_2m"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("scheduler_type: k_dpmpp_2m"))
})
It("should use cuda setting from preferences", func() {
preferences := json.RawMessage(`{"cuda": true}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("cuda: true"))
})
It("should use custom enable_parameters from preferences", func() {
preferences := json.RawMessage(`{"enable_parameters": "num_inference_steps,guidance_scale"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("enable_parameters: num_inference_steps,guidance_scale"))
})
It("should use custom backend from preferences", func() {
preferences := json.RawMessage(`{"backend": "diffusers"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: diffusers"))
})
It("should handle invalid JSON preferences", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
_, err := importer.Import(details)
Expect(err).To(HaveOccurred())
})
It("should extract filename correctly from URI with path", func() {
details := importers.Details{
URI: "https://huggingface.co/test/path/to/model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("model"))
})
It("should include known_usecases as image in config", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("known_usecases:"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("- image"))
})
It("should include diffusers configuration in config", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("diffusers:"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/mlx.go | core/gallery/importers/mlx.go | package importers
import (
"encoding/json"
"path/filepath"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/schema"
"go.yaml.in/yaml/v2"
)
var _ Importer = &MLXImporter{}
type MLXImporter struct{}
func (i *MLXImporter) Match(details Details) bool {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return false
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return false
}
b, ok := preferencesMap["backend"].(string)
if ok && b == "mlx" || b == "mlx-vlm" {
return true
}
// All https://huggingface.co/mlx-community/*
if strings.Contains(details.URI, "mlx-community/") {
return true
}
return false
}
func (i *MLXImporter) Import(details Details) (gallery.ModelConfig, error) {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return gallery.ModelConfig{}, err
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return gallery.ModelConfig{}, err
}
name, ok := preferencesMap["name"].(string)
if !ok {
name = filepath.Base(details.URI)
}
description, ok := preferencesMap["description"].(string)
if !ok {
description = "Imported from " + details.URI
}
backend := "mlx"
b, ok := preferencesMap["backend"].(string)
if ok {
backend = b
}
modelConfig := config.ModelConfig{
Name: name,
Description: description,
KnownUsecaseStrings: []string{"chat"},
Backend: backend,
PredictionOptions: schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: details.URI,
},
},
TemplateConfig: config.TemplateConfig{
UseTokenizerTemplate: true,
},
}
data, err := yaml.Marshal(modelConfig)
if err != nil {
return gallery.ModelConfig{}, err
}
return gallery.ModelConfig{
Name: name,
Description: description,
ConfigFile: string(data),
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/transformers_test.go | core/gallery/importers/transformers_test.go | package importers_test
import (
"encoding/json"
"github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/mudler/LocalAI/core/gallery/importers"
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("TransformersImporter", func() {
var importer *TransformersImporter
BeforeEach(func() {
importer = &TransformersImporter{}
})
Context("Match", func() {
It("should match when backend preference is transformers", func() {
preferences := json.RawMessage(`{"backend": "transformers"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when HuggingFace details contain tokenizer.json", func() {
hfDetails := &hfapi.ModelDetails{
Files: []hfapi.ModelFile{
{Path: "tokenizer.json"},
},
}
details := Details{
URI: "https://huggingface.co/test/model",
HuggingFace: hfDetails,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when HuggingFace details contain tokenizer_config.json", func() {
hfDetails := &hfapi.ModelDetails{
Files: []hfapi.ModelFile{
{Path: "tokenizer_config.json"},
},
}
details := Details{
URI: "https://huggingface.co/test/model",
HuggingFace: hfDetails,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should not match when URI has no tokenizer files and no backend preference", func() {
details := Details{
URI: "https://example.com/model.bin",
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should not match when backend preference is different", func() {
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should return false when JSON preferences are invalid", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
})
Context("Import", func() {
It("should import model config with default name and description", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("my-model"))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/test/my-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: transformers"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: https://huggingface.co/test/my-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("type: AutoModelForCausalLM"))
})
It("should import model config with custom name and description from preferences", func() {
preferences := json.RawMessage(`{"name": "custom-model", "description": "Custom description"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-model"))
Expect(modelConfig.Description).To(Equal("Custom description"))
})
It("should use custom model type from preferences", func() {
preferences := json.RawMessage(`{"type": "SentenceTransformer"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("type: SentenceTransformer"))
})
It("should use default model type when not specified", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("type: AutoModelForCausalLM"))
})
It("should use custom backend from preferences", func() {
preferences := json.RawMessage(`{"backend": "transformers"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: transformers"))
})
It("should use quantization from preferences", func() {
preferences := json.RawMessage(`{"quantization": "int8"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("quantization: int8"))
})
It("should handle invalid JSON preferences", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
_, err := importer.Import(details)
Expect(err).To(HaveOccurred())
})
It("should extract filename correctly from URI with path", func() {
details := importers.Details{
URI: "https://huggingface.co/test/path/to/model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("model"))
})
It("should include use_tokenizer_template in config", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("use_tokenizer_template: true"))
})
It("should include known_usecases in config", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("known_usecases:"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("- chat"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/transformers.go | core/gallery/importers/transformers.go | package importers
import (
"encoding/json"
"path/filepath"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/schema"
"go.yaml.in/yaml/v2"
)
var _ Importer = &TransformersImporter{}
type TransformersImporter struct{}
func (i *TransformersImporter) Match(details Details) bool {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return false
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return false
}
b, ok := preferencesMap["backend"].(string)
if ok && b == "transformers" {
return true
}
if details.HuggingFace != nil {
for _, file := range details.HuggingFace.Files {
if strings.Contains(file.Path, "tokenizer.json") ||
strings.Contains(file.Path, "tokenizer_config.json") {
return true
}
}
}
return false
}
func (i *TransformersImporter) Import(details Details) (gallery.ModelConfig, error) {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return gallery.ModelConfig{}, err
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return gallery.ModelConfig{}, err
}
name, ok := preferencesMap["name"].(string)
if !ok {
name = filepath.Base(details.URI)
}
description, ok := preferencesMap["description"].(string)
if !ok {
description = "Imported from " + details.URI
}
backend := "transformers"
b, ok := preferencesMap["backend"].(string)
if ok {
backend = b
}
modelType, ok := preferencesMap["type"].(string)
if !ok {
modelType = "AutoModelForCausalLM"
}
quantization, ok := preferencesMap["quantization"].(string)
if !ok {
quantization = ""
}
modelConfig := config.ModelConfig{
Name: name,
Description: description,
KnownUsecaseStrings: []string{"chat"},
Backend: backend,
PredictionOptions: schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: details.URI,
},
},
TemplateConfig: config.TemplateConfig{
UseTokenizerTemplate: true,
},
}
modelConfig.ModelType = modelType
modelConfig.Quantization = quantization
data, err := yaml.Marshal(modelConfig)
if err != nil {
return gallery.ModelConfig{}, err
}
return gallery.ModelConfig{
Name: name,
Description: description,
ConfigFile: string(data),
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/importers.go | core/gallery/importers/importers.go | package importers
import (
"encoding/json"
"fmt"
"os"
"strings"
"github.com/mudler/xlog"
"gopkg.in/yaml.v3"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/downloader"
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
)
var defaultImporters = []Importer{
&LlamaCPPImporter{},
&MLXImporter{},
&VLLMImporter{},
&TransformersImporter{},
&DiffuserImporter{},
}
type Details struct {
HuggingFace *hfapi.ModelDetails
URI string
Preferences json.RawMessage
}
type Importer interface {
Match(details Details) bool
Import(details Details) (gallery.ModelConfig, error)
}
func hasYAMLExtension(uri string) bool {
return strings.HasSuffix(uri, ".yaml") || strings.HasSuffix(uri, ".yml")
}
func DiscoverModelConfig(uri string, preferences json.RawMessage) (gallery.ModelConfig, error) {
var err error
var modelConfig gallery.ModelConfig
hf := hfapi.NewClient()
hfrepoID := strings.ReplaceAll(uri, "huggingface://", "")
hfrepoID = strings.ReplaceAll(hfrepoID, "hf://", "")
hfrepoID = strings.ReplaceAll(hfrepoID, "https://huggingface.co/", "")
hfDetails, err := hf.GetModelDetails(hfrepoID)
if err != nil {
// maybe not a HF repository
// TODO: maybe we can check if the URI is a valid HF repository
xlog.Debug("Failed to get model details, maybe not a HF repository", "uri", uri, "hfrepoID", hfrepoID)
} else {
xlog.Debug("Got model details", "uri", uri)
xlog.Debug("Model details", "details", hfDetails)
}
// handle local config files ("/my-model.yaml" or "file://my-model.yaml")
localURI := uri
if strings.HasPrefix(uri, downloader.LocalPrefix) {
localURI = strings.TrimPrefix(uri, downloader.LocalPrefix)
}
// if a file exists or it's an url that ends with .yaml or .yml, read the config file directly
if _, e := os.Stat(localURI); hasYAMLExtension(localURI) && (e == nil || downloader.URI(localURI).LooksLikeURL()) {
var modelYAML []byte
if downloader.URI(localURI).LooksLikeURL() {
err := downloader.URI(localURI).ReadWithCallback(localURI, func(url string, i []byte) error {
modelYAML = i
return nil
})
if err != nil {
xlog.Error("error reading model definition", "error", err, "filepath", localURI)
return gallery.ModelConfig{}, err
}
} else {
modelYAML, err = os.ReadFile(localURI)
if err != nil {
xlog.Error("error reading model definition", "error", err, "filepath", localURI)
return gallery.ModelConfig{}, err
}
}
var modelConfig config.ModelConfig
if e := yaml.Unmarshal(modelYAML, &modelConfig); e != nil {
return gallery.ModelConfig{}, e
}
configFile, err := yaml.Marshal(modelConfig)
return gallery.ModelConfig{
Description: modelConfig.Description,
Name: modelConfig.Name,
ConfigFile: string(configFile),
}, err
}
details := Details{
HuggingFace: hfDetails,
URI: uri,
Preferences: preferences,
}
importerMatched := false
for _, importer := range defaultImporters {
if importer.Match(details) {
importerMatched = true
modelConfig, err = importer.Import(details)
if err != nil {
continue
}
break
}
}
if !importerMatched {
return gallery.ModelConfig{}, fmt.Errorf("no importer matched for %s", uri)
}
return modelConfig, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/diffuser.go | core/gallery/importers/diffuser.go | package importers
import (
"encoding/json"
"path/filepath"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/schema"
"gopkg.in/yaml.v3"
)
var _ Importer = &DiffuserImporter{}
type DiffuserImporter struct{}
func (i *DiffuserImporter) Match(details Details) bool {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return false
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return false
}
b, ok := preferencesMap["backend"].(string)
if ok && b == "diffusers" {
return true
}
if details.HuggingFace != nil {
for _, file := range details.HuggingFace.Files {
if strings.Contains(file.Path, "model_index.json") ||
strings.Contains(file.Path, "scheduler/scheduler_config.json") {
return true
}
}
}
return false
}
func (i *DiffuserImporter) Import(details Details) (gallery.ModelConfig, error) {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return gallery.ModelConfig{}, err
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return gallery.ModelConfig{}, err
}
name, ok := preferencesMap["name"].(string)
if !ok {
name = filepath.Base(details.URI)
}
description, ok := preferencesMap["description"].(string)
if !ok {
description = "Imported from " + details.URI
}
backend := "diffusers"
b, ok := preferencesMap["backend"].(string)
if ok {
backend = b
}
pipelineType, ok := preferencesMap["pipeline_type"].(string)
if !ok {
pipelineType = "StableDiffusionPipeline"
}
schedulerType, ok := preferencesMap["scheduler_type"].(string)
if !ok {
schedulerType = ""
}
enableParameters, ok := preferencesMap["enable_parameters"].(string)
if !ok {
enableParameters = "negative_prompt,num_inference_steps"
}
cuda := false
if cudaVal, ok := preferencesMap["cuda"].(bool); ok {
cuda = cudaVal
}
modelConfig := config.ModelConfig{
Name: name,
Description: description,
KnownUsecaseStrings: []string{"image"},
Backend: backend,
PredictionOptions: schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: details.URI,
},
},
Diffusers: config.Diffusers{
PipelineType: pipelineType,
SchedulerType: schedulerType,
EnableParameters: enableParameters,
CUDA: cuda,
},
}
data, err := yaml.Marshal(modelConfig)
if err != nil {
return gallery.ModelConfig{}, err
}
return gallery.ModelConfig{
Name: name,
Description: description,
ConfigFile: string(data),
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/vllm_test.go | core/gallery/importers/vllm_test.go | package importers_test
import (
"encoding/json"
"github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/mudler/LocalAI/core/gallery/importers"
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("VLLMImporter", func() {
var importer *VLLMImporter
BeforeEach(func() {
importer = &VLLMImporter{}
})
Context("Match", func() {
It("should match when backend preference is vllm", func() {
preferences := json.RawMessage(`{"backend": "vllm"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when HuggingFace details contain tokenizer.json", func() {
hfDetails := &hfapi.ModelDetails{
Files: []hfapi.ModelFile{
{Path: "tokenizer.json"},
},
}
details := Details{
URI: "https://huggingface.co/test/model",
HuggingFace: hfDetails,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when HuggingFace details contain tokenizer_config.json", func() {
hfDetails := &hfapi.ModelDetails{
Files: []hfapi.ModelFile{
{Path: "tokenizer_config.json"},
},
}
details := Details{
URI: "https://huggingface.co/test/model",
HuggingFace: hfDetails,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should not match when URI has no tokenizer files and no backend preference", func() {
details := Details{
URI: "https://example.com/model.bin",
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should not match when backend preference is different", func() {
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should return false when JSON preferences are invalid", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
})
Context("Import", func() {
It("should import model config with default name and description", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("my-model"))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/test/my-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: vllm"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: https://huggingface.co/test/my-model"))
})
It("should import model config with custom name and description from preferences", func() {
preferences := json.RawMessage(`{"name": "custom-model", "description": "Custom description"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-model"))
Expect(modelConfig.Description).To(Equal("Custom description"))
})
It("should use custom backend from preferences", func() {
preferences := json.RawMessage(`{"backend": "vllm"}`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: vllm"))
})
It("should handle invalid JSON preferences", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://huggingface.co/test/my-model",
Preferences: preferences,
}
_, err := importer.Import(details)
Expect(err).To(HaveOccurred())
})
It("should extract filename correctly from URI with path", func() {
details := importers.Details{
URI: "https://huggingface.co/test/path/to/model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("model"))
})
It("should include use_tokenizer_template in config", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("use_tokenizer_template: true"))
})
It("should include known_usecases in config", func() {
details := Details{
URI: "https://huggingface.co/test/my-model",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("known_usecases:"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("- chat"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/llama-cpp_test.go | core/gallery/importers/llama-cpp_test.go | package importers_test
import (
"encoding/json"
"fmt"
"github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("LlamaCPPImporter", func() {
var importer *LlamaCPPImporter
BeforeEach(func() {
importer = &LlamaCPPImporter{}
})
Context("Match", func() {
It("should match when URI ends with .gguf", func() {
details := Details{
URI: "https://example.com/model.gguf",
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should match when backend preference is llama-cpp", func() {
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeTrue())
})
It("should not match when URI does not end with .gguf and no backend preference", func() {
details := Details{
URI: "https://example.com/model.bin",
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should not match when backend preference is different", func() {
preferences := json.RawMessage(`{"backend": "mlx"}`)
details := Details{
URI: "https://example.com/model",
Preferences: preferences,
}
result := importer.Match(details)
Expect(result).To(BeFalse())
})
It("should return false when JSON preferences are invalid", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://example.com/model.gguf",
Preferences: preferences,
}
// Invalid JSON causes Match to return false early
result := importer.Match(details)
Expect(result).To(BeFalse())
})
})
Context("Import", func() {
It("should import model config with default name and description", func() {
details := Details{
URI: "https://example.com/my-model.gguf",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("my-model.gguf"))
Expect(modelConfig.Description).To(Equal("Imported from https://example.com/my-model.gguf"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"))
Expect(len(modelConfig.Files)).To(Equal(1), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].URI).To(Equal("https://example.com/my-model.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].Filename).To(Equal("my-model.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
})
It("should import model config with custom name and description from preferences", func() {
preferences := json.RawMessage(`{"name": "custom-model", "description": "Custom description"}`)
details := Details{
URI: "https://example.com/my-model.gguf",
Preferences: preferences,
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-model"))
Expect(modelConfig.Description).To(Equal("Custom description"))
Expect(len(modelConfig.Files)).To(Equal(1), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].URI).To(Equal("https://example.com/my-model.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].Filename).To(Equal("my-model.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
})
It("should handle invalid JSON preferences", func() {
preferences := json.RawMessage(`invalid json`)
details := Details{
URI: "https://example.com/my-model.gguf",
Preferences: preferences,
}
_, err := importer.Import(details)
Expect(err).To(HaveOccurred())
})
It("should extract filename correctly from URI with path", func() {
details := importers.Details{
URI: "https://example.com/path/to/model.gguf",
}
modelConfig, err := importer.Import(details)
Expect(err).ToNot(HaveOccurred())
Expect(len(modelConfig.Files)).To(Equal(1), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].URI).To(Equal("https://example.com/path/to/model.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].Filename).To(Equal("model.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/importers_suite_test.go | core/gallery/importers/importers_suite_test.go | package importers_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestImporters(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Importers test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/llama-cpp.go | core/gallery/importers/llama-cpp.go | package importers
import (
"encoding/json"
"path/filepath"
"slices"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/functions"
"github.com/mudler/xlog"
"go.yaml.in/yaml/v2"
)
var _ Importer = &LlamaCPPImporter{}
type LlamaCPPImporter struct{}
func (i *LlamaCPPImporter) Match(details Details) bool {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
xlog.Error("failed to marshal preferences", "error", err)
return false
}
preferencesMap := make(map[string]any)
if len(preferences) > 0 {
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
xlog.Error("failed to unmarshal preferences", "error", err)
return false
}
}
uri := downloader.URI(details.URI)
if preferencesMap["backend"] == "llama-cpp" {
return true
}
if strings.HasSuffix(details.URI, ".gguf") {
return true
}
if uri.LooksLikeOCI() {
return true
}
if details.HuggingFace != nil {
for _, file := range details.HuggingFace.Files {
if strings.HasSuffix(file.Path, ".gguf") {
return true
}
}
}
return false
}
func (i *LlamaCPPImporter) Import(details Details) (gallery.ModelConfig, error) {
xlog.Debug("llama.cpp importer matched", "uri", details.URI)
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return gallery.ModelConfig{}, err
}
preferencesMap := make(map[string]any)
if len(preferences) > 0 {
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return gallery.ModelConfig{}, err
}
}
name, ok := preferencesMap["name"].(string)
if !ok {
name = filepath.Base(details.URI)
}
description, ok := preferencesMap["description"].(string)
if !ok {
description = "Imported from " + details.URI
}
preferedQuantizations, _ := preferencesMap["quantizations"].(string)
quants := []string{"q4_k_m"}
if preferedQuantizations != "" {
quants = strings.Split(preferedQuantizations, ",")
}
mmprojQuants, _ := preferencesMap["mmproj_quantizations"].(string)
mmprojQuantsList := []string{"fp16"}
if mmprojQuants != "" {
mmprojQuantsList = strings.Split(mmprojQuants, ",")
}
embeddings, _ := preferencesMap["embeddings"].(string)
modelConfig := config.ModelConfig{
Name: name,
Description: description,
KnownUsecaseStrings: []string{"chat"},
Options: []string{"use_jinja:true"},
Backend: "llama-cpp",
TemplateConfig: config.TemplateConfig{
UseTokenizerTemplate: true,
},
FunctionsConfig: functions.FunctionsConfig{
GrammarConfig: functions.GrammarConfig{
NoGrammar: true,
},
},
}
if embeddings != "" && strings.ToLower(embeddings) == "true" || strings.ToLower(embeddings) == "yes" {
trueV := true
modelConfig.Embeddings = &trueV
}
cfg := gallery.ModelConfig{
Name: name,
Description: description,
}
uri := downloader.URI(details.URI)
switch {
case uri.LooksLikeOCI():
ociName := strings.TrimPrefix(string(uri), downloader.OCIPrefix)
ociName = strings.TrimPrefix(ociName, downloader.OllamaPrefix)
ociName = strings.ReplaceAll(ociName, "/", "__")
ociName = strings.ReplaceAll(ociName, ":", "__")
cfg.Files = append(cfg.Files, gallery.File{
URI: details.URI,
Filename: ociName,
})
modelConfig.PredictionOptions = schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: ociName,
},
}
case uri.LooksLikeURL() && strings.HasSuffix(details.URI, ".gguf"):
// Extract filename from URL
fileName, e := uri.FilenameFromUrl()
if e != nil {
return gallery.ModelConfig{}, e
}
cfg.Files = append(cfg.Files, gallery.File{
URI: details.URI,
Filename: fileName,
})
modelConfig.PredictionOptions = schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: fileName,
},
}
case strings.HasSuffix(details.URI, ".gguf"):
cfg.Files = append(cfg.Files, gallery.File{
URI: details.URI,
Filename: filepath.Base(details.URI),
})
modelConfig.PredictionOptions = schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: filepath.Base(details.URI),
},
}
case details.HuggingFace != nil:
// We want to:
// Get first the chosen quants that match filenames
// OR the first mmproj/gguf file found
var lastMMProjFile *gallery.File
var lastGGUFFile *gallery.File
foundPreferedQuant := false
foundPreferedMMprojQuant := false
for _, file := range details.HuggingFace.Files {
// Get the mmproj prefered quants
if strings.Contains(strings.ToLower(file.Path), "mmproj") {
lastMMProjFile = &gallery.File{
URI: file.URL,
Filename: filepath.Join("llama-cpp", "mmproj", filepath.Base(file.Path)),
SHA256: file.SHA256,
}
if slices.ContainsFunc(mmprojQuantsList, func(quant string) bool {
return strings.Contains(strings.ToLower(file.Path), strings.ToLower(quant))
}) {
cfg.Files = append(cfg.Files, *lastMMProjFile)
foundPreferedMMprojQuant = true
}
} else if strings.HasSuffix(strings.ToLower(file.Path), "gguf") {
lastGGUFFile = &gallery.File{
URI: file.URL,
Filename: filepath.Join("llama-cpp", "models", filepath.Base(file.Path)),
SHA256: file.SHA256,
}
// get the files of the prefered quants
if slices.ContainsFunc(quants, func(quant string) bool {
return strings.Contains(strings.ToLower(file.Path), strings.ToLower(quant))
}) {
foundPreferedQuant = true
cfg.Files = append(cfg.Files, *lastGGUFFile)
}
}
}
// Make sure to add at least one file if not already present (which is the latest one)
if lastMMProjFile != nil && !foundPreferedMMprojQuant {
if !slices.ContainsFunc(cfg.Files, func(f gallery.File) bool {
return f.Filename == lastMMProjFile.Filename
}) {
cfg.Files = append(cfg.Files, *lastMMProjFile)
}
}
if lastGGUFFile != nil && !foundPreferedQuant {
if !slices.ContainsFunc(cfg.Files, func(f gallery.File) bool {
return f.Filename == lastGGUFFile.Filename
}) {
cfg.Files = append(cfg.Files, *lastGGUFFile)
}
}
// Find first mmproj file and configure it in the config file
for _, file := range cfg.Files {
if !strings.Contains(strings.ToLower(file.Filename), "mmproj") {
continue
}
modelConfig.MMProj = file.Filename
break
}
// Find first non-mmproj file and configure it in the config file
for _, file := range cfg.Files {
if strings.Contains(strings.ToLower(file.Filename), "mmproj") {
continue
}
modelConfig.PredictionOptions = schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: file.Filename,
},
}
break
}
}
data, err := yaml.Marshal(modelConfig)
if err != nil {
return gallery.ModelConfig{}, err
}
cfg.ConfigFile = string(data)
return cfg, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/vllm.go | core/gallery/importers/vllm.go | package importers
import (
"encoding/json"
"path/filepath"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/schema"
"go.yaml.in/yaml/v2"
)
var _ Importer = &VLLMImporter{}
type VLLMImporter struct{}
func (i *VLLMImporter) Match(details Details) bool {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return false
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return false
}
b, ok := preferencesMap["backend"].(string)
if ok && b == "vllm" {
return true
}
if details.HuggingFace != nil {
for _, file := range details.HuggingFace.Files {
if strings.Contains(file.Path, "tokenizer.json") ||
strings.Contains(file.Path, "tokenizer_config.json") {
return true
}
}
}
return false
}
func (i *VLLMImporter) Import(details Details) (gallery.ModelConfig, error) {
preferences, err := details.Preferences.MarshalJSON()
if err != nil {
return gallery.ModelConfig{}, err
}
preferencesMap := make(map[string]any)
err = json.Unmarshal(preferences, &preferencesMap)
if err != nil {
return gallery.ModelConfig{}, err
}
name, ok := preferencesMap["name"].(string)
if !ok {
name = filepath.Base(details.URI)
}
description, ok := preferencesMap["description"].(string)
if !ok {
description = "Imported from " + details.URI
}
backend := "vllm"
b, ok := preferencesMap["backend"].(string)
if ok {
backend = b
}
modelConfig := config.ModelConfig{
Name: name,
Description: description,
KnownUsecaseStrings: []string{"chat"},
Backend: backend,
PredictionOptions: schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: details.URI,
},
},
TemplateConfig: config.TemplateConfig{
UseTokenizerTemplate: true,
},
}
data, err := yaml.Marshal(modelConfig)
if err != nil {
return gallery.ModelConfig{}, err
}
return gallery.ModelConfig{
Name: name,
Description: description,
ConfigFile: string(data),
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/gallery/importers/importers_test.go | core/gallery/importers/importers_test.go | package importers_test
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/gallery/importers"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("DiscoverModelConfig", func() {
Context("With only a repository URI", func() {
It("should discover and import using LlamaCPPImporter", func() {
uri := "https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred(), fmt.Sprintf("Error: %v", err))
Expect(modelConfig.Name).To(Equal("LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(len(modelConfig.Files)).To(Equal(1), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].Filename).To(Equal("llama-cpp/models/localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].URI).To(Equal("https://huggingface.co/mudler/LocalAI-functioncall-qwen2.5-7b-v0.5-Q4_K_M-GGUF/resolve/main/localai-functioncall-qwen2.5-7b-v0.5-q4_k_m.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].SHA256).To(Equal("4e7b7fe1d54b881f1ef90799219dc6cc285d29db24f559c8998d1addb35713d4"), fmt.Sprintf("Model config: %+v", modelConfig))
})
It("should discover and import using LlamaCPPImporter", func() {
uri := "https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred(), fmt.Sprintf("Error: %v", err))
Expect(modelConfig.Name).To(Equal("Qwen3-VL-2B-Instruct-GGUF"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("mmproj: llama-cpp/mmproj/mmproj-Qwen3VL-2B-Instruct-Q8_0.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: llama-cpp/models/Qwen3VL-2B-Instruct-Q4_K_M.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(len(modelConfig.Files)).To(Equal(2), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].Filename).To(Equal("llama-cpp/models/Qwen3VL-2B-Instruct-Q4_K_M.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].URI).To(Equal("https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF/resolve/main/Qwen3VL-2B-Instruct-Q4_K_M.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].SHA256).ToNot(BeEmpty(), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[1].Filename).To(Equal("llama-cpp/mmproj/mmproj-Qwen3VL-2B-Instruct-Q8_0.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[1].URI).To(Equal("https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF/resolve/main/mmproj-Qwen3VL-2B-Instruct-Q8_0.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[1].SHA256).ToNot(BeEmpty(), fmt.Sprintf("Model config: %+v", modelConfig))
})
It("should discover and import using LlamaCPPImporter", func() {
uri := "https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF"
preferences := json.RawMessage(`{ "quantizations": "Q8_0", "mmproj_quantizations": "f16" }`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred(), fmt.Sprintf("Error: %v", err))
Expect(modelConfig.Name).To(Equal("Qwen3-VL-2B-Instruct-GGUF"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("mmproj: llama-cpp/mmproj/mmproj-Qwen3VL-2B-Instruct-F16.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.ConfigFile).To(ContainSubstring("model: llama-cpp/models/Qwen3VL-2B-Instruct-Q8_0.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(len(modelConfig.Files)).To(Equal(2), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].Filename).To(Equal("llama-cpp/models/Qwen3VL-2B-Instruct-Q8_0.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].URI).To(Equal("https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF/resolve/main/Qwen3VL-2B-Instruct-Q8_0.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[0].SHA256).ToNot(BeEmpty(), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[1].Filename).To(Equal("llama-cpp/mmproj/mmproj-Qwen3VL-2B-Instruct-F16.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[1].URI).To(Equal("https://huggingface.co/Qwen/Qwen3-VL-2B-Instruct-GGUF/resolve/main/mmproj-Qwen3VL-2B-Instruct-F16.gguf"), fmt.Sprintf("Model config: %+v", modelConfig))
Expect(modelConfig.Files[1].SHA256).ToNot(BeEmpty(), fmt.Sprintf("Model config: %+v", modelConfig))
})
})
Context("with .gguf URI", func() {
It("should discover and import using LlamaCPPImporter", func() {
uri := "https://example.com/my-model.gguf"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("my-model.gguf"))
Expect(modelConfig.Description).To(Equal("Imported from https://example.com/my-model.gguf"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"))
})
It("should use custom preferences when provided", func() {
uri := "https://example.com/my-model.gguf"
preferences := json.RawMessage(`{"name": "custom-name", "description": "Custom description"}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-name"))
Expect(modelConfig.Description).To(Equal("Custom description"))
})
})
Context("with mlx-community URI", func() {
It("should discover and import using MLXImporter", func() {
uri := "https://huggingface.co/mlx-community/test-model"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("test-model"))
Expect(modelConfig.Description).To(Equal("Imported from https://huggingface.co/mlx-community/test-model"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: mlx"))
})
It("should use custom preferences when provided", func() {
uri := "https://huggingface.co/mlx-community/test-model"
preferences := json.RawMessage(`{"name": "custom-mlx", "description": "Custom MLX description"}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("custom-mlx"))
Expect(modelConfig.Description).To(Equal("Custom MLX description"))
})
})
Context("with backend preference", func() {
It("should use llama-cpp backend when specified", func() {
uri := "https://example.com/model"
preferences := json.RawMessage(`{"backend": "llama-cpp"}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"))
})
It("should use mlx backend when specified", func() {
uri := "https://example.com/model"
preferences := json.RawMessage(`{"backend": "mlx"}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: mlx"))
})
It("should use mlx-vlm backend when specified", func() {
uri := "https://example.com/model"
preferences := json.RawMessage(`{"backend": "mlx-vlm"}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: mlx-vlm"))
})
})
Context("with HuggingFace URI formats", func() {
It("should handle huggingface:// prefix", func() {
uri := "huggingface://mlx-community/test-model"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("test-model"))
})
It("should handle hf:// prefix", func() {
uri := "hf://mlx-community/test-model"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("test-model"))
})
It("should handle https://huggingface.co/ prefix", func() {
uri := "https://huggingface.co/mlx-community/test-model"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("test-model"))
})
})
Context("with invalid or non-matching URI", func() {
It("should return error when no importer matches", func() {
uri := "https://example.com/unknown-model.bin"
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
// When no importer matches, the function returns empty config and error
// The exact behavior depends on implementation, but typically an error is returned
Expect(modelConfig.Name).To(BeEmpty())
Expect(err).To(HaveOccurred())
})
})
Context("with invalid JSON preferences", func() {
It("should return error when JSON is invalid even if URI matches", func() {
uri := "https://example.com/model.gguf"
preferences := json.RawMessage(`invalid json`)
// Even though Match() returns true for .gguf extension,
// Import() will fail when trying to unmarshal invalid JSON preferences
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).To(HaveOccurred())
Expect(modelConfig.Name).To(BeEmpty())
})
})
Context("with local YAML config files", func() {
var tempDir string
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "importers-test-*")
Expect(err).ToNot(HaveOccurred())
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
It("should read local YAML file with file:// prefix", func() {
yamlContent := `name: test-model
backend: llama-cpp
description: Test model from local YAML
parameters:
model: /path/to/model.gguf
temperature: 0.7
`
yamlFile := filepath.Join(tempDir, "test-model.yaml")
err := os.WriteFile(yamlFile, []byte(yamlContent), 0644)
Expect(err).ToNot(HaveOccurred())
uri := "file://" + yamlFile
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("test-model"))
Expect(modelConfig.Description).To(Equal("Test model from local YAML"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("name: test-model"))
})
It("should read local YAML file without file:// prefix (direct path)", func() {
yamlContent := `name: direct-path-model
backend: mlx
description: Test model from direct path
parameters:
model: /path/to/model.safetensors
`
yamlFile := filepath.Join(tempDir, "direct-model.yaml")
err := os.WriteFile(yamlFile, []byte(yamlContent), 0644)
Expect(err).ToNot(HaveOccurred())
uri := yamlFile
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("direct-path-model"))
Expect(modelConfig.Description).To(Equal("Test model from direct path"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: mlx"))
})
It("should read local YAML file with .yml extension", func() {
yamlContent := `name: yml-extension-model
backend: transformers
description: Test model with .yml extension
parameters:
model: /path/to/model
`
yamlFile := filepath.Join(tempDir, "test-model.yml")
err := os.WriteFile(yamlFile, []byte(yamlContent), 0644)
Expect(err).ToNot(HaveOccurred())
uri := "file://" + yamlFile
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
Expect(modelConfig.Name).To(Equal("yml-extension-model"))
Expect(modelConfig.Description).To(Equal("Test model with .yml extension"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: transformers"))
})
It("should ignore preferences when reading YAML files directly", func() {
yamlContent := `name: yaml-model
backend: llama-cpp
description: Original description
parameters:
model: /path/to/model.gguf
`
yamlFile := filepath.Join(tempDir, "prefs-test.yaml")
err := os.WriteFile(yamlFile, []byte(yamlContent), 0644)
Expect(err).ToNot(HaveOccurred())
uri := "file://" + yamlFile
// Preferences should be ignored when reading YAML directly
preferences := json.RawMessage(`{"name": "custom-name", "description": "Custom description", "backend": "mlx"}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).ToNot(HaveOccurred())
// Should use values from YAML file, not preferences
Expect(modelConfig.Name).To(Equal("yaml-model"))
Expect(modelConfig.Description).To(Equal("Original description"))
Expect(modelConfig.ConfigFile).To(ContainSubstring("backend: llama-cpp"))
})
It("should return error when local YAML file doesn't exist", func() {
nonExistentFile := filepath.Join(tempDir, "nonexistent.yaml")
uri := "file://" + nonExistentFile
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).To(HaveOccurred())
Expect(modelConfig.Name).To(BeEmpty())
})
It("should return error when YAML file is invalid/malformed", func() {
invalidYaml := `name: invalid-model
backend: llama-cpp
invalid: yaml: content: [unclosed bracket
`
yamlFile := filepath.Join(tempDir, "invalid.yaml")
err := os.WriteFile(yamlFile, []byte(invalidYaml), 0644)
Expect(err).ToNot(HaveOccurred())
uri := "file://" + yamlFile
preferences := json.RawMessage(`{}`)
modelConfig, err := importers.DiscoverModelConfig(uri, preferences)
Expect(err).To(HaveOccurred())
Expect(modelConfig.Name).To(BeEmpty())
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/explorer/database_test.go | core/explorer/database_test.go | package explorer_test
import (
"os"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/mudler/LocalAI/core/explorer"
)
var _ = Describe("Database", func() {
var (
dbPath string
db *explorer.Database
err error
)
BeforeEach(func() {
// Create a temporary file path for the database
dbPath = "test_db.json"
db, err = explorer.NewDatabase(dbPath)
Expect(err).To(BeNil())
})
AfterEach(func() {
// Clean up the temporary database file
os.Remove(dbPath)
})
Context("when managing tokens", func() {
It("should add and retrieve a token", func() {
token := "token123"
t := explorer.TokenData{Name: "TokenName", Description: "A test token"}
err = db.Set(token, t)
Expect(err).To(BeNil())
retrievedToken, exists := db.Get(token)
Expect(exists).To(BeTrue())
Expect(retrievedToken).To(Equal(t))
})
It("should delete a token", func() {
token := "token123"
t := explorer.TokenData{Name: "TokenName", Description: "A test token"}
err = db.Set(token, t)
Expect(err).To(BeNil())
err = db.Delete(token)
Expect(err).To(BeNil())
_, exists := db.Get(token)
Expect(exists).To(BeFalse())
})
It("should persist data to disk", func() {
token := "token123"
t := explorer.TokenData{Name: "TokenName", Description: "A test token"}
err = db.Set(token, t)
Expect(err).To(BeNil())
// Recreate the database object to simulate reloading from disk
db, err = explorer.NewDatabase(dbPath)
Expect(err).To(BeNil())
retrievedToken, exists := db.Get(token)
Expect(exists).To(BeTrue())
Expect(retrievedToken).To(Equal(t))
// Check the token list
tokenList := db.TokenList()
Expect(tokenList).To(ContainElement(token))
})
})
Context("when loading an empty or non-existent file", func() {
It("should start with an empty database", func() {
dbPath = "empty_db.json"
db, err = explorer.NewDatabase(dbPath)
Expect(err).To(BeNil())
_, exists := db.Get("nonexistent")
Expect(exists).To(BeFalse())
// Clean up
os.Remove(dbPath)
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/explorer/database.go | core/explorer/database.go | package explorer
// A simple JSON database for storing and retrieving p2p network tokens and a name and description.
import (
"encoding/json"
"os"
"sort"
"sync"
"github.com/gofrs/flock"
)
// Database is a simple JSON database for storing and retrieving p2p network tokens and a name and description.
type Database struct {
path string
data map[string]TokenData
flock *flock.Flock
sync.Mutex
}
// TokenData is a p2p network token with a name and description.
type TokenData struct {
Name string `json:"name"`
Description string `json:"description"`
Clusters []ClusterData
Failures int
}
type ClusterData struct {
Workers []string
Type string
NetworkID string
}
// NewDatabase creates a new Database with the given path.
func NewDatabase(path string) (*Database, error) {
fileLock := flock.New(path + ".lock")
db := &Database{
data: make(map[string]TokenData),
path: path,
flock: fileLock,
}
return db, db.load()
}
// Get retrieves a Token from the Database by its token.
func (db *Database) Get(token string) (TokenData, bool) {
db.flock.Lock() // we are making sure that the file is not being written to
defer db.flock.Unlock()
db.Lock() // we are making sure that is safe if called by another instance in the same process
defer db.Unlock()
db.load()
t, ok := db.data[token]
return t, ok
}
// Set stores a Token in the Database by its token.
func (db *Database) Set(token string, t TokenData) error {
db.flock.Lock()
defer db.flock.Unlock()
db.Lock()
defer db.Unlock()
db.load()
db.data[token] = t
return db.save()
}
// Delete removes a Token from the Database by its token.
func (db *Database) Delete(token string) error {
db.flock.Lock()
defer db.flock.Unlock()
db.Lock()
defer db.Unlock()
db.load()
delete(db.data, token)
return db.save()
}
func (db *Database) TokenList() []string {
db.flock.Lock()
defer db.flock.Unlock()
db.Lock()
defer db.Unlock()
db.load()
tokens := []string{}
for k := range db.data {
tokens = append(tokens, k)
}
sort.Slice(tokens, func(i, j int) bool {
// sort by token
return tokens[i] < tokens[j]
})
return tokens
}
// load reads the Database from disk.
func (db *Database) load() error {
if _, err := os.Stat(db.path); os.IsNotExist(err) {
return nil
}
// Read the file from disk
// Unmarshal the JSON into db.data
f, err := os.ReadFile(db.path)
if err != nil {
return err
}
return json.Unmarshal(f, &db.data)
}
// Save writes the Database to disk.
func (db *Database) save() error {
// Marshal db.data into JSON
// Write the JSON to the file
f, err := os.Create(db.path)
if err != nil {
return err
}
defer f.Close()
return json.NewEncoder(f).Encode(db.data)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/explorer/discovery.go | core/explorer/discovery.go | package explorer
import (
"context"
"fmt"
"strings"
"sync"
"time"
"github.com/mudler/xlog"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/edgevpn/pkg/blockchain"
)
type DiscoveryServer struct {
sync.Mutex
database *Database
connectionTime time.Duration
errorThreshold int
}
// NewDiscoveryServer creates a new DiscoveryServer with the given Database.
// it keeps the db state in sync with the network state
func NewDiscoveryServer(db *Database, dur time.Duration, failureThreshold int) *DiscoveryServer {
if dur == 0 {
dur = 50 * time.Second
}
if failureThreshold == 0 {
failureThreshold = 3
}
return &DiscoveryServer{
database: db,
connectionTime: dur,
errorThreshold: failureThreshold,
}
}
type Network struct {
Clusters []ClusterData
}
func (s *DiscoveryServer) runBackground() {
if len(s.database.TokenList()) == 0 {
time.Sleep(5 * time.Second) // avoid busy loop
return
}
for _, token := range s.database.TokenList() {
c, cancel := context.WithTimeout(context.Background(), s.connectionTime)
defer cancel()
// Connect to the network
// Get the number of nodes
// save it in the current state (mutex)
// do not do in parallel
n, err := p2p.NewNode(token)
if err != nil {
xlog.Error("Failed to create node", "error", err)
s.failedToken(token)
continue
}
err = n.Start(c)
if err != nil {
xlog.Error("Failed to start node", "error", err)
s.failedToken(token)
continue
}
ledger, err := n.Ledger()
if err != nil {
xlog.Error("Failed to start ledger", "error", err)
s.failedToken(token)
continue
}
networkData := make(chan ClusterData)
// get the network data - it takes the whole timeout
// as we might not be connected to the network yet,
// and few attempts would have to be made before bailing out
go s.retrieveNetworkData(c, ledger, networkData)
hasWorkers := false
ledgerK := []ClusterData{}
for key := range networkData {
ledgerK = append(ledgerK, key)
if len(key.Workers) > 0 {
hasWorkers = true
}
}
xlog.Debug("Network clusters", "network", token, "count", len(ledgerK))
if len(ledgerK) != 0 {
for _, k := range ledgerK {
xlog.Debug("Clusterdata", "network", token, "cluster", k)
}
}
if hasWorkers {
s.Lock()
data, _ := s.database.Get(token)
(&data).Clusters = ledgerK
(&data).Failures = 0
s.database.Set(token, data)
s.Unlock()
} else {
s.failedToken(token)
}
}
s.deleteFailedConnections()
}
func (s *DiscoveryServer) failedToken(token string) {
s.Lock()
defer s.Unlock()
data, _ := s.database.Get(token)
(&data).Failures++
s.database.Set(token, data)
}
func (s *DiscoveryServer) deleteFailedConnections() {
s.Lock()
defer s.Unlock()
for _, t := range s.database.TokenList() {
data, _ := s.database.Get(t)
if data.Failures > s.errorThreshold {
xlog.Info("Token has been removed from the database", "token", t)
s.database.Delete(t)
}
}
}
func (s *DiscoveryServer) retrieveNetworkData(c context.Context, ledger *blockchain.Ledger, networkData chan ClusterData) {
clusters := map[string]ClusterData{}
defer func() {
for _, n := range clusters {
networkData <- n
}
close(networkData)
}()
for {
select {
case <-c.Done():
return
default:
time.Sleep(5 * time.Second)
data := ledger.LastBlock().Storage
LEDGER:
for d := range data {
toScanForWorkers := false
cd := ClusterData{}
isWorkerCluster := d == p2p.WorkerID || (strings.Contains(d, "_") && strings.Contains(d, p2p.WorkerID))
isFederatedCluster := d == p2p.FederatedID || (strings.Contains(d, "_") && strings.Contains(d, p2p.FederatedID))
switch {
case isWorkerCluster:
toScanForWorkers = true
cd.Type = "worker"
case isFederatedCluster:
toScanForWorkers = true
cd.Type = "federated"
}
if strings.Contains(d, "_") {
cd.NetworkID = strings.Split(d, "_")[0]
}
if !toScanForWorkers {
continue LEDGER
}
atLeastOneWorker := false
DATA:
for _, v := range data[d] {
nd := &schema.NodeData{}
if err := v.Unmarshal(nd); err != nil {
continue DATA
}
if nd.IsOnline() {
atLeastOneWorker = true
(&cd).Workers = append(cd.Workers, nd.ID)
}
}
if atLeastOneWorker {
clusters[d] = cd
}
}
}
}
}
// Start the discovery server. This is meant to be run in to a goroutine.
func (s *DiscoveryServer) Start(ctx context.Context, keepRunning bool) error {
for {
select {
case <-ctx.Done():
return fmt.Errorf("context cancelled")
default:
// Collect data
s.runBackground()
if !keepRunning {
return nil
}
}
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/explorer/explorer_suite_test.go | core/explorer/explorer_suite_test.go | package explorer_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestExplorer(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Explorer test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/application/p2p.go | core/application/p2p.go | package application
import (
"context"
"fmt"
"net"
"slices"
"time"
"github.com/google/uuid"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/edgevpn/pkg/node"
"github.com/mudler/xlog"
)
func (a *Application) StopP2P() error {
if a.p2pCancel != nil {
a.p2pCancel()
a.p2pCancel = nil
a.p2pCtx = nil
// Wait a bit for shutdown to complete
time.Sleep(200 * time.Millisecond)
}
return nil
}
func (a *Application) StartP2P() error {
// we need a p2p token
if a.applicationConfig.P2PToken == "" {
return fmt.Errorf("P2P token is not set")
}
networkID := a.applicationConfig.P2PNetworkID
ctx, cancel := context.WithCancel(a.ApplicationConfig().Context)
a.p2pCtx = ctx
a.p2pCancel = cancel
var n *node.Node
// Here we are avoiding creating multiple nodes:
// - if the federated mode is enabled, we create a federated node and expose a service
// - exposing a service creates a node with specific options, and we don't want to create another node
// If the federated mode is enabled, we expose a service to the local instance running
// at r.Address
if a.applicationConfig.Federated {
_, port, err := net.SplitHostPort(a.applicationConfig.APIAddress)
if err != nil {
return err
}
// Here a new node is created and started
// and a service is exposed by the node
node, err := p2p.ExposeService(ctx, "localhost", port, a.applicationConfig.P2PToken, p2p.NetworkID(networkID, p2p.FederatedID))
if err != nil {
return err
}
if err := p2p.ServiceDiscoverer(ctx, node, a.applicationConfig.P2PToken, p2p.NetworkID(networkID, p2p.FederatedID), nil, false); err != nil {
return err
}
n = node
// start node sync in the background
if err := a.p2pSync(ctx, node); err != nil {
return err
}
}
// If a node wasn't created previously, create it
if n == nil {
node, err := p2p.NewNode(a.applicationConfig.P2PToken)
if err != nil {
return err
}
err = node.Start(ctx)
if err != nil {
return fmt.Errorf("starting new node: %w", err)
}
n = node
}
// Attach a ServiceDiscoverer to the p2p node
xlog.Info("Starting P2P server discovery...")
if err := p2p.ServiceDiscoverer(ctx, n, a.applicationConfig.P2PToken, p2p.NetworkID(networkID, p2p.WorkerID), func(serviceID string, node schema.NodeData) {
var tunnelAddresses []string
for _, v := range p2p.GetAvailableNodes(p2p.NetworkID(networkID, p2p.WorkerID)) {
if v.IsOnline() {
tunnelAddresses = append(tunnelAddresses, v.TunnelAddress)
} else {
xlog.Info("Node is offline", "node", v.ID)
}
}
if a.applicationConfig.TunnelCallback != nil {
a.applicationConfig.TunnelCallback(tunnelAddresses)
}
}, true); err != nil {
return err
}
return nil
}
// RestartP2P restarts the P2P stack with current ApplicationConfig settings
// Note: This method signals that P2P should be restarted, but the actual restart
// is handled by the caller to avoid import cycles
func (a *Application) RestartP2P() error {
a.p2pMutex.Lock()
defer a.p2pMutex.Unlock()
// Stop existing P2P if running
if a.p2pCancel != nil {
a.p2pCancel()
a.p2pCancel = nil
a.p2pCtx = nil
// Wait a bit for shutdown to complete
time.Sleep(200 * time.Millisecond)
}
appConfig := a.ApplicationConfig()
// Start P2P if token is set
if appConfig.P2PToken == "" {
return fmt.Errorf("P2P token is not set")
}
// Create new context for P2P
ctx, cancel := context.WithCancel(appConfig.Context)
a.p2pCtx = ctx
a.p2pCancel = cancel
// Get API address from config
address := appConfig.APIAddress
if address == "" {
address = "127.0.0.1:8080" // default
}
// Start P2P stack in a goroutine
go func() {
if err := a.StartP2P(); err != nil {
xlog.Error("Failed to start P2P stack", "error", err)
cancel() // Cancel context on error
}
}()
xlog.Info("P2P stack restarted with new settings")
return nil
}
func syncState(ctx context.Context, n *node.Node, app *Application) error {
xlog.Debug("[p2p-sync] Syncing state")
whatWeHave := []string{}
for _, model := range app.ModelConfigLoader().GetAllModelsConfigs() {
whatWeHave = append(whatWeHave, model.Name)
}
ledger, _ := n.Ledger()
currentData := ledger.CurrentData()
xlog.Debug("[p2p-sync] Current data", "data", currentData)
data, exists := ledger.GetKey("shared_state", "models")
if !exists {
ledger.AnnounceUpdate(ctx, time.Minute, "shared_state", "models", whatWeHave)
xlog.Debug("No models found in the ledger, announced our models", "models", whatWeHave)
}
models := []string{}
if err := data.Unmarshal(&models); err != nil {
xlog.Warn("error unmarshalling models", "error", err)
return nil
}
xlog.Debug("[p2p-sync] Models comparison", "ourModels", whatWeHave, "ledgerModels", models)
// Sync with our state
whatIsNotThere := []string{}
for _, model := range whatWeHave {
if !slices.Contains(models, model) {
whatIsNotThere = append(whatIsNotThere, model)
}
}
if len(whatIsNotThere) > 0 {
xlog.Debug("[p2p-sync] Announcing our models", "models", append(models, whatIsNotThere...))
ledger.AnnounceUpdate(
ctx,
1*time.Minute,
"shared_state",
"models",
append(models, whatIsNotThere...),
)
}
// Check if we have a model that is not in our state, otherwise install it
for _, model := range models {
if slices.Contains(whatWeHave, model) {
xlog.Debug("[p2p-sync] Model is already present in this instance", "model", model)
continue
}
// we install model
xlog.Info("[p2p-sync] Installing model which is not present in this instance", "model", model)
uuid, err := uuid.NewUUID()
if err != nil {
xlog.Error("error generating UUID", "error", err)
continue
}
app.GalleryService().ModelGalleryChannel <- services.GalleryOp[gallery.GalleryModel, gallery.ModelConfig]{
ID: uuid.String(),
GalleryElementName: model,
Galleries: app.ApplicationConfig().Galleries,
BackendGalleries: app.ApplicationConfig().BackendGalleries,
}
}
return nil
}
func (a *Application) p2pSync(ctx context.Context, n *node.Node) error {
go func() {
for {
select {
case <-ctx.Done():
return
case <-time.After(1 * time.Minute):
if err := syncState(ctx, n, a); err != nil {
xlog.Error("error syncing state", "error", err)
}
}
}
}()
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/application/config_file_watcher.go | core/application/config_file_watcher.go | package application
import (
"encoding/json"
"fmt"
"os"
"path"
"path/filepath"
"time"
"dario.cat/mergo"
"github.com/fsnotify/fsnotify"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/xlog"
)
type fileHandler func(fileContent []byte, appConfig *config.ApplicationConfig) error
type configFileHandler struct {
handlers map[string]fileHandler
watcher *fsnotify.Watcher
appConfig *config.ApplicationConfig
}
// TODO: This should be a singleton eventually so other parts of the code can register config file handlers,
// then we can export it to other packages
func newConfigFileHandler(appConfig *config.ApplicationConfig) configFileHandler {
c := configFileHandler{
handlers: make(map[string]fileHandler),
appConfig: appConfig,
}
err := c.Register("api_keys.json", readApiKeysJson(*appConfig), true)
if err != nil {
xlog.Error("unable to register config file handler", "error", err, "file", "api_keys.json")
}
err = c.Register("external_backends.json", readExternalBackendsJson(*appConfig), true)
if err != nil {
xlog.Error("unable to register config file handler", "error", err, "file", "external_backends.json")
}
err = c.Register("runtime_settings.json", readRuntimeSettingsJson(*appConfig), true)
if err != nil {
xlog.Error("unable to register config file handler", "error", err, "file", "runtime_settings.json")
}
// Note: agent_tasks.json and agent_jobs.json are handled by AgentJobService directly
// The service watches and reloads these files internally
return c
}
func (c *configFileHandler) Register(filename string, handler fileHandler, runNow bool) error {
_, ok := c.handlers[filename]
if ok {
return fmt.Errorf("handler already registered for file %s", filename)
}
c.handlers[filename] = handler
if runNow {
c.callHandler(filename, handler)
}
return nil
}
func (c *configFileHandler) callHandler(filename string, handler fileHandler) {
rootedFilePath := filepath.Join(c.appConfig.DynamicConfigsDir, filepath.Clean(filename))
xlog.Debug("reading file for dynamic config update", "filename", rootedFilePath)
fileContent, err := os.ReadFile(rootedFilePath)
if err != nil && !os.IsNotExist(err) {
xlog.Error("could not read file", "error", err, "filename", rootedFilePath)
}
if err = handler(fileContent, c.appConfig); err != nil {
xlog.Error("WatchConfigDirectory goroutine failed to update options", "error", err)
}
}
func (c *configFileHandler) Watch() error {
configWatcher, err := fsnotify.NewWatcher()
c.watcher = configWatcher
if err != nil {
return err
}
if c.appConfig.DynamicConfigsDirPollInterval > 0 {
xlog.Debug("Poll interval set, falling back to polling for configuration changes")
ticker := time.NewTicker(c.appConfig.DynamicConfigsDirPollInterval)
go func() {
for {
<-ticker.C
for file, handler := range c.handlers {
xlog.Debug("polling config file", "file", file)
c.callHandler(file, handler)
}
}
}()
}
// Start listening for events.
go func() {
for {
select {
case event, ok := <-c.watcher.Events:
if !ok {
return
}
if event.Has(fsnotify.Write | fsnotify.Create | fsnotify.Remove) {
handler, ok := c.handlers[path.Base(event.Name)]
if !ok {
continue
}
c.callHandler(filepath.Base(event.Name), handler)
}
case err, ok := <-c.watcher.Errors:
xlog.Error("config watcher error received", "error", err)
if !ok {
return
}
}
}
}()
// Add a path.
err = c.watcher.Add(c.appConfig.DynamicConfigsDir)
if err != nil {
return fmt.Errorf("unable to create a watcher on the configuration directory: %+v", err)
}
return nil
}
// TODO: When we institute graceful shutdown, this should be called
func (c *configFileHandler) Stop() error {
return c.watcher.Close()
}
func readApiKeysJson(startupAppConfig config.ApplicationConfig) fileHandler {
handler := func(fileContent []byte, appConfig *config.ApplicationConfig) error {
xlog.Debug("processing api keys runtime update", "numKeys", len(startupAppConfig.ApiKeys))
if len(fileContent) > 0 {
// Parse JSON content from the file
var fileKeys []string
err := json.Unmarshal(fileContent, &fileKeys)
if err != nil {
return err
}
xlog.Debug("discovered API keys from api keys dynamic config file", "numKeys", len(fileKeys))
appConfig.ApiKeys = append(startupAppConfig.ApiKeys, fileKeys...)
} else {
xlog.Debug("no API keys discovered from dynamic config file")
appConfig.ApiKeys = startupAppConfig.ApiKeys
}
xlog.Debug("total api keys after processing", "numKeys", len(appConfig.ApiKeys))
return nil
}
return handler
}
func readExternalBackendsJson(startupAppConfig config.ApplicationConfig) fileHandler {
handler := func(fileContent []byte, appConfig *config.ApplicationConfig) error {
xlog.Debug("processing external_backends.json")
if len(fileContent) > 0 {
// Parse JSON content from the file
var fileBackends map[string]string
err := json.Unmarshal(fileContent, &fileBackends)
if err != nil {
return err
}
appConfig.ExternalGRPCBackends = startupAppConfig.ExternalGRPCBackends
err = mergo.Merge(&appConfig.ExternalGRPCBackends, &fileBackends)
if err != nil {
return err
}
} else {
appConfig.ExternalGRPCBackends = startupAppConfig.ExternalGRPCBackends
}
xlog.Debug("external backends loaded from external_backends.json")
return nil
}
return handler
}
func readRuntimeSettingsJson(startupAppConfig config.ApplicationConfig) fileHandler {
handler := func(fileContent []byte, appConfig *config.ApplicationConfig) error {
xlog.Debug("processing runtime_settings.json")
// Determine if settings came from env vars by comparing with startup config
// startupAppConfig contains the original values set from env vars at startup.
// If current values match startup values, they came from env vars (or defaults).
// We apply file settings only if current values match startup values (meaning not from env vars).
envWatchdogIdle := appConfig.WatchDogIdle == startupAppConfig.WatchDogIdle
envWatchdogBusy := appConfig.WatchDogBusy == startupAppConfig.WatchDogBusy
envWatchdogIdleTimeout := appConfig.WatchDogIdleTimeout == startupAppConfig.WatchDogIdleTimeout
envWatchdogBusyTimeout := appConfig.WatchDogBusyTimeout == startupAppConfig.WatchDogBusyTimeout
envSingleBackend := appConfig.SingleBackend == startupAppConfig.SingleBackend
envMaxActiveBackends := appConfig.MaxActiveBackends == startupAppConfig.MaxActiveBackends
envParallelRequests := appConfig.ParallelBackendRequests == startupAppConfig.ParallelBackendRequests
envMemoryReclaimerEnabled := appConfig.MemoryReclaimerEnabled == startupAppConfig.MemoryReclaimerEnabled
envMemoryReclaimerThreshold := appConfig.MemoryReclaimerThreshold == startupAppConfig.MemoryReclaimerThreshold
envThreads := appConfig.Threads == startupAppConfig.Threads
envContextSize := appConfig.ContextSize == startupAppConfig.ContextSize
envF16 := appConfig.F16 == startupAppConfig.F16
envDebug := appConfig.Debug == startupAppConfig.Debug
envCORS := appConfig.CORS == startupAppConfig.CORS
envCSRF := appConfig.CSRF == startupAppConfig.CSRF
envCORSAllowOrigins := appConfig.CORSAllowOrigins == startupAppConfig.CORSAllowOrigins
envP2PToken := appConfig.P2PToken == startupAppConfig.P2PToken
envP2PNetworkID := appConfig.P2PNetworkID == startupAppConfig.P2PNetworkID
envFederated := appConfig.Federated == startupAppConfig.Federated
envAutoloadGalleries := appConfig.AutoloadGalleries == startupAppConfig.AutoloadGalleries
envAutoloadBackendGalleries := appConfig.AutoloadBackendGalleries == startupAppConfig.AutoloadBackendGalleries
envAgentJobRetentionDays := appConfig.AgentJobRetentionDays == startupAppConfig.AgentJobRetentionDays
envForceEvictionWhenBusy := appConfig.ForceEvictionWhenBusy == startupAppConfig.ForceEvictionWhenBusy
envLRUEvictionMaxRetries := appConfig.LRUEvictionMaxRetries == startupAppConfig.LRUEvictionMaxRetries
envLRUEvictionRetryInterval := appConfig.LRUEvictionRetryInterval == startupAppConfig.LRUEvictionRetryInterval
if len(fileContent) > 0 {
var settings config.RuntimeSettings
err := json.Unmarshal(fileContent, &settings)
if err != nil {
return err
}
// Apply file settings only if they don't match startup values (i.e., not from env vars)
if settings.WatchdogIdleEnabled != nil && !envWatchdogIdle {
appConfig.WatchDogIdle = *settings.WatchdogIdleEnabled
if appConfig.WatchDogIdle {
appConfig.WatchDog = true
}
}
if settings.WatchdogBusyEnabled != nil && !envWatchdogBusy {
appConfig.WatchDogBusy = *settings.WatchdogBusyEnabled
if appConfig.WatchDogBusy {
appConfig.WatchDog = true
}
}
if settings.WatchdogIdleTimeout != nil && !envWatchdogIdleTimeout {
dur, err := time.ParseDuration(*settings.WatchdogIdleTimeout)
if err == nil {
appConfig.WatchDogIdleTimeout = dur
} else {
xlog.Warn("invalid watchdog idle timeout in runtime_settings.json", "error", err, "timeout", *settings.WatchdogIdleTimeout)
}
}
if settings.WatchdogBusyTimeout != nil && !envWatchdogBusyTimeout {
dur, err := time.ParseDuration(*settings.WatchdogBusyTimeout)
if err == nil {
appConfig.WatchDogBusyTimeout = dur
} else {
xlog.Warn("invalid watchdog busy timeout in runtime_settings.json", "error", err, "timeout", *settings.WatchdogBusyTimeout)
}
}
// Handle MaxActiveBackends (new) and SingleBackend (deprecated)
if settings.MaxActiveBackends != nil && !envMaxActiveBackends {
appConfig.MaxActiveBackends = *settings.MaxActiveBackends
// For backward compatibility, also set SingleBackend if MaxActiveBackends == 1
appConfig.SingleBackend = (*settings.MaxActiveBackends == 1)
} else if settings.SingleBackend != nil && !envSingleBackend {
// Legacy: SingleBackend maps to MaxActiveBackends = 1
appConfig.SingleBackend = *settings.SingleBackend
if *settings.SingleBackend {
appConfig.MaxActiveBackends = 1
} else {
appConfig.MaxActiveBackends = 0
}
}
if settings.ParallelBackendRequests != nil && !envParallelRequests {
appConfig.ParallelBackendRequests = *settings.ParallelBackendRequests
}
if settings.MemoryReclaimerEnabled != nil && !envMemoryReclaimerEnabled {
appConfig.MemoryReclaimerEnabled = *settings.MemoryReclaimerEnabled
if appConfig.MemoryReclaimerEnabled {
appConfig.WatchDog = true // Memory reclaimer requires watchdog
}
}
if settings.MemoryReclaimerThreshold != nil && !envMemoryReclaimerThreshold {
appConfig.MemoryReclaimerThreshold = *settings.MemoryReclaimerThreshold
}
if settings.ForceEvictionWhenBusy != nil && !envForceEvictionWhenBusy {
appConfig.ForceEvictionWhenBusy = *settings.ForceEvictionWhenBusy
}
if settings.LRUEvictionMaxRetries != nil && !envLRUEvictionMaxRetries {
appConfig.LRUEvictionMaxRetries = *settings.LRUEvictionMaxRetries
}
if settings.LRUEvictionRetryInterval != nil && !envLRUEvictionRetryInterval {
dur, err := time.ParseDuration(*settings.LRUEvictionRetryInterval)
if err == nil {
appConfig.LRUEvictionRetryInterval = dur
} else {
xlog.Warn("invalid LRU eviction retry interval in runtime_settings.json", "error", err, "interval", *settings.LRUEvictionRetryInterval)
}
}
if settings.Threads != nil && !envThreads {
appConfig.Threads = *settings.Threads
}
if settings.ContextSize != nil && !envContextSize {
appConfig.ContextSize = *settings.ContextSize
}
if settings.F16 != nil && !envF16 {
appConfig.F16 = *settings.F16
}
if settings.Debug != nil && !envDebug {
appConfig.Debug = *settings.Debug
}
if settings.CORS != nil && !envCORS {
appConfig.CORS = *settings.CORS
}
if settings.CSRF != nil && !envCSRF {
appConfig.CSRF = *settings.CSRF
}
if settings.CORSAllowOrigins != nil && !envCORSAllowOrigins {
appConfig.CORSAllowOrigins = *settings.CORSAllowOrigins
}
if settings.P2PToken != nil && !envP2PToken {
appConfig.P2PToken = *settings.P2PToken
}
if settings.P2PNetworkID != nil && !envP2PNetworkID {
appConfig.P2PNetworkID = *settings.P2PNetworkID
}
if settings.Federated != nil && !envFederated {
appConfig.Federated = *settings.Federated
}
if settings.Galleries != nil {
appConfig.Galleries = *settings.Galleries
}
if settings.BackendGalleries != nil {
appConfig.BackendGalleries = *settings.BackendGalleries
}
if settings.AutoloadGalleries != nil && !envAutoloadGalleries {
appConfig.AutoloadGalleries = *settings.AutoloadGalleries
}
if settings.AutoloadBackendGalleries != nil && !envAutoloadBackendGalleries {
appConfig.AutoloadBackendGalleries = *settings.AutoloadBackendGalleries
}
if settings.ApiKeys != nil {
// API keys from env vars (startup) should be kept, runtime settings keys replace all runtime keys
// If runtime_settings.json specifies ApiKeys (even if empty), it replaces all runtime keys
// Start with env keys, then add runtime_settings.json keys (which may be empty to clear them)
envKeys := startupAppConfig.ApiKeys
runtimeKeys := *settings.ApiKeys
// Replace all runtime keys with what's in runtime_settings.json
appConfig.ApiKeys = append(envKeys, runtimeKeys...)
}
if settings.AgentJobRetentionDays != nil && !envAgentJobRetentionDays {
appConfig.AgentJobRetentionDays = *settings.AgentJobRetentionDays
}
// If watchdog is enabled via file but not via env, ensure WatchDog flag is set
if !envWatchdogIdle && !envWatchdogBusy {
if settings.WatchdogEnabled != nil && *settings.WatchdogEnabled {
appConfig.WatchDog = true
}
}
}
xlog.Debug("runtime settings loaded from runtime_settings.json")
return nil
}
return handler
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/application/watchdog.go | core/application/watchdog.go | package application
import (
"time"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/xlog"
)
func (a *Application) StopWatchdog() error {
if a.watchdogStop != nil {
close(a.watchdogStop)
a.watchdogStop = nil
}
return nil
}
// startWatchdog starts the watchdog with current ApplicationConfig settings
// This is an internal method that assumes the caller holds the watchdogMutex
func (a *Application) startWatchdog() error {
appConfig := a.ApplicationConfig()
// Get effective max active backends (considers both MaxActiveBackends and deprecated SingleBackend)
lruLimit := appConfig.GetEffectiveMaxActiveBackends()
// Create watchdog if enabled OR if LRU limit is set OR if memory reclaimer is enabled
// LRU eviction requires watchdog infrastructure even without busy/idle checks
if appConfig.WatchDog || lruLimit > 0 || appConfig.MemoryReclaimerEnabled {
wd := model.NewWatchDog(
model.WithProcessManager(a.modelLoader),
model.WithBusyTimeout(appConfig.WatchDogBusyTimeout),
model.WithIdleTimeout(appConfig.WatchDogIdleTimeout),
model.WithWatchdogInterval(appConfig.WatchDogInterval),
model.WithBusyCheck(appConfig.WatchDogBusy),
model.WithIdleCheck(appConfig.WatchDogIdle),
model.WithLRULimit(lruLimit),
model.WithMemoryReclaimer(appConfig.MemoryReclaimerEnabled, appConfig.MemoryReclaimerThreshold),
model.WithForceEvictionWhenBusy(appConfig.ForceEvictionWhenBusy),
)
a.modelLoader.SetWatchDog(wd)
// Create new stop channel
a.watchdogStop = make(chan bool, 1)
// Start watchdog goroutine if any periodic checks are enabled
// LRU eviction doesn't need the Run() loop - it's triggered on model load
// But memory reclaimer needs the Run() loop for periodic checking
if appConfig.WatchDogBusy || appConfig.WatchDogIdle || appConfig.MemoryReclaimerEnabled {
go wd.Run()
}
// Setup shutdown handler
go func() {
select {
case <-a.watchdogStop:
xlog.Debug("Watchdog stop signal received")
wd.Shutdown()
case <-appConfig.Context.Done():
xlog.Debug("Context canceled, shutting down watchdog")
wd.Shutdown()
}
}()
xlog.Info("Watchdog started with new settings", "lruLimit", lruLimit, "busyCheck", appConfig.WatchDogBusy, "idleCheck", appConfig.WatchDogIdle, "memoryReclaimer", appConfig.MemoryReclaimerEnabled, "memoryThreshold", appConfig.MemoryReclaimerThreshold, "interval", appConfig.WatchDogInterval)
} else {
xlog.Info("Watchdog disabled")
}
return nil
}
// StartWatchdog starts the watchdog with current ApplicationConfig settings
func (a *Application) StartWatchdog() error {
a.watchdogMutex.Lock()
defer a.watchdogMutex.Unlock()
return a.startWatchdog()
}
// RestartWatchdog restarts the watchdog with current ApplicationConfig settings
func (a *Application) RestartWatchdog() error {
a.watchdogMutex.Lock()
defer a.watchdogMutex.Unlock()
// Shutdown existing watchdog if running
if a.watchdogStop != nil {
close(a.watchdogStop)
a.watchdogStop = nil
}
// Shutdown existing watchdog if running
currentWD := a.modelLoader.GetWatchDog()
if currentWD != nil {
currentWD.Shutdown()
// Wait a bit for shutdown to complete
time.Sleep(100 * time.Millisecond)
}
// Start watchdog with new settings
return a.startWatchdog()
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/application/startup.go | core/application/startup.go | package application
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
"github.com/mudler/LocalAI/core/backend"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/services"
coreStartup "github.com/mudler/LocalAI/core/startup"
"github.com/mudler/LocalAI/internal"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/xsysinfo"
"github.com/mudler/xlog"
)
func New(opts ...config.AppOption) (*Application, error) {
options := config.NewApplicationConfig(opts...)
// Store a copy of the startup config (from env vars, before file loading)
// This is used to determine if settings came from env vars vs file
startupConfigCopy := *options
application := newApplication(options)
application.startupConfig = &startupConfigCopy
xlog.Info("Starting LocalAI", "threads", options.Threads, "modelsPath", options.SystemState.Model.ModelsPath)
xlog.Info("LocalAI version", "version", internal.PrintableVersion())
if err := application.start(); err != nil {
return nil, err
}
caps, err := xsysinfo.CPUCapabilities()
if err == nil {
xlog.Debug("CPU capabilities", "capabilities", caps)
}
gpus, err := xsysinfo.GPUs()
if err == nil {
xlog.Debug("GPU count", "count", len(gpus))
for _, gpu := range gpus {
xlog.Debug("GPU", "gpu", gpu.String())
}
}
// Make sure directories exists
if options.SystemState.Model.ModelsPath == "" {
return nil, fmt.Errorf("models path cannot be empty")
}
err = os.MkdirAll(options.SystemState.Model.ModelsPath, 0750)
if err != nil {
return nil, fmt.Errorf("unable to create ModelPath: %q", err)
}
if options.GeneratedContentDir != "" {
err := os.MkdirAll(options.GeneratedContentDir, 0750)
if err != nil {
return nil, fmt.Errorf("unable to create ImageDir: %q", err)
}
}
if options.UploadDir != "" {
err := os.MkdirAll(options.UploadDir, 0750)
if err != nil {
return nil, fmt.Errorf("unable to create UploadDir: %q", err)
}
}
if err := coreStartup.InstallModels(options.Context, application.GalleryService(), options.Galleries, options.BackendGalleries, options.SystemState, application.ModelLoader(), options.EnforcePredownloadScans, options.AutoloadBackendGalleries, nil, options.ModelsURL...); err != nil {
xlog.Error("error installing models", "error", err)
}
for _, backend := range options.ExternalBackends {
if err := services.InstallExternalBackend(options.Context, options.BackendGalleries, options.SystemState, application.ModelLoader(), nil, backend, "", ""); err != nil {
xlog.Error("error installing external backend", "error", err)
}
}
configLoaderOpts := options.ToConfigLoaderOptions()
if err := application.ModelConfigLoader().LoadModelConfigsFromPath(options.SystemState.Model.ModelsPath, configLoaderOpts...); err != nil {
xlog.Error("error loading config files", "error", err)
}
if err := gallery.RegisterBackends(options.SystemState, application.ModelLoader()); err != nil {
xlog.Error("error registering external backends", "error", err)
}
if options.ConfigFile != "" {
if err := application.ModelConfigLoader().LoadMultipleModelConfigsSingleFile(options.ConfigFile, configLoaderOpts...); err != nil {
xlog.Error("error loading config file", "error", err)
}
}
if err := application.ModelConfigLoader().Preload(options.SystemState.Model.ModelsPath); err != nil {
xlog.Error("error downloading models", "error", err)
}
if options.PreloadJSONModels != "" {
if err := services.ApplyGalleryFromString(options.SystemState, application.ModelLoader(), options.EnforcePredownloadScans, options.AutoloadBackendGalleries, options.Galleries, options.BackendGalleries, options.PreloadJSONModels); err != nil {
return nil, err
}
}
if options.PreloadModelsFromPath != "" {
if err := services.ApplyGalleryFromFile(options.SystemState, application.ModelLoader(), options.EnforcePredownloadScans, options.AutoloadBackendGalleries, options.Galleries, options.BackendGalleries, options.PreloadModelsFromPath); err != nil {
return nil, err
}
}
if options.Debug {
for _, v := range application.ModelConfigLoader().GetAllModelsConfigs() {
xlog.Debug("Model", "name", v.Name, "config", v)
}
}
// Load runtime settings from file if DynamicConfigsDir is set
// This applies file settings with env var precedence (env vars take priority)
// Note: startupConfigCopy was already created above, so it has the original env var values
if options.DynamicConfigsDir != "" {
loadRuntimeSettingsFromFile(options)
}
// turn off any process that was started by GRPC if the context is canceled
go func() {
<-options.Context.Done()
xlog.Debug("Context canceled, shutting down")
err := application.ModelLoader().StopAllGRPC()
if err != nil {
xlog.Error("error while stopping all grpc backends", "error", err)
}
}()
// Initialize watchdog with current settings (after loading from file)
initializeWatchdog(application, options)
if options.LoadToMemory != nil && !options.SingleBackend {
for _, m := range options.LoadToMemory {
cfg, err := application.ModelConfigLoader().LoadModelConfigFileByNameDefaultOptions(m, options)
if err != nil {
return nil, err
}
xlog.Debug("Auto loading model into memory from file", "model", m, "file", cfg.Model)
o := backend.ModelOptions(*cfg, options)
var backendErr error
_, backendErr = application.ModelLoader().Load(o...)
if backendErr != nil {
return nil, err
}
}
}
// Watch the configuration directory
startWatcher(options)
xlog.Info("core/startup process completed!")
return application, nil
}
func startWatcher(options *config.ApplicationConfig) {
if options.DynamicConfigsDir == "" {
// No need to start the watcher if the directory is not set
return
}
if _, err := os.Stat(options.DynamicConfigsDir); err != nil {
if os.IsNotExist(err) {
// We try to create the directory if it does not exist and was specified
if err := os.MkdirAll(options.DynamicConfigsDir, 0700); err != nil {
xlog.Error("failed creating DynamicConfigsDir", "error", err)
}
} else {
// something else happened, we log the error and don't start the watcher
xlog.Error("failed to read DynamicConfigsDir, watcher will not be started", "error", err)
return
}
}
configHandler := newConfigFileHandler(options)
if err := configHandler.Watch(); err != nil {
xlog.Error("failed creating watcher", "error", err)
}
}
// loadRuntimeSettingsFromFile loads settings from runtime_settings.json with env var precedence
// This function is called at startup, before env vars are applied via AppOptions.
// Since env vars are applied via AppOptions in run.go, we need to check if they're set.
// We do this by checking if the current options values differ from defaults, which would
// indicate they were set from env vars. However, a simpler approach is to just apply
// file settings here, and let the AppOptions (which are applied after this) override them.
// But actually, this is called AFTER AppOptions are applied in New(), so we need to check env vars.
// The cleanest solution: Store original values before applying file, or check if values match
// what would be set from env vars. For now, we'll apply file settings and they'll be
// overridden by AppOptions if env vars were set (but AppOptions are already applied).
// Actually, this function is called in New() before AppOptions are fully processed for watchdog.
// Let's check the call order: New() -> loadRuntimeSettingsFromFile() -> initializeWatchdog()
// But AppOptions are applied in NewApplicationConfig() which is called first.
// So at this point, options already has values from env vars. We should compare against
// defaults to see if env vars were set. But we don't have defaults stored.
// Simplest: Just apply file settings. If env vars were set, they're already in options.
// The file watcher handler will handle runtime changes properly by comparing with startupAppConfig.
func loadRuntimeSettingsFromFile(options *config.ApplicationConfig) {
settingsFile := filepath.Join(options.DynamicConfigsDir, "runtime_settings.json")
fileContent, err := os.ReadFile(settingsFile)
if err != nil {
if os.IsNotExist(err) {
xlog.Debug("runtime_settings.json not found, using defaults")
return
}
xlog.Warn("failed to read runtime_settings.json", "error", err)
return
}
var settings config.RuntimeSettings
if err := json.Unmarshal(fileContent, &settings); err != nil {
xlog.Warn("failed to parse runtime_settings.json", "error", err)
return
}
// At this point, options already has values from env vars (via AppOptions in run.go).
// To avoid env var duplication, we determine if env vars were set by checking if
// current values differ from defaults. Defaults are: false for bools, 0 for durations.
// If current value is at default, it likely wasn't set from env var, so we can apply file.
// If current value is non-default, it was likely set from env var, so we preserve it.
// Note: This means env vars explicitly setting to false/0 won't be distinguishable from defaults,
// but that's an acceptable limitation to avoid env var duplication.
if settings.WatchdogIdleEnabled != nil {
// Only apply if current value is default (false), suggesting it wasn't set from env var
if !options.WatchDogIdle {
options.WatchDogIdle = *settings.WatchdogIdleEnabled
if options.WatchDogIdle {
options.WatchDog = true
}
}
}
if settings.WatchdogBusyEnabled != nil {
if !options.WatchDogBusy {
options.WatchDogBusy = *settings.WatchdogBusyEnabled
if options.WatchDogBusy {
options.WatchDog = true
}
}
}
if settings.WatchdogIdleTimeout != nil {
// Only apply if current value is default (0), suggesting it wasn't set from env var
if options.WatchDogIdleTimeout == 0 {
dur, err := time.ParseDuration(*settings.WatchdogIdleTimeout)
if err == nil {
options.WatchDogIdleTimeout = dur
} else {
xlog.Warn("invalid watchdog idle timeout in runtime_settings.json", "error", err, "timeout", *settings.WatchdogIdleTimeout)
}
}
}
if settings.WatchdogBusyTimeout != nil {
if options.WatchDogBusyTimeout == 0 {
dur, err := time.ParseDuration(*settings.WatchdogBusyTimeout)
if err == nil {
options.WatchDogBusyTimeout = dur
} else {
xlog.Warn("invalid watchdog busy timeout in runtime_settings.json", "error", err, "timeout", *settings.WatchdogBusyTimeout)
}
}
}
if settings.WatchdogInterval != nil {
if options.WatchDogInterval == 0 {
dur, err := time.ParseDuration(*settings.WatchdogInterval)
if err == nil {
options.WatchDogInterval = dur
} else {
xlog.Warn("invalid watchdog interval in runtime_settings.json", "error", err, "interval", *settings.WatchdogInterval)
options.WatchDogInterval = model.DefaultWatchdogInterval
}
}
}
// Handle MaxActiveBackends (new) and SingleBackend (deprecated)
if settings.MaxActiveBackends != nil {
// Only apply if current value is default (0), suggesting it wasn't set from env var
if options.MaxActiveBackends == 0 {
options.MaxActiveBackends = *settings.MaxActiveBackends
// For backward compatibility, also set SingleBackend if MaxActiveBackends == 1
options.SingleBackend = (*settings.MaxActiveBackends == 1)
}
} else if settings.SingleBackend != nil {
// Legacy: SingleBackend maps to MaxActiveBackends = 1
if !options.SingleBackend {
options.SingleBackend = *settings.SingleBackend
if *settings.SingleBackend {
options.MaxActiveBackends = 1
}
}
}
if settings.ParallelBackendRequests != nil {
if !options.ParallelBackendRequests {
options.ParallelBackendRequests = *settings.ParallelBackendRequests
}
}
if settings.MemoryReclaimerEnabled != nil {
// Only apply if current value is default (false), suggesting it wasn't set from env var
if !options.MemoryReclaimerEnabled {
options.MemoryReclaimerEnabled = *settings.MemoryReclaimerEnabled
if options.MemoryReclaimerEnabled {
options.WatchDog = true // Memory reclaimer requires watchdog
}
}
}
if settings.MemoryReclaimerThreshold != nil {
// Only apply if current value is default (0), suggesting it wasn't set from env var
if options.MemoryReclaimerThreshold == 0 {
options.MemoryReclaimerThreshold = *settings.MemoryReclaimerThreshold
}
}
if settings.AgentJobRetentionDays != nil {
// Only apply if current value is default (0), suggesting it wasn't set from env var
if options.AgentJobRetentionDays == 0 {
options.AgentJobRetentionDays = *settings.AgentJobRetentionDays
}
}
if !options.WatchDogIdle && !options.WatchDogBusy {
if settings.WatchdogEnabled != nil && *settings.WatchdogEnabled {
options.WatchDog = true
}
}
xlog.Debug("Runtime settings loaded from runtime_settings.json")
}
// initializeWatchdog initializes the watchdog with current ApplicationConfig settings
func initializeWatchdog(application *Application, options *config.ApplicationConfig) {
// Get effective max active backends (considers both MaxActiveBackends and deprecated SingleBackend)
lruLimit := options.GetEffectiveMaxActiveBackends()
// Create watchdog if enabled OR if LRU limit is set OR if memory reclaimer is enabled
if options.WatchDog || lruLimit > 0 || options.MemoryReclaimerEnabled {
wd := model.NewWatchDog(
model.WithProcessManager(application.ModelLoader()),
model.WithBusyTimeout(options.WatchDogBusyTimeout),
model.WithIdleTimeout(options.WatchDogIdleTimeout),
model.WithWatchdogInterval(options.WatchDogInterval),
model.WithBusyCheck(options.WatchDogBusy),
model.WithIdleCheck(options.WatchDogIdle),
model.WithLRULimit(lruLimit),
model.WithMemoryReclaimer(options.MemoryReclaimerEnabled, options.MemoryReclaimerThreshold),
model.WithForceEvictionWhenBusy(options.ForceEvictionWhenBusy),
)
application.ModelLoader().SetWatchDog(wd)
// Initialize ModelLoader LRU eviction retry settings
application.ModelLoader().SetLRUEvictionRetrySettings(
options.LRUEvictionMaxRetries,
options.LRUEvictionRetryInterval,
)
// Start watchdog goroutine if any periodic checks are enabled
// LRU eviction doesn't need the Run() loop - it's triggered on model load
// But memory reclaimer needs the Run() loop for periodic checking
if options.WatchDogBusy || options.WatchDogIdle || options.MemoryReclaimerEnabled {
go wd.Run()
}
go func() {
<-options.Context.Done()
xlog.Debug("Context canceled, shutting down")
wd.Shutdown()
}()
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/application/application.go | core/application/application.go | package application
import (
"context"
"sync"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/core/templates"
"github.com/mudler/LocalAI/pkg/model"
)
type Application struct {
backendLoader *config.ModelConfigLoader
modelLoader *model.ModelLoader
applicationConfig *config.ApplicationConfig
startupConfig *config.ApplicationConfig // Stores original config from env vars (before file loading)
templatesEvaluator *templates.Evaluator
galleryService *services.GalleryService
agentJobService *services.AgentJobService
watchdogMutex sync.Mutex
watchdogStop chan bool
p2pMutex sync.Mutex
p2pCtx context.Context
p2pCancel context.CancelFunc
agentJobMutex sync.Mutex
}
func newApplication(appConfig *config.ApplicationConfig) *Application {
return &Application{
backendLoader: config.NewModelConfigLoader(appConfig.SystemState.Model.ModelsPath),
modelLoader: model.NewModelLoader(appConfig.SystemState),
applicationConfig: appConfig,
templatesEvaluator: templates.NewEvaluator(appConfig.SystemState.Model.ModelsPath),
}
}
func (a *Application) ModelConfigLoader() *config.ModelConfigLoader {
return a.backendLoader
}
func (a *Application) ModelLoader() *model.ModelLoader {
return a.modelLoader
}
func (a *Application) ApplicationConfig() *config.ApplicationConfig {
return a.applicationConfig
}
func (a *Application) TemplatesEvaluator() *templates.Evaluator {
return a.templatesEvaluator
}
func (a *Application) GalleryService() *services.GalleryService {
return a.galleryService
}
func (a *Application) AgentJobService() *services.AgentJobService {
return a.agentJobService
}
// StartupConfig returns the original startup configuration (from env vars, before file loading)
func (a *Application) StartupConfig() *config.ApplicationConfig {
return a.startupConfig
}
func (a *Application) start() error {
galleryService := services.NewGalleryService(a.ApplicationConfig(), a.ModelLoader())
err := galleryService.Start(a.ApplicationConfig().Context, a.ModelConfigLoader(), a.ApplicationConfig().SystemState)
if err != nil {
return err
}
a.galleryService = galleryService
// Initialize agent job service
agentJobService := services.NewAgentJobService(
a.ApplicationConfig(),
a.ModelLoader(),
a.ModelConfigLoader(),
a.TemplatesEvaluator(),
)
err = agentJobService.Start(a.ApplicationConfig().Context)
if err != nil {
return err
}
a.agentJobService = agentJobService
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/application/agent_jobs.go | core/application/agent_jobs.go | package application
import (
"time"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/xlog"
)
// RestartAgentJobService restarts the agent job service with current ApplicationConfig settings
func (a *Application) RestartAgentJobService() error {
a.agentJobMutex.Lock()
defer a.agentJobMutex.Unlock()
// Stop existing service if running
if a.agentJobService != nil {
if err := a.agentJobService.Stop(); err != nil {
xlog.Warn("Error stopping agent job service", "error", err)
}
// Wait a bit for shutdown to complete
time.Sleep(200 * time.Millisecond)
}
// Create new service instance
agentJobService := services.NewAgentJobService(
a.ApplicationConfig(),
a.ModelLoader(),
a.ModelConfigLoader(),
a.TemplatesEvaluator(),
)
// Start the service
err := agentJobService.Start(a.ApplicationConfig().Context)
if err != nil {
xlog.Error("Failed to start agent job service", "error", err)
return err
}
a.agentJobService = agentJobService
xlog.Info("Agent job service restarted")
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/token_metrics.go | core/backend/token_metrics.go | package backend
import (
"context"
"fmt"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
model "github.com/mudler/LocalAI/pkg/model"
)
func TokenMetrics(
modelFile string,
loader *model.ModelLoader,
appConfig *config.ApplicationConfig,
modelConfig config.ModelConfig) (*proto.MetricsResponse, error) {
opts := ModelOptions(modelConfig, appConfig, model.WithModel(modelFile))
model, err := loader.Load(opts...)
if err != nil {
return nil, err
}
if model == nil {
return nil, fmt.Errorf("could not loadmodel model")
}
res, err := model.GetTokenMetrics(context.Background(), &proto.MetricsRequest{})
return res, err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/llm.go | core/backend/llm.go | package backend
import (
"context"
"encoding/json"
"regexp"
"slices"
"strings"
"sync"
"unicode/utf8"
"github.com/mudler/xlog"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/grpc/proto"
model "github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/utils"
)
type LLMResponse struct {
Response string // should this be []byte?
Usage TokenUsage
AudioOutput string
Logprobs *schema.Logprobs // Logprobs from the backend response
}
type TokenUsage struct {
Prompt int
Completion int
TimingPromptProcessing float64
TimingTokenGeneration float64
}
func ModelInference(ctx context.Context, s string, messages schema.Messages, images, videos, audios []string, loader *model.ModelLoader, c *config.ModelConfig, cl *config.ModelConfigLoader, o *config.ApplicationConfig, tokenCallback func(string, TokenUsage) bool, tools string, toolChoice string, logprobs *int, topLogprobs *int, logitBias map[string]float64) (func() (LLMResponse, error), error) {
modelFile := c.Model
// Check if the modelFile exists, if it doesn't try to load it from the gallery
if o.AutoloadGalleries { // experimental
modelNames, err := services.ListModels(cl, loader, nil, services.SKIP_ALWAYS)
if err != nil {
return nil, err
}
if !slices.Contains(modelNames, c.Name) {
utils.ResetDownloadTimers()
// if we failed to load the model, we try to download it
err := gallery.InstallModelFromGallery(ctx, o.Galleries, o.BackendGalleries, o.SystemState, loader, c.Name, gallery.GalleryModel{}, utils.DisplayDownloadFunction, o.EnforcePredownloadScans, o.AutoloadBackendGalleries)
if err != nil {
xlog.Error("failed to install model from gallery", "error", err, "model", modelFile)
//return nil, err
}
}
}
opts := ModelOptions(*c, o)
inferenceModel, err := loader.Load(opts...)
if err != nil {
return nil, err
}
var protoMessages []*proto.Message
// if we are using the tokenizer template, we need to convert the messages to proto messages
// unless the prompt has already been tokenized (non-chat endpoints + functions)
if c.TemplateConfig.UseTokenizerTemplate && len(messages) > 0 {
protoMessages = messages.ToProto()
}
// in GRPC, the backend is supposed to answer to 1 single token if stream is not supported
fn := func() (LLMResponse, error) {
opts := gRPCPredictOpts(*c, loader.ModelPath)
opts.Prompt = s
opts.Messages = protoMessages
opts.UseTokenizerTemplate = c.TemplateConfig.UseTokenizerTemplate
opts.Images = images
opts.Videos = videos
opts.Audios = audios
opts.Tools = tools
opts.ToolChoice = toolChoice
if logprobs != nil {
opts.Logprobs = int32(*logprobs)
}
if topLogprobs != nil {
opts.TopLogprobs = int32(*topLogprobs)
}
if len(logitBias) > 0 {
// Serialize logit_bias map to JSON string for proto
logitBiasJSON, err := json.Marshal(logitBias)
if err == nil {
opts.LogitBias = string(logitBiasJSON)
}
}
tokenUsage := TokenUsage{}
// check the per-model feature flag for usage, since tokenCallback may have a cost.
// Defaults to off as for now it is still experimental
if c.FeatureFlag.Enabled("usage") {
userTokenCallback := tokenCallback
if userTokenCallback == nil {
userTokenCallback = func(token string, usage TokenUsage) bool {
return true
}
}
promptInfo, pErr := inferenceModel.TokenizeString(ctx, opts)
if pErr == nil && promptInfo.Length > 0 {
tokenUsage.Prompt = int(promptInfo.Length)
}
tokenCallback = func(token string, usage TokenUsage) bool {
tokenUsage.Completion++
return userTokenCallback(token, tokenUsage)
}
}
if tokenCallback != nil {
if c.TemplateConfig.ReplyPrefix != "" {
tokenCallback(c.TemplateConfig.ReplyPrefix, tokenUsage)
}
ss := ""
var logprobs *schema.Logprobs
var partialRune []byte
err := inferenceModel.PredictStream(ctx, opts, func(reply *proto.Reply) {
msg := reply.Message
partialRune = append(partialRune, msg...)
tokenUsage.Prompt = int(reply.PromptTokens)
tokenUsage.Completion = int(reply.Tokens)
tokenUsage.TimingTokenGeneration = reply.TimingTokenGeneration
tokenUsage.TimingPromptProcessing = reply.TimingPromptProcessing
// Parse logprobs from reply if present (collect from last chunk that has them)
if len(reply.Logprobs) > 0 {
var parsedLogprobs schema.Logprobs
if err := json.Unmarshal(reply.Logprobs, &parsedLogprobs); err == nil {
logprobs = &parsedLogprobs
}
}
// Process complete runes and accumulate them
var completeRunes []byte
for len(partialRune) > 0 {
r, size := utf8.DecodeRune(partialRune)
if r == utf8.RuneError {
// incomplete rune, wait for more bytes
break
}
completeRunes = append(completeRunes, partialRune[:size]...)
partialRune = partialRune[size:]
}
// If we have complete runes, send them as a single token
if len(completeRunes) > 0 {
tokenCallback(string(completeRunes), tokenUsage)
ss += string(completeRunes)
}
if len(msg) == 0 {
tokenCallback("", tokenUsage)
}
})
return LLMResponse{
Response: ss,
Usage: tokenUsage,
Logprobs: logprobs,
}, err
} else {
// TODO: Is the chicken bit the only way to get here? is that acceptable?
reply, err := inferenceModel.Predict(ctx, opts)
if err != nil {
return LLMResponse{}, err
}
if tokenUsage.Prompt == 0 {
tokenUsage.Prompt = int(reply.PromptTokens)
}
if tokenUsage.Completion == 0 {
tokenUsage.Completion = int(reply.Tokens)
}
tokenUsage.TimingTokenGeneration = reply.TimingTokenGeneration
tokenUsage.TimingPromptProcessing = reply.TimingPromptProcessing
response := string(reply.Message)
if c.TemplateConfig.ReplyPrefix != "" {
response = c.TemplateConfig.ReplyPrefix + response
}
// Parse logprobs from reply if present
var logprobs *schema.Logprobs
if len(reply.Logprobs) > 0 {
var parsedLogprobs schema.Logprobs
if err := json.Unmarshal(reply.Logprobs, &parsedLogprobs); err == nil {
logprobs = &parsedLogprobs
}
}
return LLMResponse{
Response: response,
Usage: tokenUsage,
Logprobs: logprobs,
}, err
}
}
return fn, nil
}
var cutstrings map[string]*regexp.Regexp = make(map[string]*regexp.Regexp)
var mu sync.Mutex = sync.Mutex{}
func Finetune(config config.ModelConfig, input, prediction string) string {
if config.Echo {
prediction = input + prediction
}
for _, c := range config.Cutstrings {
mu.Lock()
reg, ok := cutstrings[c]
if !ok {
r, err := regexp.Compile(c)
if err != nil {
xlog.Fatal("failed to compile regex", "error", err)
}
cutstrings[c] = r
reg = cutstrings[c]
}
mu.Unlock()
prediction = reg.ReplaceAllString(prediction, "")
}
// extract results from the response which can be for instance inside XML tags
var predResult string
for _, r := range config.ExtractRegex {
mu.Lock()
reg, ok := cutstrings[r]
if !ok {
regex, err := regexp.Compile(r)
if err != nil {
xlog.Fatal("failed to compile regex", "error", err)
}
cutstrings[r] = regex
reg = regex
}
mu.Unlock()
predResult += reg.FindString(prediction)
}
if predResult != "" {
prediction = predResult
}
for _, c := range config.TrimSpace {
prediction = strings.TrimSpace(strings.TrimPrefix(prediction, c))
}
for _, c := range config.TrimSuffix {
prediction = strings.TrimSpace(strings.TrimSuffix(prediction, c))
}
return prediction
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/llm_test.go | core/backend/llm_test.go | package backend_test
import (
. "github.com/mudler/LocalAI/core/backend"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("LLM tests", func() {
Context("Finetune LLM output", func() {
var (
testConfig config.ModelConfig
input string
prediction string
result string
)
BeforeEach(func() {
testConfig = config.ModelConfig{
PredictionOptions: schema.PredictionOptions{
Echo: false,
},
LLMConfig: config.LLMConfig{
Cutstrings: []string{`<.*?>`}, // Example regex for removing XML tags
ExtractRegex: []string{`<result>(.*?)</result>`}, // Example regex to extract from tags
TrimSpace: []string{" ", "\n"},
TrimSuffix: []string{".", "!"},
},
}
})
Context("when echo is enabled", func() {
BeforeEach(func() {
testConfig.Echo = true
input = "Hello"
prediction = "World"
})
It("should prepend input to prediction", func() {
result = Finetune(testConfig, input, prediction)
Expect(result).To(Equal("HelloWorld"))
})
})
Context("when echo is disabled", func() {
BeforeEach(func() {
testConfig.Echo = false
input = "Hello"
prediction = "World"
})
It("should not modify the prediction with input", func() {
result = Finetune(testConfig, input, prediction)
Expect(result).To(Equal("World"))
})
})
Context("when cutstrings regex is applied", func() {
BeforeEach(func() {
input = ""
prediction = "<div>Hello</div> World"
})
It("should remove substrings matching cutstrings regex", func() {
result = Finetune(testConfig, input, prediction)
Expect(result).To(Equal("Hello World"))
})
})
Context("when extract regex is applied", func() {
BeforeEach(func() {
input = ""
prediction = "<response><result>42</result></response>"
})
It("should extract substrings matching the extract regex", func() {
result = Finetune(testConfig, input, prediction)
Expect(result).To(Equal("42"))
})
})
Context("when trimming spaces", func() {
BeforeEach(func() {
input = ""
prediction = " Hello World "
})
It("should trim spaces from the prediction", func() {
result = Finetune(testConfig, input, prediction)
Expect(result).To(Equal("Hello World"))
})
})
Context("when trimming suffixes", func() {
BeforeEach(func() {
input = ""
prediction = "Hello World."
})
It("should trim suffixes from the prediction", func() {
result = Finetune(testConfig, input, prediction)
Expect(result).To(Equal("Hello World"))
})
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/transcript.go | core/backend/transcript.go | package backend
import (
"context"
"fmt"
"time"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
)
func ModelTranscription(audio, language string, translate bool, diarize bool, prompt string, ml *model.ModelLoader, modelConfig config.ModelConfig, appConfig *config.ApplicationConfig) (*schema.TranscriptionResult, error) {
if modelConfig.Backend == "" {
modelConfig.Backend = model.WhisperBackend
}
opts := ModelOptions(modelConfig, appConfig)
transcriptionModel, err := ml.Load(opts...)
if err != nil {
return nil, err
}
if transcriptionModel == nil {
return nil, fmt.Errorf("could not load transcription model")
}
r, err := transcriptionModel.AudioTranscription(context.Background(), &proto.TranscriptRequest{
Dst: audio,
Language: language,
Translate: translate,
Diarize: diarize,
Threads: uint32(*modelConfig.Threads),
Prompt: prompt,
})
if err != nil {
return nil, err
}
tr := &schema.TranscriptionResult{
Text: r.Text,
}
for _, s := range r.Segments {
var tks []int
for _, t := range s.Tokens {
tks = append(tks, int(t))
}
tr.Segments = append(tr.Segments,
schema.TranscriptionSegment{
Text: s.Text,
Id: int(s.Id),
Start: time.Duration(s.Start),
End: time.Duration(s.End),
Tokens: tks,
})
}
return tr, err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/soundgeneration.go | core/backend/soundgeneration.go | package backend
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/utils"
)
func SoundGeneration(
text string,
duration *float32,
temperature *float32,
doSample *bool,
sourceFile *string,
sourceDivisor *int32,
loader *model.ModelLoader,
appConfig *config.ApplicationConfig,
modelConfig config.ModelConfig,
) (string, *proto.Result, error) {
opts := ModelOptions(modelConfig, appConfig)
soundGenModel, err := loader.Load(opts...)
if err != nil {
return "", nil, err
}
if soundGenModel == nil {
return "", nil, fmt.Errorf("could not load sound generation model")
}
if err := os.MkdirAll(appConfig.GeneratedContentDir, 0750); err != nil {
return "", nil, fmt.Errorf("failed creating audio directory: %s", err)
}
audioDir := filepath.Join(appConfig.GeneratedContentDir, "audio")
if err := os.MkdirAll(audioDir, 0750); err != nil {
return "", nil, fmt.Errorf("failed creating audio directory: %s", err)
}
fileName := utils.GenerateUniqueFileName(audioDir, "sound_generation", ".wav")
filePath := filepath.Join(audioDir, fileName)
res, err := soundGenModel.SoundGeneration(context.Background(), &proto.SoundGenerationRequest{
Text: text,
Model: modelConfig.Model,
Dst: filePath,
Sample: doSample,
Duration: duration,
Temperature: temperature,
Src: sourceFile,
SrcDivisor: sourceDivisor,
})
// return RPC error if any
if !res.Success {
return "", nil, fmt.Errorf("error during sound generation: %s", res.Message)
}
return filePath, res, err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/tts.go | core/backend/tts.go | package backend
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/utils"
)
func ModelTTS(
text,
voice,
language string,
loader *model.ModelLoader,
appConfig *config.ApplicationConfig,
modelConfig config.ModelConfig,
) (string, *proto.Result, error) {
opts := ModelOptions(modelConfig, appConfig)
ttsModel, err := loader.Load(opts...)
if err != nil {
return "", nil, err
}
if ttsModel == nil {
return "", nil, fmt.Errorf("could not load tts model %q", modelConfig.Model)
}
audioDir := filepath.Join(appConfig.GeneratedContentDir, "audio")
if err := os.MkdirAll(audioDir, 0750); err != nil {
return "", nil, fmt.Errorf("failed creating audio directory: %s", err)
}
fileName := utils.GenerateUniqueFileName(audioDir, "tts", ".wav")
filePath := filepath.Join(audioDir, fileName)
// We join the model name to the model path here. This seems to only be done for TTS and is HIGHLY suspect.
// This should be addressed in a follow up PR soon.
// Copying it over nearly verbatim, as TTS backends are not functional without this.
modelPath := ""
// Checking first that it exists and is not outside ModelPath
// TODO: we should actually first check if the modelFile is looking like
// a FS path
mp := filepath.Join(loader.ModelPath, modelConfig.Model)
if _, err := os.Stat(mp); err == nil {
if err := utils.VerifyPath(mp, appConfig.SystemState.Model.ModelsPath); err != nil {
return "", nil, err
}
modelPath = mp
} else {
modelPath = modelConfig.Model // skip this step if it fails?????
}
res, err := ttsModel.TTS(context.Background(), &proto.TTSRequest{
Text: text,
Model: modelPath,
Voice: voice,
Dst: filePath,
Language: &language,
})
if err != nil {
return "", nil, err
}
// return RPC error if any
if !res.Success {
return "", nil, fmt.Errorf("error during TTS: %s", res.Message)
}
return filePath, res, err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/rerank.go | core/backend/rerank.go | package backend
import (
"context"
"fmt"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
model "github.com/mudler/LocalAI/pkg/model"
)
func Rerank(request *proto.RerankRequest, loader *model.ModelLoader, appConfig *config.ApplicationConfig, modelConfig config.ModelConfig) (*proto.RerankResult, error) {
opts := ModelOptions(modelConfig, appConfig)
rerankModel, err := loader.Load(opts...)
if err != nil {
return nil, err
}
if rerankModel == nil {
return nil, fmt.Errorf("could not load rerank model")
}
res, err := rerankModel.Rerank(context.Background(), request)
return res, err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/vad.go | core/backend/vad.go | package backend
import (
"context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
)
func VAD(request *schema.VADRequest,
ctx context.Context,
ml *model.ModelLoader,
appConfig *config.ApplicationConfig,
modelConfig config.ModelConfig) (*schema.VADResponse, error) {
opts := ModelOptions(modelConfig, appConfig)
vadModel, err := ml.Load(opts...)
if err != nil {
return nil, err
}
req := proto.VADRequest{
Audio: request.Audio,
}
resp, err := vadModel.VAD(ctx, &req)
if err != nil {
return nil, err
}
segments := []schema.VADSegment{}
for _, s := range resp.Segments {
segments = append(segments, schema.VADSegment{Start: s.Start, End: s.End})
}
return &schema.VADResponse{
Segments: segments,
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/image.go | core/backend/image.go | package backend
import (
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
model "github.com/mudler/LocalAI/pkg/model"
)
func ImageGeneration(height, width, step, seed int, positive_prompt, negative_prompt, src, dst string, loader *model.ModelLoader, modelConfig config.ModelConfig, appConfig *config.ApplicationConfig, refImages []string) (func() error, error) {
opts := ModelOptions(modelConfig, appConfig)
inferenceModel, err := loader.Load(
opts...,
)
if err != nil {
return nil, err
}
fn := func() error {
_, err := inferenceModel.GenerateImage(
appConfig.Context,
&proto.GenerateImageRequest{
Height: int32(height),
Width: int32(width),
Step: int32(step),
Seed: int32(seed),
CLIPSkip: int32(modelConfig.Diffusers.ClipSkip),
PositivePrompt: positive_prompt,
NegativePrompt: negative_prompt,
Dst: dst,
Src: src,
EnableParameters: modelConfig.Diffusers.EnableParameters,
RefImages: refImages,
})
return err
}
return fn, nil
}
// ImageGenerationFunc is a test-friendly indirection to call image generation logic.
// Tests can override this variable to provide a stub implementation.
var ImageGenerationFunc = ImageGeneration
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/detection.go | core/backend/detection.go | package backend
import (
"context"
"fmt"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
)
func Detection(
sourceFile string,
loader *model.ModelLoader,
appConfig *config.ApplicationConfig,
modelConfig config.ModelConfig,
) (*proto.DetectResponse, error) {
opts := ModelOptions(modelConfig, appConfig)
detectionModel, err := loader.Load(opts...)
if err != nil {
return nil, err
}
if detectionModel == nil {
return nil, fmt.Errorf("could not load detection model")
}
res, err := detectionModel.Detect(context.Background(), &proto.DetectOptions{
Src: sourceFile,
})
return res, err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/backend_suite_test.go | core/backend/backend_suite_test.go | package backend_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestBackend(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Backend test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/tokenize.go | core/backend/tokenize.go | package backend
import (
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/grpc"
"github.com/mudler/LocalAI/pkg/model"
)
func ModelTokenize(s string, loader *model.ModelLoader, modelConfig config.ModelConfig, appConfig *config.ApplicationConfig) (schema.TokenizeResponse, error) {
var inferenceModel grpc.Backend
var err error
opts := ModelOptions(modelConfig, appConfig)
inferenceModel, err = loader.Load(opts...)
if err != nil {
return schema.TokenizeResponse{}, err
}
predictOptions := gRPCPredictOpts(modelConfig, loader.ModelPath)
predictOptions.Prompt = s
// tokenize the string
resp, err := inferenceModel.TokenizeString(appConfig.Context, predictOptions)
if err != nil {
return schema.TokenizeResponse{}, err
}
if resp.Tokens == nil {
resp.Tokens = make([]int32, 0)
}
return schema.TokenizeResponse{
Tokens: resp.Tokens,
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/video.go | core/backend/video.go | package backend
import (
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc/proto"
model "github.com/mudler/LocalAI/pkg/model"
)
func VideoGeneration(height, width int32, prompt, negativePrompt, startImage, endImage, dst string, numFrames, fps, seed int32, cfgScale float32, step int32, loader *model.ModelLoader, modelConfig config.ModelConfig, appConfig *config.ApplicationConfig) (func() error, error) {
opts := ModelOptions(modelConfig, appConfig)
inferenceModel, err := loader.Load(
opts...,
)
if err != nil {
return nil, err
}
fn := func() error {
_, err := inferenceModel.GenerateVideo(
appConfig.Context,
&proto.GenerateVideoRequest{
Height: height,
Width: width,
Prompt: prompt,
NegativePrompt: negativePrompt,
StartImage: startImage,
EndImage: endImage,
NumFrames: numFrames,
Fps: fps,
Seed: seed,
CfgScale: cfgScale,
Step: step,
Dst: dst,
})
return err
}
return fn, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/stores.go | core/backend/stores.go | package backend
import (
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc"
"github.com/mudler/LocalAI/pkg/model"
)
func StoreBackend(sl *model.ModelLoader, appConfig *config.ApplicationConfig, storeName string, backend string) (grpc.Backend, error) {
if backend == "" {
backend = model.LocalStoreBackend
}
sc := []model.Option{
model.WithBackendString(backend),
model.WithModel(storeName),
}
return sl.Load(sc...)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/options.go | core/backend/options.go | package backend
import (
"math/rand"
"os"
"path/filepath"
"github.com/mudler/LocalAI/core/config"
pb "github.com/mudler/LocalAI/pkg/grpc/proto"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/xlog"
)
func ModelOptions(c config.ModelConfig, so *config.ApplicationConfig, opts ...model.Option) []model.Option {
name := c.Name
if name == "" {
name = c.Model
}
defOpts := []model.Option{
model.WithBackendString(c.Backend),
model.WithModel(c.Model),
model.WithContext(so.Context),
model.WithModelID(name),
}
threads := 1
if c.Threads != nil {
threads = *c.Threads
}
if so.Threads != 0 {
threads = so.Threads
}
c.Threads = &threads
grpcOpts := grpcModelOpts(c, so.SystemState.Model.ModelsPath)
defOpts = append(defOpts, model.WithLoadGRPCLoadModelOpts(grpcOpts))
if so.ParallelBackendRequests {
defOpts = append(defOpts, model.EnableParallelRequests)
}
if c.GRPC.Attempts != 0 {
defOpts = append(defOpts, model.WithGRPCAttempts(c.GRPC.Attempts))
}
if c.GRPC.AttemptsSleepTime != 0 {
defOpts = append(defOpts, model.WithGRPCAttemptsDelay(c.GRPC.AttemptsSleepTime))
}
for k, v := range so.ExternalGRPCBackends {
defOpts = append(defOpts, model.WithExternalBackend(k, v))
}
return append(defOpts, opts...)
}
func getSeed(c config.ModelConfig) int32 {
var seed int32 = config.RAND_SEED
if c.Seed != nil {
seed = int32(*c.Seed)
}
if seed == config.RAND_SEED {
seed = rand.Int31()
}
return seed
}
func grpcModelOpts(c config.ModelConfig, modelPath string) *pb.ModelOptions {
b := 512
if c.Batch != 0 {
b = c.Batch
}
flashAttention := "auto"
if c.FlashAttention != nil {
flashAttention = *c.FlashAttention
}
f16 := false
if c.F16 != nil {
f16 = *c.F16
}
embeddings := false
if c.Embeddings != nil {
embeddings = *c.Embeddings
}
lowVRAM := false
if c.LowVRAM != nil {
lowVRAM = *c.LowVRAM
}
reranking := false
if c.Reranking != nil {
reranking = *c.Reranking
}
mmap := false
if c.MMap != nil {
mmap = *c.MMap
}
ctxSize := 4096
if c.ContextSize != nil {
ctxSize = *c.ContextSize
}
mmlock := false
if c.MMlock != nil {
mmlock = *c.MMlock
}
nGPULayers := 9999999
if c.NGPULayers != nil {
nGPULayers = *c.NGPULayers
}
triggers := make([]*pb.GrammarTrigger, 0)
for _, t := range c.FunctionsConfig.GrammarConfig.GrammarTriggers {
triggers = append(triggers, &pb.GrammarTrigger{
Word: t.Word,
})
}
opts := &pb.ModelOptions{
CUDA: c.CUDA || c.Diffusers.CUDA,
SchedulerType: c.Diffusers.SchedulerType,
GrammarTriggers: triggers,
PipelineType: c.Diffusers.PipelineType,
CFGScale: c.CFGScale,
LoraAdapter: c.LoraAdapter,
LoraScale: c.LoraScale,
LoraAdapters: c.LoraAdapters,
LoraScales: c.LoraScales,
F16Memory: f16,
LoraBase: c.LoraBase,
IMG2IMG: c.Diffusers.IMG2IMG,
CLIPModel: c.Diffusers.ClipModel,
CLIPSubfolder: c.Diffusers.ClipSubFolder,
Options: c.Options,
Overrides: c.Overrides,
CLIPSkip: int32(c.Diffusers.ClipSkip),
ControlNet: c.Diffusers.ControlNet,
ContextSize: int32(ctxSize),
Seed: getSeed(c),
NBatch: int32(b),
NoMulMatQ: c.NoMulMatQ,
DraftModel: c.DraftModel,
AudioPath: c.AudioPath,
Quantization: c.Quantization,
LoadFormat: c.LoadFormat,
GPUMemoryUtilization: c.GPUMemoryUtilization,
TrustRemoteCode: c.TrustRemoteCode,
EnforceEager: c.EnforceEager,
SwapSpace: int32(c.SwapSpace),
MaxModelLen: int32(c.MaxModelLen),
TensorParallelSize: int32(c.TensorParallelSize),
DisableLogStatus: c.DisableLogStatus,
DType: c.DType,
// LimitMMPerPrompt vLLM
LimitImagePerPrompt: int32(c.LimitMMPerPrompt.LimitImagePerPrompt),
LimitVideoPerPrompt: int32(c.LimitMMPerPrompt.LimitVideoPerPrompt),
LimitAudioPerPrompt: int32(c.LimitMMPerPrompt.LimitAudioPerPrompt),
FlashAttention: flashAttention,
CacheTypeKey: c.CacheTypeK,
CacheTypeValue: c.CacheTypeV,
NoKVOffload: c.NoKVOffloading,
YarnExtFactor: c.YarnExtFactor,
YarnAttnFactor: c.YarnAttnFactor,
YarnBetaFast: c.YarnBetaFast,
YarnBetaSlow: c.YarnBetaSlow,
NGQA: c.NGQA,
RMSNormEps: c.RMSNormEps,
MLock: mmlock,
RopeFreqBase: c.RopeFreqBase,
RopeScaling: c.RopeScaling,
Type: c.ModelType,
RopeFreqScale: c.RopeFreqScale,
NUMA: c.NUMA,
Embeddings: embeddings,
Reranking: reranking,
LowVRAM: lowVRAM,
NGPULayers: int32(nGPULayers),
MMap: mmap,
MainGPU: c.MainGPU,
Threads: int32(*c.Threads),
TensorSplit: c.TensorSplit,
// RWKV
Tokenizer: c.Tokenizer,
}
if c.MMProj != "" {
opts.MMProj = filepath.Join(modelPath, c.MMProj)
}
return opts
}
func gRPCPredictOpts(c config.ModelConfig, modelPath string) *pb.PredictOptions {
promptCachePath := ""
if c.PromptCachePath != "" {
p := filepath.Join(modelPath, c.PromptCachePath)
err := os.MkdirAll(filepath.Dir(p), 0750)
if err == nil {
promptCachePath = p
} else {
xlog.Error("error creating prompt cache folder", "error", err, "promptCachePath", promptCachePath)
}
}
pbOpts := &pb.PredictOptions{
Temperature: float32(*c.Temperature),
TopP: float32(*c.TopP),
NDraft: c.NDraft,
TopK: int32(*c.TopK),
Tokens: int32(*c.Maxtokens),
Threads: int32(*c.Threads),
PromptCacheAll: c.PromptCacheAll,
PromptCacheRO: c.PromptCacheRO,
PromptCachePath: promptCachePath,
F16KV: *c.F16,
DebugMode: *c.Debug,
Grammar: c.Grammar,
NegativePromptScale: c.NegativePromptScale,
RopeFreqBase: c.RopeFreqBase,
RopeFreqScale: c.RopeFreqScale,
NegativePrompt: c.NegativePrompt,
Mirostat: int32(*c.LLMConfig.Mirostat),
MirostatETA: float32(*c.LLMConfig.MirostatETA),
MirostatTAU: float32(*c.LLMConfig.MirostatTAU),
Debug: *c.Debug,
StopPrompts: c.StopWords,
Repeat: int32(c.RepeatLastN),
FrequencyPenalty: float32(c.FrequencyPenalty),
PresencePenalty: float32(c.PresencePenalty),
Penalty: float32(c.RepeatPenalty),
NKeep: int32(c.Keep),
Batch: int32(c.Batch),
IgnoreEOS: c.IgnoreEOS,
Seed: getSeed(c),
MLock: *c.MMlock,
MMap: *c.MMap,
MainGPU: c.MainGPU,
TensorSplit: c.TensorSplit,
TailFreeSamplingZ: float32(*c.TFZ),
TypicalP: float32(*c.TypicalP),
}
// Logprobs and TopLogprobs are set by the caller if provided
return pbOpts
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/backend/embeddings.go | core/backend/embeddings.go | package backend
import (
"fmt"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/grpc"
model "github.com/mudler/LocalAI/pkg/model"
)
func ModelEmbedding(s string, tokens []int, loader *model.ModelLoader, modelConfig config.ModelConfig, appConfig *config.ApplicationConfig) (func() ([]float32, error), error) {
opts := ModelOptions(modelConfig, appConfig)
inferenceModel, err := loader.Load(opts...)
if err != nil {
return nil, err
}
var fn func() ([]float32, error)
switch model := inferenceModel.(type) {
case grpc.Backend:
fn = func() ([]float32, error) {
predictOptions := gRPCPredictOpts(modelConfig, loader.ModelPath)
if len(tokens) > 0 {
embeds := []int32{}
for _, t := range tokens {
embeds = append(embeds, int32(t))
}
predictOptions.EmbeddingTokens = embeds
res, err := model.Embeddings(appConfig.Context, predictOptions)
if err != nil {
return nil, err
}
return res.Embeddings, nil
}
predictOptions.Embeddings = s
res, err := model.Embeddings(appConfig.Context, predictOptions)
if err != nil {
return nil, err
}
return res.Embeddings, nil
}
default:
fn = func() ([]float32, error) {
return nil, fmt.Errorf("embeddings not supported by the backend")
}
}
return func() ([]float32, error) {
embeds, err := fn()
if err != nil {
return embeds, err
}
// Remove trailing 0s
for i := len(embeds) - 1; i >= 0; i-- {
if embeds[i] == 0.0 {
embeds = embeds[:i]
} else {
break
}
}
return embeds, nil
}, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/templates/templates_suite_test.go | core/templates/templates_suite_test.go | package templates_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestTemplates(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Templates test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/templates/multimodal.go | core/templates/multimodal.go | package templates
import (
"bytes"
"text/template"
"github.com/Masterminds/sprig/v3"
)
type MultiModalOptions struct {
TotalImages int
TotalAudios int
TotalVideos int
ImagesInMessage int
AudiosInMessage int
VideosInMessage int
}
type MultimodalContent struct {
ID int
}
// https://github.com/ggml-org/llama.cpp/blob/be1d4a13db26750fac702ceb3af88ae4f39dc9f4/tools/mtmd/mtmd.h#L42
// from <__image__> to <__media__> https://github.com/ggml-org/llama.cpp/blob/79c137f77677b3c8ee3c60a7da033721b938399a/tools/mtmd/mtmd.cpp#L83
const DefaultMultiModalTemplate = "{{ range .Audio }}<__media__>{{end}}{{ range .Images }}<__media__>{{end}}{{ range .Video }}[vid-{{.ID}}]{{end}}{{.Text}}"
func TemplateMultiModal(templateString string, opts MultiModalOptions, text string) (string, error) {
if templateString == "" {
templateString = DefaultMultiModalTemplate
}
// compile the template
tmpl, err := template.New("template").Funcs(sprig.FuncMap()).Parse(templateString)
if err != nil {
return "", err
}
videos := []MultimodalContent{}
for i := 0; i < opts.VideosInMessage; i++ {
videos = append(videos, MultimodalContent{ID: i + (opts.TotalVideos - opts.VideosInMessage)})
}
audios := []MultimodalContent{}
for i := 0; i < opts.AudiosInMessage; i++ {
audios = append(audios, MultimodalContent{ID: i + (opts.TotalAudios - opts.AudiosInMessage)})
}
images := []MultimodalContent{}
for i := 0; i < opts.ImagesInMessage; i++ {
images = append(images, MultimodalContent{ID: i + (opts.TotalImages - opts.ImagesInMessage)})
}
result := bytes.NewBuffer(nil)
// execute the template
err = tmpl.Execute(result, struct {
Audio []MultimodalContent
Images []MultimodalContent
Video []MultimodalContent
Text string
}{
Audio: audios,
Images: images,
Video: videos,
Text: text,
})
return result.String(), err
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/templates/evaluator_test.go | core/templates/evaluator_test.go | package templates_test
import (
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
. "github.com/mudler/LocalAI/core/templates"
"github.com/mudler/LocalAI/pkg/functions"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
const toolCallJinja = `{{ '<|begin_of_text|>' }}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% endif %}{% if system_message is defined %}{{ '<|start_header_id|>system<|end_header_id|>
' + system_message + '<|eot_id|>' }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|start_header_id|>user<|end_header_id|>
' + content + '<|eot_id|><|start_header_id|>assistant<|end_header_id|>
' }}{% elif message['role'] == 'assistant' %}{{ content + '<|eot_id|>' }}{% endif %}{% endfor %}`
const chatML = `<|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}
{{- if .FunctionCall }}
<tool_call>
{{- else if eq .RoleName "tool" }}
<tool_response>
{{- end }}
{{- if .Content}}
{{.Content }}
{{- end }}
{{- if .FunctionCall}}
{{toJson .FunctionCall}}
{{- end }}
{{- if .FunctionCall }}
</tool_call>
{{- else if eq .RoleName "tool" }}
</tool_response>
{{- end }}<|im_end|>`
const llama3 = `<|start_header_id|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}<|end_header_id|>
{{ if .FunctionCall -}}
Function call:
{{ else if eq .RoleName "tool" -}}
Function response:
{{ end -}}
{{ if .Content -}}
{{.Content -}}
{{ else if .FunctionCall -}}
{{ toJson .FunctionCall -}}
{{ end -}}
<|eot_id|>`
var llama3TestMatch map[string]map[string]interface{} = map[string]map[string]interface{}{
"user": {
"expected": "<|start_header_id|>user<|end_header_id|>\n\nA long time ago in a galaxy far, far away...<|eot_id|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: llama3,
},
},
"functions": []functions.Function{},
"shouldUseFn": false,
"messages": []schema.Message{
{
Role: "user",
StringContent: "A long time ago in a galaxy far, far away...",
},
},
},
"assistant": {
"expected": "<|start_header_id|>assistant<|end_header_id|>\n\nA long time ago in a galaxy far, far away...<|eot_id|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: llama3,
},
},
"functions": []functions.Function{},
"messages": []schema.Message{
{
Role: "assistant",
StringContent: "A long time ago in a galaxy far, far away...",
},
},
"shouldUseFn": false,
},
"function_call": {
"expected": "<|start_header_id|>assistant<|end_header_id|>\n\nFunction call:\n{\"function\":\"test\"}<|eot_id|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: llama3,
},
},
"functions": []functions.Function{},
"messages": []schema.Message{
{
Role: "assistant",
FunctionCall: map[string]string{"function": "test"},
},
},
"shouldUseFn": false,
},
"function_response": {
"expected": "<|start_header_id|>tool<|end_header_id|>\n\nFunction response:\nResponse from tool<|eot_id|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: llama3,
},
},
"functions": []functions.Function{},
"messages": []schema.Message{
{
Role: "tool",
StringContent: "Response from tool",
},
},
"shouldUseFn": false,
},
}
var chatMLTestMatch map[string]map[string]interface{} = map[string]map[string]interface{}{
"user": {
"expected": "<|im_start|>user\nA long time ago in a galaxy far, far away...<|im_end|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: chatML,
},
},
"functions": []functions.Function{},
"shouldUseFn": false,
"messages": []schema.Message{
{
Role: "user",
StringContent: "A long time ago in a galaxy far, far away...",
},
},
},
"assistant": {
"expected": "<|im_start|>assistant\nA long time ago in a galaxy far, far away...<|im_end|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: chatML,
},
},
"functions": []functions.Function{},
"messages": []schema.Message{
{
Role: "assistant",
StringContent: "A long time ago in a galaxy far, far away...",
},
},
"shouldUseFn": false,
},
"function_call": {
"expected": "<|im_start|>assistant\n<tool_call>\n{\"function\":\"test\"}\n</tool_call><|im_end|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: chatML,
},
},
"functions": []functions.Function{
{
Name: "test",
Description: "test",
Parameters: nil,
},
},
"shouldUseFn": true,
"messages": []schema.Message{
{
Role: "assistant",
FunctionCall: map[string]string{"function": "test"},
},
},
},
"function_response": {
"expected": "<|im_start|>tool\n<tool_response>\nResponse from tool\n</tool_response><|im_end|>",
"config": &config.ModelConfig{
TemplateConfig: config.TemplateConfig{
ChatMessage: chatML,
},
},
"functions": []functions.Function{},
"shouldUseFn": false,
"messages": []schema.Message{
{
Role: "tool",
StringContent: "Response from tool",
},
},
},
}
var _ = Describe("Templates", func() {
Context("chat message ChatML", func() {
var evaluator *Evaluator
BeforeEach(func() {
evaluator = NewEvaluator("")
})
for key := range chatMLTestMatch {
foo := chatMLTestMatch[key]
It("renders correctly `"+key+"`", func() {
templated := evaluator.TemplateMessages(schema.OpenAIRequest{}, foo["messages"].([]schema.Message), foo["config"].(*config.ModelConfig), foo["functions"].([]functions.Function), foo["shouldUseFn"].(bool))
Expect(templated).To(Equal(foo["expected"]), templated)
})
}
})
Context("chat message llama3", func() {
var evaluator *Evaluator
BeforeEach(func() {
evaluator = NewEvaluator("")
})
for key := range llama3TestMatch {
foo := llama3TestMatch[key]
It("renders correctly `"+key+"`", func() {
templated := evaluator.TemplateMessages(schema.OpenAIRequest{}, foo["messages"].([]schema.Message), foo["config"].(*config.ModelConfig), foo["functions"].([]functions.Function), foo["shouldUseFn"].(bool))
Expect(templated).To(Equal(foo["expected"]), templated)
})
}
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/templates/cache.go | core/templates/cache.go | package templates
import (
"bytes"
"fmt"
"os"
"path/filepath"
"sync"
"text/template"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/Masterminds/sprig/v3"
)
// Keep this in sync with config.TemplateConfig. Is there a more idiomatic way to accomplish this in go?
// Technically, order doesn't _really_ matter, but the count must stay in sync, see tests/integration/reflect_test.go
type TemplateType int
type templateCache struct {
mu sync.Mutex
templatesPath string
templates map[TemplateType]map[string]*template.Template
}
func newTemplateCache(templatesPath string) *templateCache {
tc := &templateCache{
templatesPath: templatesPath,
templates: make(map[TemplateType]map[string]*template.Template),
}
return tc
}
func (tc *templateCache) initializeTemplateMapKey(tt TemplateType) {
if _, ok := tc.templates[tt]; !ok {
tc.templates[tt] = make(map[string]*template.Template)
}
}
func (tc *templateCache) existsInModelPath(s string) bool {
return utils.ExistsInPath(tc.templatesPath, s)
}
func (tc *templateCache) loadTemplateIfExists(templateType TemplateType, templateName string) error {
// Check if the template was already loaded
if _, ok := tc.templates[templateType][templateName]; ok {
return nil
}
// Check if the model path exists
// skip any error here - we run anyway if a template does not exist
modelTemplateFile := fmt.Sprintf("%s.tmpl", templateName)
dat := ""
file := filepath.Join(tc.templatesPath, modelTemplateFile)
// Security check
if err := utils.VerifyPath(modelTemplateFile, tc.templatesPath); err != nil {
return fmt.Errorf("template file outside path: %s", file)
}
// can either be a file in the system or a string with the template
if tc.existsInModelPath(modelTemplateFile) {
d, err := os.ReadFile(file)
if err != nil {
return err
}
dat = string(d)
} else {
dat = templateName
}
// Parse the template
tmpl, err := template.New("prompt").Funcs(sprig.FuncMap()).Parse(dat)
if err != nil {
return err
}
tc.templates[templateType][templateName] = tmpl
return nil
}
func (tc *templateCache) evaluateTemplate(templateType TemplateType, templateNameOrContent string, in interface{}) (string, error) {
tc.mu.Lock()
defer tc.mu.Unlock()
tc.initializeTemplateMapKey(templateType)
m, ok := tc.templates[templateType][templateNameOrContent]
if !ok {
// return "", fmt.Errorf("template not loaded: %s", templateName)
loadErr := tc.loadTemplateIfExists(templateType, templateNameOrContent)
if loadErr != nil {
return "", loadErr
}
m = tc.templates[templateType][templateNameOrContent] // ok is not important since we check m on the next line, and wealready checked
}
if m == nil {
return "", fmt.Errorf("failed loading a template for %s", templateNameOrContent)
}
var buf bytes.Buffer
if err := m.Execute(&buf, in); err != nil {
return "", err
}
return buf.String(), nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/templates/evaluator.go | core/templates/evaluator.go | package templates
import (
"encoding/json"
"fmt"
"strings"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/functions"
"github.com/mudler/xlog"
)
// Rather than pass an interface{} to the prompt template:
// These are the definitions of all possible variables LocalAI will currently populate for use in a prompt template file
// Please note: Not all of these are populated on every endpoint - your template should either be tested for each endpoint you map it to, or tolerant of zero values.
type PromptTemplateData struct {
SystemPrompt string
SuppressSystemPrompt bool // used by chat specifically to indicate that SystemPrompt above should be _ignored_
Input string
Instruction string
Functions []functions.Function
MessageIndex int
ReasoningEffort string
Metadata map[string]string
}
type ChatMessageTemplateData struct {
SystemPrompt string
Role string
RoleName string
FunctionName string
Content string
MessageIndex int
Function bool
FunctionCall interface{}
LastMessage bool
}
const (
ChatPromptTemplate TemplateType = iota
ChatMessageTemplate
CompletionPromptTemplate
EditPromptTemplate
FunctionsPromptTemplate
)
type Evaluator struct {
cache *templateCache
}
func NewEvaluator(modelPath string) *Evaluator {
return &Evaluator{
cache: newTemplateCache(modelPath),
}
}
func (e *Evaluator) EvaluateTemplateForPrompt(templateType TemplateType, config config.ModelConfig, in PromptTemplateData) (string, error) {
template := ""
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
if e.cache.existsInModelPath(fmt.Sprintf("%s.tmpl", config.Model)) {
template = config.Model
}
switch templateType {
case CompletionPromptTemplate:
if config.TemplateConfig.Completion != "" {
template = config.TemplateConfig.Completion
}
case EditPromptTemplate:
if config.TemplateConfig.Edit != "" {
template = config.TemplateConfig.Edit
}
case ChatPromptTemplate:
if config.TemplateConfig.Chat != "" {
template = config.TemplateConfig.Chat
}
case FunctionsPromptTemplate:
if config.TemplateConfig.Functions != "" {
template = config.TemplateConfig.Functions
}
}
if template == "" {
return in.Input, nil
}
return e.cache.evaluateTemplate(templateType, template, in)
}
func (e *Evaluator) evaluateTemplateForChatMessage(templateName string, messageData ChatMessageTemplateData) (string, error) {
return e.cache.evaluateTemplate(ChatMessageTemplate, templateName, messageData)
}
func (e *Evaluator) TemplateMessages(input schema.OpenAIRequest, messages []schema.Message, config *config.ModelConfig, funcs []functions.Function, shouldUseFn bool) string {
var predInput string
suppressConfigSystemPrompt := false
mess := []string{}
for messageIndex, i := range messages {
var content string
role := i.Role
// if function call, we might want to customize the role so we can display better that the "assistant called a json action"
// if an "assistant_function_call" role is defined, we use it, otherwise we use the role that is passed by in the request
if (i.FunctionCall != nil || i.ToolCalls != nil) && i.Role == "assistant" {
roleFn := "assistant_function_call"
r := config.Roles[roleFn]
if r != "" {
role = roleFn
}
}
r := config.Roles[role]
contentExists := i.Content != nil && i.StringContent != ""
fcall := i.FunctionCall
if len(i.ToolCalls) > 0 {
fcall = i.ToolCalls
}
// First attempt to populate content via a chat message specific template
if config.TemplateConfig.ChatMessage != "" {
chatMessageData := ChatMessageTemplateData{
SystemPrompt: config.SystemPrompt,
Role: r,
RoleName: role,
Content: i.StringContent,
FunctionCall: fcall,
FunctionName: i.Name,
LastMessage: messageIndex == (len(messages) - 1),
Function: config.Grammar != "" && (messageIndex == (len(messages) - 1)),
MessageIndex: messageIndex,
}
templatedChatMessage, err := e.evaluateTemplateForChatMessage(config.TemplateConfig.ChatMessage, chatMessageData)
if err != nil {
xlog.Error("error processing message with template, skipping", "error", err, "message", chatMessageData, "template", config.TemplateConfig.ChatMessage)
} else {
if templatedChatMessage == "" {
xlog.Warn("template produced blank output, skipping", "template", config.TemplateConfig.ChatMessage, "message", chatMessageData)
continue // TODO: This continue is here intentionally to skip over the line `mess = append(mess, content)` below, and to prevent the sprintf
}
xlog.Debug("templated message for chat", "message", templatedChatMessage)
content = templatedChatMessage
}
}
marshalAnyRole := func(f any) {
j, err := json.Marshal(f)
if err == nil {
if contentExists {
content += "\n" + fmt.Sprint(r, " ", string(j))
} else {
content = fmt.Sprint(r, " ", string(j))
}
}
}
marshalAny := func(f any) {
j, err := json.Marshal(f)
if err == nil {
if contentExists {
content += "\n" + string(j)
} else {
content = string(j)
}
}
}
// If this model doesn't have such a template, or if that template fails to return a value, template at the message level.
if content == "" {
if r != "" {
if contentExists {
content = fmt.Sprint(r, i.StringContent)
}
if i.FunctionCall != nil {
marshalAnyRole(i.FunctionCall)
}
if i.ToolCalls != nil {
marshalAnyRole(i.ToolCalls)
}
} else {
if contentExists {
content = fmt.Sprint(i.StringContent)
}
if i.FunctionCall != nil {
marshalAny(i.FunctionCall)
}
if i.ToolCalls != nil {
marshalAny(i.ToolCalls)
}
}
// Special Handling: System. We care if it was printed at all, not the r branch, so check separately
if contentExists && role == "system" {
suppressConfigSystemPrompt = true
}
}
mess = append(mess, content)
}
joinCharacter := "\n"
if config.TemplateConfig.JoinChatMessagesByCharacter != nil {
joinCharacter = *config.TemplateConfig.JoinChatMessagesByCharacter
}
predInput = strings.Join(mess, joinCharacter)
xlog.Debug("Prompt (before templating)", "prompt", predInput)
promptTemplate := ChatPromptTemplate
if config.TemplateConfig.Functions != "" && shouldUseFn {
promptTemplate = FunctionsPromptTemplate
}
templatedInput, err := e.EvaluateTemplateForPrompt(promptTemplate, *config, PromptTemplateData{
SystemPrompt: config.SystemPrompt,
SuppressSystemPrompt: suppressConfigSystemPrompt,
Input: predInput,
Functions: funcs,
ReasoningEffort: input.ReasoningEffort,
Metadata: input.Metadata,
})
if err == nil {
predInput = templatedInput
xlog.Debug("Template found, input modified", "input", predInput)
} else {
xlog.Debug("Template failed loading", "error", err)
}
return predInput
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/templates/multimodal_test.go | core/templates/multimodal_test.go | package templates_test
import (
. "github.com/mudler/LocalAI/core/templates" // Update with your module path
// Update with your module path
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("EvaluateTemplate", func() {
Context("templating simple strings for multimodal chat", func() {
It("should template messages correctly", func() {
result, err := TemplateMultiModal("", MultiModalOptions{
TotalImages: 1,
TotalAudios: 0,
TotalVideos: 0,
ImagesInMessage: 1,
AudiosInMessage: 0,
VideosInMessage: 0,
}, "bar")
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal("<__media__>bar"))
})
It("should handle messages with more images correctly", func() {
result, err := TemplateMultiModal("", MultiModalOptions{
TotalImages: 2,
TotalAudios: 0,
TotalVideos: 0,
ImagesInMessage: 2,
AudiosInMessage: 0,
VideosInMessage: 0,
}, "bar")
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal("<__media__><__media__>bar"))
})
It("should handle messages with more images correctly", func() {
result, err := TemplateMultiModal("", MultiModalOptions{
TotalImages: 4,
TotalAudios: 1,
TotalVideos: 0,
ImagesInMessage: 2,
AudiosInMessage: 1,
VideosInMessage: 0,
}, "bar")
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal("<__media__><__media__><__media__>bar"))
})
It("should handle messages with more images correctly", func() {
result, err := TemplateMultiModal("", MultiModalOptions{
TotalImages: 3,
TotalAudios: 1,
TotalVideos: 0,
ImagesInMessage: 1,
AudiosInMessage: 1,
VideosInMessage: 0,
}, "bar")
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal("<__media__><__media__>bar"))
})
It("should handle messages with more images correctly", func() {
result, err := TemplateMultiModal("", MultiModalOptions{
TotalImages: 0,
TotalAudios: 0,
TotalVideos: 0,
ImagesInMessage: 0,
AudiosInMessage: 0,
VideosInMessage: 0,
}, "bar")
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal("bar"))
})
})
Context("templating with custom defaults", func() {
It("should handle messages with more images correctly", func() {
result, err := TemplateMultiModal("{{ range .Audio }}[audio-{{ add1 .ID}}]{{end}}{{ range .Images }}[img-{{ add1 .ID}}]{{end}}{{ range .Video }}[vid-{{ add1 .ID}}]{{end}}{{.Text}}", MultiModalOptions{
TotalImages: 1,
TotalAudios: 0,
TotalVideos: 0,
ImagesInMessage: 1,
AudiosInMessage: 0,
VideosInMessage: 0,
}, "bar")
Expect(err).NotTo(HaveOccurred())
Expect(result).To(Equal("[img-1]bar"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/application_config.go | core/config/application_config.go | package config
import (
"context"
"encoding/json"
"regexp"
"time"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/LocalAI/pkg/xsysinfo"
"github.com/mudler/xlog"
)
type ApplicationConfig struct {
Context context.Context
ConfigFile string
SystemState *system.SystemState
ExternalBackends []string
UploadLimitMB, Threads, ContextSize int
F16 bool
Debug bool
EnableTracing bool
TracingMaxItems int
GeneratedContentDir string
UploadDir string
DynamicConfigsDir string
DynamicConfigsDirPollInterval time.Duration
CORS bool
CSRF bool
PreloadJSONModels string
PreloadModelsFromPath string
CORSAllowOrigins string
ApiKeys []string
P2PToken string
P2PNetworkID string
Federated bool
DisableWebUI bool
EnforcePredownloadScans bool
OpaqueErrors bool
UseSubtleKeyComparison bool
DisableApiKeyRequirementForHttpGet bool
DisableMetrics bool
HttpGetExemptedEndpoints []*regexp.Regexp
DisableGalleryEndpoint bool
LoadToMemory []string
Galleries []Gallery
BackendGalleries []Gallery
ExternalGRPCBackends map[string]string
AutoloadGalleries, AutoloadBackendGalleries bool
SingleBackend bool // Deprecated: use MaxActiveBackends = 1 instead
MaxActiveBackends int // Maximum number of active backends (0 = unlimited, 1 = single backend mode)
ParallelBackendRequests bool
WatchDogIdle bool
WatchDogBusy bool
WatchDog bool
// Memory Reclaimer settings (works with GPU if available, otherwise RAM)
MemoryReclaimerEnabled bool // Enable memory threshold monitoring
MemoryReclaimerThreshold float64 // Threshold 0.0-1.0 (e.g., 0.95 = 95%)
// Eviction settings
ForceEvictionWhenBusy bool // Force eviction even when models have active API calls (default: false for safety)
LRUEvictionMaxRetries int // Maximum number of retries when waiting for busy models to become idle (default: 30)
LRUEvictionRetryInterval time.Duration // Interval between retries when waiting for busy models (default: 1s)
ModelsURL []string
WatchDogBusyTimeout, WatchDogIdleTimeout time.Duration
WatchDogInterval time.Duration // Interval between watchdog checks
MachineTag string
APIAddress string
TunnelCallback func(tunnels []string)
DisableRuntimeSettings bool
AgentJobRetentionDays int // Default: 30 days
PathWithoutAuth []string
}
type AppOption func(*ApplicationConfig)
func NewApplicationConfig(o ...AppOption) *ApplicationConfig {
opt := &ApplicationConfig{
Context: context.Background(),
UploadLimitMB: 15,
Debug: true,
AgentJobRetentionDays: 30, // Default: 30 days
LRUEvictionMaxRetries: 30, // Default: 30 retries
LRUEvictionRetryInterval: 1 * time.Second, // Default: 1 second
TracingMaxItems: 1024,
PathWithoutAuth: []string{
"/static/",
"/generated-audio/",
"/generated-images/",
"/generated-videos/",
"/favicon.svg",
"/readyz",
"/healthz",
},
}
for _, oo := range o {
oo(opt)
}
return opt
}
func WithModelsURL(urls ...string) AppOption {
return func(o *ApplicationConfig) {
o.ModelsURL = urls
}
}
func WithSystemState(state *system.SystemState) AppOption {
return func(o *ApplicationConfig) {
o.SystemState = state
}
}
func WithExternalBackends(backends ...string) AppOption {
return func(o *ApplicationConfig) {
o.ExternalBackends = backends
}
}
func WithMachineTag(tag string) AppOption {
return func(o *ApplicationConfig) {
o.MachineTag = tag
}
}
func WithCors(b bool) AppOption {
return func(o *ApplicationConfig) {
o.CORS = b
}
}
func WithP2PNetworkID(s string) AppOption {
return func(o *ApplicationConfig) {
o.P2PNetworkID = s
}
}
func WithCsrf(b bool) AppOption {
return func(o *ApplicationConfig) {
o.CSRF = b
}
}
func WithP2PToken(s string) AppOption {
return func(o *ApplicationConfig) {
o.P2PToken = s
}
}
var EnableWatchDog = func(o *ApplicationConfig) {
o.WatchDog = true
}
var EnableTracing = func(o *ApplicationConfig) {
o.EnableTracing = true
}
var EnableWatchDogIdleCheck = func(o *ApplicationConfig) {
o.WatchDog = true
o.WatchDogIdle = true
}
var DisableGalleryEndpoint = func(o *ApplicationConfig) {
o.DisableGalleryEndpoint = true
}
var EnableWatchDogBusyCheck = func(o *ApplicationConfig) {
o.WatchDog = true
o.WatchDogBusy = true
}
var DisableWebUI = func(o *ApplicationConfig) {
o.DisableWebUI = true
}
var DisableRuntimeSettings = func(o *ApplicationConfig) {
o.DisableRuntimeSettings = true
}
func SetWatchDogBusyTimeout(t time.Duration) AppOption {
return func(o *ApplicationConfig) {
o.WatchDogBusyTimeout = t
}
}
func SetWatchDogIdleTimeout(t time.Duration) AppOption {
return func(o *ApplicationConfig) {
o.WatchDogIdleTimeout = t
}
}
// EnableMemoryReclaimer enables memory threshold monitoring.
// When enabled, the watchdog will evict backends if memory usage exceeds the threshold.
// Works with GPU VRAM if available, otherwise uses system RAM.
var EnableMemoryReclaimer = func(o *ApplicationConfig) {
o.MemoryReclaimerEnabled = true
o.WatchDog = true // Memory reclaimer requires watchdog infrastructure
}
// SetMemoryReclaimerThreshold sets the memory usage threshold (0.0-1.0).
// When memory usage exceeds this threshold, backends will be evicted using LRU strategy.
func SetMemoryReclaimerThreshold(threshold float64) AppOption {
return func(o *ApplicationConfig) {
if threshold > 0 && threshold <= 1.0 {
o.MemoryReclaimerThreshold = threshold
o.MemoryReclaimerEnabled = true
o.WatchDog = true // Memory reclaimer requires watchdog infrastructure
}
}
}
// WithMemoryReclaimer configures the memory reclaimer with the given settings
func WithMemoryReclaimer(enabled bool, threshold float64) AppOption {
return func(o *ApplicationConfig) {
o.MemoryReclaimerEnabled = enabled
if threshold > 0 && threshold <= 1.0 {
o.MemoryReclaimerThreshold = threshold
}
if enabled {
o.WatchDog = true // Memory reclaimer requires watchdog infrastructure
}
}
}
// EnableSingleBackend is deprecated: use SetMaxActiveBackends(1) instead.
// This is kept for backward compatibility.
var EnableSingleBackend = func(o *ApplicationConfig) {
o.SingleBackend = true
o.MaxActiveBackends = 1
}
// SetMaxActiveBackends sets the maximum number of active backends.
// 0 = unlimited, 1 = single backend mode (replaces EnableSingleBackend)
func SetMaxActiveBackends(n int) AppOption {
return func(o *ApplicationConfig) {
o.MaxActiveBackends = n
// For backward compatibility, also set SingleBackend if n == 1
if n == 1 {
o.SingleBackend = true
}
}
}
// GetEffectiveMaxActiveBackends returns the effective max active backends limit.
// It considers both MaxActiveBackends and the deprecated SingleBackend setting.
// If MaxActiveBackends is set (> 0), it takes precedence.
// If SingleBackend is true and MaxActiveBackends is 0, returns 1.
// Otherwise returns 0 (unlimited).
func (o *ApplicationConfig) GetEffectiveMaxActiveBackends() int {
if o.MaxActiveBackends > 0 {
return o.MaxActiveBackends
}
if o.SingleBackend {
return 1
}
return 0
}
// WithForceEvictionWhenBusy sets whether to force eviction even when models have active API calls
func WithForceEvictionWhenBusy(enabled bool) AppOption {
return func(o *ApplicationConfig) {
o.ForceEvictionWhenBusy = enabled
}
}
// WithLRUEvictionMaxRetries sets the maximum number of retries when waiting for busy models to become idle
func WithLRUEvictionMaxRetries(maxRetries int) AppOption {
return func(o *ApplicationConfig) {
if maxRetries > 0 {
o.LRUEvictionMaxRetries = maxRetries
}
}
}
// WithLRUEvictionRetryInterval sets the interval between retries when waiting for busy models
func WithLRUEvictionRetryInterval(interval time.Duration) AppOption {
return func(o *ApplicationConfig) {
if interval > 0 {
o.LRUEvictionRetryInterval = interval
}
}
}
var EnableParallelBackendRequests = func(o *ApplicationConfig) {
o.ParallelBackendRequests = true
}
var EnableGalleriesAutoload = func(o *ApplicationConfig) {
o.AutoloadGalleries = true
}
var EnableBackendGalleriesAutoload = func(o *ApplicationConfig) {
o.AutoloadBackendGalleries = true
}
var EnableFederated = func(o *ApplicationConfig) {
o.Federated = true
}
func WithExternalBackend(name string, uri string) AppOption {
return func(o *ApplicationConfig) {
if o.ExternalGRPCBackends == nil {
o.ExternalGRPCBackends = make(map[string]string)
}
o.ExternalGRPCBackends[name] = uri
}
}
func WithCorsAllowOrigins(b string) AppOption {
return func(o *ApplicationConfig) {
o.CORSAllowOrigins = b
}
}
func WithStringGalleries(galls string) AppOption {
return func(o *ApplicationConfig) {
if galls == "" {
o.Galleries = []Gallery{}
return
}
var galleries []Gallery
if err := json.Unmarshal([]byte(galls), &galleries); err != nil {
xlog.Error("failed loading galleries", "error", err)
}
o.Galleries = append(o.Galleries, galleries...)
}
}
func WithBackendGalleries(galls string) AppOption {
return func(o *ApplicationConfig) {
if galls == "" {
o.BackendGalleries = []Gallery{}
return
}
var galleries []Gallery
if err := json.Unmarshal([]byte(galls), &galleries); err != nil {
xlog.Error("failed loading galleries", "error", err)
}
o.BackendGalleries = append(o.BackendGalleries, galleries...)
}
}
func WithGalleries(galleries []Gallery) AppOption {
return func(o *ApplicationConfig) {
o.Galleries = append(o.Galleries, galleries...)
}
}
func WithContext(ctx context.Context) AppOption {
return func(o *ApplicationConfig) {
o.Context = ctx
}
}
func WithYAMLConfigPreload(configFile string) AppOption {
return func(o *ApplicationConfig) {
o.PreloadModelsFromPath = configFile
}
}
func WithJSONStringPreload(configFile string) AppOption {
return func(o *ApplicationConfig) {
o.PreloadJSONModels = configFile
}
}
func WithConfigFile(configFile string) AppOption {
return func(o *ApplicationConfig) {
o.ConfigFile = configFile
}
}
func WithUploadLimitMB(limit int) AppOption {
return func(o *ApplicationConfig) {
o.UploadLimitMB = limit
}
}
func WithThreads(threads int) AppOption {
return func(o *ApplicationConfig) {
if threads == 0 { // 0 is not allowed
threads = xsysinfo.CPUPhysicalCores()
}
o.Threads = threads
}
}
func WithContextSize(ctxSize int) AppOption {
return func(o *ApplicationConfig) {
o.ContextSize = ctxSize
}
}
func WithTunnelCallback(callback func(tunnels []string)) AppOption {
return func(o *ApplicationConfig) {
o.TunnelCallback = callback
}
}
func WithF16(f16 bool) AppOption {
return func(o *ApplicationConfig) {
o.F16 = f16
}
}
func WithDebug(debug bool) AppOption {
return func(o *ApplicationConfig) {
o.Debug = debug
}
}
func WithTracingMaxItems(items int) AppOption {
return func(o *ApplicationConfig) {
o.TracingMaxItems = items
}
}
func WithGeneratedContentDir(generatedContentDir string) AppOption {
return func(o *ApplicationConfig) {
o.GeneratedContentDir = generatedContentDir
}
}
func WithUploadDir(uploadDir string) AppOption {
return func(o *ApplicationConfig) {
o.UploadDir = uploadDir
}
}
func WithDynamicConfigDir(dynamicConfigsDir string) AppOption {
return func(o *ApplicationConfig) {
o.DynamicConfigsDir = dynamicConfigsDir
}
}
func WithDynamicConfigDirPollInterval(interval time.Duration) AppOption {
return func(o *ApplicationConfig) {
o.DynamicConfigsDirPollInterval = interval
}
}
func WithApiKeys(apiKeys []string) AppOption {
return func(o *ApplicationConfig) {
o.ApiKeys = apiKeys
}
}
func WithAgentJobRetentionDays(days int) AppOption {
return func(o *ApplicationConfig) {
o.AgentJobRetentionDays = days
}
}
func WithEnforcedPredownloadScans(enforced bool) AppOption {
return func(o *ApplicationConfig) {
o.EnforcePredownloadScans = enforced
}
}
func WithOpaqueErrors(opaque bool) AppOption {
return func(o *ApplicationConfig) {
o.OpaqueErrors = opaque
}
}
func WithLoadToMemory(models []string) AppOption {
return func(o *ApplicationConfig) {
o.LoadToMemory = models
}
}
func WithSubtleKeyComparison(subtle bool) AppOption {
return func(o *ApplicationConfig) {
o.UseSubtleKeyComparison = subtle
}
}
func WithDisableApiKeyRequirementForHttpGet(required bool) AppOption {
return func(o *ApplicationConfig) {
o.DisableApiKeyRequirementForHttpGet = required
}
}
func WithAPIAddress(address string) AppOption {
return func(o *ApplicationConfig) {
o.APIAddress = address
}
}
var DisableMetricsEndpoint AppOption = func(o *ApplicationConfig) {
o.DisableMetrics = true
}
func WithHttpGetExemptedEndpoints(endpoints []string) AppOption {
return func(o *ApplicationConfig) {
o.HttpGetExemptedEndpoints = []*regexp.Regexp{}
for _, epr := range endpoints {
r, err := regexp.Compile(epr)
if err == nil && r != nil {
o.HttpGetExemptedEndpoints = append(o.HttpGetExemptedEndpoints, r)
} else {
xlog.Warn("Error while compiling HTTP Get Exemption regex, skipping this entry.", "error", err, "regex", epr)
}
}
}
}
// ToConfigLoaderOptions returns a slice of ConfigLoader Option.
// Some options defined at the application level are going to be passed as defaults for
// all the configuration for the models.
// This includes for instance the context size or the number of threads.
// If a model doesn't set configs directly to the config model file
// it will use the defaults defined here.
func (o *ApplicationConfig) ToConfigLoaderOptions() []ConfigLoaderOption {
return []ConfigLoaderOption{
LoadOptionContextSize(o.ContextSize),
LoadOptionDebug(o.Debug),
LoadOptionF16(o.F16),
LoadOptionThreads(o.Threads),
ModelPath(o.SystemState.Model.ModelsPath),
}
}
// ToRuntimeSettings converts ApplicationConfig to RuntimeSettings for API responses and JSON serialization.
// This provides a single source of truth - ApplicationConfig holds the live values,
// and this method creates a RuntimeSettings snapshot for external consumption.
func (o *ApplicationConfig) ToRuntimeSettings() RuntimeSettings {
// Create local copies for pointer fields
watchdogEnabled := o.WatchDog
watchdogIdle := o.WatchDogIdle
watchdogBusy := o.WatchDogBusy
singleBackend := o.SingleBackend
maxActiveBackends := o.MaxActiveBackends
parallelBackendRequests := o.ParallelBackendRequests
memoryReclaimerEnabled := o.MemoryReclaimerEnabled
memoryReclaimerThreshold := o.MemoryReclaimerThreshold
forceEvictionWhenBusy := o.ForceEvictionWhenBusy
lruEvictionMaxRetries := o.LRUEvictionMaxRetries
threads := o.Threads
contextSize := o.ContextSize
f16 := o.F16
debug := o.Debug
tracingMaxItems := o.TracingMaxItems
enableTracing := o.EnableTracing
cors := o.CORS
csrf := o.CSRF
corsAllowOrigins := o.CORSAllowOrigins
p2pToken := o.P2PToken
p2pNetworkID := o.P2PNetworkID
federated := o.Federated
galleries := o.Galleries
backendGalleries := o.BackendGalleries
autoloadGalleries := o.AutoloadGalleries
autoloadBackendGalleries := o.AutoloadBackendGalleries
apiKeys := o.ApiKeys
agentJobRetentionDays := o.AgentJobRetentionDays
// Format timeouts as strings
var idleTimeout, busyTimeout, watchdogInterval string
if o.WatchDogIdleTimeout > 0 {
idleTimeout = o.WatchDogIdleTimeout.String()
} else {
idleTimeout = "15m" // default
}
if o.WatchDogBusyTimeout > 0 {
busyTimeout = o.WatchDogBusyTimeout.String()
} else {
busyTimeout = "5m" // default
}
if o.WatchDogInterval > 0 {
watchdogInterval = o.WatchDogInterval.String()
} else {
watchdogInterval = "2s" // default
}
var lruEvictionRetryInterval string
if o.LRUEvictionRetryInterval > 0 {
lruEvictionRetryInterval = o.LRUEvictionRetryInterval.String()
} else {
lruEvictionRetryInterval = "1s" // default
}
return RuntimeSettings{
WatchdogEnabled: &watchdogEnabled,
WatchdogIdleEnabled: &watchdogIdle,
WatchdogBusyEnabled: &watchdogBusy,
WatchdogIdleTimeout: &idleTimeout,
WatchdogBusyTimeout: &busyTimeout,
WatchdogInterval: &watchdogInterval,
SingleBackend: &singleBackend,
MaxActiveBackends: &maxActiveBackends,
ParallelBackendRequests: ¶llelBackendRequests,
MemoryReclaimerEnabled: &memoryReclaimerEnabled,
MemoryReclaimerThreshold: &memoryReclaimerThreshold,
ForceEvictionWhenBusy: &forceEvictionWhenBusy,
LRUEvictionMaxRetries: &lruEvictionMaxRetries,
LRUEvictionRetryInterval: &lruEvictionRetryInterval,
Threads: &threads,
ContextSize: &contextSize,
F16: &f16,
Debug: &debug,
TracingMaxItems: &tracingMaxItems,
EnableTracing: &enableTracing,
CORS: &cors,
CSRF: &csrf,
CORSAllowOrigins: &corsAllowOrigins,
P2PToken: &p2pToken,
P2PNetworkID: &p2pNetworkID,
Federated: &federated,
Galleries: &galleries,
BackendGalleries: &backendGalleries,
AutoloadGalleries: &autoloadGalleries,
AutoloadBackendGalleries: &autoloadBackendGalleries,
ApiKeys: &apiKeys,
AgentJobRetentionDays: &agentJobRetentionDays,
}
}
// ApplyRuntimeSettings applies RuntimeSettings to ApplicationConfig.
// Only non-nil fields in RuntimeSettings are applied.
// Returns true if watchdog-related settings changed (requiring restart).
func (o *ApplicationConfig) ApplyRuntimeSettings(settings *RuntimeSettings) (requireRestart bool) {
if settings == nil {
return false
}
if settings.WatchdogEnabled != nil {
o.WatchDog = *settings.WatchdogEnabled
requireRestart = true
}
if settings.WatchdogIdleEnabled != nil {
o.WatchDogIdle = *settings.WatchdogIdleEnabled
if o.WatchDogIdle {
o.WatchDog = true
}
requireRestart = true
}
if settings.WatchdogBusyEnabled != nil {
o.WatchDogBusy = *settings.WatchdogBusyEnabled
if o.WatchDogBusy {
o.WatchDog = true
}
requireRestart = true
}
if settings.WatchdogIdleTimeout != nil {
if dur, err := time.ParseDuration(*settings.WatchdogIdleTimeout); err == nil {
o.WatchDogIdleTimeout = dur
requireRestart = true
}
}
if settings.WatchdogBusyTimeout != nil {
if dur, err := time.ParseDuration(*settings.WatchdogBusyTimeout); err == nil {
o.WatchDogBusyTimeout = dur
requireRestart = true
}
}
if settings.WatchdogInterval != nil {
if dur, err := time.ParseDuration(*settings.WatchdogInterval); err == nil {
o.WatchDogInterval = dur
requireRestart = true
}
}
if settings.MaxActiveBackends != nil {
o.MaxActiveBackends = *settings.MaxActiveBackends
o.SingleBackend = (*settings.MaxActiveBackends == 1)
requireRestart = true
} else if settings.SingleBackend != nil {
o.SingleBackend = *settings.SingleBackend
if *settings.SingleBackend {
o.MaxActiveBackends = 1
} else {
o.MaxActiveBackends = 0
}
requireRestart = true
}
if settings.ParallelBackendRequests != nil {
o.ParallelBackendRequests = *settings.ParallelBackendRequests
}
if settings.MemoryReclaimerEnabled != nil {
o.MemoryReclaimerEnabled = *settings.MemoryReclaimerEnabled
if *settings.MemoryReclaimerEnabled {
o.WatchDog = true
}
requireRestart = true
}
if settings.MemoryReclaimerThreshold != nil {
if *settings.MemoryReclaimerThreshold > 0 && *settings.MemoryReclaimerThreshold <= 1.0 {
o.MemoryReclaimerThreshold = *settings.MemoryReclaimerThreshold
requireRestart = true
}
}
if settings.ForceEvictionWhenBusy != nil {
o.ForceEvictionWhenBusy = *settings.ForceEvictionWhenBusy
// This setting doesn't require restart, can be updated dynamically
}
if settings.LRUEvictionMaxRetries != nil {
o.LRUEvictionMaxRetries = *settings.LRUEvictionMaxRetries
// This setting doesn't require restart, can be updated dynamically
}
if settings.LRUEvictionRetryInterval != nil {
if dur, err := time.ParseDuration(*settings.LRUEvictionRetryInterval); err == nil {
o.LRUEvictionRetryInterval = dur
// This setting doesn't require restart, can be updated dynamically
}
}
if settings.Threads != nil {
o.Threads = *settings.Threads
}
if settings.ContextSize != nil {
o.ContextSize = *settings.ContextSize
}
if settings.F16 != nil {
o.F16 = *settings.F16
}
if settings.Debug != nil {
o.Debug = *settings.Debug
}
if settings.EnableTracing != nil {
o.EnableTracing = *settings.EnableTracing
}
if settings.TracingMaxItems != nil {
o.TracingMaxItems = *settings.TracingMaxItems
}
if settings.CORS != nil {
o.CORS = *settings.CORS
}
if settings.CSRF != nil {
o.CSRF = *settings.CSRF
}
if settings.CORSAllowOrigins != nil {
o.CORSAllowOrigins = *settings.CORSAllowOrigins
}
if settings.P2PToken != nil {
o.P2PToken = *settings.P2PToken
}
if settings.P2PNetworkID != nil {
o.P2PNetworkID = *settings.P2PNetworkID
}
if settings.Federated != nil {
o.Federated = *settings.Federated
}
if settings.Galleries != nil {
o.Galleries = *settings.Galleries
}
if settings.BackendGalleries != nil {
o.BackendGalleries = *settings.BackendGalleries
}
if settings.AutoloadGalleries != nil {
o.AutoloadGalleries = *settings.AutoloadGalleries
}
if settings.AutoloadBackendGalleries != nil {
o.AutoloadBackendGalleries = *settings.AutoloadBackendGalleries
}
if settings.AgentJobRetentionDays != nil {
o.AgentJobRetentionDays = *settings.AgentJobRetentionDays
}
// Note: ApiKeys requires special handling (merging with startup keys) - handled in caller
return requireRestart
}
// func WithMetrics(meter *metrics.Metrics) AppOption {
// return func(o *StartupOptions) {
// o.Metrics = meter
// }
// }
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/gguf.go | core/config/gguf.go | package config
import (
"github.com/mudler/LocalAI/pkg/xsysinfo"
"github.com/mudler/xlog"
gguf "github.com/gpustack/gguf-parser-go"
)
const (
defaultContextSize = 1024
defaultNGPULayers = 99999999
)
func guessGGUFFromFile(cfg *ModelConfig, f *gguf.GGUFFile, defaultCtx int) {
if defaultCtx == 0 && cfg.ContextSize == nil {
ctxSize := f.EstimateLLaMACppRun().ContextSize
if ctxSize > 0 {
cSize := int(ctxSize)
cfg.ContextSize = &cSize
} else {
defaultCtx = defaultContextSize
cfg.ContextSize = &defaultCtx
}
}
// GPU options
if cfg.Options == nil {
if xsysinfo.HasGPU("nvidia") || xsysinfo.HasGPU("amd") {
cfg.Options = []string{"gpu"}
}
}
// vram estimation
vram, err := xsysinfo.TotalAvailableVRAM()
if err != nil {
xlog.Error("guessDefaultsFromFile(TotalAvailableVRAM)", "error", err)
} else if vram > 0 {
estimate, err := xsysinfo.EstimateGGUFVRAMUsage(f, vram)
if err != nil {
xlog.Error("guessDefaultsFromFile(EstimateGGUFVRAMUsage)", "error", err)
} else {
if estimate.IsFullOffload {
xlog.Warn("guessDefaultsFromFile: full offload is recommended")
}
if estimate.EstimatedVRAM > vram {
xlog.Warn("guessDefaultsFromFile: estimated VRAM usage is greater than available VRAM")
}
if cfg.NGPULayers == nil && estimate.EstimatedLayers > 0 {
xlog.Debug("guessDefaultsFromFile: layers estimated", "layers", estimate.EstimatedLayers)
cfg.NGPULayers = &estimate.EstimatedLayers
}
}
}
if cfg.NGPULayers == nil {
// we assume we want to offload all layers
defaultHigh := defaultNGPULayers
cfg.NGPULayers = &defaultHigh
}
xlog.Debug("guessDefaultsFromFile: NGPULayers set", "NGPULayers", cfg.NGPULayers)
// template estimations
if cfg.HasTemplate() {
// nothing to guess here
xlog.Debug("guessDefaultsFromFile: template already set", "name", cfg.Name)
return
}
xlog.Debug("Model file loaded", "file", cfg.ModelFileName(), "eosTokenID", f.Tokenizer().EOSTokenID, "bosTokenID", f.Tokenizer().BOSTokenID, "modelName", f.Metadata().Name, "architecture", f.Architecture().Architecture)
// guess the name
if cfg.Name == "" {
cfg.Name = f.Metadata().Name
}
// Instruct to use template from llama.cpp
cfg.TemplateConfig.UseTokenizerTemplate = true
cfg.FunctionsConfig.GrammarConfig.NoGrammar = true
cfg.Options = append(cfg.Options, "use_jinja:true")
cfg.KnownUsecaseStrings = append(cfg.KnownUsecaseStrings, "FLAG_CHAT")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/guesser.go | core/config/guesser.go | package config
import (
"os"
"path/filepath"
gguf "github.com/gpustack/gguf-parser-go"
"github.com/mudler/xlog"
)
func guessDefaultsFromFile(cfg *ModelConfig, modelPath string, defaultCtx int) {
if os.Getenv("LOCALAI_DISABLE_GUESSING") == "true" {
xlog.Debug("guessDefaultsFromFile: guessing disabled with LOCALAI_DISABLE_GUESSING")
return
}
if modelPath == "" {
xlog.Debug("guessDefaultsFromFile: modelPath is empty")
return
}
// We try to guess only if we don't have a template defined already
guessPath := filepath.Join(modelPath, cfg.ModelFileName())
defer func() {
if r := recover(); r != nil {
xlog.Error("guessDefaultsFromFile: panic while parsing gguf file")
}
}()
defer func() {
if cfg.ContextSize == nil {
if defaultCtx == 0 {
defaultCtx = defaultContextSize
}
cfg.ContextSize = &defaultCtx
}
}()
// try to parse the gguf file
f, err := gguf.ParseGGUFFile(guessPath)
if err == nil {
guessGGUFFromFile(cfg, f, defaultCtx)
return
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/model_config_test.go | core/config/model_config_test.go | package config
import (
"io"
"net/http"
"os"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Test cases for config related functions", func() {
Context("Test Read configuration functions", func() {
It("Test Validate", func() {
tmp, err := os.CreateTemp("", "config.yaml")
Expect(err).To(BeNil())
defer os.Remove(tmp.Name())
_, err = tmp.WriteString(
`backend: "../foo-bar"
name: "foo"
parameters:
model: "foo-bar"
known_usecases:
- chat
- COMPLETION
`)
Expect(err).ToNot(HaveOccurred())
config, err := readModelConfigFromFile(tmp.Name())
Expect(err).To(BeNil())
Expect(config).ToNot(BeNil())
valid, err := config.Validate()
Expect(err).To(HaveOccurred())
Expect(valid).To(BeFalse())
Expect(config.KnownUsecases).ToNot(BeNil())
})
It("Test Validate", func() {
tmp, err := os.CreateTemp("", "config.yaml")
Expect(err).To(BeNil())
defer os.Remove(tmp.Name())
_, err = tmp.WriteString(
`name: bar-baz
backend: "foo-bar"
parameters:
model: "foo-bar"`)
Expect(err).ToNot(HaveOccurred())
config, err := readModelConfigFromFile(tmp.Name())
Expect(err).To(BeNil())
Expect(config).ToNot(BeNil())
// two configs in config.yaml
Expect(config.Name).To(Equal("bar-baz"))
valid, err := config.Validate()
Expect(err).To(BeNil())
Expect(valid).To(BeTrue())
// download https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml
httpClient := http.Client{}
resp, err := httpClient.Get("https://raw.githubusercontent.com/mudler/LocalAI/v2.25.0/embedded/models/hermes-2-pro-mistral.yaml")
Expect(err).To(BeNil())
defer resp.Body.Close()
tmp, err = os.CreateTemp("", "config.yaml")
Expect(err).To(BeNil())
defer os.Remove(tmp.Name())
_, err = io.Copy(tmp, resp.Body)
Expect(err).To(BeNil())
config, err = readModelConfigFromFile(tmp.Name())
Expect(err).To(BeNil())
Expect(config).ToNot(BeNil())
// two configs in config.yaml
Expect(config.Name).To(Equal("hermes-2-pro-mistral"))
valid, err = config.Validate()
Expect(err).To(BeNil())
Expect(valid).To(BeTrue())
})
})
It("Properly handles backend usecase matching", func() {
a := ModelConfig{
Name: "a",
}
Expect(a.HasUsecases(FLAG_ANY)).To(BeTrue()) // FLAG_ANY just means the config _exists_ essentially.
b := ModelConfig{
Name: "b",
Backend: "stablediffusion",
}
Expect(b.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(b.HasUsecases(FLAG_IMAGE)).To(BeTrue())
Expect(b.HasUsecases(FLAG_CHAT)).To(BeFalse())
c := ModelConfig{
Name: "c",
Backend: "llama-cpp",
TemplateConfig: TemplateConfig{
Chat: "chat",
},
}
Expect(c.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(c.HasUsecases(FLAG_IMAGE)).To(BeFalse())
Expect(c.HasUsecases(FLAG_COMPLETION)).To(BeFalse())
Expect(c.HasUsecases(FLAG_CHAT)).To(BeTrue())
d := ModelConfig{
Name: "d",
Backend: "llama-cpp",
TemplateConfig: TemplateConfig{
Chat: "chat",
Completion: "completion",
},
}
Expect(d.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(d.HasUsecases(FLAG_IMAGE)).To(BeFalse())
Expect(d.HasUsecases(FLAG_COMPLETION)).To(BeTrue())
Expect(d.HasUsecases(FLAG_CHAT)).To(BeTrue())
trueValue := true
e := ModelConfig{
Name: "e",
Backend: "llama-cpp",
TemplateConfig: TemplateConfig{
Completion: "completion",
},
Embeddings: &trueValue,
}
Expect(e.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(e.HasUsecases(FLAG_IMAGE)).To(BeFalse())
Expect(e.HasUsecases(FLAG_COMPLETION)).To(BeTrue())
Expect(e.HasUsecases(FLAG_CHAT)).To(BeFalse())
Expect(e.HasUsecases(FLAG_EMBEDDINGS)).To(BeTrue())
f := ModelConfig{
Name: "f",
Backend: "piper",
}
Expect(f.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(f.HasUsecases(FLAG_TTS)).To(BeTrue())
Expect(f.HasUsecases(FLAG_CHAT)).To(BeFalse())
g := ModelConfig{
Name: "g",
Backend: "whisper",
}
Expect(g.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(g.HasUsecases(FLAG_TRANSCRIPT)).To(BeTrue())
Expect(g.HasUsecases(FLAG_TTS)).To(BeFalse())
h := ModelConfig{
Name: "h",
Backend: "transformers-musicgen",
}
Expect(h.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(h.HasUsecases(FLAG_TRANSCRIPT)).To(BeFalse())
Expect(h.HasUsecases(FLAG_TTS)).To(BeTrue())
Expect(h.HasUsecases(FLAG_SOUND_GENERATION)).To(BeTrue())
knownUsecases := FLAG_CHAT | FLAG_COMPLETION
i := ModelConfig{
Name: "i",
Backend: "whisper",
// Earlier test checks parsing, this just needs to set final values
KnownUsecases: &knownUsecases,
}
Expect(i.HasUsecases(FLAG_ANY)).To(BeTrue())
Expect(i.HasUsecases(FLAG_TRANSCRIPT)).To(BeTrue())
Expect(i.HasUsecases(FLAG_TTS)).To(BeFalse())
Expect(i.HasUsecases(FLAG_COMPLETION)).To(BeTrue())
Expect(i.HasUsecases(FLAG_CHAT)).To(BeTrue())
})
It("Test Validate with invalid MCP config", func() {
tmp, err := os.CreateTemp("", "config.yaml")
Expect(err).To(BeNil())
defer os.Remove(tmp.Name())
_, err = tmp.WriteString(
`name: test-mcp
backend: "llama-cpp"
mcp:
stdio: |
{
"mcpServers": {
"ddg": {
"command": "/docker/docker",
"args": ["run", "-i"]
}
"weather": {
"command": "/docker/docker",
"args": ["run", "-i"]
}
}
}`)
Expect(err).ToNot(HaveOccurred())
config, err := readModelConfigFromFile(tmp.Name())
Expect(err).To(BeNil())
Expect(config).ToNot(BeNil())
valid, err := config.Validate()
Expect(err).To(HaveOccurred())
Expect(valid).To(BeFalse())
Expect(err.Error()).To(ContainSubstring("invalid MCP configuration"))
})
It("Test Validate with valid MCP config", func() {
tmp, err := os.CreateTemp("", "config.yaml")
Expect(err).To(BeNil())
defer os.Remove(tmp.Name())
_, err = tmp.WriteString(
`name: test-mcp-valid
backend: "llama-cpp"
mcp:
stdio: |
{
"mcpServers": {
"ddg": {
"command": "/docker/docker",
"args": ["run", "-i"]
},
"weather": {
"command": "/docker/docker",
"args": ["run", "-i"]
}
}
}`)
Expect(err).ToNot(HaveOccurred())
config, err := readModelConfigFromFile(tmp.Name())
Expect(err).To(BeNil())
Expect(config).ToNot(BeNil())
valid, err := config.Validate()
Expect(err).To(BeNil())
Expect(valid).To(BeTrue())
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/model_config_filter.go | core/config/model_config_filter.go | package config
import "regexp"
type ModelConfigFilterFn func(string, *ModelConfig) bool
func NoFilterFn(_ string, _ *ModelConfig) bool { return true }
func BuildNameFilterFn(filter string) (ModelConfigFilterFn, error) {
if filter == "" {
return NoFilterFn, nil
}
rxp, err := regexp.Compile(filter)
if err != nil {
return nil, err
}
return func(name string, config *ModelConfig) bool {
if config != nil {
return rxp.MatchString(config.Name)
}
return rxp.MatchString(name)
}, nil
}
func BuildUsecaseFilterFn(usecases ModelConfigUsecase) ModelConfigFilterFn {
if usecases == FLAG_ANY {
return NoFilterFn
}
return func(name string, config *ModelConfig) bool {
if config == nil {
return false // TODO: Potentially make this a param, for now, no known usecase to include
}
return config.HasUsecases(usecases)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/runtime_settings.go | core/config/runtime_settings.go | package config
// RuntimeSettings represents runtime configuration that can be changed dynamically.
// This struct is used for:
// - API responses (GET /api/settings)
// - API requests (POST /api/settings)
// - Persisting to runtime_settings.json
// - Loading from runtime_settings.json on startup
//
// All fields are pointers to distinguish between "not set" and "set to zero/false value".
type RuntimeSettings struct {
// Watchdog settings
WatchdogEnabled *bool `json:"watchdog_enabled,omitempty"`
WatchdogIdleEnabled *bool `json:"watchdog_idle_enabled,omitempty"`
WatchdogBusyEnabled *bool `json:"watchdog_busy_enabled,omitempty"`
WatchdogIdleTimeout *string `json:"watchdog_idle_timeout,omitempty"`
WatchdogBusyTimeout *string `json:"watchdog_busy_timeout,omitempty"`
WatchdogInterval *string `json:"watchdog_interval,omitempty"` // Interval between watchdog checks (e.g., 2s, 30s)
// Backend management
SingleBackend *bool `json:"single_backend,omitempty"` // Deprecated: use MaxActiveBackends = 1 instead
MaxActiveBackends *int `json:"max_active_backends,omitempty"` // Maximum number of active backends (0 = unlimited, 1 = single backend mode)
ParallelBackendRequests *bool `json:"parallel_backend_requests,omitempty"`
// Memory Reclaimer settings (works with GPU if available, otherwise RAM)
MemoryReclaimerEnabled *bool `json:"memory_reclaimer_enabled,omitempty"` // Enable memory threshold monitoring
MemoryReclaimerThreshold *float64 `json:"memory_reclaimer_threshold,omitempty"` // Threshold 0.0-1.0 (e.g., 0.95 = 95%)
// Eviction settings
ForceEvictionWhenBusy *bool `json:"force_eviction_when_busy,omitempty"` // Force eviction even when models have active API calls (default: false for safety)
LRUEvictionMaxRetries *int `json:"lru_eviction_max_retries,omitempty"` // Maximum number of retries when waiting for busy models to become idle (default: 30)
LRUEvictionRetryInterval *string `json:"lru_eviction_retry_interval,omitempty"` // Interval between retries when waiting for busy models (e.g., 1s, 2s) (default: 1s)
// Performance settings
Threads *int `json:"threads,omitempty"`
ContextSize *int `json:"context_size,omitempty"`
F16 *bool `json:"f16,omitempty"`
Debug *bool `json:"debug,omitempty"`
EnableTracing *bool `json:"enable_tracing,omitempty"`
TracingMaxItems *int `json:"tracing_max_items,omitempty"`
// Security/CORS settings
CORS *bool `json:"cors,omitempty"`
CSRF *bool `json:"csrf,omitempty"`
CORSAllowOrigins *string `json:"cors_allow_origins,omitempty"`
// P2P settings
P2PToken *string `json:"p2p_token,omitempty"`
P2PNetworkID *string `json:"p2p_network_id,omitempty"`
Federated *bool `json:"federated,omitempty"`
// Gallery settings
Galleries *[]Gallery `json:"galleries,omitempty"`
BackendGalleries *[]Gallery `json:"backend_galleries,omitempty"`
AutoloadGalleries *bool `json:"autoload_galleries,omitempty"`
AutoloadBackendGalleries *bool `json:"autoload_backend_galleries,omitempty"`
// API keys - No omitempty as we need to save empty arrays to clear keys
ApiKeys *[]string `json:"api_keys"`
// Agent settings
AgentJobRetentionDays *int `json:"agent_job_retention_days,omitempty"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/gallery.go | core/config/gallery.go | package config
type Gallery struct {
URL string `json:"url" yaml:"url"`
Name string `json:"name" yaml:"name"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/application_config_test.go | core/config/application_config_test.go | package config
import (
"time"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("ApplicationConfig RuntimeSettings Conversion", func() {
Describe("ToRuntimeSettings", func() {
It("should convert all fields correctly", func() {
appConfig := &ApplicationConfig{
WatchDog: true,
WatchDogIdle: true,
WatchDogBusy: true,
WatchDogIdleTimeout: 20 * time.Minute,
WatchDogBusyTimeout: 10 * time.Minute,
SingleBackend: false,
MaxActiveBackends: 5,
ParallelBackendRequests: true,
MemoryReclaimerEnabled: true,
MemoryReclaimerThreshold: 0.85,
Threads: 8,
ContextSize: 4096,
F16: true,
Debug: true,
CORS: true,
CSRF: true,
CORSAllowOrigins: "https://example.com",
P2PToken: "test-token",
P2PNetworkID: "test-network",
Federated: true,
Galleries: []Gallery{{Name: "test-gallery", URL: "https://example.com"}},
BackendGalleries: []Gallery{{Name: "backend-gallery", URL: "https://example.com/backend"}},
AutoloadGalleries: true,
AutoloadBackendGalleries: true,
ApiKeys: []string{"key1", "key2"},
AgentJobRetentionDays: 30,
}
rs := appConfig.ToRuntimeSettings()
Expect(rs.WatchdogEnabled).ToNot(BeNil())
Expect(*rs.WatchdogEnabled).To(BeTrue())
Expect(rs.WatchdogIdleEnabled).ToNot(BeNil())
Expect(*rs.WatchdogIdleEnabled).To(BeTrue())
Expect(rs.WatchdogBusyEnabled).ToNot(BeNil())
Expect(*rs.WatchdogBusyEnabled).To(BeTrue())
Expect(rs.WatchdogIdleTimeout).ToNot(BeNil())
Expect(*rs.WatchdogIdleTimeout).To(Equal("20m0s"))
Expect(rs.WatchdogBusyTimeout).ToNot(BeNil())
Expect(*rs.WatchdogBusyTimeout).To(Equal("10m0s"))
Expect(rs.SingleBackend).ToNot(BeNil())
Expect(*rs.SingleBackend).To(BeFalse())
Expect(rs.MaxActiveBackends).ToNot(BeNil())
Expect(*rs.MaxActiveBackends).To(Equal(5))
Expect(rs.ParallelBackendRequests).ToNot(BeNil())
Expect(*rs.ParallelBackendRequests).To(BeTrue())
Expect(rs.MemoryReclaimerEnabled).ToNot(BeNil())
Expect(*rs.MemoryReclaimerEnabled).To(BeTrue())
Expect(rs.MemoryReclaimerThreshold).ToNot(BeNil())
Expect(*rs.MemoryReclaimerThreshold).To(Equal(0.85))
Expect(rs.Threads).ToNot(BeNil())
Expect(*rs.Threads).To(Equal(8))
Expect(rs.ContextSize).ToNot(BeNil())
Expect(*rs.ContextSize).To(Equal(4096))
Expect(rs.F16).ToNot(BeNil())
Expect(*rs.F16).To(BeTrue())
Expect(rs.Debug).ToNot(BeNil())
Expect(*rs.Debug).To(BeTrue())
Expect(rs.CORS).ToNot(BeNil())
Expect(*rs.CORS).To(BeTrue())
Expect(rs.CSRF).ToNot(BeNil())
Expect(*rs.CSRF).To(BeTrue())
Expect(rs.CORSAllowOrigins).ToNot(BeNil())
Expect(*rs.CORSAllowOrigins).To(Equal("https://example.com"))
Expect(rs.P2PToken).ToNot(BeNil())
Expect(*rs.P2PToken).To(Equal("test-token"))
Expect(rs.P2PNetworkID).ToNot(BeNil())
Expect(*rs.P2PNetworkID).To(Equal("test-network"))
Expect(rs.Federated).ToNot(BeNil())
Expect(*rs.Federated).To(BeTrue())
Expect(rs.Galleries).ToNot(BeNil())
Expect(*rs.Galleries).To(HaveLen(1))
Expect((*rs.Galleries)[0].Name).To(Equal("test-gallery"))
Expect(rs.BackendGalleries).ToNot(BeNil())
Expect(*rs.BackendGalleries).To(HaveLen(1))
Expect((*rs.BackendGalleries)[0].Name).To(Equal("backend-gallery"))
Expect(rs.AutoloadGalleries).ToNot(BeNil())
Expect(*rs.AutoloadGalleries).To(BeTrue())
Expect(rs.AutoloadBackendGalleries).ToNot(BeNil())
Expect(*rs.AutoloadBackendGalleries).To(BeTrue())
Expect(rs.ApiKeys).ToNot(BeNil())
Expect(*rs.ApiKeys).To(HaveLen(2))
Expect(*rs.ApiKeys).To(ContainElements("key1", "key2"))
Expect(rs.AgentJobRetentionDays).ToNot(BeNil())
Expect(*rs.AgentJobRetentionDays).To(Equal(30))
})
It("should use default timeouts when not set", func() {
appConfig := &ApplicationConfig{}
rs := appConfig.ToRuntimeSettings()
Expect(rs.WatchdogIdleTimeout).ToNot(BeNil())
Expect(*rs.WatchdogIdleTimeout).To(Equal("15m"))
Expect(rs.WatchdogBusyTimeout).ToNot(BeNil())
Expect(*rs.WatchdogBusyTimeout).To(Equal("5m"))
})
})
Describe("ApplyRuntimeSettings", func() {
It("should return false when settings is nil", func() {
appConfig := &ApplicationConfig{}
changed := appConfig.ApplyRuntimeSettings(nil)
Expect(changed).To(BeFalse())
})
It("should only apply non-nil fields", func() {
appConfig := &ApplicationConfig{
WatchDog: false,
Threads: 4,
ContextSize: 2048,
}
watchdogEnabled := true
rs := &RuntimeSettings{
WatchdogEnabled: &watchdogEnabled,
// Leave other fields nil
}
changed := appConfig.ApplyRuntimeSettings(rs)
Expect(changed).To(BeTrue())
Expect(appConfig.WatchDog).To(BeTrue())
// Unchanged fields should remain
Expect(appConfig.Threads).To(Equal(4))
Expect(appConfig.ContextSize).To(Equal(2048))
})
It("should apply watchdog settings and return changed=true", func() {
appConfig := &ApplicationConfig{}
watchdogEnabled := true
watchdogIdle := true
watchdogBusy := true
idleTimeout := "30m"
busyTimeout := "15m"
rs := &RuntimeSettings{
WatchdogEnabled: &watchdogEnabled,
WatchdogIdleEnabled: &watchdogIdle,
WatchdogBusyEnabled: &watchdogBusy,
WatchdogIdleTimeout: &idleTimeout,
WatchdogBusyTimeout: &busyTimeout,
}
changed := appConfig.ApplyRuntimeSettings(rs)
Expect(changed).To(BeTrue())
Expect(appConfig.WatchDog).To(BeTrue())
Expect(appConfig.WatchDogIdle).To(BeTrue())
Expect(appConfig.WatchDogBusy).To(BeTrue())
Expect(appConfig.WatchDogIdleTimeout).To(Equal(30 * time.Minute))
Expect(appConfig.WatchDogBusyTimeout).To(Equal(15 * time.Minute))
})
It("should enable watchdog when idle is enabled", func() {
appConfig := &ApplicationConfig{WatchDog: false}
watchdogIdle := true
rs := &RuntimeSettings{
WatchdogIdleEnabled: &watchdogIdle,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.WatchDog).To(BeTrue())
Expect(appConfig.WatchDogIdle).To(BeTrue())
})
It("should enable watchdog when busy is enabled", func() {
appConfig := &ApplicationConfig{WatchDog: false}
watchdogBusy := true
rs := &RuntimeSettings{
WatchdogBusyEnabled: &watchdogBusy,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.WatchDog).To(BeTrue())
Expect(appConfig.WatchDogBusy).To(BeTrue())
})
It("should handle MaxActiveBackends and update SingleBackend accordingly", func() {
appConfig := &ApplicationConfig{}
maxBackends := 1
rs := &RuntimeSettings{
MaxActiveBackends: &maxBackends,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MaxActiveBackends).To(Equal(1))
Expect(appConfig.SingleBackend).To(BeTrue())
// Test with multiple backends
maxBackends = 5
rs = &RuntimeSettings{
MaxActiveBackends: &maxBackends,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MaxActiveBackends).To(Equal(5))
Expect(appConfig.SingleBackend).To(BeFalse())
})
It("should handle SingleBackend and update MaxActiveBackends accordingly", func() {
appConfig := &ApplicationConfig{}
singleBackend := true
rs := &RuntimeSettings{
SingleBackend: &singleBackend,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.SingleBackend).To(BeTrue())
Expect(appConfig.MaxActiveBackends).To(Equal(1))
// Test disabling single backend
singleBackend = false
rs = &RuntimeSettings{
SingleBackend: &singleBackend,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.SingleBackend).To(BeFalse())
Expect(appConfig.MaxActiveBackends).To(Equal(0))
})
It("should enable watchdog when memory reclaimer is enabled", func() {
appConfig := &ApplicationConfig{WatchDog: false}
memoryEnabled := true
threshold := 0.90
rs := &RuntimeSettings{
MemoryReclaimerEnabled: &memoryEnabled,
MemoryReclaimerThreshold: &threshold,
}
changed := appConfig.ApplyRuntimeSettings(rs)
Expect(changed).To(BeTrue())
Expect(appConfig.WatchDog).To(BeTrue())
Expect(appConfig.MemoryReclaimerEnabled).To(BeTrue())
Expect(appConfig.MemoryReclaimerThreshold).To(Equal(0.90))
})
It("should reject invalid memory threshold values", func() {
appConfig := &ApplicationConfig{MemoryReclaimerThreshold: 0.50}
// Test threshold > 1.0
invalidThreshold := 1.5
rs := &RuntimeSettings{
MemoryReclaimerThreshold: &invalidThreshold,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MemoryReclaimerThreshold).To(Equal(0.50)) // Should remain unchanged
// Test threshold <= 0
invalidThreshold = 0.0
rs = &RuntimeSettings{
MemoryReclaimerThreshold: &invalidThreshold,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MemoryReclaimerThreshold).To(Equal(0.50)) // Should remain unchanged
// Test negative threshold
invalidThreshold = -0.5
rs = &RuntimeSettings{
MemoryReclaimerThreshold: &invalidThreshold,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MemoryReclaimerThreshold).To(Equal(0.50)) // Should remain unchanged
})
It("should accept valid memory threshold at boundary", func() {
appConfig := &ApplicationConfig{}
// Test threshold = 1.0 (maximum valid)
threshold := 1.0
rs := &RuntimeSettings{
MemoryReclaimerThreshold: &threshold,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MemoryReclaimerThreshold).To(Equal(1.0))
// Test threshold just above 0
threshold = 0.01
rs = &RuntimeSettings{
MemoryReclaimerThreshold: &threshold,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.MemoryReclaimerThreshold).To(Equal(0.01))
})
It("should apply performance settings without triggering watchdog change", func() {
appConfig := &ApplicationConfig{}
threads := 16
contextSize := 8192
f16 := true
debug := true
rs := &RuntimeSettings{
Threads: &threads,
ContextSize: &contextSize,
F16: &f16,
Debug: &debug,
}
changed := appConfig.ApplyRuntimeSettings(rs)
// These settings don't require watchdog restart
Expect(changed).To(BeFalse())
Expect(appConfig.Threads).To(Equal(16))
Expect(appConfig.ContextSize).To(Equal(8192))
Expect(appConfig.F16).To(BeTrue())
Expect(appConfig.Debug).To(BeTrue())
})
It("should apply CORS and security settings", func() {
appConfig := &ApplicationConfig{}
cors := true
csrf := true
origins := "https://example.com,https://other.com"
rs := &RuntimeSettings{
CORS: &cors,
CSRF: &csrf,
CORSAllowOrigins: &origins,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.CORS).To(BeTrue())
Expect(appConfig.CSRF).To(BeTrue())
Expect(appConfig.CORSAllowOrigins).To(Equal("https://example.com,https://other.com"))
})
It("should apply P2P settings", func() {
appConfig := &ApplicationConfig{}
token := "p2p-test-token"
networkID := "p2p-test-network"
federated := true
rs := &RuntimeSettings{
P2PToken: &token,
P2PNetworkID: &networkID,
Federated: &federated,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.P2PToken).To(Equal("p2p-test-token"))
Expect(appConfig.P2PNetworkID).To(Equal("p2p-test-network"))
Expect(appConfig.Federated).To(BeTrue())
})
It("should apply gallery settings", func() {
appConfig := &ApplicationConfig{}
galleries := []Gallery{
{Name: "gallery1", URL: "https://gallery1.com"},
{Name: "gallery2", URL: "https://gallery2.com"},
}
backendGalleries := []Gallery{
{Name: "backend-gallery", URL: "https://backend.com"},
}
autoload := true
autoloadBackend := true
rs := &RuntimeSettings{
Galleries: &galleries,
BackendGalleries: &backendGalleries,
AutoloadGalleries: &autoload,
AutoloadBackendGalleries: &autoloadBackend,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.Galleries).To(HaveLen(2))
Expect(appConfig.Galleries[0].Name).To(Equal("gallery1"))
Expect(appConfig.BackendGalleries).To(HaveLen(1))
Expect(appConfig.AutoloadGalleries).To(BeTrue())
Expect(appConfig.AutoloadBackendGalleries).To(BeTrue())
})
It("should apply agent settings", func() {
appConfig := &ApplicationConfig{}
retentionDays := 14
rs := &RuntimeSettings{
AgentJobRetentionDays: &retentionDays,
}
appConfig.ApplyRuntimeSettings(rs)
Expect(appConfig.AgentJobRetentionDays).To(Equal(14))
})
})
Describe("Round-trip conversion", func() {
It("should maintain values through ToRuntimeSettings -> ApplyRuntimeSettings", func() {
original := &ApplicationConfig{
WatchDog: true,
WatchDogIdle: true,
WatchDogBusy: false,
WatchDogIdleTimeout: 25 * time.Minute,
WatchDogBusyTimeout: 12 * time.Minute,
SingleBackend: false,
MaxActiveBackends: 3,
ParallelBackendRequests: true,
MemoryReclaimerEnabled: true,
MemoryReclaimerThreshold: 0.92,
Threads: 12,
ContextSize: 6144,
F16: true,
Debug: false,
CORS: true,
CSRF: false,
CORSAllowOrigins: "https://test.com",
P2PToken: "round-trip-token",
P2PNetworkID: "round-trip-network",
Federated: true,
AutoloadGalleries: true,
AutoloadBackendGalleries: false,
AgentJobRetentionDays: 60,
}
// Convert to RuntimeSettings
rs := original.ToRuntimeSettings()
// Apply to a new ApplicationConfig
target := &ApplicationConfig{}
target.ApplyRuntimeSettings(&rs)
// Verify all values match
Expect(target.WatchDog).To(Equal(original.WatchDog))
Expect(target.WatchDogIdle).To(Equal(original.WatchDogIdle))
Expect(target.WatchDogBusy).To(Equal(original.WatchDogBusy))
Expect(target.WatchDogIdleTimeout).To(Equal(original.WatchDogIdleTimeout))
Expect(target.WatchDogBusyTimeout).To(Equal(original.WatchDogBusyTimeout))
Expect(target.MaxActiveBackends).To(Equal(original.MaxActiveBackends))
Expect(target.ParallelBackendRequests).To(Equal(original.ParallelBackendRequests))
Expect(target.MemoryReclaimerEnabled).To(Equal(original.MemoryReclaimerEnabled))
Expect(target.MemoryReclaimerThreshold).To(Equal(original.MemoryReclaimerThreshold))
Expect(target.Threads).To(Equal(original.Threads))
Expect(target.ContextSize).To(Equal(original.ContextSize))
Expect(target.F16).To(Equal(original.F16))
Expect(target.Debug).To(Equal(original.Debug))
Expect(target.CORS).To(Equal(original.CORS))
Expect(target.CSRF).To(Equal(original.CSRF))
Expect(target.CORSAllowOrigins).To(Equal(original.CORSAllowOrigins))
Expect(target.P2PToken).To(Equal(original.P2PToken))
Expect(target.P2PNetworkID).To(Equal(original.P2PNetworkID))
Expect(target.Federated).To(Equal(original.Federated))
Expect(target.AutoloadGalleries).To(Equal(original.AutoloadGalleries))
Expect(target.AutoloadBackendGalleries).To(Equal(original.AutoloadBackendGalleries))
Expect(target.AgentJobRetentionDays).To(Equal(original.AgentJobRetentionDays))
})
It("should handle empty galleries correctly in round-trip", func() {
original := &ApplicationConfig{
Galleries: []Gallery{},
BackendGalleries: []Gallery{},
ApiKeys: []string{},
}
rs := original.ToRuntimeSettings()
target := &ApplicationConfig{}
target.ApplyRuntimeSettings(&rs)
Expect(target.Galleries).To(BeEmpty())
Expect(target.BackendGalleries).To(BeEmpty())
})
})
Describe("Edge cases", func() {
It("should handle invalid timeout string in ApplyRuntimeSettings", func() {
appConfig := &ApplicationConfig{
WatchDogIdleTimeout: 10 * time.Minute,
}
invalidTimeout := "not-a-duration"
rs := &RuntimeSettings{
WatchdogIdleTimeout: &invalidTimeout,
}
appConfig.ApplyRuntimeSettings(rs)
// Should remain unchanged due to parse error
Expect(appConfig.WatchDogIdleTimeout).To(Equal(10 * time.Minute))
})
It("should handle zero values in ApplicationConfig", func() {
appConfig := &ApplicationConfig{
// All zero values
}
rs := appConfig.ToRuntimeSettings()
// Should still have non-nil pointers with zero/default values
Expect(rs.WatchdogEnabled).ToNot(BeNil())
Expect(*rs.WatchdogEnabled).To(BeFalse())
Expect(rs.Threads).ToNot(BeNil())
Expect(*rs.Threads).To(Equal(0))
Expect(rs.MemoryReclaimerThreshold).ToNot(BeNil())
Expect(*rs.MemoryReclaimerThreshold).To(Equal(0.0))
})
It("should prefer MaxActiveBackends over SingleBackend when both are set", func() {
appConfig := &ApplicationConfig{}
maxBackends := 3
singleBackend := true
rs := &RuntimeSettings{
MaxActiveBackends: &maxBackends,
SingleBackend: &singleBackend,
}
appConfig.ApplyRuntimeSettings(rs)
// MaxActiveBackends should take precedence
Expect(appConfig.MaxActiveBackends).To(Equal(3))
Expect(appConfig.SingleBackend).To(BeFalse()) // 3 != 1, so single backend is false
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/model_test.go | core/config/model_test.go | package config
import (
"os"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Test cases for config related functions", func() {
var (
configFile string
)
Context("Test Read configuration functions", func() {
configFile = os.Getenv("CONFIG_FILE")
It("Test readConfigFile", func() {
config, err := readMultipleModelConfigsFromFile(configFile)
Expect(err).To(BeNil())
Expect(config).ToNot(BeNil())
// two configs in config.yaml
Expect(config[0].Name).To(Equal("list1"))
Expect(config[1].Name).To(Equal("list2"))
})
It("Test LoadConfigs", func() {
bcl := NewModelConfigLoader(os.Getenv("MODELS_PATH"))
err := bcl.LoadModelConfigsFromPath(os.Getenv("MODELS_PATH"))
Expect(err).To(BeNil())
configs := bcl.GetAllModelsConfigs()
loadedModelNames := []string{}
for _, v := range configs {
loadedModelNames = append(loadedModelNames, v.Name)
}
Expect(configs).ToNot(BeNil())
Expect(loadedModelNames).To(ContainElements("code-search-ada-code-001"))
// config should includes text-embedding-ada-002 models's api.config
Expect(loadedModelNames).To(ContainElements("text-embedding-ada-002"))
// config should includes rwkv_test models's api.config
Expect(loadedModelNames).To(ContainElements("rwkv_test"))
// config should includes whisper-1 models's api.config
Expect(loadedModelNames).To(ContainElements("whisper-1"))
})
It("Test new loadconfig", func() {
bcl := NewModelConfigLoader(os.Getenv("MODELS_PATH"))
err := bcl.LoadModelConfigsFromPath(os.Getenv("MODELS_PATH"))
Expect(err).To(BeNil())
configs := bcl.GetAllModelsConfigs()
loadedModelNames := []string{}
for _, v := range configs {
loadedModelNames = append(loadedModelNames, v.Name)
}
Expect(configs).ToNot(BeNil())
totalModels := len(loadedModelNames)
Expect(loadedModelNames).To(ContainElements("code-search-ada-code-001"))
// config should includes text-embedding-ada-002 models's api.config
Expect(loadedModelNames).To(ContainElements("text-embedding-ada-002"))
// config should includes rwkv_test models's api.config
Expect(loadedModelNames).To(ContainElements("rwkv_test"))
// config should includes whisper-1 models's api.config
Expect(loadedModelNames).To(ContainElements("whisper-1"))
// create a temp directory and store a temporary model
tmpdir, err := os.MkdirTemp("", "test")
Expect(err).ToNot(HaveOccurred())
defer os.RemoveAll(tmpdir)
// create a temporary model
model := `name: "test-model"
description: "test model"
options:
- foo
- bar
- baz
`
modelFile := tmpdir + "/test-model.yaml"
err = os.WriteFile(modelFile, []byte(model), 0644)
Expect(err).ToNot(HaveOccurred())
err = bcl.LoadModelConfigsFromPath(tmpdir)
Expect(err).ToNot(HaveOccurred())
configs = bcl.GetAllModelsConfigs()
Expect(len(configs)).ToNot(Equal(totalModels))
loadedModelNames = []string{}
var testModel ModelConfig
for _, v := range configs {
loadedModelNames = append(loadedModelNames, v.Name)
if v.Name == "test-model" {
testModel = v
}
}
Expect(loadedModelNames).To(ContainElements("test-model"))
Expect(testModel.Description).To(Equal("test model"))
Expect(testModel.Options).To(ContainElements("foo", "bar", "baz"))
})
})
})
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/config_suite_test.go | core/config/config_suite_test.go | package config_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestConfig(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Config test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/model_config_loader.go | core/config/model_config_loader.go | package config
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"github.com/charmbracelet/glamour"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/xlog"
"gopkg.in/yaml.v3"
)
type ModelConfigLoader struct {
configs map[string]ModelConfig
modelPath string
sync.Mutex
}
func NewModelConfigLoader(modelPath string) *ModelConfigLoader {
return &ModelConfigLoader{
configs: make(map[string]ModelConfig),
modelPath: modelPath,
}
}
type LoadOptions struct {
modelPath string
debug bool
threads, ctxSize int
f16 bool
}
func LoadOptionDebug(debug bool) ConfigLoaderOption {
return func(o *LoadOptions) {
o.debug = debug
}
}
func LoadOptionThreads(threads int) ConfigLoaderOption {
return func(o *LoadOptions) {
o.threads = threads
}
}
func LoadOptionContextSize(ctxSize int) ConfigLoaderOption {
return func(o *LoadOptions) {
o.ctxSize = ctxSize
}
}
func ModelPath(modelPath string) ConfigLoaderOption {
return func(o *LoadOptions) {
o.modelPath = modelPath
}
}
func LoadOptionF16(f16 bool) ConfigLoaderOption {
return func(o *LoadOptions) {
o.f16 = f16
}
}
type ConfigLoaderOption func(*LoadOptions)
func (lo *LoadOptions) Apply(options ...ConfigLoaderOption) {
for _, l := range options {
l(lo)
}
}
// TODO: either in the next PR or the next commit, I want to merge these down into a single function that looks at the first few characters of the file to determine if we need to deserialize to []BackendConfig or BackendConfig
func readMultipleModelConfigsFromFile(file string, opts ...ConfigLoaderOption) ([]*ModelConfig, error) {
c := &[]*ModelConfig{}
f, err := os.ReadFile(file)
if err != nil {
return nil, fmt.Errorf("readMultipleModelConfigsFromFile cannot read config file %q: %w", file, err)
}
if err := yaml.Unmarshal(f, c); err != nil {
return nil, fmt.Errorf("readMultipleModelConfigsFromFile cannot unmarshal config file %q: %w", file, err)
}
for _, cc := range *c {
cc.modelConfigFile = file
cc.SetDefaults(opts...)
}
return *c, nil
}
func readModelConfigFromFile(file string, opts ...ConfigLoaderOption) (*ModelConfig, error) {
lo := &LoadOptions{}
lo.Apply(opts...)
c := &ModelConfig{}
f, err := os.ReadFile(file)
if err != nil {
return nil, fmt.Errorf("readModelConfigFromFile cannot read config file %q: %w", file, err)
}
if err := yaml.Unmarshal(f, c); err != nil {
return nil, fmt.Errorf("readModelConfigFromFile cannot unmarshal config file %q: %w", file, err)
}
c.SetDefaults(opts...)
c.modelConfigFile = file
return c, nil
}
// Load a config file for a model
func (bcl *ModelConfigLoader) LoadModelConfigFileByName(modelName, modelPath string, opts ...ConfigLoaderOption) (*ModelConfig, error) {
// Load a config file if present after the model name
cfg := &ModelConfig{
PredictionOptions: schema.PredictionOptions{
BasicModelRequest: schema.BasicModelRequest{
Model: modelName,
},
},
}
cfgExisting, exists := bcl.GetModelConfig(modelName)
if exists {
cfg = &cfgExisting
} else {
// Try loading a model config file
modelConfig := filepath.Join(modelPath, modelName+".yaml")
if _, err := os.Stat(modelConfig); err == nil {
if err := bcl.ReadModelConfig(
modelConfig, opts...,
); err != nil {
return nil, fmt.Errorf("failed loading model config (%s) %s", modelConfig, err.Error())
}
cfgExisting, exists = bcl.GetModelConfig(modelName)
if exists {
cfg = &cfgExisting
}
}
}
cfg.SetDefaults(append(opts, ModelPath(modelPath))...)
return cfg, nil
}
func (bcl *ModelConfigLoader) LoadModelConfigFileByNameDefaultOptions(modelName string, appConfig *ApplicationConfig) (*ModelConfig, error) {
return bcl.LoadModelConfigFileByName(modelName, appConfig.SystemState.Model.ModelsPath,
LoadOptionDebug(appConfig.Debug),
LoadOptionThreads(appConfig.Threads),
LoadOptionContextSize(appConfig.ContextSize),
LoadOptionF16(appConfig.F16),
ModelPath(appConfig.SystemState.Model.ModelsPath))
}
// This format is currently only used when reading a single file at startup, passed in via ApplicationConfig.ConfigFile
func (bcl *ModelConfigLoader) LoadMultipleModelConfigsSingleFile(file string, opts ...ConfigLoaderOption) error {
bcl.Lock()
defer bcl.Unlock()
c, err := readMultipleModelConfigsFromFile(file, opts...)
if err != nil {
return fmt.Errorf("cannot load config file: %w", err)
}
for _, cc := range c {
if valid, err := cc.Validate(); valid {
bcl.configs[cc.Name] = *cc
} else {
xlog.Warn("skipping invalid model config", "name", cc.Name, "error", err)
}
}
return nil
}
func (bcl *ModelConfigLoader) ReadModelConfig(file string, opts ...ConfigLoaderOption) error {
bcl.Lock()
defer bcl.Unlock()
c, err := readModelConfigFromFile(file, opts...)
if err != nil {
return fmt.Errorf("ReadModelConfig cannot read config file %q: %w", file, err)
}
if valid, err := c.Validate(); valid {
bcl.configs[c.Name] = *c
} else {
if err != nil {
return fmt.Errorf("config is not valid: %w", err)
}
return fmt.Errorf("config is not valid")
}
return nil
}
func (bcl *ModelConfigLoader) GetModelConfig(m string) (ModelConfig, bool) {
bcl.Lock()
defer bcl.Unlock()
v, exists := bcl.configs[m]
return v, exists
}
func (bcl *ModelConfigLoader) GetAllModelsConfigs() []ModelConfig {
bcl.Lock()
defer bcl.Unlock()
var res []ModelConfig
for _, v := range bcl.configs {
res = append(res, v)
}
sort.SliceStable(res, func(i, j int) bool {
return res[i].Name < res[j].Name
})
return res
}
func (bcl *ModelConfigLoader) GetModelConfigsByFilter(filter ModelConfigFilterFn) []ModelConfig {
bcl.Lock()
defer bcl.Unlock()
var res []ModelConfig
if filter == nil {
filter = NoFilterFn
}
for n, v := range bcl.configs {
if filter(n, &v) {
res = append(res, v)
}
}
// TODO: I don't think this one needs to Sort on name... but we'll see what breaks.
return res
}
func (bcl *ModelConfigLoader) RemoveModelConfig(m string) {
bcl.Lock()
defer bcl.Unlock()
delete(bcl.configs, m)
}
// Preload prepare models if they are not local but url or huggingface repositories
func (bcl *ModelConfigLoader) Preload(modelPath string) error {
bcl.Lock()
defer bcl.Unlock()
status := func(fileName, current, total string, percent float64) {
utils.DisplayDownloadFunction(fileName, current, total, percent)
}
xlog.Info("Preloading models", "path", modelPath)
renderMode := "dark"
if os.Getenv("COLOR") != "" {
renderMode = os.Getenv("COLOR")
}
glamText := func(t string) {
out, err := glamour.Render(t, renderMode)
if err == nil && os.Getenv("NO_COLOR") == "" {
fmt.Println(out)
} else {
fmt.Println(t)
}
}
for i, config := range bcl.configs {
// Download files and verify their SHA
for i, file := range config.DownloadFiles {
xlog.Debug("Checking file exists and matches SHA", "filename", file.Filename)
if err := utils.VerifyPath(file.Filename, modelPath); err != nil {
return err
}
// Create file path
filePath := filepath.Join(modelPath, file.Filename)
if err := file.URI.DownloadFile(filePath, file.SHA256, i, len(config.DownloadFiles), status); err != nil {
return err
}
}
// If the model is an URL, expand it, and download the file
if config.IsModelURL() {
modelFileName := config.ModelFileName()
uri := downloader.URI(config.Model)
if uri.ResolveURL() != config.Model {
// check if file exists
if _, err := os.Stat(filepath.Join(modelPath, modelFileName)); errors.Is(err, os.ErrNotExist) {
err := uri.DownloadFile(filepath.Join(modelPath, modelFileName), "", 0, 0, status)
if err != nil {
return err
}
}
cc := bcl.configs[i]
c := &cc
c.PredictionOptions.Model = modelFileName
bcl.configs[i] = *c
}
}
if config.IsMMProjURL() {
modelFileName := config.MMProjFileName()
uri := downloader.URI(config.MMProj)
// check if file exists
if _, err := os.Stat(filepath.Join(modelPath, modelFileName)); errors.Is(err, os.ErrNotExist) {
err := uri.DownloadFile(filepath.Join(modelPath, modelFileName), "", 0, 0, status)
if err != nil {
return err
}
}
cc := bcl.configs[i]
c := &cc
c.MMProj = modelFileName
bcl.configs[i] = *c
}
if bcl.configs[i].Name != "" {
glamText(fmt.Sprintf("**Model name**: _%s_", bcl.configs[i].Name))
}
if bcl.configs[i].Description != "" {
//glamText("**Description**")
glamText(bcl.configs[i].Description)
}
if bcl.configs[i].Usage != "" {
//glamText("**Usage**")
glamText(bcl.configs[i].Usage)
}
}
return nil
}
// LoadModelConfigsFromPath reads all the configurations of the models from a path
// (non-recursive)
func (bcl *ModelConfigLoader) LoadModelConfigsFromPath(path string, opts ...ConfigLoaderOption) error {
bcl.Lock()
defer bcl.Unlock()
entries, err := os.ReadDir(path)
if err != nil {
return fmt.Errorf("LoadModelConfigsFromPath cannot read directory '%s': %w", path, err)
}
files := make([]fs.FileInfo, 0, len(entries))
for _, entry := range entries {
info, err := entry.Info()
if err != nil {
return err
}
files = append(files, info)
}
for _, file := range files {
// Skip templates, YAML and .keep files
if !strings.Contains(file.Name(), ".yaml") && !strings.Contains(file.Name(), ".yml") ||
strings.HasPrefix(file.Name(), ".") {
continue
}
c, err := readModelConfigFromFile(filepath.Join(path, file.Name()), opts...)
if err != nil {
xlog.Error("LoadModelConfigsFromPath cannot read config file", "error", err, "File Name", file.Name())
continue
}
if valid, validationErr := c.Validate(); valid {
bcl.configs[c.Name] = *c
} else {
xlog.Error("config is not valid", "error", validationErr, "Name", c.Name)
}
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/config/model_config.go | core/config/model_config.go | package config
import (
"fmt"
"os"
"regexp"
"slices"
"strings"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/functions"
"github.com/mudler/cogito"
"gopkg.in/yaml.v3"
)
const (
RAND_SEED = -1
)
// @Description TTS configuration
type TTSConfig struct {
// Voice wav path or id
Voice string `yaml:"voice,omitempty" json:"voice,omitempty"`
AudioPath string `yaml:"audio_path,omitempty" json:"audio_path,omitempty"`
}
// @Description ModelConfig represents a model configuration
type ModelConfig struct {
modelConfigFile string `yaml:"-" json:"-"`
schema.PredictionOptions `yaml:"parameters,omitempty" json:"parameters,omitempty"`
Name string `yaml:"name,omitempty" json:"name,omitempty"`
F16 *bool `yaml:"f16,omitempty" json:"f16,omitempty"`
Threads *int `yaml:"threads,omitempty" json:"threads,omitempty"`
Debug *bool `yaml:"debug,omitempty" json:"debug,omitempty"`
Roles map[string]string `yaml:"roles,omitempty" json:"roles,omitempty"`
Embeddings *bool `yaml:"embeddings,omitempty" json:"embeddings,omitempty"`
Backend string `yaml:"backend,omitempty" json:"backend,omitempty"`
TemplateConfig TemplateConfig `yaml:"template,omitempty" json:"template,omitempty"`
KnownUsecaseStrings []string `yaml:"known_usecases,omitempty" json:"known_usecases,omitempty"`
KnownUsecases *ModelConfigUsecase `yaml:"-" json:"-"`
Pipeline Pipeline `yaml:"pipeline,omitempty" json:"pipeline,omitempty"`
PromptStrings, InputStrings []string `yaml:"-" json:"-"`
InputToken [][]int `yaml:"-" json:"-"`
functionCallString, functionCallNameString string `yaml:"-" json:"-"`
ResponseFormat string `yaml:"-" json:"-"`
ResponseFormatMap map[string]interface{} `yaml:"-" json:"-"`
FunctionsConfig functions.FunctionsConfig `yaml:"function,omitempty" json:"function,omitempty"`
FeatureFlag FeatureFlag `yaml:"feature_flags,omitempty" json:"feature_flags,omitempty"` // Feature Flag registry. We move fast, and features may break on a per model/backend basis. Registry for (usually temporary) flags that indicate aborting something early.
// LLM configs (GPT4ALL, Llama.cpp, ...)
LLMConfig `yaml:",inline" json:",inline"`
// Diffusers
Diffusers Diffusers `yaml:"diffusers,omitempty" json:"diffusers,omitempty"`
Step int `yaml:"step,omitempty" json:"step,omitempty"`
// GRPC Options
GRPC GRPC `yaml:"grpc,omitempty" json:"grpc,omitempty"`
// TTS specifics
TTSConfig `yaml:"tts,omitempty" json:"tts,omitempty"`
// CUDA
// Explicitly enable CUDA or not (some backends might need it)
CUDA bool `yaml:"cuda,omitempty" json:"cuda,omitempty"`
DownloadFiles []File `yaml:"download_files,omitempty" json:"download_files,omitempty"`
Description string `yaml:"description,omitempty" json:"description,omitempty"`
Usage string `yaml:"usage,omitempty" json:"usage,omitempty"`
Options []string `yaml:"options,omitempty" json:"options,omitempty"`
Overrides []string `yaml:"overrides,omitempty" json:"overrides,omitempty"`
MCP MCPConfig `yaml:"mcp,omitempty" json:"mcp,omitempty"`
Agent AgentConfig `yaml:"agent,omitempty" json:"agent,omitempty"`
}
// @Description MCP configuration
type MCPConfig struct {
Servers string `yaml:"remote,omitempty" json:"remote,omitempty"`
Stdio string `yaml:"stdio,omitempty" json:"stdio,omitempty"`
}
// @Description Agent configuration
type AgentConfig struct {
MaxAttempts int `yaml:"max_attempts,omitempty" json:"max_attempts,omitempty"`
MaxIterations int `yaml:"max_iterations,omitempty" json:"max_iterations,omitempty"`
EnableReasoning bool `yaml:"enable_reasoning,omitempty" json:"enable_reasoning,omitempty"`
EnablePlanning bool `yaml:"enable_planning,omitempty" json:"enable_planning,omitempty"`
EnableMCPPrompts bool `yaml:"enable_mcp_prompts,omitempty" json:"enable_mcp_prompts,omitempty"`
EnablePlanReEvaluator bool `yaml:"enable_plan_re_evaluator,omitempty" json:"enable_plan_re_evaluator,omitempty"`
}
func (c *MCPConfig) MCPConfigFromYAML() (MCPGenericConfig[MCPRemoteServers], MCPGenericConfig[MCPSTDIOServers], error) {
var remote MCPGenericConfig[MCPRemoteServers]
var stdio MCPGenericConfig[MCPSTDIOServers]
if err := yaml.Unmarshal([]byte(c.Servers), &remote); err != nil {
return remote, stdio, err
}
if err := yaml.Unmarshal([]byte(c.Stdio), &stdio); err != nil {
return remote, stdio, err
}
return remote, stdio, nil
}
// @Description MCP generic configuration
type MCPGenericConfig[T any] struct {
Servers T `yaml:"mcpServers,omitempty" json:"mcpServers,omitempty"`
}
type MCPRemoteServers map[string]MCPRemoteServer
type MCPSTDIOServers map[string]MCPSTDIOServer
// @Description MCP remote server configuration
type MCPRemoteServer struct {
URL string `json:"url,omitempty"`
Token string `json:"token,omitempty"`
}
// @Description MCP STDIO server configuration
type MCPSTDIOServer struct {
Args []string `json:"args,omitempty"`
Env map[string]string `json:"env,omitempty"`
Command string `json:"command,omitempty"`
}
// @Description Pipeline defines other models to use for audio-to-audio
type Pipeline struct {
TTS string `yaml:"tts,omitempty" json:"tts,omitempty"`
LLM string `yaml:"llm,omitempty" json:"llm,omitempty"`
Transcription string `yaml:"transcription,omitempty" json:"transcription,omitempty"`
VAD string `yaml:"vad,omitempty" json:"vad,omitempty"`
}
// @Description File configuration for model downloads
type File struct {
Filename string `yaml:"filename,omitempty" json:"filename,omitempty"`
SHA256 string `yaml:"sha256,omitempty" json:"sha256,omitempty"`
URI downloader.URI `yaml:"uri,omitempty" json:"uri,omitempty"`
}
type FeatureFlag map[string]*bool
func (ff FeatureFlag) Enabled(s string) bool {
if v, exists := ff[s]; exists && v != nil {
return *v
}
return false
}
// @Description GRPC configuration
type GRPC struct {
Attempts int `yaml:"attempts,omitempty" json:"attempts,omitempty"`
AttemptsSleepTime int `yaml:"attempts_sleep_time,omitempty" json:"attempts_sleep_time,omitempty"`
}
// @Description Diffusers configuration
type Diffusers struct {
CUDA bool `yaml:"cuda,omitempty" json:"cuda,omitempty"`
PipelineType string `yaml:"pipeline_type,omitempty" json:"pipeline_type,omitempty"`
SchedulerType string `yaml:"scheduler_type,omitempty" json:"scheduler_type,omitempty"`
EnableParameters string `yaml:"enable_parameters,omitempty" json:"enable_parameters,omitempty"` // A list of comma separated parameters to specify
IMG2IMG bool `yaml:"img2img,omitempty" json:"img2img,omitempty"` // Image to Image Diffuser
ClipSkip int `yaml:"clip_skip,omitempty" json:"clip_skip,omitempty"` // Skip every N frames
ClipModel string `yaml:"clip_model,omitempty" json:"clip_model,omitempty"` // Clip model to use
ClipSubFolder string `yaml:"clip_subfolder,omitempty" json:"clip_subfolder,omitempty"` // Subfolder to use for clip model
ControlNet string `yaml:"control_net,omitempty" json:"control_net,omitempty"`
}
// @Description LLMConfig is a struct that holds the configuration that are generic for most of the LLM backends.
type LLMConfig struct {
SystemPrompt string `yaml:"system_prompt,omitempty" json:"system_prompt,omitempty"`
TensorSplit string `yaml:"tensor_split,omitempty" json:"tensor_split,omitempty"`
MainGPU string `yaml:"main_gpu,omitempty" json:"main_gpu,omitempty"`
RMSNormEps float32 `yaml:"rms_norm_eps,omitempty" json:"rms_norm_eps,omitempty"`
NGQA int32 `yaml:"ngqa,omitempty" json:"ngqa,omitempty"`
PromptCachePath string `yaml:"prompt_cache_path,omitempty" json:"prompt_cache_path,omitempty"`
PromptCacheAll bool `yaml:"prompt_cache_all,omitempty" json:"prompt_cache_all,omitempty"`
PromptCacheRO bool `yaml:"prompt_cache_ro,omitempty" json:"prompt_cache_ro,omitempty"`
MirostatETA *float64 `yaml:"mirostat_eta,omitempty" json:"mirostat_eta,omitempty"`
MirostatTAU *float64 `yaml:"mirostat_tau,omitempty" json:"mirostat_tau,omitempty"`
Mirostat *int `yaml:"mirostat,omitempty" json:"mirostat,omitempty"`
NGPULayers *int `yaml:"gpu_layers,omitempty" json:"gpu_layers,omitempty"`
MMap *bool `yaml:"mmap,omitempty" json:"mmap,omitempty"`
MMlock *bool `yaml:"mmlock,omitempty" json:"mmlock,omitempty"`
LowVRAM *bool `yaml:"low_vram,omitempty" json:"low_vram,omitempty"`
Reranking *bool `yaml:"reranking,omitempty" json:"reranking,omitempty"`
Grammar string `yaml:"grammar,omitempty" json:"grammar,omitempty"`
StopWords []string `yaml:"stopwords,omitempty" json:"stopwords,omitempty"`
Cutstrings []string `yaml:"cutstrings,omitempty" json:"cutstrings,omitempty"`
ExtractRegex []string `yaml:"extract_regex,omitempty" json:"extract_regex,omitempty"`
TrimSpace []string `yaml:"trimspace,omitempty" json:"trimspace,omitempty"`
TrimSuffix []string `yaml:"trimsuffix,omitempty" json:"trimsuffix,omitempty"`
ContextSize *int `yaml:"context_size,omitempty" json:"context_size,omitempty"`
NUMA bool `yaml:"numa,omitempty" json:"numa,omitempty"`
LoraAdapter string `yaml:"lora_adapter,omitempty" json:"lora_adapter,omitempty"`
LoraBase string `yaml:"lora_base,omitempty" json:"lora_base,omitempty"`
LoraAdapters []string `yaml:"lora_adapters,omitempty" json:"lora_adapters,omitempty"`
LoraScales []float32 `yaml:"lora_scales,omitempty" json:"lora_scales,omitempty"`
LoraScale float32 `yaml:"lora_scale,omitempty" json:"lora_scale,omitempty"`
NoMulMatQ bool `yaml:"no_mulmatq,omitempty" json:"no_mulmatq,omitempty"`
DraftModel string `yaml:"draft_model,omitempty" json:"draft_model,omitempty"`
NDraft int32 `yaml:"n_draft,omitempty" json:"n_draft,omitempty"`
Quantization string `yaml:"quantization,omitempty" json:"quantization,omitempty"`
LoadFormat string `yaml:"load_format,omitempty" json:"load_format,omitempty"`
GPUMemoryUtilization float32 `yaml:"gpu_memory_utilization,omitempty" json:"gpu_memory_utilization,omitempty"` // vLLM
TrustRemoteCode bool `yaml:"trust_remote_code,omitempty" json:"trust_remote_code,omitempty"` // vLLM
EnforceEager bool `yaml:"enforce_eager,omitempty" json:"enforce_eager,omitempty"` // vLLM
SwapSpace int `yaml:"swap_space,omitempty" json:"swap_space,omitempty"` // vLLM
MaxModelLen int `yaml:"max_model_len,omitempty" json:"max_model_len,omitempty"` // vLLM
TensorParallelSize int `yaml:"tensor_parallel_size,omitempty" json:"tensor_parallel_size,omitempty"` // vLLM
DisableLogStatus bool `yaml:"disable_log_stats,omitempty" json:"disable_log_stats,omitempty"` // vLLM
DType string `yaml:"dtype,omitempty" json:"dtype,omitempty"` // vLLM
LimitMMPerPrompt LimitMMPerPrompt `yaml:"limit_mm_per_prompt,omitempty" json:"limit_mm_per_prompt,omitempty"` // vLLM
MMProj string `yaml:"mmproj,omitempty" json:"mmproj,omitempty"`
FlashAttention *string `yaml:"flash_attention,omitempty" json:"flash_attention,omitempty"`
NoKVOffloading bool `yaml:"no_kv_offloading,omitempty" json:"no_kv_offloading,omitempty"`
CacheTypeK string `yaml:"cache_type_k,omitempty" json:"cache_type_k,omitempty"`
CacheTypeV string `yaml:"cache_type_v,omitempty" json:"cache_type_v,omitempty"`
RopeScaling string `yaml:"rope_scaling,omitempty" json:"rope_scaling,omitempty"`
ModelType string `yaml:"type,omitempty" json:"type,omitempty"`
YarnExtFactor float32 `yaml:"yarn_ext_factor,omitempty" json:"yarn_ext_factor,omitempty"`
YarnAttnFactor float32 `yaml:"yarn_attn_factor,omitempty" json:"yarn_attn_factor,omitempty"`
YarnBetaFast float32 `yaml:"yarn_beta_fast,omitempty" json:"yarn_beta_fast,omitempty"`
YarnBetaSlow float32 `yaml:"yarn_beta_slow,omitempty" json:"yarn_beta_slow,omitempty"`
CFGScale float32 `yaml:"cfg_scale,omitempty" json:"cfg_scale,omitempty"` // Classifier-Free Guidance Scale
}
// @Description LimitMMPerPrompt is a struct that holds the configuration for the limit-mm-per-prompt config in vLLM
type LimitMMPerPrompt struct {
LimitImagePerPrompt int `yaml:"image,omitempty" json:"image,omitempty"`
LimitVideoPerPrompt int `yaml:"video,omitempty" json:"video,omitempty"`
LimitAudioPerPrompt int `yaml:"audio,omitempty" json:"audio,omitempty"`
}
// @Description TemplateConfig is a struct that holds the configuration of the templating system
type TemplateConfig struct {
// Chat is the template used in the chat completion endpoint
Chat string `yaml:"chat,omitempty" json:"chat,omitempty"`
// ChatMessage is the template used for chat messages
ChatMessage string `yaml:"chat_message,omitempty" json:"chat_message,omitempty"`
// Completion is the template used for completion requests
Completion string `yaml:"completion,omitempty" json:"completion,omitempty"`
// Edit is the template used for edit completion requests
Edit string `yaml:"edit,omitempty" json:"edit,omitempty"`
// Functions is the template used when tools are present in the client requests
Functions string `yaml:"function,omitempty" json:"function,omitempty"`
// UseTokenizerTemplate is a flag that indicates if the tokenizer template should be used.
// Note: this is mostly consumed for backends such as vllm and transformers
// that can use the tokenizers specified in the JSON config files of the models
UseTokenizerTemplate bool `yaml:"use_tokenizer_template,omitempty" json:"use_tokenizer_template,omitempty"`
// JoinChatMessagesByCharacter is a string that will be used to join chat messages together.
// It defaults to \n
JoinChatMessagesByCharacter *string `yaml:"join_chat_messages_by_character,omitempty" json:"join_chat_messages_by_character,omitempty"`
Multimodal string `yaml:"multimodal,omitempty" json:"multimodal,omitempty"`
ReplyPrefix string `yaml:"reply_prefix,omitempty" json:"reply_prefix,omitempty"`
}
func (c *ModelConfig) syncKnownUsecasesFromString() {
c.KnownUsecases = GetUsecasesFromYAML(c.KnownUsecaseStrings)
// Make sure the usecases are valid, we rewrite with what we identified
c.KnownUsecaseStrings = []string{}
for k, usecase := range GetAllModelConfigUsecases() {
if c.HasUsecases(usecase) {
c.KnownUsecaseStrings = append(c.KnownUsecaseStrings, k)
}
}
}
func (c *ModelConfig) UnmarshalYAML(value *yaml.Node) error {
type BCAlias ModelConfig
var aux BCAlias
if err := value.Decode(&aux); err != nil {
return err
}
mc := ModelConfig(aux)
*c = mc
c.syncKnownUsecasesFromString()
return nil
}
func (c *ModelConfig) SetFunctionCallString(s string) {
c.functionCallString = s
}
func (c *ModelConfig) SetFunctionCallNameString(s string) {
c.functionCallNameString = s
}
func (c *ModelConfig) ShouldUseFunctions() bool {
return ((c.functionCallString != "none" || c.functionCallString == "") || c.ShouldCallSpecificFunction())
}
func (c *ModelConfig) ShouldCallSpecificFunction() bool {
return len(c.functionCallNameString) > 0
}
// MMProjFileName returns the filename of the MMProj file
// If the MMProj is a URL, it will return the MD5 of the URL which is the filename
func (c *ModelConfig) MMProjFileName() string {
uri := downloader.URI(c.MMProj)
if uri.LooksLikeURL() {
f, _ := uri.FilenameFromUrl()
return f
}
return c.MMProj
}
func (c *ModelConfig) IsMMProjURL() bool {
uri := downloader.URI(c.MMProj)
return uri.LooksLikeURL()
}
func (c *ModelConfig) IsModelURL() bool {
uri := downloader.URI(c.Model)
return uri.LooksLikeURL()
}
// ModelFileName returns the filename of the model
// If the model is a URL, it will return the MD5 of the URL which is the filename
func (c *ModelConfig) ModelFileName() string {
uri := downloader.URI(c.Model)
if uri.LooksLikeURL() {
f, _ := uri.FilenameFromUrl()
return f
}
return c.Model
}
func (c *ModelConfig) FunctionToCall() string {
if c.functionCallNameString != "" &&
c.functionCallNameString != "none" && c.functionCallNameString != "auto" {
return c.functionCallNameString
}
return c.functionCallString
}
func (cfg *ModelConfig) SetDefaults(opts ...ConfigLoaderOption) {
lo := &LoadOptions{}
lo.Apply(opts...)
ctx := lo.ctxSize
threads := lo.threads
f16 := lo.f16
debug := lo.debug
// https://github.com/ggerganov/llama.cpp/blob/75cd4c77292034ecec587ecb401366f57338f7c0/common/sampling.h#L22
defaultTopP := 0.95
defaultTopK := 40
defaultTemp := 0.9
// https://github.com/mudler/LocalAI/issues/2780
defaultMirostat := 0
defaultMirostatTAU := 5.0
defaultMirostatETA := 0.1
defaultTypicalP := 1.0
defaultTFZ := 1.0
defaultZero := 0
trueV := true
falseV := false
if cfg.Seed == nil {
// random number generator seed
defaultSeed := RAND_SEED
cfg.Seed = &defaultSeed
}
if cfg.TopK == nil {
cfg.TopK = &defaultTopK
}
if cfg.TypicalP == nil {
cfg.TypicalP = &defaultTypicalP
}
if cfg.TFZ == nil {
cfg.TFZ = &defaultTFZ
}
if cfg.MMap == nil {
// MMap is enabled by default
// Only exception is for Intel GPUs
if os.Getenv("XPU") != "" {
cfg.MMap = &falseV
} else {
cfg.MMap = &trueV
}
}
if cfg.MMlock == nil {
// MMlock is disabled by default
cfg.MMlock = &falseV
}
if cfg.TopP == nil {
cfg.TopP = &defaultTopP
}
if cfg.Temperature == nil {
cfg.Temperature = &defaultTemp
}
if cfg.Maxtokens == nil {
cfg.Maxtokens = &defaultZero
}
if cfg.Mirostat == nil {
cfg.Mirostat = &defaultMirostat
}
if cfg.MirostatETA == nil {
cfg.MirostatETA = &defaultMirostatETA
}
if cfg.MirostatTAU == nil {
cfg.MirostatTAU = &defaultMirostatTAU
}
if cfg.LowVRAM == nil {
cfg.LowVRAM = &falseV
}
if cfg.Embeddings == nil {
cfg.Embeddings = &falseV
}
if cfg.Reranking == nil {
cfg.Reranking = &falseV
}
if threads == 0 {
// Threads can't be 0
threads = 4
}
if cfg.Threads == nil {
cfg.Threads = &threads
}
if cfg.F16 == nil {
cfg.F16 = &f16
}
if cfg.Debug == nil {
cfg.Debug = &falseV
}
if debug {
cfg.Debug = &trueV
}
guessDefaultsFromFile(cfg, lo.modelPath, ctx)
cfg.syncKnownUsecasesFromString()
}
func (c *ModelConfig) Validate() (bool, error) {
downloadedFileNames := []string{}
for _, f := range c.DownloadFiles {
downloadedFileNames = append(downloadedFileNames, f.Filename)
}
validationTargets := []string{c.Backend, c.Model, c.MMProj}
validationTargets = append(validationTargets, downloadedFileNames...)
// Simple validation to make sure the model can be correctly loaded
for _, n := range validationTargets {
if n == "" {
continue
}
if strings.HasPrefix(n, string(os.PathSeparator)) ||
strings.Contains(n, "..") {
return false, fmt.Errorf("invalid file path: %s", n)
}
}
if c.Backend != "" {
// a regex that checks that is a string name with no special characters, except '-' and '_'
re := regexp.MustCompile(`^[a-zA-Z0-9-_]+$`)
if !re.MatchString(c.Backend) {
return false, fmt.Errorf("invalid backend name: %s", c.Backend)
}
}
// Validate MCP configuration if present
if c.MCP.Servers != "" || c.MCP.Stdio != "" {
if _, _, err := c.MCP.MCPConfigFromYAML(); err != nil {
return false, fmt.Errorf("invalid MCP configuration: %w", err)
}
}
return true, nil
}
func (c *ModelConfig) HasTemplate() bool {
return c.TemplateConfig.Completion != "" || c.TemplateConfig.Edit != "" || c.TemplateConfig.Chat != "" || c.TemplateConfig.ChatMessage != "" || c.TemplateConfig.UseTokenizerTemplate
}
func (c *ModelConfig) GetModelConfigFile() string {
return c.modelConfigFile
}
type ModelConfigUsecase int
const (
FLAG_ANY ModelConfigUsecase = 0b000000000000
FLAG_CHAT ModelConfigUsecase = 0b000000000001
FLAG_COMPLETION ModelConfigUsecase = 0b000000000010
FLAG_EDIT ModelConfigUsecase = 0b000000000100
FLAG_EMBEDDINGS ModelConfigUsecase = 0b000000001000
FLAG_RERANK ModelConfigUsecase = 0b000000010000
FLAG_IMAGE ModelConfigUsecase = 0b000000100000
FLAG_TRANSCRIPT ModelConfigUsecase = 0b000001000000
FLAG_TTS ModelConfigUsecase = 0b000010000000
FLAG_SOUND_GENERATION ModelConfigUsecase = 0b000100000000
FLAG_TOKENIZE ModelConfigUsecase = 0b001000000000
FLAG_VAD ModelConfigUsecase = 0b010000000000
FLAG_VIDEO ModelConfigUsecase = 0b100000000000
FLAG_DETECTION ModelConfigUsecase = 0b1000000000000
// Common Subsets
FLAG_LLM ModelConfigUsecase = FLAG_CHAT | FLAG_COMPLETION | FLAG_EDIT
)
func GetAllModelConfigUsecases() map[string]ModelConfigUsecase {
return map[string]ModelConfigUsecase{
// Note: FLAG_ANY is intentionally excluded from this map
// because it's 0 and would always match in HasUsecases checks
"FLAG_CHAT": FLAG_CHAT,
"FLAG_COMPLETION": FLAG_COMPLETION,
"FLAG_EDIT": FLAG_EDIT,
"FLAG_EMBEDDINGS": FLAG_EMBEDDINGS,
"FLAG_RERANK": FLAG_RERANK,
"FLAG_IMAGE": FLAG_IMAGE,
"FLAG_TRANSCRIPT": FLAG_TRANSCRIPT,
"FLAG_TTS": FLAG_TTS,
"FLAG_SOUND_GENERATION": FLAG_SOUND_GENERATION,
"FLAG_TOKENIZE": FLAG_TOKENIZE,
"FLAG_VAD": FLAG_VAD,
"FLAG_LLM": FLAG_LLM,
"FLAG_VIDEO": FLAG_VIDEO,
"FLAG_DETECTION": FLAG_DETECTION,
}
}
func stringToFlag(s string) string {
return "FLAG_" + strings.ToUpper(s)
}
func GetUsecasesFromYAML(input []string) *ModelConfigUsecase {
if len(input) == 0 {
return nil
}
result := FLAG_ANY
flags := GetAllModelConfigUsecases()
for _, str := range input {
for _, flag := range []string{stringToFlag(str), str} {
f, exists := flags[flag]
if exists {
result |= f
}
}
}
return &result
}
// HasUsecases examines a ModelConfig and determines which endpoints have a chance of success.
func (c *ModelConfig) HasUsecases(u ModelConfigUsecase) bool {
if (c.KnownUsecases != nil) && ((u & *c.KnownUsecases) == u) {
return true
}
return c.GuessUsecases(u)
}
// GuessUsecases is a **heuristic based** function, as the backend in question may not be loaded yet, and the config may not record what it's useful at.
// In its current state, this function should ideally check for properties of the config like templates, rather than the direct backend name checks for the lower half.
// This avoids the maintenance burden of updating this list for each new backend - but unfortunately, that's the best option for some services currently.
func (c *ModelConfig) GuessUsecases(u ModelConfigUsecase) bool {
if (u & FLAG_CHAT) == FLAG_CHAT {
if c.TemplateConfig.Chat == "" && c.TemplateConfig.ChatMessage == "" && !c.TemplateConfig.UseTokenizerTemplate {
return false
}
}
if (u & FLAG_COMPLETION) == FLAG_COMPLETION {
if c.TemplateConfig.Completion == "" {
return false
}
}
if (u & FLAG_EDIT) == FLAG_EDIT {
if c.TemplateConfig.Edit == "" {
return false
}
}
if (u & FLAG_EMBEDDINGS) == FLAG_EMBEDDINGS {
if c.Embeddings == nil || !*c.Embeddings {
return false
}
}
if (u & FLAG_IMAGE) == FLAG_IMAGE {
imageBackends := []string{"diffusers", "stablediffusion", "stablediffusion-ggml"}
if !slices.Contains(imageBackends, c.Backend) {
return false
}
if c.Backend == "diffusers" && c.Diffusers.PipelineType == "" {
return false
}
}
if (u & FLAG_VIDEO) == FLAG_VIDEO {
videoBackends := []string{"diffusers", "stablediffusion"}
if !slices.Contains(videoBackends, c.Backend) {
return false
}
if c.Backend == "diffusers" && c.Diffusers.PipelineType == "" {
return false
}
}
if (u & FLAG_RERANK) == FLAG_RERANK {
if c.Backend != "rerankers" {
return false
}
}
if (u & FLAG_TRANSCRIPT) == FLAG_TRANSCRIPT {
if c.Backend != "whisper" {
return false
}
}
if (u & FLAG_TTS) == FLAG_TTS {
ttsBackends := []string{"bark-cpp", "piper", "transformers-musicgen", "kokoro"}
if !slices.Contains(ttsBackends, c.Backend) {
return false
}
}
if (u & FLAG_DETECTION) == FLAG_DETECTION {
if c.Backend != "rfdetr" {
return false
}
}
if (u & FLAG_SOUND_GENERATION) == FLAG_SOUND_GENERATION {
if c.Backend != "transformers-musicgen" {
return false
}
}
if (u & FLAG_TOKENIZE) == FLAG_TOKENIZE {
tokenizeCapableBackends := []string{"llama.cpp", "rwkv"}
if !slices.Contains(tokenizeCapableBackends, c.Backend) {
return false
}
}
if (u & FLAG_VAD) == FLAG_VAD {
if c.Backend != "silero-vad" {
return false
}
}
return true
}
// BuildCogitoOptions generates cogito options from the model configuration
// It accepts a context, MCP sessions, and optional callback functions for status, reasoning, tool calls, and tool results
func (c *ModelConfig) BuildCogitoOptions() []cogito.Option {
cogitoOpts := []cogito.Option{
cogito.WithIterations(3), // default to 3 iterations
cogito.WithMaxAttempts(3), // default to 3 attempts
cogito.WithForceReasoning(),
}
// Apply agent configuration options
if c.Agent.EnableReasoning {
cogitoOpts = append(cogitoOpts, cogito.EnableToolReasoner)
}
if c.Agent.EnablePlanning {
cogitoOpts = append(cogitoOpts, cogito.EnableAutoPlan)
}
if c.Agent.EnableMCPPrompts {
cogitoOpts = append(cogitoOpts, cogito.EnableMCPPrompts)
}
if c.Agent.EnablePlanReEvaluator {
cogitoOpts = append(cogitoOpts, cogito.EnableAutoPlanReEvaluator)
}
if c.Agent.MaxIterations != 0 {
cogitoOpts = append(cogitoOpts, cogito.WithIterations(c.Agent.MaxIterations))
}
if c.Agent.MaxAttempts != 0 {
cogitoOpts = append(cogitoOpts, cogito.WithMaxAttempts(c.Agent.MaxAttempts))
}
return cogitoOpts
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/models.go | core/cli/models.go | package cli
import (
"context"
"encoding/json"
"errors"
"fmt"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/startup"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
"github.com/schollz/progressbar/v3"
)
type ModelsCMDFlags struct {
Galleries string `env:"LOCALAI_GALLERIES,GALLERIES" help:"JSON list of galleries" group:"models" default:"${galleries}"`
BackendGalleries string `env:"LOCALAI_BACKEND_GALLERIES,BACKEND_GALLERIES" help:"JSON list of backend galleries" group:"backends" default:"${backends}"`
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"storage"`
}
type ModelsList struct {
ModelsCMDFlags `embed:""`
}
type ModelsInstall struct {
DisablePredownloadScan bool `env:"LOCALAI_DISABLE_PREDOWNLOAD_SCAN" help:"If true, disables the best-effort security scanner before downloading any files." group:"hardening" default:"false"`
AutoloadBackendGalleries bool `env:"LOCALAI_AUTOLOAD_BACKEND_GALLERIES" help:"If true, automatically loads backend galleries" group:"backends" default:"true"`
ModelArgs []string `arg:"" optional:"" name:"models" help:"Model configuration URLs to load"`
ModelsCMDFlags `embed:""`
}
type ModelsCMD struct {
List ModelsList `cmd:"" help:"List the models available in your galleries" default:"withargs"`
Install ModelsInstall `cmd:"" help:"Install a model from the gallery"`
}
func (ml *ModelsList) Run(ctx *cliContext.Context) error {
var galleries []config.Gallery
if err := json.Unmarshal([]byte(ml.Galleries), &galleries); err != nil {
xlog.Error("unable to load galleries", "error", err)
}
systemState, err := system.GetSystemState(
system.WithModelPath(ml.ModelsPath),
system.WithBackendPath(ml.BackendsPath),
)
if err != nil {
return err
}
models, err := gallery.AvailableGalleryModels(galleries, systemState)
if err != nil {
return err
}
for _, model := range models {
if model.Installed {
fmt.Printf(" * %s@%s (installed)\n", model.Gallery.Name, model.Name)
} else {
fmt.Printf(" - %s@%s\n", model.Gallery.Name, model.Name)
}
}
return nil
}
func (mi *ModelsInstall) Run(ctx *cliContext.Context) error {
systemState, err := system.GetSystemState(
system.WithModelPath(mi.ModelsPath),
system.WithBackendPath(mi.BackendsPath),
)
if err != nil {
return err
}
galleryService := services.NewGalleryService(&config.ApplicationConfig{
SystemState: systemState,
}, model.NewModelLoader(systemState))
err = galleryService.Start(context.Background(), config.NewModelConfigLoader(mi.ModelsPath), systemState)
if err != nil {
return err
}
var galleries []config.Gallery
if err := json.Unmarshal([]byte(mi.Galleries), &galleries); err != nil {
xlog.Error("unable to load galleries", "error", err)
}
var backendGalleries []config.Gallery
if err := json.Unmarshal([]byte(mi.BackendGalleries), &backendGalleries); err != nil {
xlog.Error("unable to load backend galleries", "error", err)
}
for _, modelName := range mi.ModelArgs {
progressBar := progressbar.NewOptions(
1000,
progressbar.OptionSetDescription(fmt.Sprintf("downloading model %s", modelName)),
progressbar.OptionShowBytes(false),
progressbar.OptionClearOnFinish(),
)
progressCallback := func(fileName string, current string, total string, percentage float64) {
v := int(percentage * 10)
err := progressBar.Set(v)
if err != nil {
xlog.Error("error while updating progress bar", "error", err, "filename", fileName, "value", v)
}
}
//startup.InstallModels()
models, err := gallery.AvailableGalleryModels(galleries, systemState)
if err != nil {
return err
}
modelURI := downloader.URI(modelName)
if !modelURI.LooksLikeOCI() {
model := gallery.FindGalleryElement(models, modelName)
if model == nil {
xlog.Error("model not found", "model", modelName)
return err
}
err = gallery.SafetyScanGalleryModel(model)
if err != nil && !errors.Is(err, downloader.ErrNonHuggingFaceFile) {
return err
}
}
modelLoader := model.NewModelLoader(systemState)
err = startup.InstallModels(context.Background(), galleryService, galleries, backendGalleries, systemState, modelLoader, !mi.DisablePredownloadScan, mi.AutoloadBackendGalleries, progressCallback, modelName)
if err != nil {
return err
}
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/transcript.go | core/cli/transcript.go | package cli
import (
"context"
"errors"
"fmt"
"github.com/mudler/LocalAI/core/backend"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
)
type TranscriptCMD struct {
Filename string `arg:""`
Backend string `short:"b" default:"whisper" help:"Backend to run the transcription model"`
Model string `short:"m" required:"" help:"Model name to run the TTS"`
Language string `short:"l" help:"Language of the audio file"`
Translate bool `short:"c" help:"Translate the transcription to english"`
Diarize bool `short:"d" help:"Mark speaker turns"`
Threads int `short:"t" default:"1" help:"Number of threads used for parallel computation"`
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
Prompt string `short:"p" help:"Previous transcribed text or words that hint at what the model should expect"`
}
func (t *TranscriptCMD) Run(ctx *cliContext.Context) error {
systemState, err := system.GetSystemState(
system.WithModelPath(t.ModelsPath),
)
if err != nil {
return err
}
opts := &config.ApplicationConfig{
SystemState: systemState,
Context: context.Background(),
}
cl := config.NewModelConfigLoader(t.ModelsPath)
ml := model.NewModelLoader(systemState)
if err := cl.LoadModelConfigsFromPath(t.ModelsPath); err != nil {
return err
}
c, exists := cl.GetModelConfig(t.Model)
if !exists {
return errors.New("model not found")
}
c.Threads = &t.Threads
defer func() {
err := ml.StopAllGRPC()
if err != nil {
xlog.Error("unable to stop all grpc processes", "error", err)
}
}()
tr, err := backend.ModelTranscription(t.Filename, t.Language, t.Translate, t.Diarize, t.Prompt, ml, c, opts)
if err != nil {
return err
}
for _, segment := range tr.Segments {
fmt.Println(segment.Start.String(), "-", segment.Text)
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/soundgeneration.go | core/cli/soundgeneration.go | package cli
import (
"context"
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/mudler/LocalAI/core/backend"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
)
type SoundGenerationCMD struct {
Text []string `arg:""`
Backend string `short:"b" required:"" help:"Backend to run the SoundGeneration model"`
Model string `short:"m" required:"" help:"Model name to run the SoundGeneration"`
Duration string `short:"d" help:"If specified, the length of audio to generate in seconds"`
Temperature string `short:"t" help:"If specified, the temperature of the generation"`
InputFile string `short:"i" help:"If specified, the input file to condition generation upon"`
InputFileSampleDivisor string `short:"f" help:"If InputFile and this divisor is specified, the first portion of the sample file will be used"`
DoSample bool `short:"s" default:"true" help:"Enables sampling from the model. Better quality at the cost of speed. Defaults to enabled."`
OutputFile string `short:"o" type:"path" help:"The path to write the output wav file"`
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
ExternalGRPCBackends []string `env:"LOCALAI_EXTERNAL_GRPC_BACKENDS,EXTERNAL_GRPC_BACKENDS" help:"A list of external grpc backends" group:"backends"`
}
func parseToFloat32Ptr(input string) *float32 {
f, err := strconv.ParseFloat(input, 32)
if err != nil {
return nil
}
f2 := float32(f)
return &f2
}
func parseToInt32Ptr(input string) *int32 {
i, err := strconv.ParseInt(input, 10, 32)
if err != nil {
return nil
}
i2 := int32(i)
return &i2
}
func (t *SoundGenerationCMD) Run(ctx *cliContext.Context) error {
outputFile := t.OutputFile
outputDir := os.TempDir()
if outputFile != "" {
outputDir = filepath.Dir(outputFile)
}
text := strings.Join(t.Text, " ")
systemState, err := system.GetSystemState(
system.WithModelPath(t.ModelsPath),
)
if err != nil {
return err
}
externalBackends := make(map[string]string)
// split ":" to get backend name and the uri
for _, v := range t.ExternalGRPCBackends {
backend := v[:strings.IndexByte(v, ':')]
uri := v[strings.IndexByte(v, ':')+1:]
externalBackends[backend] = uri
fmt.Printf("TMP externalBackends[%q]=%q\n\n", backend, uri)
}
opts := &config.ApplicationConfig{
SystemState: systemState,
Context: context.Background(),
GeneratedContentDir: outputDir,
ExternalGRPCBackends: externalBackends,
}
ml := model.NewModelLoader(systemState)
defer func() {
err := ml.StopAllGRPC()
if err != nil {
xlog.Error("unable to stop all grpc processes", "error", err)
}
}()
options := config.ModelConfig{}
options.SetDefaults()
options.Backend = t.Backend
options.Model = t.Model
var inputFile *string
if t.InputFile != "" {
inputFile = &t.InputFile
}
filePath, _, err := backend.SoundGeneration(text,
parseToFloat32Ptr(t.Duration), parseToFloat32Ptr(t.Temperature), &t.DoSample,
inputFile, parseToInt32Ptr(t.InputFileSampleDivisor), ml, opts, options)
if err != nil {
return err
}
if outputFile != "" {
if err := os.Rename(filePath, outputFile); err != nil {
return err
}
fmt.Printf("Generate file %s\n", outputFile)
} else {
fmt.Printf("Generate file %s\n", filePath)
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/tts.go | core/cli/tts.go | package cli
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/mudler/LocalAI/core/backend"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
)
type TTSCMD struct {
Text []string `arg:""`
Backend string `short:"b" default:"piper" help:"Backend to run the TTS model"`
Model string `short:"m" required:"" help:"Model name to run the TTS"`
Voice string `short:"v" help:"Voice name to run the TTS"`
Language string `short:"l" help:"Language to use with the TTS"`
OutputFile string `short:"o" type:"path" help:"The path to write the output wav file"`
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
}
func (t *TTSCMD) Run(ctx *cliContext.Context) error {
outputFile := t.OutputFile
outputDir := os.TempDir()
if outputFile != "" {
outputDir = filepath.Dir(outputFile)
}
text := strings.Join(t.Text, " ")
systemState, err := system.GetSystemState(
system.WithModelPath(t.ModelsPath),
)
if err != nil {
return err
}
opts := &config.ApplicationConfig{
SystemState: systemState,
Context: context.Background(),
GeneratedContentDir: outputDir,
}
ml := model.NewModelLoader(systemState)
defer func() {
err := ml.StopAllGRPC()
if err != nil {
xlog.Error("unable to stop all grpc processes", "error", err)
}
}()
options := config.ModelConfig{}
options.SetDefaults()
options.Backend = t.Backend
options.Model = t.Model
filePath, _, err := backend.ModelTTS(text, t.Voice, t.Language, ml, opts, options)
if err != nil {
return err
}
if outputFile != "" {
if err := os.Rename(filePath, outputFile); err != nil {
return err
}
fmt.Printf("Generate file %s\n", outputFile)
} else {
fmt.Printf("Generate file %s\n", filePath)
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/util.go | core/cli/util.go | package cli
import (
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/mholt/archiver/v3"
"github.com/mudler/xlog"
gguf "github.com/gpustack/gguf-parser-go"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/downloader"
"github.com/mudler/LocalAI/pkg/oci"
"github.com/mudler/LocalAI/pkg/system"
)
type UtilCMD struct {
GGUFInfo GGUFInfoCMD `cmd:"" name:"gguf-info" help:"Get information about a GGUF file"`
CreateOCIImage CreateOCIImageCMD `cmd:"" name:"create-oci-image" help:"Create an OCI image from a file or a directory"`
HFScan HFScanCMD `cmd:"" name:"hf-scan" help:"Checks installed models for known security issues. WARNING: this is a best-effort feature and may not catch everything!"`
UsecaseHeuristic UsecaseHeuristicCMD `cmd:"" name:"usecase-heuristic" help:"Checks a specific model config and prints what usecase LocalAI will offer for it."`
}
type GGUFInfoCMD struct {
Args []string `arg:"" optional:"" name:"args" help:"Arguments to pass to the utility command"`
Header bool `optional:"" default:"false" name:"header" help:"Show header information"`
}
type HFScanCMD struct {
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
Galleries string `env:"LOCALAI_GALLERIES,GALLERIES" help:"JSON list of galleries" group:"models" default:"${galleries}"`
ToScan []string `arg:""`
}
type UsecaseHeuristicCMD struct {
ConfigName string `name:"The config file to check"`
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
}
type CreateOCIImageCMD struct {
Input []string `arg:"" help:"Input file or directory to create an OCI image from"`
Output string `default:"image.tar" help:"Output OCI image name"`
ImageName string `default:"localai" help:"Image name"`
Platform string `default:"linux/amd64" help:"Platform of the image"`
}
func (u *CreateOCIImageCMD) Run(ctx *cliContext.Context) error {
xlog.Info("Creating OCI image from input")
dir, err := os.MkdirTemp("", "localai")
if err != nil {
return err
}
defer os.RemoveAll(dir)
err = archiver.Archive(u.Input, filepath.Join(dir, "archive.tar"))
if err != nil {
return err
}
xlog.Info("Creating OCI image", "output", u.Output, "input", u.Input)
platform := strings.Split(u.Platform, "/")
if len(platform) != 2 {
return fmt.Errorf("invalid platform: %s", u.Platform)
}
return oci.CreateTar(filepath.Join(dir, "archive.tar"), u.Output, u.ImageName, platform[1], platform[0])
}
func (u *GGUFInfoCMD) Run(ctx *cliContext.Context) error {
if len(u.Args) == 0 {
return fmt.Errorf("no GGUF file provided")
}
// We try to guess only if we don't have a template defined already
f, err := gguf.ParseGGUFFile(u.Args[0])
if err != nil {
// Only valid for gguf files
xlog.Error("guessDefaultsFromFile: not a GGUF file")
return err
}
xlog.Info("GGUF file loaded", "file", u.Args[0], "eosTokenID", f.Tokenizer().EOSTokenID, "bosTokenID", f.Tokenizer().BOSTokenID, "modelName", f.Metadata().Name, "architecture", f.Architecture().Architecture)
xlog.Info("Tokenizer", "tokenizer", fmt.Sprintf("%+v", f.Tokenizer()))
xlog.Info("Architecture", "architecture", fmt.Sprintf("%+v", f.Architecture()))
v, exists := f.Header.MetadataKV.Get("tokenizer.chat_template")
if exists {
xlog.Info("chat_template", "template", v.ValueString())
}
if u.Header {
for _, metadata := range f.Header.MetadataKV {
xlog.Info("metadata", "key", metadata.Key, "value", metadata.Value)
}
// log.Info().Any("header", fmt.Sprintf("%+v", f.Header)).Msg("Header")
}
return nil
}
func (hfscmd *HFScanCMD) Run(ctx *cliContext.Context) error {
systemState, err := system.GetSystemState(
system.WithModelPath(hfscmd.ModelsPath),
)
if err != nil {
return err
}
xlog.Info("LocalAI Security Scanner - This is BEST EFFORT functionality! Currently limited to huggingface models!")
if len(hfscmd.ToScan) == 0 {
xlog.Info("Checking all installed models against galleries")
var galleries []config.Gallery
if err := json.Unmarshal([]byte(hfscmd.Galleries), &galleries); err != nil {
xlog.Error("unable to load galleries", "error", err)
}
err := gallery.SafetyScanGalleryModels(galleries, systemState)
if err == nil {
xlog.Info("No security warnings were detected for your installed models. Please note that this is a BEST EFFORT tool, and all issues may not be detected.")
} else {
xlog.Error("! WARNING ! A known-vulnerable model is installed!", "error", err)
}
return err
} else {
var errs error = nil
for _, uri := range hfscmd.ToScan {
xlog.Info("scanning specific uri", "uri", uri)
scanResults, err := downloader.HuggingFaceScan(downloader.URI(uri))
if err != nil && errors.Is(err, downloader.ErrUnsafeFilesFound) {
xlog.Error("! WARNING ! A known-vulnerable model is included in this repo!", "error", err, "clamAV", scanResults.ClamAVInfectedFiles, "pickles", scanResults.DangerousPickles)
errs = errors.Join(errs, err)
}
}
if errs != nil {
return errs
}
xlog.Info("No security warnings were detected for your installed models. Please note that this is a BEST EFFORT tool, and all issues may not be detected.")
return nil
}
}
func (uhcmd *UsecaseHeuristicCMD) Run(ctx *cliContext.Context) error {
if len(uhcmd.ConfigName) == 0 {
xlog.Error("ConfigName is a required parameter")
return fmt.Errorf("config name is a required parameter")
}
if len(uhcmd.ModelsPath) == 0 {
xlog.Error("ModelsPath is a required parameter")
return fmt.Errorf("model path is a required parameter")
}
bcl := config.NewModelConfigLoader(uhcmd.ModelsPath)
err := bcl.ReadModelConfig(uhcmd.ConfigName)
if err != nil {
xlog.Error("error while loading backend", "error", err, "ConfigName", uhcmd.ConfigName)
return err
}
bc, exists := bcl.GetModelConfig(uhcmd.ConfigName)
if !exists {
xlog.Error("ConfigName not found", "ConfigName", uhcmd.ConfigName)
}
for name, uc := range config.GetAllModelConfigUsecases() {
if bc.HasUsecases(uc) {
xlog.Info("Usecase", "usecase", name)
}
}
xlog.Info("---")
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/federated.go | core/cli/federated.go | package cli
import (
"context"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/pkg/signals"
)
type FederatedCLI struct {
Address string `env:"LOCALAI_ADDRESS,ADDRESS" default:":8080" help:"Bind address for the API server" group:"api"`
Peer2PeerToken string `env:"LOCALAI_P2P_TOKEN,P2P_TOKEN,TOKEN" name:"p2ptoken" help:"Token for P2P mode (optional)" group:"p2p"`
RandomWorker bool `env:"LOCALAI_RANDOM_WORKER,RANDOM_WORKER" default:"false" help:"Select a random worker from the pool" group:"p2p"`
Peer2PeerNetworkID string `env:"LOCALAI_P2P_NETWORK_ID,P2P_NETWORK_ID" help:"Network ID for P2P mode, can be set arbitrarly by the user for grouping a set of instances." group:"p2p"`
TargetWorker string `env:"LOCALAI_TARGET_WORKER,TARGET_WORKER" help:"Target worker to run the federated server on" group:"p2p"`
}
func (f *FederatedCLI) Run(ctx *cliContext.Context) error {
fs := p2p.NewFederatedServer(f.Address, p2p.NetworkID(f.Peer2PeerNetworkID, p2p.FederatedID), f.Peer2PeerToken, !f.RandomWorker, f.TargetWorker)
c, cancel := context.WithCancel(context.Background())
signals.RegisterGracefulTerminationHandler(func() {
cancel()
})
return fs.Start(c)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/cli.go | core/cli/cli.go | package cli
import (
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/cli/worker"
)
var CLI struct {
cliContext.Context `embed:""`
Run RunCMD `cmd:"" help:"Run LocalAI, this the default command if no other command is specified. Run 'local-ai run --help' for more information" default:"withargs"`
Federated FederatedCLI `cmd:"" help:"Run LocalAI in federated mode"`
Models ModelsCMD `cmd:"" help:"Manage LocalAI models and definitions"`
Backends BackendsCMD `cmd:"" help:"Manage LocalAI backends and definitions"`
TTS TTSCMD `cmd:"" help:"Convert text to speech"`
SoundGeneration SoundGenerationCMD `cmd:"" help:"Generates audio files from text or audio"`
Transcript TranscriptCMD `cmd:"" help:"Convert audio to text"`
Worker worker.Worker `cmd:"" help:"Run workers to distribute workload (llama.cpp-only)"`
Util UtilCMD `cmd:"" help:"Utility commands"`
Explorer ExplorerCMD `cmd:"" help:"Run p2p explorer"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/run.go | core/cli/run.go | package cli
import (
"context"
"fmt"
"os"
"strings"
"time"
"github.com/mudler/LocalAI/core/application"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/http"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/internal"
"github.com/mudler/LocalAI/pkg/signals"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
)
type RunCMD struct {
ModelArgs []string `arg:"" optional:"" name:"models" help:"Model configuration URLs to load"`
ExternalBackends []string `env:"LOCALAI_EXTERNAL_BACKENDS,EXTERNAL_BACKENDS" help:"A list of external backends to load from gallery on boot" group:"backends"`
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"backends"`
BackendsSystemPath string `env:"LOCALAI_BACKENDS_SYSTEM_PATH,BACKEND_SYSTEM_PATH" type:"path" default:"/var/lib/local-ai/backends" help:"Path containing system backends used for inferencing" group:"backends"`
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
GeneratedContentPath string `env:"LOCALAI_GENERATED_CONTENT_PATH,GENERATED_CONTENT_PATH" type:"path" default:"/tmp/generated/content" help:"Location for generated content (e.g. images, audio, videos)" group:"storage"`
UploadPath string `env:"LOCALAI_UPLOAD_PATH,UPLOAD_PATH" type:"path" default:"/tmp/localai/upload" help:"Path to store uploads from files api" group:"storage"`
LocalaiConfigDir string `env:"LOCALAI_CONFIG_DIR" type:"path" default:"${basepath}/configuration" help:"Directory for dynamic loading of certain configuration files (currently api_keys.json and external_backends.json)" group:"storage"`
LocalaiConfigDirPollInterval time.Duration `env:"LOCALAI_CONFIG_DIR_POLL_INTERVAL" help:"Typically the config path picks up changes automatically, but if your system has broken fsnotify events, set this to an interval to poll the LocalAI Config Dir (example: 1m)" group:"storage"`
// The alias on this option is there to preserve functionality with the old `--config-file` parameter
ModelsConfigFile string `env:"LOCALAI_MODELS_CONFIG_FILE,CONFIG_FILE" aliases:"config-file" help:"YAML file containing a list of model backend configs" group:"storage"`
BackendGalleries string `env:"LOCALAI_BACKEND_GALLERIES,BACKEND_GALLERIES" help:"JSON list of backend galleries" group:"backends" default:"${backends}"`
Galleries string `env:"LOCALAI_GALLERIES,GALLERIES" help:"JSON list of galleries" group:"models" default:"${galleries}"`
AutoloadGalleries bool `env:"LOCALAI_AUTOLOAD_GALLERIES,AUTOLOAD_GALLERIES" group:"models" default:"true"`
AutoloadBackendGalleries bool `env:"LOCALAI_AUTOLOAD_BACKEND_GALLERIES,AUTOLOAD_BACKEND_GALLERIES" group:"backends" default:"true"`
PreloadModels string `env:"LOCALAI_PRELOAD_MODELS,PRELOAD_MODELS" help:"A List of models to apply in JSON at start" group:"models"`
Models []string `env:"LOCALAI_MODELS,MODELS" help:"A List of model configuration URLs to load" group:"models"`
PreloadModelsConfig string `env:"LOCALAI_PRELOAD_MODELS_CONFIG,PRELOAD_MODELS_CONFIG" help:"A List of models to apply at startup. Path to a YAML config file" group:"models"`
F16 bool `name:"f16" env:"LOCALAI_F16,F16" help:"Enable GPU acceleration" group:"performance"`
Threads int `env:"LOCALAI_THREADS,THREADS" short:"t" help:"Number of threads used for parallel computation. Usage of the number of physical cores in the system is suggested" group:"performance"`
ContextSize int `env:"LOCALAI_CONTEXT_SIZE,CONTEXT_SIZE" help:"Default context size for models" group:"performance"`
Address string `env:"LOCALAI_ADDRESS,ADDRESS" default:":8080" help:"Bind address for the API server" group:"api"`
CORS bool `env:"LOCALAI_CORS,CORS" help:"" group:"api"`
CORSAllowOrigins string `env:"LOCALAI_CORS_ALLOW_ORIGINS,CORS_ALLOW_ORIGINS" group:"api"`
CSRF bool `env:"LOCALAI_CSRF" help:"Enables fiber CSRF middleware" group:"api"`
UploadLimit int `env:"LOCALAI_UPLOAD_LIMIT,UPLOAD_LIMIT" default:"15" help:"Default upload-limit in MB" group:"api"`
APIKeys []string `env:"LOCALAI_API_KEY,API_KEY" help:"List of API Keys to enable API authentication. When this is set, all the requests must be authenticated with one of these API keys" group:"api"`
DisableWebUI bool `env:"LOCALAI_DISABLE_WEBUI,DISABLE_WEBUI" default:"false" help:"Disables the web user interface. When set to true, the server will only expose API endpoints without serving the web interface" group:"api"`
DisableRuntimeSettings bool `env:"LOCALAI_DISABLE_RUNTIME_SETTINGS,DISABLE_RUNTIME_SETTINGS" default:"false" help:"Disables the runtime settings. When set to true, the server will not load the runtime settings from the runtime_settings.json file" group:"api"`
DisablePredownloadScan bool `env:"LOCALAI_DISABLE_PREDOWNLOAD_SCAN" help:"If true, disables the best-effort security scanner before downloading any files." group:"hardening" default:"false"`
OpaqueErrors bool `env:"LOCALAI_OPAQUE_ERRORS" default:"false" help:"If true, all error responses are replaced with blank 500 errors. This is intended only for hardening against information leaks and is normally not recommended." group:"hardening"`
UseSubtleKeyComparison bool `env:"LOCALAI_SUBTLE_KEY_COMPARISON" default:"false" help:"If true, API Key validation comparisons will be performed using constant-time comparisons rather than simple equality. This trades off performance on each request for resiliancy against timing attacks." group:"hardening"`
DisableApiKeyRequirementForHttpGet bool `env:"LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET" default:"false" help:"If true, a valid API key is not required to issue GET requests to portions of the web ui. This should only be enabled in secure testing environments" group:"hardening"`
DisableMetricsEndpoint bool `env:"LOCALAI_DISABLE_METRICS_ENDPOINT,DISABLE_METRICS_ENDPOINT" default:"false" help:"Disable the /metrics endpoint" group:"api"`
HttpGetExemptedEndpoints []string `env:"LOCALAI_HTTP_GET_EXEMPTED_ENDPOINTS" default:"^/$,^/browse/?$,^/talk/?$,^/p2p/?$,^/chat/?$,^/text2image/?$,^/tts/?$,^/static/.*$,^/swagger.*$" help:"If LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET is overriden to true, this is the list of endpoints to exempt. Only adjust this in case of a security incident or as a result of a personal security posture review" group:"hardening"`
Peer2Peer bool `env:"LOCALAI_P2P,P2P" name:"p2p" default:"false" help:"Enable P2P mode" group:"p2p"`
Peer2PeerDHTInterval int `env:"LOCALAI_P2P_DHT_INTERVAL,P2P_DHT_INTERVAL" default:"360" name:"p2p-dht-interval" help:"Interval for DHT refresh (used during token generation)" group:"p2p"`
Peer2PeerOTPInterval int `env:"LOCALAI_P2P_OTP_INTERVAL,P2P_OTP_INTERVAL" default:"9000" name:"p2p-otp-interval" help:"Interval for OTP refresh (used during token generation)" group:"p2p"`
Peer2PeerToken string `env:"LOCALAI_P2P_TOKEN,P2P_TOKEN,TOKEN" name:"p2ptoken" help:"Token for P2P mode (optional)" group:"p2p"`
Peer2PeerNetworkID string `env:"LOCALAI_P2P_NETWORK_ID,P2P_NETWORK_ID" help:"Network ID for P2P mode, can be set arbitrarly by the user for grouping a set of instances" group:"p2p"`
ParallelRequests bool `env:"LOCALAI_PARALLEL_REQUESTS,PARALLEL_REQUESTS" help:"Enable backends to handle multiple requests in parallel if they support it (e.g.: llama.cpp or vllm)" group:"backends"`
SingleActiveBackend bool `env:"LOCALAI_SINGLE_ACTIVE_BACKEND,SINGLE_ACTIVE_BACKEND" help:"Allow only one backend to be run at a time (deprecated: use --max-active-backends=1 instead)" group:"backends"`
MaxActiveBackends int `env:"LOCALAI_MAX_ACTIVE_BACKENDS,MAX_ACTIVE_BACKENDS" default:"0" help:"Maximum number of backends to keep loaded at once (0 = unlimited, 1 = single backend mode). Least recently used backends are evicted when limit is reached" group:"backends"`
PreloadBackendOnly bool `env:"LOCALAI_PRELOAD_BACKEND_ONLY,PRELOAD_BACKEND_ONLY" default:"false" help:"Do not launch the API services, only the preloaded models / backends are started (useful for multi-node setups)" group:"backends"`
ExternalGRPCBackends []string `env:"LOCALAI_EXTERNAL_GRPC_BACKENDS,EXTERNAL_GRPC_BACKENDS" help:"A list of external grpc backends" group:"backends"`
EnableWatchdogIdle bool `env:"LOCALAI_WATCHDOG_IDLE,WATCHDOG_IDLE" default:"false" help:"Enable watchdog for stopping backends that are idle longer than the watchdog-idle-timeout" group:"backends"`
WatchdogIdleTimeout string `env:"LOCALAI_WATCHDOG_IDLE_TIMEOUT,WATCHDOG_IDLE_TIMEOUT" default:"15m" help:"Threshold beyond which an idle backend should be stopped" group:"backends"`
EnableWatchdogBusy bool `env:"LOCALAI_WATCHDOG_BUSY,WATCHDOG_BUSY" default:"false" help:"Enable watchdog for stopping backends that are busy longer than the watchdog-busy-timeout" group:"backends"`
WatchdogBusyTimeout string `env:"LOCALAI_WATCHDOG_BUSY_TIMEOUT,WATCHDOG_BUSY_TIMEOUT" default:"5m" help:"Threshold beyond which a busy backend should be stopped" group:"backends"`
EnableMemoryReclaimer bool `env:"LOCALAI_MEMORY_RECLAIMER,MEMORY_RECLAIMER,LOCALAI_GPU_RECLAIMER,GPU_RECLAIMER" default:"false" help:"Enable memory threshold monitoring to auto-evict backends when memory usage exceeds threshold (uses GPU VRAM if available, otherwise RAM)" group:"backends"`
MemoryReclaimerThreshold float64 `env:"LOCALAI_MEMORY_RECLAIMER_THRESHOLD,MEMORY_RECLAIMER_THRESHOLD,LOCALAI_GPU_RECLAIMER_THRESHOLD,GPU_RECLAIMER_THRESHOLD" default:"0.95" help:"Memory usage threshold (0.0-1.0) that triggers backend eviction (default 0.95 = 95%%)" group:"backends"`
ForceEvictionWhenBusy bool `env:"LOCALAI_FORCE_EVICTION_WHEN_BUSY,FORCE_EVICTION_WHEN_BUSY" default:"false" help:"Force eviction even when models have active API calls (default: false for safety)" group:"backends"`
LRUEvictionMaxRetries int `env:"LOCALAI_LRU_EVICTION_MAX_RETRIES,LRU_EVICTION_MAX_RETRIES" default:"30" help:"Maximum number of retries when waiting for busy models to become idle before eviction (default: 30)" group:"backends"`
LRUEvictionRetryInterval string `env:"LOCALAI_LRU_EVICTION_RETRY_INTERVAL,LRU_EVICTION_RETRY_INTERVAL" default:"1s" help:"Interval between retries when waiting for busy models to become idle (e.g., 1s, 2s) (default: 1s)" group:"backends"`
Federated bool `env:"LOCALAI_FEDERATED,FEDERATED" help:"Enable federated instance" group:"federated"`
DisableGalleryEndpoint bool `env:"LOCALAI_DISABLE_GALLERY_ENDPOINT,DISABLE_GALLERY_ENDPOINT" help:"Disable the gallery endpoints" group:"api"`
MachineTag string `env:"LOCALAI_MACHINE_TAG,MACHINE_TAG" help:"Add Machine-Tag header to each response which is useful to track the machine in the P2P network" group:"api"`
LoadToMemory []string `env:"LOCALAI_LOAD_TO_MEMORY,LOAD_TO_MEMORY" help:"A list of models to load into memory at startup" group:"models"`
EnableTracing bool `env:"LOCALAI_ENABLE_TRACING,ENABLE_TRACING" help:"Enable API tracing" group:"api"`
TracingMaxItems int `env:"LOCALAI_TRACING_MAX_ITEMS" default:"1024" help:"Maximum number of traces to keep" group:"api"`
AgentJobRetentionDays int `env:"LOCALAI_AGENT_JOB_RETENTION_DAYS,AGENT_JOB_RETENTION_DAYS" default:"30" help:"Number of days to keep agent job history (default: 30)" group:"api"`
Version bool
}
func (r *RunCMD) Run(ctx *cliContext.Context) error {
if r.Version {
fmt.Println(internal.Version)
return nil
}
os.MkdirAll(r.BackendsPath, 0750)
os.MkdirAll(r.ModelsPath, 0750)
systemState, err := system.GetSystemState(
system.WithBackendSystemPath(r.BackendsSystemPath),
system.WithModelPath(r.ModelsPath),
system.WithBackendPath(r.BackendsPath),
)
if err != nil {
return err
}
opts := []config.AppOption{
config.WithContext(context.Background()),
config.WithConfigFile(r.ModelsConfigFile),
config.WithJSONStringPreload(r.PreloadModels),
config.WithYAMLConfigPreload(r.PreloadModelsConfig),
config.WithSystemState(systemState),
config.WithContextSize(r.ContextSize),
config.WithDebug(ctx.Debug || (ctx.LogLevel != nil && *ctx.LogLevel == "debug")),
config.WithGeneratedContentDir(r.GeneratedContentPath),
config.WithUploadDir(r.UploadPath),
config.WithDynamicConfigDir(r.LocalaiConfigDir),
config.WithDynamicConfigDirPollInterval(r.LocalaiConfigDirPollInterval),
config.WithF16(r.F16),
config.WithStringGalleries(r.Galleries),
config.WithBackendGalleries(r.BackendGalleries),
config.WithCors(r.CORS),
config.WithCorsAllowOrigins(r.CORSAllowOrigins),
config.WithCsrf(r.CSRF),
config.WithThreads(r.Threads),
config.WithUploadLimitMB(r.UploadLimit),
config.WithApiKeys(r.APIKeys),
config.WithModelsURL(append(r.Models, r.ModelArgs...)...),
config.WithExternalBackends(r.ExternalBackends...),
config.WithOpaqueErrors(r.OpaqueErrors),
config.WithEnforcedPredownloadScans(!r.DisablePredownloadScan),
config.WithSubtleKeyComparison(r.UseSubtleKeyComparison),
config.WithDisableApiKeyRequirementForHttpGet(r.DisableApiKeyRequirementForHttpGet),
config.WithHttpGetExemptedEndpoints(r.HttpGetExemptedEndpoints),
config.WithP2PNetworkID(r.Peer2PeerNetworkID),
config.WithLoadToMemory(r.LoadToMemory),
config.WithMachineTag(r.MachineTag),
config.WithAPIAddress(r.Address),
config.WithAgentJobRetentionDays(r.AgentJobRetentionDays),
config.WithTunnelCallback(func(tunnels []string) {
tunnelEnvVar := strings.Join(tunnels, ",")
// TODO: this is very specific to llama.cpp, we should have a more generic way to set the environment variable
os.Setenv("LLAMACPP_GRPC_SERVERS", tunnelEnvVar)
xlog.Debug("setting LLAMACPP_GRPC_SERVERS", "value", tunnelEnvVar)
}),
}
if r.DisableMetricsEndpoint {
opts = append(opts, config.DisableMetricsEndpoint)
}
if r.DisableRuntimeSettings {
opts = append(opts, config.DisableRuntimeSettings)
}
if r.EnableTracing {
opts = append(opts, config.EnableTracing)
}
if r.EnableTracing {
opts = append(opts, config.EnableTracing)
}
opts = append(opts, config.WithTracingMaxItems(r.TracingMaxItems))
token := ""
if r.Peer2Peer || r.Peer2PeerToken != "" {
xlog.Info("P2P mode enabled")
token = r.Peer2PeerToken
if token == "" {
// IF no token is provided, and p2p is enabled,
// we generate one and wait for the user to pick up the token (this is for interactive)
xlog.Info("No token provided, generating one")
token = p2p.GenerateToken(r.Peer2PeerDHTInterval, r.Peer2PeerOTPInterval)
xlog.Info("Generated Token:")
fmt.Println(token)
xlog.Info("To use the token, you can run the following command in another node or terminal:")
fmt.Printf("export TOKEN=\"%s\"\nlocal-ai worker p2p-llama-cpp-rpc\n", token)
}
opts = append(opts, config.WithP2PToken(token))
}
if r.Federated {
opts = append(opts, config.EnableFederated)
}
idleWatchDog := r.EnableWatchdogIdle
busyWatchDog := r.EnableWatchdogBusy
if r.DisableWebUI {
opts = append(opts, config.DisableWebUI)
}
if r.DisableGalleryEndpoint {
opts = append(opts, config.DisableGalleryEndpoint)
}
if idleWatchDog || busyWatchDog {
opts = append(opts, config.EnableWatchDog)
if idleWatchDog {
opts = append(opts, config.EnableWatchDogIdleCheck)
dur, err := time.ParseDuration(r.WatchdogIdleTimeout)
if err != nil {
return err
}
opts = append(opts, config.SetWatchDogIdleTimeout(dur))
}
if busyWatchDog {
opts = append(opts, config.EnableWatchDogBusyCheck)
dur, err := time.ParseDuration(r.WatchdogBusyTimeout)
if err != nil {
return err
}
opts = append(opts, config.SetWatchDogBusyTimeout(dur))
}
}
// Handle memory reclaimer (uses GPU VRAM if available, otherwise RAM)
if r.EnableMemoryReclaimer {
opts = append(opts, config.WithMemoryReclaimer(true, r.MemoryReclaimerThreshold))
}
if r.ParallelRequests {
opts = append(opts, config.EnableParallelBackendRequests)
}
// Handle max active backends (LRU eviction)
// MaxActiveBackends takes precedence over SingleActiveBackend
if r.MaxActiveBackends > 0 {
opts = append(opts, config.SetMaxActiveBackends(r.MaxActiveBackends))
} else if r.SingleActiveBackend {
// Backward compatibility: --single-active-backend is equivalent to --max-active-backends=1
opts = append(opts, config.EnableSingleBackend)
}
// Handle LRU eviction settings
if r.ForceEvictionWhenBusy {
opts = append(opts, config.WithForceEvictionWhenBusy(true))
}
if r.LRUEvictionMaxRetries > 0 {
opts = append(opts, config.WithLRUEvictionMaxRetries(r.LRUEvictionMaxRetries))
}
if r.LRUEvictionRetryInterval != "" {
dur, err := time.ParseDuration(r.LRUEvictionRetryInterval)
if err != nil {
return fmt.Errorf("invalid LRU eviction retry interval: %w", err)
}
opts = append(opts, config.WithLRUEvictionRetryInterval(dur))
}
// split ":" to get backend name and the uri
for _, v := range r.ExternalGRPCBackends {
backend := v[:strings.IndexByte(v, ':')]
uri := v[strings.IndexByte(v, ':')+1:]
opts = append(opts, config.WithExternalBackend(backend, uri))
}
if r.AutoloadGalleries {
opts = append(opts, config.EnableGalleriesAutoload)
}
if r.AutoloadBackendGalleries {
opts = append(opts, config.EnableBackendGalleriesAutoload)
}
if r.PreloadBackendOnly {
_, err := application.New(opts...)
return err
}
app, err := application.New(opts...)
if err != nil {
return fmt.Errorf("failed basic startup tasks with error %s", err.Error())
}
appHTTP, err := http.API(app)
if err != nil {
xlog.Error("error during HTTP App construction", "error", err)
return err
}
xlog.Info("LocalAI is started and running", "address", r.Address)
if token != "" {
if err := app.StartP2P(); err != nil {
return err
}
}
signals.RegisterGracefulTerminationHandler(func() {
if err := app.ModelLoader().StopAllGRPC(); err != nil {
xlog.Error("error while stopping all grpc backends", "error", err)
}
})
return appHTTP.Start(r.Address)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/backends.go | core/cli/backends.go | package cli
import (
"context"
"encoding/json"
"fmt"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/core/services"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
"github.com/schollz/progressbar/v3"
)
type BackendsCMDFlags struct {
BackendGalleries string `env:"LOCALAI_BACKEND_GALLERIES,BACKEND_GALLERIES" help:"JSON list of backend galleries" group:"backends" default:"${backends}"`
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"storage"`
BackendsSystemPath string `env:"LOCALAI_BACKENDS_SYSTEM_PATH,BACKEND_SYSTEM_PATH" type:"path" default:"/var/lib/local-ai/backends" help:"Path containing system backends used for inferencing" group:"backends"`
}
type BackendsList struct {
BackendsCMDFlags `embed:""`
}
type BackendsInstall struct {
BackendArgs string `arg:"" optional:"" name:"backend" help:"Backend configuration URL to load"`
Name string `arg:"" optional:"" name:"name" help:"Name of the backend"`
Alias string `arg:"" optional:"" name:"alias" help:"Alias of the backend"`
BackendsCMDFlags `embed:""`
}
type BackendsUninstall struct {
BackendArgs []string `arg:"" name:"backends" help:"Backend names to uninstall"`
BackendsCMDFlags `embed:""`
}
type BackendsCMD struct {
List BackendsList `cmd:"" help:"List the backends available in your galleries" default:"withargs"`
Install BackendsInstall `cmd:"" help:"Install a backend from the gallery"`
Uninstall BackendsUninstall `cmd:"" help:"Uninstall a backend"`
}
func (bl *BackendsList) Run(ctx *cliContext.Context) error {
var galleries []config.Gallery
if err := json.Unmarshal([]byte(bl.BackendGalleries), &galleries); err != nil {
xlog.Error("unable to load galleries", "error", err)
}
systemState, err := system.GetSystemState(
system.WithBackendSystemPath(bl.BackendsSystemPath),
system.WithBackendPath(bl.BackendsPath),
)
if err != nil {
return err
}
backends, err := gallery.AvailableBackends(galleries, systemState)
if err != nil {
return err
}
for _, backend := range backends {
if backend.Installed {
fmt.Printf(" * %s@%s (installed)\n", backend.Gallery.Name, backend.Name)
} else {
fmt.Printf(" - %s@%s\n", backend.Gallery.Name, backend.Name)
}
}
return nil
}
func (bi *BackendsInstall) Run(ctx *cliContext.Context) error {
var galleries []config.Gallery
if err := json.Unmarshal([]byte(bi.BackendGalleries), &galleries); err != nil {
xlog.Error("unable to load galleries", "error", err)
}
systemState, err := system.GetSystemState(
system.WithBackendSystemPath(bi.BackendsSystemPath),
system.WithBackendPath(bi.BackendsPath),
)
if err != nil {
return err
}
progressBar := progressbar.NewOptions(
1000,
progressbar.OptionSetDescription(fmt.Sprintf("downloading backend %s", bi.BackendArgs)),
progressbar.OptionShowBytes(false),
progressbar.OptionClearOnFinish(),
)
progressCallback := func(fileName string, current string, total string, percentage float64) {
v := int(percentage * 10)
err := progressBar.Set(v)
if err != nil {
xlog.Error("error while updating progress bar", "error", err, "filename", fileName, "value", v)
}
}
modelLoader := model.NewModelLoader(systemState)
err = services.InstallExternalBackend(context.Background(), galleries, systemState, modelLoader, progressCallback, bi.BackendArgs, bi.Name, bi.Alias)
if err != nil {
return err
}
return nil
}
func (bu *BackendsUninstall) Run(ctx *cliContext.Context) error {
for _, backendName := range bu.BackendArgs {
xlog.Info("uninstalling backend", "backend", backendName)
systemState, err := system.GetSystemState(
system.WithBackendSystemPath(bu.BackendsSystemPath),
system.WithBackendPath(bu.BackendsPath),
)
if err != nil {
return err
}
err = gallery.DeleteBackendFromSystem(systemState, backendName)
if err != nil {
return err
}
fmt.Printf("Backend %s uninstalled successfully\n", backendName)
}
return nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/explorer.go | core/cli/explorer.go | package cli
import (
"context"
"time"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/explorer"
"github.com/mudler/LocalAI/core/http"
"github.com/mudler/LocalAI/pkg/signals"
"github.com/mudler/xlog"
)
type ExplorerCMD struct {
Address string `env:"LOCALAI_ADDRESS,ADDRESS" default:":8080" help:"Bind address for the API server" group:"api"`
PoolDatabase string `env:"LOCALAI_POOL_DATABASE,POOL_DATABASE" default:"explorer.json" help:"Path to the pool database" group:"api"`
ConnectionTimeout string `env:"LOCALAI_CONNECTION_TIMEOUT,CONNECTION_TIMEOUT" default:"2m" help:"Connection timeout for the explorer" group:"api"`
ConnectionErrorThreshold int `env:"LOCALAI_CONNECTION_ERROR_THRESHOLD,CONNECTION_ERROR_THRESHOLD" default:"3" help:"Connection failure threshold for the explorer" group:"api"`
WithSync bool `env:"LOCALAI_WITH_SYNC,WITH_SYNC" default:"false" help:"Enable sync with the network" group:"api"`
OnlySync bool `env:"LOCALAI_ONLY_SYNC,ONLY_SYNC" default:"false" help:"Only sync with the network" group:"api"`
}
func (e *ExplorerCMD) Run(ctx *cliContext.Context) error {
db, err := explorer.NewDatabase(e.PoolDatabase)
if err != nil {
return err
}
dur, err := time.ParseDuration(e.ConnectionTimeout)
if err != nil {
return err
}
if e.WithSync {
ds := explorer.NewDiscoveryServer(db, dur, e.ConnectionErrorThreshold)
go ds.Start(context.Background(), true)
}
if e.OnlySync {
ds := explorer.NewDiscoveryServer(db, dur, e.ConnectionErrorThreshold)
ctx := context.Background()
return ds.Start(ctx, false)
}
appHTTP := http.Explorer(db)
signals.RegisterGracefulTerminationHandler(func() {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
if err := appHTTP.Shutdown(ctx); err != nil {
xlog.Error("error during shutdown", "error", err)
}
})
return appHTTP.Start(e.Address)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/worker/worker_llamacpp.go | core/cli/worker/worker_llamacpp.go | package worker
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"syscall"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/config"
"github.com/mudler/LocalAI/core/gallery"
"github.com/mudler/LocalAI/pkg/model"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
)
type LLamaCPP struct {
WorkerFlags `embed:""`
}
const (
llamaCPPRPCBinaryName = "llama-cpp-rpc-server"
llamaCPPGalleryName = "llama-cpp"
)
func findLLamaCPPBackend(galleries string, systemState *system.SystemState) (string, error) {
backends, err := gallery.ListSystemBackends(systemState)
if err != nil {
xlog.Warn("Failed listing system backends", "error", err)
return "", err
}
xlog.Debug("System backends", "backends", backends)
backend, ok := backends.Get(llamaCPPGalleryName)
if !ok {
ml := model.NewModelLoader(systemState)
var gals []config.Gallery
if err := json.Unmarshal([]byte(galleries), &gals); err != nil {
xlog.Error("failed loading galleries", "error", err)
return "", err
}
err := gallery.InstallBackendFromGallery(context.Background(), gals, systemState, ml, llamaCPPGalleryName, nil, true)
if err != nil {
xlog.Error("llama-cpp backend not found, failed to install it", "error", err)
return "", err
}
}
backendPath := filepath.Dir(backend.RunFile)
if backendPath == "" {
return "", errors.New("llama-cpp backend not found, install it first")
}
grpcProcess := filepath.Join(
backendPath,
llamaCPPRPCBinaryName,
)
return grpcProcess, nil
}
func (r *LLamaCPP) Run(ctx *cliContext.Context) error {
if len(os.Args) < 4 {
return fmt.Errorf("usage: local-ai worker llama-cpp-rpc -- <llama-rpc-server-args>")
}
systemState, err := system.GetSystemState(
system.WithBackendPath(r.BackendsPath),
system.WithBackendSystemPath(r.BackendsSystemPath),
)
if err != nil {
return err
}
grpcProcess, err := findLLamaCPPBackend(r.BackendGalleries, systemState)
if err != nil {
return err
}
args := strings.Split(r.ExtraLLamaCPPArgs, " ")
args = append([]string{grpcProcess}, args...)
return syscall.Exec(
grpcProcess,
args,
os.Environ())
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/worker/worker.go | core/cli/worker/worker.go | package worker
type WorkerFlags struct {
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"backends"`
BackendGalleries string `env:"LOCALAI_BACKEND_GALLERIES,BACKEND_GALLERIES" help:"JSON list of backend galleries" group:"backends" default:"${backends}"`
BackendsSystemPath string `env:"LOCALAI_BACKENDS_SYSTEM_PATH,BACKEND_SYSTEM_PATH" type:"path" default:"/var/lib/local-ai/backends" help:"Path containing system backends used for inferencing" group:"backends"`
ExtraLLamaCPPArgs string `name:"llama-cpp-args" env:"LOCALAI_EXTRA_LLAMA_CPP_ARGS,EXTRA_LLAMA_CPP_ARGS" help:"Extra arguments to pass to llama-cpp-rpc-server"`
}
type Worker struct {
P2P P2P `cmd:"" name:"p2p-llama-cpp-rpc" help:"Starts a LocalAI llama.cpp worker in P2P mode (requires a token)"`
LLamaCPP LLamaCPP `cmd:"" name:"llama-cpp-rpc" help:"Starts a llama.cpp worker in standalone mode"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/worker/worker_p2p.go | core/cli/worker/worker_p2p.go | package worker
import (
"context"
"fmt"
"os"
"os/exec"
"strings"
"time"
cliContext "github.com/mudler/LocalAI/core/cli/context"
"github.com/mudler/LocalAI/core/p2p"
"github.com/mudler/LocalAI/pkg/signals"
"github.com/mudler/LocalAI/pkg/system"
"github.com/mudler/xlog"
"github.com/phayes/freeport"
)
type P2P struct {
WorkerFlags `embed:""`
Token string `env:"LOCALAI_TOKEN,LOCALAI_P2P_TOKEN,TOKEN" help:"P2P token to use"`
NoRunner bool `env:"LOCALAI_NO_RUNNER,NO_RUNNER" help:"Do not start the llama-cpp-rpc-server"`
RunnerAddress string `env:"LOCALAI_RUNNER_ADDRESS,RUNNER_ADDRESS" help:"Address of the llama-cpp-rpc-server"`
RunnerPort string `env:"LOCALAI_RUNNER_PORT,RUNNER_PORT" help:"Port of the llama-cpp-rpc-server"`
Peer2PeerNetworkID string `env:"LOCALAI_P2P_NETWORK_ID,P2P_NETWORK_ID" help:"Network ID for P2P mode, can be set arbitrarly by the user for grouping a set of instances" group:"p2p"`
}
func (r *P2P) Run(ctx *cliContext.Context) error {
systemState, err := system.GetSystemState(
system.WithBackendPath(r.BackendsPath),
system.WithBackendSystemPath(r.BackendsSystemPath),
)
if err != nil {
return err
}
// Check if the token is set
// as we always need it.
if r.Token == "" {
return fmt.Errorf("Token is required")
}
port, err := freeport.GetFreePort()
if err != nil {
return err
}
address := "127.0.0.1"
c, cancel := context.WithCancel(context.Background())
defer cancel()
if r.NoRunner {
// Let override which port and address to bind if the user
// configure the llama-cpp service on its own
p := fmt.Sprint(port)
if r.RunnerAddress != "" {
address = r.RunnerAddress
}
if r.RunnerPort != "" {
p = r.RunnerPort
}
_, err = p2p.ExposeService(c, address, p, r.Token, p2p.NetworkID(r.Peer2PeerNetworkID, p2p.WorkerID))
if err != nil {
return err
}
xlog.Info("You need to start llama-cpp-rpc-server", "address", address, "port", p)
} else {
// Start llama.cpp directly from the version we have pre-packaged
go func() {
for {
xlog.Info("Starting llama-cpp-rpc-server", "address", address, "port", port)
grpcProcess, err := findLLamaCPPBackend(r.BackendGalleries, systemState)
if err != nil {
xlog.Error("Failed to find llama-cpp-rpc-server", "error", err)
return
}
var extraArgs []string
if r.ExtraLLamaCPPArgs != "" {
extraArgs = strings.Split(r.ExtraLLamaCPPArgs, " ")
}
args := append([]string{"--host", address, "--port", fmt.Sprint(port)}, extraArgs...)
xlog.Debug("Starting llama-cpp-rpc-server", "address", address, "port", port, "args", args, "argCount", len(args))
cmd := exec.Command(
grpcProcess, args...,
)
cmd.Env = os.Environ()
cmd.Stderr = os.Stdout
cmd.Stdout = os.Stdout
if err := cmd.Start(); err != nil {
xlog.Error("Failed to start llama-cpp-rpc-server", "error", err, "grpcProcess", grpcProcess, "args", args)
}
cmd.Wait()
}
}()
_, err = p2p.ExposeService(c, address, fmt.Sprint(port), r.Token, p2p.NetworkID(r.Peer2PeerNetworkID, p2p.WorkerID))
if err != nil {
return err
}
}
signals.RegisterGracefulTerminationHandler(func() {
cancel()
})
for {
time.Sleep(1 * time.Second)
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/cli/context/context.go | core/cli/context/context.go | package cliContext
type Context struct {
Debug bool `env:"LOCALAI_DEBUG,DEBUG" default:"false" hidden:"" help:"DEPRECATED, use --log-level=debug instead. Enable debug logging"`
LogLevel *string `env:"LOCALAI_LOG_LEVEL" enum:"error,warn,info,debug,trace" help:"Set the level of logs to output [${enum}]"`
LogFormat *string `env:"LOCALAI_LOG_FORMAT" default:"default" enum:"default,text,json" help:"Set the format of logs to output [${enum}]"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/p2p/p2p.go | core/p2p/p2p.go | package p2p
import (
"context"
"errors"
"fmt"
"io"
"net"
"os"
"strings"
"sync"
"time"
"github.com/ipfs/go-log"
"github.com/libp2p/go-libp2p/core/peer"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/edgevpn/pkg/config"
"github.com/mudler/edgevpn/pkg/node"
"github.com/mudler/edgevpn/pkg/protocol"
"github.com/mudler/edgevpn/pkg/services"
"github.com/mudler/edgevpn/pkg/types"
eutils "github.com/mudler/edgevpn/pkg/utils"
zlog "github.com/mudler/xlog"
"github.com/multiformats/go-multiaddr"
"github.com/phayes/freeport"
"github.com/mudler/edgevpn/pkg/logger"
)
func generateNewConnectionData(DHTInterval, OTPInterval int) *node.YAMLConnectionConfig {
maxMessSize := 20 << 20 // 20MB
keyLength := 43
if DHTInterval == 0 {
DHTInterval = 30
}
if OTPInterval == 0 {
OTPInterval = 9000
}
return &node.YAMLConnectionConfig{
MaxMessageSize: maxMessSize,
RoomName: eutils.RandStringRunes(keyLength),
Rendezvous: eutils.RandStringRunes(keyLength),
MDNS: eutils.RandStringRunes(keyLength),
OTP: node.OTP{
DHT: node.OTPConfig{
Key: eutils.RandStringRunes(keyLength),
Interval: DHTInterval,
Length: keyLength,
},
Crypto: node.OTPConfig{
Key: eutils.RandStringRunes(keyLength),
Interval: OTPInterval,
Length: keyLength,
},
},
}
}
func GenerateToken(DHTInterval, OTPInterval int) string {
// Generates a new config and exit
return generateNewConnectionData(DHTInterval, OTPInterval).Base64()
}
func nodeID(s string) string {
hostname, _ := os.Hostname()
return fmt.Sprintf("%s-%s", hostname, s)
}
func nodeAnnounce(ctx context.Context, node *node.Node) {
ledger, _ := node.Ledger()
// Announce ourselves so nodes accepts our connection
ledger.Announce(
ctx,
10*time.Second,
func() {
updatedMap := map[string]interface{}{}
updatedMap[node.Host().ID().String()] = &types.User{
PeerID: node.Host().ID().String(),
Timestamp: time.Now().String(),
}
ledger.Add(protocol.UsersLedgerKey, updatedMap)
},
)
}
func proxyP2PConnection(ctx context.Context, node *node.Node, serviceID string, conn net.Conn) {
ledger, _ := node.Ledger()
// Retrieve current ID for ip in the blockchain
existingValue, found := ledger.GetKey(protocol.ServicesLedgerKey, serviceID)
service := &types.Service{}
existingValue.Unmarshal(service)
// If mismatch, update the blockchain
if !found {
zlog.Error("Service not found on blockchain")
conn.Close()
// ll.Debugf("service '%s' not found on blockchain", serviceID)
return
}
// Decode the Peer
d, err := peer.Decode(service.PeerID)
if err != nil {
zlog.Error("cannot decode peer")
conn.Close()
// ll.Debugf("could not decode peer '%s'", service.PeerID)
return
}
// Open a stream
stream, err := node.Host().NewStream(ctx, d, protocol.ServiceProtocol.ID())
if err != nil {
zlog.Error("cannot open stream peer", "error", err)
conn.Close()
// ll.Debugf("could not open stream '%s'", err.Error())
return
}
// ll.Debugf("(service %s) Redirecting", serviceID, l.Addr().String())
zlog.Info("Redirecting", "from", conn.LocalAddr().String(), "to", stream.Conn().RemoteMultiaddr().String())
closer := make(chan struct{}, 2)
go copyStream(closer, stream, conn)
go copyStream(closer, conn, stream)
<-closer
stream.Close()
conn.Close()
}
func allocateLocalService(ctx context.Context, node *node.Node, listenAddr, service string) error {
zlog.Info("Allocating service", "service", service, "address", listenAddr)
// Open local port for listening
l, err := net.Listen("tcp", listenAddr)
if err != nil {
zlog.Error("Error listening", "error", err)
return err
}
go func() {
<-ctx.Done()
l.Close()
}()
nodeAnnounce(ctx, node)
defer l.Close()
for {
select {
case <-ctx.Done():
return errors.New("context canceled")
default:
zlog.Debug("New for connection")
// Listen for an incoming connection.
conn, err := l.Accept()
if err != nil {
fmt.Println("Error accepting: ", err.Error())
continue
}
// Handle connections in a new goroutine, forwarding to the p2p service
go func() {
proxyP2PConnection(ctx, node, service, conn)
}()
}
}
}
// This is the main of the server (which keeps the env variable updated)
// This starts a goroutine that keeps LLAMACPP_GRPC_SERVERS updated with the discovered services
func ServiceDiscoverer(ctx context.Context, n *node.Node, token, servicesID string, discoveryFunc func(serviceID string, node schema.NodeData), allocate bool) error {
if servicesID == "" {
servicesID = defaultServicesID
}
tunnels, err := discoveryTunnels(ctx, n, token, servicesID, allocate)
if err != nil {
return err
}
// TODO: discoveryTunnels should return all the nodes that are available?
// In this way we updated availableNodes here instead of appending
// e.g. we have a LastSeen field in NodeData that is updated in discoveryTunnels
// each time the node is seen
// In this case the below function should be idempotent and just keep track of the nodes
go func() {
for {
select {
case <-ctx.Done():
zlog.Error("Discoverer stopped")
return
case tunnel := <-tunnels:
AddNode(servicesID, tunnel)
if discoveryFunc != nil {
discoveryFunc(servicesID, tunnel)
}
}
}
}()
return nil
}
func discoveryTunnels(ctx context.Context, n *node.Node, token, servicesID string, allocate bool) (chan schema.NodeData, error) {
tunnels := make(chan schema.NodeData)
ledger, err := n.Ledger()
if err != nil {
return nil, fmt.Errorf("getting the ledger: %w", err)
}
// get new services, allocate and return to the channel
// TODO:
// a function ensureServices that:
// - starts a service if not started, if the worker is Online
// - checks that workers are Online, if not cancel the context of allocateLocalService
// - discoveryTunnels should return all the nodes and addresses associated with it
// - the caller should take now care of the fact that we are always returning fresh information
go func() {
for {
select {
case <-ctx.Done():
zlog.Error("Discoverer stopped")
return
default:
time.Sleep(5 * time.Second)
data := ledger.LastBlock().Storage[servicesID]
if logLevel == logLevelDebug {
// We want to surface this debugging data only if p2p logging is set to debug
// (and not generally the whole application, as this can be really noisy)
zlog.Debug("Ledger data", "data", ledger.LastBlock().Storage)
}
for k, v := range data {
// New worker found in the ledger data as k (worker id)
nd := &schema.NodeData{}
if err := v.Unmarshal(nd); err != nil {
zlog.Error("cannot unmarshal node data")
continue
}
ensureService(ctx, n, nd, k, allocate)
muservice.Lock()
if _, ok := service[nd.Name]; ok {
tunnels <- service[nd.Name].NodeData
}
muservice.Unlock()
}
}
}
}()
return tunnels, err
}
type nodeServiceData struct {
NodeData schema.NodeData
CancelFunc context.CancelFunc
}
var service = map[string]nodeServiceData{}
var muservice sync.Mutex
func ensureService(ctx context.Context, n *node.Node, nd *schema.NodeData, sserv string, allocate bool) {
muservice.Lock()
defer muservice.Unlock()
nd.ServiceID = sserv
if ndService, found := service[nd.Name]; !found {
if !nd.IsOnline() {
// if node is offline and not present, do nothing
// Node nd.ID is offline
return
}
newCtxm, cancel := context.WithCancel(ctx)
if allocate {
// Start the service
port, err := freeport.GetFreePort()
if err != nil {
zlog.Error("Could not allocate a free port", "error", err, "node", nd.ID)
cancel()
return
}
tunnelAddress := fmt.Sprintf("127.0.0.1:%d", port)
nd.TunnelAddress = tunnelAddress
go allocateLocalService(newCtxm, n, tunnelAddress, sserv)
zlog.Debug("Starting service", "service", sserv, "address", tunnelAddress)
}
service[nd.Name] = nodeServiceData{
NodeData: *nd,
CancelFunc: cancel,
}
} else {
// Check if the service is still alive
// if not cancel the context
if !nd.IsOnline() && !ndService.NodeData.IsOnline() {
ndService.CancelFunc()
delete(service, nd.Name)
zlog.Info("Node is offline, deleting", "node", nd.ID)
} else if nd.IsOnline() {
// update last seen inside service
nd.TunnelAddress = ndService.NodeData.TunnelAddress
service[nd.Name] = nodeServiceData{
NodeData: *nd,
CancelFunc: ndService.CancelFunc,
}
}
}
}
// This is the P2P worker main
func ExposeService(ctx context.Context, host, port, token, servicesID string) (*node.Node, error) {
if servicesID == "" {
servicesID = defaultServicesID
}
llger := logger.New(log.LevelFatal)
nodeOpts, err := newNodeOpts(token)
if err != nil {
return nil, err
}
// generate a random string for the name
name := utils.RandString(10)
// Register the service
nodeOpts = append(nodeOpts,
services.RegisterService(llger, time.Duration(60)*time.Second, name, fmt.Sprintf("%s:%s", host, port))...)
n, err := node.New(nodeOpts...)
if err != nil {
return nil, fmt.Errorf("creating a new node: %w", err)
}
err = n.Start(ctx)
if err != nil {
return n, fmt.Errorf("creating a new node: %w", err)
}
ledger, err := n.Ledger()
if err != nil {
return n, fmt.Errorf("creating a new node: %w", err)
}
ledger.Announce(
ctx,
20*time.Second,
func() {
updatedMap := map[string]interface{}{}
updatedMap[name] = &schema.NodeData{
Name: name,
LastSeen: time.Now(),
ID: nodeID(name),
}
ledger.Add(servicesID, updatedMap)
},
)
return n, err
}
func NewNode(token string) (*node.Node, error) {
nodeOpts, err := newNodeOpts(token)
if err != nil {
return nil, err
}
n, err := node.New(nodeOpts...)
if err != nil {
return nil, fmt.Errorf("creating a new node: %w", err)
}
return n, nil
}
func newNodeOpts(token string) ([]node.Option, error) {
llger := logger.New(log.LevelFatal)
defaultInterval := 10 * time.Second
// TODO: move this up, expose more config options when creating a node
noDHT := os.Getenv("LOCALAI_P2P_DISABLE_DHT") == "true"
noLimits := os.Getenv("LOCALAI_P2P_ENABLE_LIMITS") != "true"
var listenMaddrs []string
var bootstrapPeers []string
laddrs := os.Getenv("LOCALAI_P2P_LISTEN_MADDRS")
if laddrs != "" {
listenMaddrs = strings.Split(laddrs, ",")
}
bootmaddr := os.Getenv("LOCALAI_P2P_BOOTSTRAP_PEERS_MADDRS")
if bootmaddr != "" {
bootstrapPeers = strings.Split(bootmaddr, ",")
}
dhtAnnounceMaddrs := stringsToMultiAddr(strings.Split(os.Getenv("LOCALAI_P2P_DHT_ANNOUNCE_MADDRS"), ","))
libp2ploglevel := os.Getenv("LOCALAI_P2P_LIB_LOGLEVEL")
if libp2ploglevel == "" {
libp2ploglevel = "fatal"
}
c := config.Config{
ListenMaddrs: listenMaddrs,
DHTAnnounceMaddrs: dhtAnnounceMaddrs,
Limit: config.ResourceLimit{
Enable: noLimits,
MaxConns: 100,
},
NetworkToken: token,
LowProfile: false,
LogLevel: logLevel,
Libp2pLogLevel: libp2ploglevel,
Ledger: config.Ledger{
SyncInterval: defaultInterval,
AnnounceInterval: defaultInterval,
},
NAT: config.NAT{
Service: true,
Map: true,
RateLimit: true,
RateLimitGlobal: 100,
RateLimitPeer: 100,
RateLimitInterval: defaultInterval,
},
Discovery: config.Discovery{
DHT: !noDHT,
MDNS: true,
Interval: 10 * time.Second,
BootstrapPeers: bootstrapPeers,
},
Connection: config.Connection{
HolePunch: true,
AutoRelay: true,
MaxConnections: 1000,
},
}
nodeOpts, _, err := c.ToOpts(llger)
if err != nil {
return nil, fmt.Errorf("parsing options: %w", err)
}
nodeOpts = append(nodeOpts, services.Alive(30*time.Second, 900*time.Second, 15*time.Minute)...)
return nodeOpts, nil
}
func stringsToMultiAddr(peers []string) []multiaddr.Multiaddr {
res := []multiaddr.Multiaddr{}
for _, p := range peers {
addr, err := multiaddr.NewMultiaddr(p)
if err != nil {
continue
}
res = append(res, addr)
}
return res
}
func copyStream(closer chan struct{}, dst io.Writer, src io.Reader) {
defer func() { closer <- struct{}{} }() // connection is closed, send signal to stop proxy
io.Copy(dst, src)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/p2p/node.go | core/p2p/node.go | package p2p
import (
"slices"
"strings"
"sync"
"github.com/mudler/LocalAI/core/schema"
)
const (
defaultServicesID = "services"
WorkerID = "worker"
)
var mu sync.Mutex
var nodes = map[string]map[string]schema.NodeData{}
func GetAvailableNodes(serviceID string) []schema.NodeData {
if serviceID == "" {
serviceID = defaultServicesID
}
mu.Lock()
defer mu.Unlock()
var availableNodes = []schema.NodeData{}
for _, v := range nodes[serviceID] {
availableNodes = append(availableNodes, v)
}
slices.SortFunc(availableNodes, func(a, b schema.NodeData) int {
return strings.Compare(a.ID, b.ID)
})
return availableNodes
}
func GetNode(serviceID, nodeID string) (schema.NodeData, bool) {
if serviceID == "" {
serviceID = defaultServicesID
}
mu.Lock()
defer mu.Unlock()
if _, ok := nodes[serviceID]; !ok {
return schema.NodeData{}, false
}
nd, exists := nodes[serviceID][nodeID]
return nd, exists
}
func AddNode(serviceID string, node schema.NodeData) {
if serviceID == "" {
serviceID = defaultServicesID
}
mu.Lock()
defer mu.Unlock()
if nodes[serviceID] == nil {
nodes[serviceID] = map[string]schema.NodeData{}
}
nodes[serviceID][node.ID] = node
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/p2p/federated_server.go | core/p2p/federated_server.go | package p2p
import (
"context"
"errors"
"fmt"
"io"
"net"
"github.com/mudler/LocalAI/core/schema"
"github.com/mudler/edgevpn/pkg/node"
"github.com/mudler/xlog"
)
func (f *FederatedServer) Start(ctx context.Context) error {
n, err := NewNode(f.p2ptoken)
if err != nil {
return fmt.Errorf("creating a new node: %w", err)
}
err = n.Start(ctx)
if err != nil {
return fmt.Errorf("creating a new node: %w", err)
}
if err := ServiceDiscoverer(ctx, n, f.p2ptoken, f.service, func(servicesID string, tunnel schema.NodeData) {
xlog.Debug("Discovered node", "node", tunnel.ID)
}, false); err != nil {
return err
}
return f.proxy(ctx, n)
}
func (fs *FederatedServer) proxy(ctx context.Context, node *node.Node) error {
xlog.Info("Allocating service", "service", fs.service, "address", fs.listenAddr)
// Open local port for listening
l, err := net.Listen("tcp", fs.listenAddr)
if err != nil {
xlog.Error("Error listening", "error", err)
return err
}
go func() {
<-ctx.Done()
l.Close()
}()
nodeAnnounce(ctx, node)
defer l.Close()
for {
select {
case <-ctx.Done():
return errors.New("context canceled")
default:
xlog.Debug("New connection", "address", l.Addr().String())
// Listen for an incoming connection.
conn, err := l.Accept()
if err != nil {
fmt.Println("Error accepting: ", err.Error())
continue
}
// Handle connections in a new goroutine, forwarding to the p2p service
go func() {
workerID := ""
if fs.workerTarget != "" {
workerID = fs.workerTarget
} else if fs.loadBalanced {
xlog.Debug("Load balancing request")
workerID = fs.SelectLeastUsedServer()
if workerID == "" {
xlog.Debug("Least used server not found, selecting random")
workerID = fs.RandomServer()
}
} else {
workerID = fs.RandomServer()
}
if workerID == "" {
xlog.Error("No available nodes yet")
fs.sendHTMLResponse(conn, 503, "Sorry, waiting for nodes to connect")
return
}
xlog.Debug("Selected node", "node", workerID)
nodeData, exists := GetNode(fs.service, workerID)
if !exists {
xlog.Error("Node not found", "node", workerID)
fs.sendHTMLResponse(conn, 404, "Node not found")
return
}
proxyP2PConnection(ctx, node, nodeData.ServiceID, conn)
if fs.loadBalanced {
fs.RecordRequest(workerID)
}
}()
}
}
}
// sendHTMLResponse sends a basic HTML response with a status code and a message.
// This is extracted to make the HTML content maintainable.
func (fs *FederatedServer) sendHTMLResponse(conn net.Conn, statusCode int, message string) {
defer conn.Close()
// Define the HTML content separately for easier maintenance.
htmlContent := fmt.Sprintf("<html><body><h1>%s</h1></body></html>\r\n", message)
// Create the HTTP response with dynamic status code and content.
response := fmt.Sprintf(
"HTTP/1.1 %d %s\r\n"+
"Content-Type: text/html\r\n"+
"Connection: close\r\n"+
"\r\n"+
"%s",
statusCode, getHTTPStatusText(statusCode), htmlContent,
)
// Write the response to the client connection.
_, writeErr := io.WriteString(conn, response)
if writeErr != nil {
xlog.Error("Error writing response to client", "error", writeErr)
}
}
// getHTTPStatusText returns a textual representation of HTTP status codes.
func getHTTPStatusText(statusCode int) string {
switch statusCode {
case 503:
return "Service Unavailable"
case 404:
return "Not Found"
case 200:
return "OK"
default:
return "Unknown Status"
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/p2p/federated.go | core/p2p/federated.go | package p2p
import (
"fmt"
"math/rand/v2"
"sync"
"github.com/mudler/xlog"
)
const FederatedID = "federated"
func NetworkID(networkID, serviceID string) string {
if networkID != "" {
return fmt.Sprintf("%s_%s", networkID, serviceID)
}
return serviceID
}
type FederatedServer struct {
sync.Mutex
listenAddr, service, p2ptoken string
requestTable map[string]int
loadBalanced bool
workerTarget string
}
func NewFederatedServer(listenAddr, service, p2pToken string, loadBalanced bool, workerTarget string) *FederatedServer {
return &FederatedServer{
listenAddr: listenAddr,
service: service,
p2ptoken: p2pToken,
requestTable: map[string]int{},
loadBalanced: loadBalanced,
workerTarget: workerTarget,
}
}
func (fs *FederatedServer) RandomServer() string {
var tunnelAddresses []string
for _, v := range GetAvailableNodes(fs.service) {
if v.IsOnline() {
tunnelAddresses = append(tunnelAddresses, v.ID)
} else {
delete(fs.requestTable, v.ID) // make sure it's not tracked
xlog.Info("Node is offline", "node", v.ID)
}
}
if len(tunnelAddresses) == 0 {
return ""
}
return tunnelAddresses[rand.IntN(len(tunnelAddresses))]
}
func (fs *FederatedServer) syncTableStatus() {
fs.Lock()
defer fs.Unlock()
currentTunnels := make(map[string]struct{})
for _, v := range GetAvailableNodes(fs.service) {
if v.IsOnline() {
fs.ensureRecordExist(v.ID)
currentTunnels[v.ID] = struct{}{}
}
}
// delete tunnels that don't exist anymore
for t := range fs.requestTable {
if _, ok := currentTunnels[t]; !ok {
delete(fs.requestTable, t)
}
}
}
func (fs *FederatedServer) SelectLeastUsedServer() string {
fs.syncTableStatus()
fs.Lock()
defer fs.Unlock()
xlog.Debug("SelectLeastUsedServer()", "request_table", fs.requestTable)
// cycle over requestTable and find the entry with the lower number
// if there are multiple entries with the same number, select one randomly
// if there are no entries, return an empty string
var min int
var minKey string
for k, v := range fs.requestTable {
if min == 0 || v < min {
min = v
minKey = k
}
}
xlog.Debug("Selected tunnel", "tunnel", minKey, "requests_served", min, "request_table", fs.requestTable)
return minKey
}
func (fs *FederatedServer) RecordRequest(nodeID string) {
fs.Lock()
defer fs.Unlock()
// increment the counter for the nodeID in the requestTable
fs.requestTable[nodeID]++
xlog.Debug("Recording request", "request_table", fs.requestTable, "request", nodeID)
}
func (fs *FederatedServer) ensureRecordExist(nodeID string) {
// if the nodeID is not in the requestTable, add it with a counter of 0
_, ok := fs.requestTable[nodeID]
if !ok {
fs.requestTable[nodeID] = 0
}
xlog.Debug("Ensure record exists", "request_table", fs.requestTable, "request", nodeID)
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/p2p/p2p_common.go | core/p2p/p2p_common.go | package p2p
import (
"os"
"strings"
)
var logLevel = strings.ToLower(os.Getenv("LOCALAI_P2P_LOGLEVEL"))
const (
logLevelDebug = "debug"
logLevelInfo = "info"
)
func init() {
if logLevel == "" {
logLevel = logLevelInfo
}
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/clients/store.go | core/clients/store.go | package clients
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
)
// Define a struct to hold the store API client
type StoreClient struct {
BaseURL string
Client *http.Client
}
type SetRequest struct {
Keys [][]float32 `json:"keys"`
Values []string `json:"values"`
}
type GetRequest struct {
Keys [][]float32 `json:"keys"`
}
type GetResponse struct {
Keys [][]float32 `json:"keys"`
Values []string `json:"values"`
}
type DeleteRequest struct {
Keys [][]float32 `json:"keys"`
}
type FindRequest struct {
TopK int `json:"topk"`
Key []float32 `json:"key"`
}
type FindResponse struct {
Keys [][]float32 `json:"keys"`
Values []string `json:"values"`
Similarities []float32 `json:"similarities"`
}
// Constructor for StoreClient
func NewStoreClient(baseUrl string) *StoreClient {
return &StoreClient{
BaseURL: baseUrl,
Client: &http.Client{},
}
}
// Implement Set method
func (c *StoreClient) Set(req SetRequest) error {
return c.doRequest("stores/set", req)
}
// Implement Get method
func (c *StoreClient) Get(req GetRequest) (*GetResponse, error) {
body, err := c.doRequestWithResponse("stores/get", req)
if err != nil {
return nil, err
}
var resp GetResponse
err = json.Unmarshal(body, &resp)
if err != nil {
return nil, err
}
return &resp, nil
}
// Implement Delete method
func (c *StoreClient) Delete(req DeleteRequest) error {
return c.doRequest("stores/delete", req)
}
// Implement Find method
func (c *StoreClient) Find(req FindRequest) (*FindResponse, error) {
body, err := c.doRequestWithResponse("stores/find", req)
if err != nil {
return nil, err
}
var resp FindResponse
err = json.Unmarshal(body, &resp)
if err != nil {
return nil, err
}
return &resp, nil
}
// Helper function to perform a request without expecting a response body
func (c *StoreClient) doRequest(path string, data interface{}) error {
jsonData, err := json.Marshal(data)
if err != nil {
return err
}
req, err := http.NewRequest("POST", c.BaseURL+"/"+path, bytes.NewBuffer(jsonData))
if err != nil {
return err
}
req.Header.Set("Content-Type", "application/json")
resp, err := c.Client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("API request to %s failed with status code %d", path, resp.StatusCode)
}
return nil
}
// Helper function to perform a request and parse the response body
func (c *StoreClient) doRequestWithResponse(path string, data interface{}) ([]byte, error) {
jsonData, err := json.Marshal(data)
if err != nil {
return nil, err
}
req, err := http.NewRequest("POST", c.BaseURL+"/"+path, bytes.NewBuffer(jsonData))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
resp, err := c.Client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("API request to %s failed with status code %d", path, resp.StatusCode)
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
return body, nil
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/request.go | core/schema/request.go | package schema
// This file and type represent a generic request to LocalAI - as opposed to requests to LocalAI-specific endpoints, which live in localai.go
type LocalAIRequest interface {
ModelName(*string) string
}
// @Description BasicModelRequest contains the basic model request fields
type BasicModelRequest struct {
Model string `json:"model,omitempty" yaml:"model,omitempty"`
// TODO: Should this also include the following fields from the OpenAI side of the world?
// If so, changes should be made to core/http/middleware/request.go to match
// Context context.Context `json:"-"`
// Cancel context.CancelFunc `json:"-"`
}
func (bmr *BasicModelRequest) ModelName(s *string) string {
if s != nil {
bmr.Model = *s
}
return bmr.Model
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/jina.go | core/schema/jina.go | package schema
// RerankRequest defines the structure of the request payload
type JINARerankRequest struct {
BasicModelRequest
Query string `json:"query"`
Documents []string `json:"documents"`
TopN *int `json:"top_n,omitempty"`
Backend string `json:"backend"`
}
// DocumentResult represents a single document result
type JINADocumentResult struct {
Index int `json:"index"`
Document JINAText `json:"document"`
RelevanceScore float64 `json:"relevance_score"`
}
// Text holds the text of the document
type JINAText struct {
Text string `json:"text"`
}
// RerankResponse defines the structure of the response payload
type JINARerankResponse struct {
Model string `json:"model"`
Usage JINAUsageInfo `json:"usage"`
Results []JINADocumentResult `json:"results"`
}
// UsageInfo holds information about usage of tokens
type JINAUsageInfo struct {
TotalTokens int `json:"total_tokens"`
PromptTokens int `json:"prompt_tokens"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/prediction.go | core/schema/prediction.go | package schema
import (
"encoding/json"
"gopkg.in/yaml.v3"
)
// LogprobsValue represents the logprobs parameter which is a boolean.
// According to OpenAI API: true means return log probabilities, false/null means don't return them.
// The actual number of top logprobs per token is controlled by top_logprobs (0-5).
type LogprobsValue struct {
Enabled bool // true if logprobs should be returned
}
// UnmarshalJSON implements json.Unmarshaler to handle boolean
func (l *LogprobsValue) UnmarshalJSON(data []byte) error {
// Try to unmarshal as boolean
var b bool
if err := json.Unmarshal(data, &b); err == nil {
l.Enabled = b
return nil
}
// If it's null, set to false
var n *bool
if err := json.Unmarshal(data, &n); err == nil {
l.Enabled = false
return nil
}
// Try as integer for backward compatibility (treat > 0 as true)
var i int
if err := json.Unmarshal(data, &i); err == nil {
l.Enabled = i > 0
return nil
}
return json.Unmarshal(data, &l.Enabled)
}
// MarshalJSON implements json.Marshaler
func (l LogprobsValue) MarshalJSON() ([]byte, error) {
return json.Marshal(l.Enabled)
}
// UnmarshalYAML implements yaml.Unmarshaler to handle boolean
func (l *LogprobsValue) UnmarshalYAML(value *yaml.Node) error {
switch value.Kind {
case yaml.ScalarNode:
switch value.Tag {
case "!!bool":
var b bool
if err := value.Decode(&b); err != nil {
return err
}
l.Enabled = b
return nil
case "!!int":
// For backward compatibility, treat integer > 0 as true
var i int
if err := value.Decode(&i); err != nil {
return err
}
l.Enabled = i > 0
return nil
case "!!null":
l.Enabled = false
return nil
}
}
return value.Decode(&l.Enabled)
}
// IsEnabled returns true if logprobs should be returned
func (l *LogprobsValue) IsEnabled() bool {
return l.Enabled
}
// @Description PredictionOptions contains prediction parameters for model inference
type PredictionOptions struct {
// Also part of the OpenAI official spec
BasicModelRequest `yaml:",inline"`
// Also part of the OpenAI official spec
Language string `json:"language,omitempty" yaml:"language,omitempty"`
// Only for audio transcription
Translate bool `json:"translate,omitempty" yaml:"translate,omitempty"`
// Also part of the OpenAI official spec. use it for returning multiple results
N int `json:"n,omitempty" yaml:"n,omitempty"`
// Common options between all the API calls, part of the OpenAI spec
TopP *float64 `json:"top_p,omitempty" yaml:"top_p,omitempty"`
TopK *int `json:"top_k,omitempty" yaml:"top_k,omitempty"`
Temperature *float64 `json:"temperature,omitempty" yaml:"temperature,omitempty"`
Maxtokens *int `json:"max_tokens,omitempty" yaml:"max_tokens,omitempty"`
Echo bool `json:"echo,omitempty" yaml:"echo,omitempty"`
// Custom parameters - not present in the OpenAI API
Batch int `json:"batch,omitempty" yaml:"batch,omitempty"`
IgnoreEOS bool `json:"ignore_eos,omitempty" yaml:"ignore_eos,omitempty"`
RepeatPenalty float64 `json:"repeat_penalty,omitempty" yaml:"repeat_penalty,omitempty"`
RepeatLastN int `json:"repeat_last_n,omitempty" yaml:"repeat_last_n,omitempty"`
Keep int `json:"n_keep,omitempty" yaml:"n_keep,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty" yaml:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty" yaml:"presence_penalty,omitempty"`
TFZ *float64 `json:"tfz,omitempty" yaml:"tfz,omitempty"`
TypicalP *float64 `json:"typical_p,omitempty" yaml:"typical_p,omitempty"`
Seed *int `json:"seed,omitempty" yaml:"seed,omitempty"`
// OpenAI API logprobs parameters
// logprobs: boolean - if true, returns log probabilities of each output token
// top_logprobs: integer 0-20 - number of most likely tokens to return at each token position
Logprobs LogprobsValue `json:"logprobs,omitempty" yaml:"logprobs,omitempty"` // Whether to return log probabilities (true/false)
TopLogprobs *int `json:"top_logprobs,omitempty" yaml:"top_logprobs,omitempty"` // Number of top logprobs per token (0-20)
LogitBias map[string]float64 `json:"logit_bias,omitempty" yaml:"logit_bias,omitempty"` // Map of token IDs to bias values (-100 to 100)
NegativePrompt string `json:"negative_prompt,omitempty" yaml:"negative_prompt,omitempty"`
RopeFreqBase float32 `json:"rope_freq_base,omitempty" yaml:"rope_freq_base,omitempty"`
RopeFreqScale float32 `json:"rope_freq_scale,omitempty" yaml:"rope_freq_scale,omitempty"`
NegativePromptScale float32 `json:"negative_prompt_scale,omitempty" yaml:"negative_prompt_scale,omitempty"`
// Diffusers
ClipSkip int `json:"clip_skip,omitempty" yaml:"clip_skip,omitempty"`
// RWKV (?)
Tokenizer string `json:"tokenizer,omitempty" yaml:"tokenizer,omitempty"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/transcription.go | core/schema/transcription.go | package schema
import "time"
type TranscriptionSegment struct {
Id int `json:"id"`
Start time.Duration `json:"start"`
End time.Duration `json:"end"`
Text string `json:"text"`
Tokens []int `json:"tokens"`
}
type TranscriptionResult struct {
Segments []TranscriptionSegment `json:"segments"`
Text string `json:"text"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/tokenize.go | core/schema/tokenize.go | package schema
type TokenizeRequest struct {
BasicModelRequest
Content string `json:"content"`
}
type TokenizeResponse struct {
Tokens []int32 `json:"tokens"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/elevenlabs.go | core/schema/elevenlabs.go | package schema
type ElevenLabsTTSRequest struct {
Text string `json:"text" yaml:"text"`
ModelID string `json:"model_id" yaml:"model_id"`
LanguageCode string `json:"language_code" yaml:"language_code"`
}
type ElevenLabsSoundGenerationRequest struct {
Text string `json:"text" yaml:"text"`
ModelID string `json:"model_id" yaml:"model_id"`
Duration *float32 `json:"duration_seconds,omitempty" yaml:"duration_seconds,omitempty"`
Temperature *float32 `json:"prompt_influence,omitempty" yaml:"prompt_influence,omitempty"`
DoSample *bool `json:"do_sample,omitempty" yaml:"do_sample,omitempty"`
}
func (elttsr *ElevenLabsTTSRequest) ModelName(s *string) string {
if s != nil {
elttsr.ModelID = *s
}
return elttsr.ModelID
}
func (elsgr *ElevenLabsSoundGenerationRequest) ModelName(s *string) string {
if s != nil {
elsgr.ModelID = *s
}
return elsgr.ModelID
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/schema_suite_test.go | core/schema/schema_suite_test.go | package schema_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestSchema(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "LocalAI Schema test suite")
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/backend.go | core/schema/backend.go | package schema
// BackendResponse represents the response for backend operations
type BackendResponse struct {
ID string `json:"id"`
StatusURL string `json:"status_url"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/openai.go | core/schema/openai.go | package schema
import (
"context"
functions "github.com/mudler/LocalAI/pkg/functions"
)
// APIError provides error information returned by the OpenAI API.
type APIError struct {
Code any `json:"code,omitempty"`
Message string `json:"message"`
Param *string `json:"param,omitempty"`
Type string `json:"type"`
}
type ErrorResponse struct {
Error *APIError `json:"error,omitempty"`
}
type InputTokensDetails struct {
TextTokens int `json:"text_tokens"`
ImageTokens int `json:"image_tokens"`
}
type OpenAIUsage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
// Fields for image generation API compatibility
InputTokens int `json:"input_tokens,omitempty"`
OutputTokens int `json:"output_tokens,omitempty"`
InputTokensDetails *InputTokensDetails `json:"input_tokens_details,omitempty"`
// Extra timing data, disabled by default as is't not a part of OpenAI specification
TimingPromptProcessing float64 `json:"timing_prompt_processing,omitempty"`
TimingTokenGeneration float64 `json:"timing_token_generation,omitempty"`
}
type Item struct {
Embedding []float32 `json:"embedding"`
Index int `json:"index"`
Object string `json:"object,omitempty"`
// Images
URL string `json:"url,omitempty"`
B64JSON string `json:"b64_json,omitempty"`
}
type OpenAIResponse struct {
Created int `json:"created,omitempty"`
Object string `json:"object,omitempty"`
ID string `json:"id,omitempty"`
Model string `json:"model,omitempty"`
Choices []Choice `json:"choices,omitempty"`
Data []Item `json:"data,omitempty"`
Usage OpenAIUsage `json:"usage"`
}
type Choice struct {
Index int `json:"index"`
FinishReason *string `json:"finish_reason"`
Message *Message `json:"message,omitempty"`
Delta *Message `json:"delta,omitempty"`
Text string `json:"text,omitempty"`
Logprobs *Logprobs `json:"logprobs,omitempty"`
}
type Logprobs struct {
Content []LogprobContent `json:"content,omitempty"`
}
type LogprobContent struct {
ID int32 `json:"id"`
Token string `json:"token"`
Bytes []int `json:"bytes,omitempty"`
Logprob float64 `json:"logprob"`
TopLogprobs []LogprobContent `json:"top_logprobs,omitempty"`
}
type Content struct {
Type string `json:"type" yaml:"type"`
Text string `json:"text" yaml:"text"`
ImageURL ContentURL `json:"image_url" yaml:"image_url"`
AudioURL ContentURL `json:"audio_url" yaml:"audio_url"`
VideoURL ContentURL `json:"video_url" yaml:"video_url"`
InputAudio InputAudio `json:"input_audio" yaml:"input_audio"`
}
type ContentURL struct {
URL string `json:"url" yaml:"url"`
}
type InputAudio struct {
// Format identifies the audio format, e.g. 'wav'.
Format string `json:"format" yaml:"format"`
// Data holds the base64-encoded audio data.
Data string `json:"data" yaml:"data"`
}
type OpenAIModel struct {
ID string `json:"id"`
Object string `json:"object"`
}
type ImageGenerationResponseFormat string
type ChatCompletionResponseFormatType string
type ChatCompletionResponseFormat struct {
Type ChatCompletionResponseFormatType `json:"type,omitempty"`
}
type JsonSchemaRequest struct {
Type string `json:"type"`
JsonSchema JsonSchema `json:"json_schema"`
}
type JsonSchema struct {
Name string `json:"name"`
Strict bool `json:"strict"`
Schema functions.Item `json:"schema"`
}
type OpenAIRequest struct {
PredictionOptions
Context context.Context `json:"-"`
Cancel context.CancelFunc `json:"-"`
// whisper
File string `json:"file" validate:"required"`
// Multiple input images for img2img or inpainting
Files []string `json:"files,omitempty"`
// Reference images for models that support them (e.g., Flux Kontext)
RefImages []string `json:"ref_images,omitempty"`
//whisper/image
ResponseFormat interface{} `json:"response_format,omitempty"`
// image
Size string `json:"size"`
// Prompt is read only by completion/image API calls
Prompt interface{} `json:"prompt" yaml:"prompt"`
// Edit endpoint
Instruction string `json:"instruction" yaml:"instruction"`
Input interface{} `json:"input" yaml:"input"`
Stop interface{} `json:"stop" yaml:"stop"`
// Messages is read only by chat/completion API calls
Messages []Message `json:"messages" yaml:"messages"`
// A list of available functions to call
Functions functions.Functions `json:"functions" yaml:"functions"`
FunctionCall interface{} `json:"function_call" yaml:"function_call"` // might be a string or an object
Tools []functions.Tool `json:"tools,omitempty" yaml:"tools"`
ToolsChoice interface{} `json:"tool_choice,omitempty" yaml:"tool_choice"`
Stream bool `json:"stream"`
// Image (not supported by OpenAI)
Quality string `json:"quality"`
Step int `json:"step"`
// A grammar to constrain the LLM output
Grammar string `json:"grammar" yaml:"grammar"`
JSONFunctionGrammarObject *functions.JSONFunctionStructure `json:"grammar_json_functions" yaml:"grammar_json_functions"`
Backend string `json:"backend" yaml:"backend"`
ModelBaseName string `json:"model_base_name" yaml:"model_base_name"`
ReasoningEffort string `json:"reasoning_effort" yaml:"reasoning_effort"`
Metadata map[string]string `json:"metadata" yaml:"metadata"`
}
type ModelsDataResponse struct {
Object string `json:"object"`
Data []OpenAIModel `json:"data"`
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
mudler/LocalAI | https://github.com/mudler/LocalAI/blob/23df29fbd3eec1af3944521205fd62b20d4149e5/core/schema/message.go | core/schema/message.go | package schema
import (
"encoding/json"
"github.com/mudler/xlog"
"github.com/mudler/LocalAI/pkg/grpc/proto"
)
type Message struct {
// The message role
Role string `json:"role,omitempty" yaml:"role"`
// The message name (used for tools calls)
Name string `json:"name,omitempty" yaml:"name"`
// The message content
Content interface{} `json:"content" yaml:"content"`
StringContent string `json:"string_content,omitempty" yaml:"string_content,omitempty"`
StringImages []string `json:"string_images,omitempty" yaml:"string_images,omitempty"`
StringVideos []string `json:"string_videos,omitempty" yaml:"string_videos,omitempty"`
StringAudios []string `json:"string_audios,omitempty" yaml:"string_audios,omitempty"`
// A result of a function call
FunctionCall interface{} `json:"function_call,omitempty" yaml:"function_call,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty" yaml:"tool_call,omitempty"`
}
type ToolCall struct {
Index int `json:"index"`
ID string `json:"id"`
Type string `json:"type"`
FunctionCall FunctionCall `json:"function"`
}
type FunctionCall struct {
Name string `json:"name,omitempty"`
Arguments string `json:"arguments"`
}
type Messages []Message
// MessagesToProto converts schema.Message slice to proto.Message slice
// It handles content conversion, tool_calls serialization, and optional fields
func (messages Messages) ToProto() []*proto.Message {
protoMessages := make([]*proto.Message, len(messages))
for i, message := range messages {
protoMessages[i] = &proto.Message{
Role: message.Role,
Name: message.Name, // needed by function calls
}
switch ct := message.Content.(type) {
case string:
protoMessages[i].Content = ct
case []interface{}:
// If using the tokenizer template, in case of multimodal we want to keep the multimodal content as and return only strings here
data, _ := json.Marshal(ct)
resultData := []struct {
Text string `json:"text"`
}{}
json.Unmarshal(data, &resultData)
for _, r := range resultData {
protoMessages[i].Content += r.Text
}
}
// Serialize tool_calls to JSON string if present
if len(message.ToolCalls) > 0 {
toolCallsJSON, err := json.Marshal(message.ToolCalls)
if err != nil {
xlog.Warn("failed to marshal tool_calls to JSON", "error", err)
} else {
protoMessages[i].ToolCalls = string(toolCallsJSON)
}
}
// Note: tool_call_id and reasoning_content are not in schema.Message yet
// They may need to be added to schema.Message if needed in the future
}
return protoMessages
}
| go | MIT | 23df29fbd3eec1af3944521205fd62b20d4149e5 | 2026-01-07T08:35:47.749878Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.