| | package openai |
| |
|
| | import ( |
| | "github.com/labstack/echo/v4" |
| | "github.com/mudler/LocalAI/core/config" |
| | "github.com/mudler/LocalAI/core/schema" |
| | "github.com/mudler/LocalAI/core/services" |
| | model "github.com/mudler/LocalAI/pkg/model" |
| | ) |
| |
|
| | |
| | |
| | |
| | |
| | func ListModelsEndpoint(bcl *config.ModelConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) echo.HandlerFunc { |
| | return func(c echo.Context) error { |
| | |
| | filter := c.QueryParam("filter") |
| |
|
| | |
| | var policy services.LooseFilePolicy |
| | excludeConfigured := c.QueryParam("excludeConfigured") |
| | if excludeConfigured == "" || excludeConfigured == "true" { |
| | policy = services.SKIP_IF_CONFIGURED |
| | } else { |
| | policy = services.ALWAYS_INCLUDE |
| | } |
| |
|
| | filterFn, err := config.BuildNameFilterFn(filter) |
| | if err != nil { |
| | return err |
| | } |
| |
|
| | modelNames, err := services.ListModels(bcl, ml, filterFn, policy) |
| | if err != nil { |
| | return err |
| | } |
| |
|
| | |
| | dataModels := []schema.OpenAIModel{} |
| | for _, m := range modelNames { |
| | dataModels = append(dataModels, schema.OpenAIModel{ID: m, Object: "model"}) |
| | } |
| |
|
| | return c.JSON(200, schema.ModelsDataResponse{ |
| | Object: "list", |
| | Data: dataModels, |
| | }) |
| | } |
| | } |
| |
|