|
|
package openai |
|
|
|
|
|
import ( |
|
|
"encoding/json" |
|
|
"time" |
|
|
|
|
|
"github.com/labstack/echo/v4" |
|
|
"github.com/mudler/LocalAI/core/backend" |
|
|
"github.com/mudler/LocalAI/core/config" |
|
|
"github.com/mudler/LocalAI/core/http/middleware" |
|
|
|
|
|
"github.com/google/uuid" |
|
|
"github.com/mudler/LocalAI/core/schema" |
|
|
|
|
|
"github.com/mudler/LocalAI/core/templates" |
|
|
"github.com/mudler/LocalAI/pkg/model" |
|
|
|
|
|
"github.com/mudler/xlog" |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
func EditEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator *templates.Evaluator, appConfig *config.ApplicationConfig) echo.HandlerFunc { |
|
|
|
|
|
return func(c echo.Context) error { |
|
|
|
|
|
input, ok := c.Get(middleware.CONTEXT_LOCALS_KEY_LOCALAI_REQUEST).(*schema.OpenAIRequest) |
|
|
if !ok || input.Model == "" { |
|
|
return echo.ErrBadRequest |
|
|
} |
|
|
|
|
|
extraUsage := c.Request().Header.Get("Extra-Usage") != "" |
|
|
|
|
|
config, ok := c.Get(middleware.CONTEXT_LOCALS_KEY_MODEL_CONFIG).(*config.ModelConfig) |
|
|
if !ok || config == nil { |
|
|
return echo.ErrBadRequest |
|
|
} |
|
|
|
|
|
xlog.Debug("Edit Endpoint Input", "input", input) |
|
|
xlog.Debug("Edit Endpoint Config", "config", *config) |
|
|
|
|
|
var result []schema.Choice |
|
|
totalTokenUsage := backend.TokenUsage{} |
|
|
|
|
|
for _, i := range config.InputStrings { |
|
|
templatedInput, err := evaluator.EvaluateTemplateForPrompt(templates.EditPromptTemplate, *config, templates.PromptTemplateData{ |
|
|
Input: i, |
|
|
Instruction: input.Instruction, |
|
|
SystemPrompt: config.SystemPrompt, |
|
|
ReasoningEffort: input.ReasoningEffort, |
|
|
Metadata: input.Metadata, |
|
|
}) |
|
|
if err == nil { |
|
|
i = templatedInput |
|
|
xlog.Debug("Template found, input modified", "input", i) |
|
|
} |
|
|
|
|
|
r, tokenUsage, err := ComputeChoices(input, i, config, cl, appConfig, ml, func(s string, c *[]schema.Choice) { |
|
|
*c = append(*c, schema.Choice{Text: s}) |
|
|
}, nil) |
|
|
if err != nil { |
|
|
return err |
|
|
} |
|
|
|
|
|
totalTokenUsage.Prompt += tokenUsage.Prompt |
|
|
totalTokenUsage.Completion += tokenUsage.Completion |
|
|
|
|
|
totalTokenUsage.TimingTokenGeneration += tokenUsage.TimingTokenGeneration |
|
|
totalTokenUsage.TimingPromptProcessing += tokenUsage.TimingPromptProcessing |
|
|
|
|
|
result = append(result, r...) |
|
|
} |
|
|
usage := schema.OpenAIUsage{ |
|
|
PromptTokens: totalTokenUsage.Prompt, |
|
|
CompletionTokens: totalTokenUsage.Completion, |
|
|
TotalTokens: totalTokenUsage.Prompt + totalTokenUsage.Completion, |
|
|
} |
|
|
if extraUsage { |
|
|
usage.TimingTokenGeneration = totalTokenUsage.TimingTokenGeneration |
|
|
usage.TimingPromptProcessing = totalTokenUsage.TimingPromptProcessing |
|
|
} |
|
|
|
|
|
id := uuid.New().String() |
|
|
created := int(time.Now().Unix()) |
|
|
resp := &schema.OpenAIResponse{ |
|
|
ID: id, |
|
|
Created: created, |
|
|
Model: input.Model, |
|
|
Choices: result, |
|
|
Object: "edit", |
|
|
Usage: usage, |
|
|
} |
|
|
|
|
|
jsonResult, _ := json.Marshal(resp) |
|
|
xlog.Debug("Response", "response", string(jsonResult)) |
|
|
|
|
|
|
|
|
return c.JSON(200, resp) |
|
|
} |
|
|
} |
|
|
|