Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .air.toml +8 -0
- .devcontainer-scripts/postcreate.sh +17 -0
- .devcontainer-scripts/poststart.sh +13 -0
- .devcontainer-scripts/utils.sh +55 -0
- .devcontainer/devcontainer.json +24 -0
- .devcontainer/docker-compose-devcontainer.yml +44 -0
- .devcontainer/grafana/datasource.yml +10 -0
- .devcontainer/prometheus/prometheus.yml +21 -0
- .dockerignore +23 -0
- .editorconfig +31 -0
- .env +93 -0
- .gitattributes +29 -35
- .github/FUNDING.yml +5 -0
- .github/ISSUE_TEMPLATE/bug_report.md +29 -0
- .github/ISSUE_TEMPLATE/config.yml +8 -0
- .github/ISSUE_TEMPLATE/feature_request.md +20 -0
- .github/PULL_REQUEST_TEMPLATE.md +31 -0
- .github/bump_deps.sh +27 -0
- .github/bump_docs.sh +7 -0
- .github/check_and_update.py +85 -0
- .github/checksum_checker.sh +63 -0
- .github/ci/modelslist.go +304 -0
- .github/dependabot.yml +119 -0
- .github/gallery-agent/agent.go +445 -0
- .github/gallery-agent/gallery.go +200 -0
- .github/gallery-agent/main.go +383 -0
- .github/gallery-agent/testing.go +224 -0
- .github/gallery-agent/tools.go +46 -0
- .github/labeler.yml +33 -0
- .github/release.yml +37 -0
- .github/stale.yml +18 -0
- .github/workflows/backend.yml +1498 -0
- .github/workflows/backend_build.yml +250 -0
- .github/workflows/backend_build_darwin.yml +144 -0
- .github/workflows/backend_pr.yml +79 -0
- .github/workflows/build-test.yaml +67 -0
- .github/workflows/bump_deps.yaml +63 -0
- .github/workflows/bump_docs.yaml +31 -0
- .github/workflows/checksum_checker.yaml +46 -0
- .github/workflows/dependabot_auto.yml +43 -0
- .github/workflows/deploy-explorer.yaml +64 -0
- .github/workflows/disabled/comment-pr.yaml +83 -0
- .github/workflows/disabled/test-gpu.yml +63 -0
- .github/workflows/gallery-agent.yaml +132 -0
- .github/workflows/generate_grpc_cache.yaml +95 -0
- .github/workflows/generate_intel_image.yaml +59 -0
- .github/workflows/image-pr.yml +95 -0
- .github/workflows/image.yml +187 -0
- .github/workflows/image_build.yml +327 -0
- .github/workflows/labeler.yml +12 -0
.air.toml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# .air.toml
|
| 2 |
+
[build]
|
| 3 |
+
cmd = "make build"
|
| 4 |
+
bin = "./local-ai"
|
| 5 |
+
args_bin = [ "--debug" ]
|
| 6 |
+
include_ext = ["go", "html", "yaml", "toml", "json", "txt", "md"]
|
| 7 |
+
exclude_dir = ["pkg/grpc/proto"]
|
| 8 |
+
delay = 1000
|
.devcontainer-scripts/postcreate.sh
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
cd /workspace
|
| 4 |
+
|
| 5 |
+
# Get the files into the volume without a bind mount
|
| 6 |
+
if [ ! -d ".git" ]; then
|
| 7 |
+
git clone https://github.com/mudler/LocalAI.git .
|
| 8 |
+
else
|
| 9 |
+
git fetch
|
| 10 |
+
fi
|
| 11 |
+
|
| 12 |
+
echo "Standard Post-Create script completed."
|
| 13 |
+
|
| 14 |
+
if [ -f "/devcontainer-customization/postcreate.sh" ]; then
|
| 15 |
+
echo "Launching customization postcreate.sh"
|
| 16 |
+
bash "/devcontainer-customization/postcreate.sh"
|
| 17 |
+
fi
|
.devcontainer-scripts/poststart.sh
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
cd /workspace
|
| 4 |
+
|
| 5 |
+
# Ensures generated source files are present upon load
|
| 6 |
+
make prepare
|
| 7 |
+
|
| 8 |
+
echo "Standard Post-Start script completed."
|
| 9 |
+
|
| 10 |
+
if [ -f "/devcontainer-customization/poststart.sh" ]; then
|
| 11 |
+
echo "Launching customization poststart.sh"
|
| 12 |
+
bash "/devcontainer-customization/poststart.sh"
|
| 13 |
+
fi
|
.devcontainer-scripts/utils.sh
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# This file contains some really simple functions that are useful when building up customization scripts.
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
# Checks if the git config has a user registered - and sets it up if not.
|
| 7 |
+
#
|
| 8 |
+
# Param 1: name
|
| 9 |
+
# Param 2: email
|
| 10 |
+
#
|
| 11 |
+
config_user() {
|
| 12 |
+
echo "Configuring git for $1 <$2>"
|
| 13 |
+
local gcn=$(git config --global user.name)
|
| 14 |
+
if [ -z "${gcn}" ]; then
|
| 15 |
+
echo "Setting up git user / remote"
|
| 16 |
+
git config --global user.name "$1"
|
| 17 |
+
git config --global user.email "$2"
|
| 18 |
+
|
| 19 |
+
fi
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
# Checks if the git remote is configured - and sets it up if not. Fetches either way.
|
| 23 |
+
#
|
| 24 |
+
# Param 1: remote name
|
| 25 |
+
# Param 2: remote url
|
| 26 |
+
#
|
| 27 |
+
config_remote() {
|
| 28 |
+
echo "Adding git remote and fetching $2 as $1"
|
| 29 |
+
local gr=$(git remote -v | grep $1)
|
| 30 |
+
if [ -z "${gr}" ]; then
|
| 31 |
+
git remote add $1 $2
|
| 32 |
+
fi
|
| 33 |
+
git fetch $1
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
# Setup special .ssh files
|
| 37 |
+
# Prints out lines of text to make things pretty
|
| 38 |
+
# Param 1: bash array, filenames relative to the customization directory that should be copied to ~/.ssh
|
| 39 |
+
setup_ssh() {
|
| 40 |
+
echo "starting ~/.ssh directory setup..."
|
| 41 |
+
mkdir -p "${HOME}.ssh"
|
| 42 |
+
chmod 0700 "${HOME}/.ssh"
|
| 43 |
+
echo "-----"
|
| 44 |
+
local files=("$@")
|
| 45 |
+
for file in "${files[@]}" ; do
|
| 46 |
+
local cfile="/devcontainer-customization/${file}"
|
| 47 |
+
local hfile="${HOME}/.ssh/${file}"
|
| 48 |
+
if [ ! -f "${hfile}" ]; then
|
| 49 |
+
echo "copying \"${file}\""
|
| 50 |
+
cp "${cfile}" "${hfile}"
|
| 51 |
+
chmod 600 "${hfile}"
|
| 52 |
+
fi
|
| 53 |
+
done
|
| 54 |
+
echo "~/.ssh directory setup complete!"
|
| 55 |
+
}
|
.devcontainer/devcontainer.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"$schema": "https://raw.githubusercontent.com/devcontainers/spec/main/schemas/devContainer.schema.json",
|
| 3 |
+
"name": "LocalAI",
|
| 4 |
+
"workspaceFolder": "/workspace",
|
| 5 |
+
"dockerComposeFile": [ "./docker-compose-devcontainer.yml" ],
|
| 6 |
+
"service": "api",
|
| 7 |
+
"shutdownAction": "stopCompose",
|
| 8 |
+
"customizations": {
|
| 9 |
+
"vscode": {
|
| 10 |
+
"extensions": [
|
| 11 |
+
"golang.go",
|
| 12 |
+
"ms-vscode.makefile-tools",
|
| 13 |
+
"ms-azuretools.vscode-docker",
|
| 14 |
+
"ms-python.python",
|
| 15 |
+
"ms-python.debugpy",
|
| 16 |
+
"wayou.vscode-todo-highlight",
|
| 17 |
+
"waderyan.gitblame"
|
| 18 |
+
]
|
| 19 |
+
}
|
| 20 |
+
},
|
| 21 |
+
"forwardPorts": [8080, 3000],
|
| 22 |
+
"postCreateCommand": "bash /.devcontainer-scripts/postcreate.sh",
|
| 23 |
+
"postStartCommand": "bash /.devcontainer-scripts/poststart.sh"
|
| 24 |
+
}
|
.devcontainer/docker-compose-devcontainer.yml
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
services:
|
| 2 |
+
api:
|
| 3 |
+
build:
|
| 4 |
+
context: ..
|
| 5 |
+
dockerfile: Dockerfile
|
| 6 |
+
target: devcontainer
|
| 7 |
+
env_file:
|
| 8 |
+
- ../.env
|
| 9 |
+
ports:
|
| 10 |
+
- 8080:8080
|
| 11 |
+
volumes:
|
| 12 |
+
- localai_workspace:/workspace
|
| 13 |
+
- ../models:/host-models
|
| 14 |
+
- ./customization:/devcontainer-customization
|
| 15 |
+
command: /bin/sh -c "while sleep 1000; do :; done"
|
| 16 |
+
cap_add:
|
| 17 |
+
- SYS_PTRACE
|
| 18 |
+
security_opt:
|
| 19 |
+
- seccomp:unconfined
|
| 20 |
+
prometheus:
|
| 21 |
+
image: prom/prometheus
|
| 22 |
+
container_name: prometheus
|
| 23 |
+
command:
|
| 24 |
+
- '--config.file=/etc/prometheus/prometheus.yml'
|
| 25 |
+
ports:
|
| 26 |
+
- 9090:9090
|
| 27 |
+
restart: unless-stopped
|
| 28 |
+
volumes:
|
| 29 |
+
- ./prometheus:/etc/prometheus
|
| 30 |
+
- prom_data:/prometheus
|
| 31 |
+
grafana:
|
| 32 |
+
image: grafana/grafana
|
| 33 |
+
container_name: grafana
|
| 34 |
+
ports:
|
| 35 |
+
- 3000:3000
|
| 36 |
+
restart: unless-stopped
|
| 37 |
+
environment:
|
| 38 |
+
- GF_SECURITY_ADMIN_USER=admin
|
| 39 |
+
- GF_SECURITY_ADMIN_PASSWORD=grafana
|
| 40 |
+
volumes:
|
| 41 |
+
- ./grafana:/etc/grafana/provisioning/datasources
|
| 42 |
+
volumes:
|
| 43 |
+
prom_data:
|
| 44 |
+
localai_workspace:
|
.devcontainer/grafana/datasource.yml
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
apiVersion: 1
|
| 3 |
+
|
| 4 |
+
datasources:
|
| 5 |
+
- name: Prometheus
|
| 6 |
+
type: prometheus
|
| 7 |
+
url: http://prometheus:9090
|
| 8 |
+
isDefault: true
|
| 9 |
+
access: proxy
|
| 10 |
+
editable: true
|
.devcontainer/prometheus/prometheus.yml
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
global:
|
| 2 |
+
scrape_interval: 15s
|
| 3 |
+
scrape_timeout: 10s
|
| 4 |
+
evaluation_interval: 15s
|
| 5 |
+
alerting:
|
| 6 |
+
alertmanagers:
|
| 7 |
+
- static_configs:
|
| 8 |
+
- targets: []
|
| 9 |
+
scheme: http
|
| 10 |
+
timeout: 10s
|
| 11 |
+
api_version: v1
|
| 12 |
+
scrape_configs:
|
| 13 |
+
- job_name: prometheus
|
| 14 |
+
honor_timestamps: true
|
| 15 |
+
scrape_interval: 15s
|
| 16 |
+
scrape_timeout: 10s
|
| 17 |
+
metrics_path: /metrics
|
| 18 |
+
scheme: http
|
| 19 |
+
static_configs:
|
| 20 |
+
- targets:
|
| 21 |
+
- localhost:9090
|
.dockerignore
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.idea
|
| 2 |
+
.github
|
| 3 |
+
.vscode
|
| 4 |
+
.devcontainer
|
| 5 |
+
models
|
| 6 |
+
backends
|
| 7 |
+
examples/chatbot-ui/models
|
| 8 |
+
backend/go/image/stablediffusion-ggml/build/
|
| 9 |
+
backend/go/*/build
|
| 10 |
+
backend/go/*/.cache
|
| 11 |
+
backend/go/*/sources
|
| 12 |
+
backend/go/*/package
|
| 13 |
+
examples/rwkv/models
|
| 14 |
+
examples/**/models
|
| 15 |
+
Dockerfile*
|
| 16 |
+
__pycache__
|
| 17 |
+
|
| 18 |
+
# SonarQube
|
| 19 |
+
.scannerwork
|
| 20 |
+
|
| 21 |
+
# backend virtual environments
|
| 22 |
+
**/venv
|
| 23 |
+
backend/python/**/source
|
.editorconfig
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
root = true
|
| 3 |
+
|
| 4 |
+
[*]
|
| 5 |
+
indent_style = space
|
| 6 |
+
indent_size = 2
|
| 7 |
+
end_of_line = lf
|
| 8 |
+
charset = utf-8
|
| 9 |
+
trim_trailing_whitespace = true
|
| 10 |
+
insert_final_newline = true
|
| 11 |
+
|
| 12 |
+
[*.go]
|
| 13 |
+
indent_style = tab
|
| 14 |
+
|
| 15 |
+
[Makefile]
|
| 16 |
+
indent_style = tab
|
| 17 |
+
|
| 18 |
+
[*.proto]
|
| 19 |
+
indent_size = 2
|
| 20 |
+
|
| 21 |
+
[*.py]
|
| 22 |
+
indent_size = 4
|
| 23 |
+
|
| 24 |
+
[*.js]
|
| 25 |
+
indent_size = 2
|
| 26 |
+
|
| 27 |
+
[*.yaml]
|
| 28 |
+
indent_size = 2
|
| 29 |
+
|
| 30 |
+
[*.md]
|
| 31 |
+
trim_trailing_whitespace = false
|
.env
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
## Set number of threads.
|
| 2 |
+
## Note: prefer the number of physical cores. Overbooking the CPU degrades performance notably.
|
| 3 |
+
# LOCALAI_THREADS=14
|
| 4 |
+
|
| 5 |
+
## Specify a different bind address (defaults to ":8080")
|
| 6 |
+
# LOCALAI_ADDRESS=127.0.0.1:8080
|
| 7 |
+
|
| 8 |
+
## Default models context size
|
| 9 |
+
# LOCALAI_CONTEXT_SIZE=512
|
| 10 |
+
#
|
| 11 |
+
## Define galleries.
|
| 12 |
+
## models will to install will be visible in `/models/available`
|
| 13 |
+
# LOCALAI_GALLERIES=[{"name":"localai", "url":"github:mudler/LocalAI/gallery/index.yaml@master"}]
|
| 14 |
+
|
| 15 |
+
## CORS settings
|
| 16 |
+
# LOCALAI_CORS=true
|
| 17 |
+
# LOCALAI_CORS_ALLOW_ORIGINS=*
|
| 18 |
+
|
| 19 |
+
## Default path for models
|
| 20 |
+
#
|
| 21 |
+
# LOCALAI_MODELS_PATH=/models
|
| 22 |
+
|
| 23 |
+
## Enable debug mode
|
| 24 |
+
# LOCALAI_LOG_LEVEL=debug
|
| 25 |
+
|
| 26 |
+
## Disables COMPEL (Diffusers)
|
| 27 |
+
# COMPEL=0
|
| 28 |
+
|
| 29 |
+
## Enable/Disable single backend (useful if only one GPU is available)
|
| 30 |
+
# LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
| 31 |
+
|
| 32 |
+
# Forces shutdown of the backends if busy (only if LOCALAI_SINGLE_ACTIVE_BACKEND is set)
|
| 33 |
+
# LOCALAI_FORCE_BACKEND_SHUTDOWN=true
|
| 34 |
+
|
| 35 |
+
## Path where to store generated images
|
| 36 |
+
# LOCALAI_IMAGE_PATH=/tmp/generated/images
|
| 37 |
+
|
| 38 |
+
## Specify a default upload limit in MB (whisper)
|
| 39 |
+
# LOCALAI_UPLOAD_LIMIT=15
|
| 40 |
+
|
| 41 |
+
## List of external GRPC backends (note on the container image this variable is already set to use extra backends available in extra/)
|
| 42 |
+
# LOCALAI_EXTERNAL_GRPC_BACKENDS=my-backend:127.0.0.1:9000,my-backend2:/usr/bin/backend.py
|
| 43 |
+
|
| 44 |
+
### Advanced settings ###
|
| 45 |
+
### Those are not really used by LocalAI, but from components in the stack ###
|
| 46 |
+
##
|
| 47 |
+
### Preload libraries
|
| 48 |
+
# LD_PRELOAD=
|
| 49 |
+
|
| 50 |
+
### Huggingface cache for models
|
| 51 |
+
# HUGGINGFACE_HUB_CACHE=/usr/local/huggingface
|
| 52 |
+
|
| 53 |
+
### Python backends GRPC max workers
|
| 54 |
+
### Default number of workers for GRPC Python backends.
|
| 55 |
+
### This actually controls wether a backend can process multiple requests or not.
|
| 56 |
+
# PYTHON_GRPC_MAX_WORKERS=1
|
| 57 |
+
|
| 58 |
+
### Define the number of parallel LLAMA.cpp workers (Defaults to 1)
|
| 59 |
+
# LLAMACPP_PARALLEL=1
|
| 60 |
+
|
| 61 |
+
### Define a list of GRPC Servers for llama-cpp workers to distribute the load
|
| 62 |
+
# https://github.com/ggerganov/llama.cpp/pull/6829
|
| 63 |
+
# https://github.com/ggerganov/llama.cpp/blob/master/tools/rpc/README.md
|
| 64 |
+
# LLAMACPP_GRPC_SERVERS=""
|
| 65 |
+
|
| 66 |
+
### Enable to run parallel requests
|
| 67 |
+
# LOCALAI_PARALLEL_REQUESTS=true
|
| 68 |
+
|
| 69 |
+
# Enable to allow p2p mode
|
| 70 |
+
# LOCALAI_P2P=true
|
| 71 |
+
|
| 72 |
+
# Enable to use federated mode
|
| 73 |
+
# LOCALAI_FEDERATED=true
|
| 74 |
+
|
| 75 |
+
# Enable to start federation server
|
| 76 |
+
# FEDERATED_SERVER=true
|
| 77 |
+
|
| 78 |
+
# Define to use federation token
|
| 79 |
+
# TOKEN=""
|
| 80 |
+
|
| 81 |
+
### Watchdog settings
|
| 82 |
+
###
|
| 83 |
+
# Enables watchdog to kill backends that are inactive for too much time
|
| 84 |
+
# LOCALAI_WATCHDOG_IDLE=true
|
| 85 |
+
#
|
| 86 |
+
# Time in duration format (e.g. 1h30m) after which a backend is considered idle
|
| 87 |
+
# LOCALAI_WATCHDOG_IDLE_TIMEOUT=5m
|
| 88 |
+
#
|
| 89 |
+
# Enables watchdog to kill backends that are busy for too much time
|
| 90 |
+
# LOCALAI_WATCHDOG_BUSY=true
|
| 91 |
+
#
|
| 92 |
+
# Time in duration format (e.g. 1h30m) after which a backend is considered busy
|
| 93 |
+
# LOCALAI_WATCHDOG_BUSY_TIMEOUT=5m
|
.gitattributes
CHANGED
|
@@ -1,35 +1,29 @@
|
|
| 1 |
-
*.
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
+
*.sh text eol=lf
|
| 2 |
+
backend/cpp/llama/*.hpp linguist-vendoredcore/http/static/assets/KFOlCnqEu92Fr1MmEU9vAw.ttf filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
core/http/static/assets/KFOmCnqEu92Fr1Me5Q.ttf filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
core/http/static/assets/UcCO3FwrK3iLTeHuS_fvQtMwCp50KnMw2boKoduKmMEVuFuYMZg.ttf filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
core/http/static/assets/UcCO3FwrK3iLTeHuS_fvQtMwCp50KnMw2boKoduKmMEVuGKYMZg.ttf filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
core/http/static/assets/UcCO3FwrK3iLTeHuS_fvQtMwCp50KnMw2boKoduKmMEVuLyfMZg.ttf filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
core/http/static/assets/fontawesome/webfonts/fa-brands-400.ttf filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
core/http/static/assets/fontawesome/webfonts/fa-brands-400.woff2 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
core/http/static/assets/fontawesome/webfonts/fa-solid-900.ttf filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
core/http/static/assets/fontawesome/webfonts/fa-solid-900.woff2 filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
core/http/static/assets/jetbrains-mono-medium.ttf filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
core/http/static/assets/jetbrains-mono-regular.ttf filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
core/http/static/assets/jetbrains-mono-semibold.ttf filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
core/http/static/assets/playfair-display-bold.ttf filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
core/http/static/assets/playfair-display-regular.ttf filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
core/http/static/assets/playfair-display-semibold.ttf filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
core/http/static/logo.png filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
core/http/static/logo_horizontal.png filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
docs/assets/images/imagen.png filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
docs/assets/images/localai_screenshot.png filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
docs/assets/images/logos/logo.png filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
docs/assets/images/screenshots/screenshot_chat.png filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
docs/assets/images/screenshots/screenshot_gallery.png filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
docs/assets/images/screenshots/screenshot_home.png filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
docs/assets/images/screenshots/screenshot_image.png filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
docs/assets/images/screenshots/screenshot_login.png filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
docs/assets/images/screenshots/screenshot_p2p.png filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
docs/assets/images/screenshots/screenshot_talk.png filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
docs/assets/images/screenshots/screenshot_tts.png filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.github/FUNDING.yml
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# These are supported funding model platforms
|
| 2 |
+
|
| 3 |
+
github: [mudler]
|
| 4 |
+
custom:
|
| 5 |
+
- https://www.buymeacoffee.com/mudler
|
.github/ISSUE_TEMPLATE/bug_report.md
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Bug report
|
| 3 |
+
about: Create a report to help us improve
|
| 4 |
+
title: ''
|
| 5 |
+
labels: bug, unconfirmed, up-for-grabs
|
| 6 |
+
---
|
| 7 |
+
|
| 8 |
+
<!-- Thanks for helping us to improve LocalAI! We welcome all bug reports. Please fill out each area of the template so we can better help you. Comments like this will be hidden when you post but you can delete them if you wish. -->
|
| 9 |
+
|
| 10 |
+
**LocalAI version:**
|
| 11 |
+
<!-- Container Image or LocalAI tag/commit -->
|
| 12 |
+
|
| 13 |
+
**Environment, CPU architecture, OS, and Version:**
|
| 14 |
+
<!-- Provide the output from "uname -a", HW specs, if it's a VM -->
|
| 15 |
+
|
| 16 |
+
**Describe the bug**
|
| 17 |
+
<!-- A clear and concise description of what the bug is. -->
|
| 18 |
+
|
| 19 |
+
**To Reproduce**
|
| 20 |
+
<!-- Steps to reproduce the behavior, including the LocalAI command used, if any -->
|
| 21 |
+
|
| 22 |
+
**Expected behavior**
|
| 23 |
+
<!-- A clear and concise description of what you expected to happen. -->
|
| 24 |
+
|
| 25 |
+
**Logs**
|
| 26 |
+
<!-- If applicable, add logs while running LocalAI in debug mode (`--debug` or `DEBUG=true`) to help explain your problem. -->
|
| 27 |
+
|
| 28 |
+
**Additional context**
|
| 29 |
+
<!-- Add any other context about the problem here. -->
|
.github/ISSUE_TEMPLATE/config.yml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
blank_issues_enabled: false
|
| 2 |
+
contact_links:
|
| 3 |
+
- name: Community Support
|
| 4 |
+
url: https://github.com/go-skynet/LocalAI/discussions
|
| 5 |
+
about: Please ask and answer questions here.
|
| 6 |
+
- name: Discord
|
| 7 |
+
url: https://discord.gg/uJAeKSAGDy
|
| 8 |
+
about: Join our community on Discord!
|
.github/ISSUE_TEMPLATE/feature_request.md
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Feature request
|
| 3 |
+
about: Suggest an idea for this project
|
| 4 |
+
title: ''
|
| 5 |
+
labels: enhancement, up-for-grabs
|
| 6 |
+
---
|
| 7 |
+
|
| 8 |
+
<!-- Thanks for helping us to improve LocalAI! We welcome all feature requests. Please fill out each area of the template so we can better help you. Comments like this will be hidden when you post but you can delete them if you wish. -->
|
| 9 |
+
|
| 10 |
+
**Is your feature request related to a problem? Please describe.**
|
| 11 |
+
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->
|
| 12 |
+
|
| 13 |
+
**Describe the solution you'd like**
|
| 14 |
+
<!-- A clear and concise description of what you want to happen. -->
|
| 15 |
+
|
| 16 |
+
**Describe alternatives you've considered**
|
| 17 |
+
<!-- A clear and concise description of any alternative solutions or features you've considered. -->
|
| 18 |
+
|
| 19 |
+
**Additional context**
|
| 20 |
+
<!-- Add any other context or screenshots about the feature request here. -->
|
.github/PULL_REQUEST_TEMPLATE.md
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
**Description**
|
| 2 |
+
|
| 3 |
+
This PR fixes #
|
| 4 |
+
|
| 5 |
+
**Notes for Reviewers**
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
**[Signed commits](../CONTRIBUTING.md#signing-off-on-commits-developer-certificate-of-origin)**
|
| 9 |
+
- [ ] Yes, I signed my commits.
|
| 10 |
+
|
| 11 |
+
<!--
|
| 12 |
+
Thank you for contributing to LocalAI!
|
| 13 |
+
|
| 14 |
+
Contributing Conventions
|
| 15 |
+
-------------------------
|
| 16 |
+
|
| 17 |
+
The draft above helps to give a quick overview of your PR.
|
| 18 |
+
|
| 19 |
+
Remember to remove this comment and to at least:
|
| 20 |
+
|
| 21 |
+
1. Include descriptive PR titles with [<component-name>] prepended. We use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/).
|
| 22 |
+
2. Build and test your changes before submitting a PR (`make build`).
|
| 23 |
+
3. Sign your commits
|
| 24 |
+
4. **Tag maintainer:** for a quicker response, tag the relevant maintainer (see below).
|
| 25 |
+
5. **X/Twitter handle:** we announce bigger features on X/Twitter. If your PR gets announced, and you'd like a mention, we'll gladly shout you out!
|
| 26 |
+
|
| 27 |
+
By following the community's contribution conventions upfront, the review process will
|
| 28 |
+
be accelerated and your PR merged more quickly.
|
| 29 |
+
|
| 30 |
+
If no one reviews your PR within a few days, please @-mention @mudler.
|
| 31 |
+
-->
|
.github/bump_deps.sh
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
set -xe
|
| 3 |
+
REPO=$1
|
| 4 |
+
BRANCH=$2
|
| 5 |
+
VAR=$3
|
| 6 |
+
FILE=$4
|
| 7 |
+
|
| 8 |
+
if [ -z "$FILE" ]; then
|
| 9 |
+
FILE="Makefile"
|
| 10 |
+
fi
|
| 11 |
+
|
| 12 |
+
LAST_COMMIT=$(curl -s -H "Accept: application/vnd.github.VERSION.sha" "https://api.github.com/repos/$REPO/commits/$BRANCH")
|
| 13 |
+
|
| 14 |
+
# Read $VAR from Makefile (only first match)
|
| 15 |
+
set +e
|
| 16 |
+
CURRENT_COMMIT="$(grep -m1 "^$VAR?=" $FILE | cut -d'=' -f2)"
|
| 17 |
+
set -e
|
| 18 |
+
|
| 19 |
+
sed -i $FILE -e "s/$VAR?=.*/$VAR?=$LAST_COMMIT/"
|
| 20 |
+
|
| 21 |
+
if [ -z "$CURRENT_COMMIT" ]; then
|
| 22 |
+
echo "Could not find $VAR in Makefile."
|
| 23 |
+
exit 0
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
echo "Changes: https://github.com/$REPO/compare/${CURRENT_COMMIT}..${LAST_COMMIT}" >> "${VAR}_message.txt"
|
| 27 |
+
echo "${LAST_COMMIT}" >> "${VAR}_commit.txt"
|
.github/bump_docs.sh
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
set -xe
|
| 3 |
+
REPO=$1
|
| 4 |
+
|
| 5 |
+
LATEST_TAG=$(curl -s "https://api.github.com/repos/$REPO/releases/latest" | jq -r '.tag_name')
|
| 6 |
+
|
| 7 |
+
cat <<< $(jq ".version = \"$LATEST_TAG\"" docs/data/version.json) > docs/data/version.json
|
.github/check_and_update.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
from huggingface_hub import hf_hub_download, get_paths_info
|
| 3 |
+
import requests
|
| 4 |
+
import sys
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
uri = sys.argv[1]
|
| 8 |
+
file_name = uri.split('/')[-1]
|
| 9 |
+
|
| 10 |
+
# Function to parse the URI and determine download method
|
| 11 |
+
def parse_uri(uri):
|
| 12 |
+
if uri.startswith('huggingface://'):
|
| 13 |
+
repo_id = uri.split('://')[1]
|
| 14 |
+
return 'huggingface', repo_id.rsplit('/', 1)[0]
|
| 15 |
+
elif 'huggingface.co' in uri:
|
| 16 |
+
parts = uri.split('/resolve/')
|
| 17 |
+
if len(parts) > 1:
|
| 18 |
+
repo_path = parts[0].split('https://huggingface.co/')[-1]
|
| 19 |
+
return 'huggingface', repo_path
|
| 20 |
+
return 'direct', uri
|
| 21 |
+
|
| 22 |
+
def calculate_sha256(file_path):
|
| 23 |
+
sha256_hash = hashlib.sha256()
|
| 24 |
+
with open(file_path, 'rb') as f:
|
| 25 |
+
for byte_block in iter(lambda: f.read(4096), b''):
|
| 26 |
+
sha256_hash.update(byte_block)
|
| 27 |
+
return sha256_hash.hexdigest()
|
| 28 |
+
|
| 29 |
+
def manual_safety_check_hf(repo_id):
|
| 30 |
+
scanResponse = requests.get('https://huggingface.co/api/models/' + repo_id + "/scan")
|
| 31 |
+
scan = scanResponse.json()
|
| 32 |
+
# Check if 'hasUnsafeFile' exists in the response
|
| 33 |
+
if 'hasUnsafeFile' in scan:
|
| 34 |
+
if scan['hasUnsafeFile']:
|
| 35 |
+
return scan
|
| 36 |
+
else:
|
| 37 |
+
return None
|
| 38 |
+
else:
|
| 39 |
+
return None
|
| 40 |
+
|
| 41 |
+
download_type, repo_id_or_url = parse_uri(uri)
|
| 42 |
+
|
| 43 |
+
new_checksum = None
|
| 44 |
+
file_path = None
|
| 45 |
+
|
| 46 |
+
# Decide download method based on URI type
|
| 47 |
+
if download_type == 'huggingface':
|
| 48 |
+
# Check if the repo is flagged as dangerous by HF
|
| 49 |
+
hazard = manual_safety_check_hf(repo_id_or_url)
|
| 50 |
+
if hazard != None:
|
| 51 |
+
print(f'Error: HuggingFace has detected security problems for {repo_id_or_url}: {str(hazard)}', filename=file_name)
|
| 52 |
+
sys.exit(5)
|
| 53 |
+
# Use HF API to pull sha
|
| 54 |
+
for file in get_paths_info(repo_id_or_url, [file_name], repo_type='model'):
|
| 55 |
+
try:
|
| 56 |
+
new_checksum = file.lfs.sha256
|
| 57 |
+
break
|
| 58 |
+
except Exception as e:
|
| 59 |
+
print(f'Error from Hugging Face Hub: {str(e)}', file=sys.stderr)
|
| 60 |
+
sys.exit(2)
|
| 61 |
+
if new_checksum is None:
|
| 62 |
+
try:
|
| 63 |
+
file_path = hf_hub_download(repo_id=repo_id_or_url, filename=file_name)
|
| 64 |
+
except Exception as e:
|
| 65 |
+
print(f'Error from Hugging Face Hub: {str(e)}', file=sys.stderr)
|
| 66 |
+
sys.exit(2)
|
| 67 |
+
else:
|
| 68 |
+
response = requests.get(repo_id_or_url)
|
| 69 |
+
if response.status_code == 200:
|
| 70 |
+
with open(file_name, 'wb') as f:
|
| 71 |
+
f.write(response.content)
|
| 72 |
+
file_path = file_name
|
| 73 |
+
elif response.status_code == 404:
|
| 74 |
+
print(f'File not found: {response.status_code}', file=sys.stderr)
|
| 75 |
+
sys.exit(2)
|
| 76 |
+
else:
|
| 77 |
+
print(f'Error downloading file: {response.status_code}', file=sys.stderr)
|
| 78 |
+
sys.exit(1)
|
| 79 |
+
|
| 80 |
+
if new_checksum is None:
|
| 81 |
+
new_checksum = calculate_sha256(file_path)
|
| 82 |
+
print(new_checksum)
|
| 83 |
+
os.remove(file_path)
|
| 84 |
+
else:
|
| 85 |
+
print(new_checksum)
|
.github/checksum_checker.sh
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# This scripts needs yq and huggingface_hub to be installed
|
| 3 |
+
# to install hugingface_hub run pip install huggingface_hub
|
| 4 |
+
|
| 5 |
+
# Path to the input YAML file
|
| 6 |
+
input_yaml=$1
|
| 7 |
+
|
| 8 |
+
# Function to download file and check checksum using Python
|
| 9 |
+
function check_and_update_checksum() {
|
| 10 |
+
model_name="$1"
|
| 11 |
+
file_name="$2"
|
| 12 |
+
uri="$3"
|
| 13 |
+
old_checksum="$4"
|
| 14 |
+
idx="$5"
|
| 15 |
+
|
| 16 |
+
# Download the file and calculate new checksum using Python
|
| 17 |
+
new_checksum=$(python3 ./.github/check_and_update.py $uri)
|
| 18 |
+
result=$?
|
| 19 |
+
|
| 20 |
+
if [[ $result -eq 5 ]]; then
|
| 21 |
+
echo "Contaminated entry detected, deleting entry for $model_name..."
|
| 22 |
+
yq eval -i "del([$idx])" "$input_yaml"
|
| 23 |
+
return
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
if [[ "$new_checksum" == "" ]]; then
|
| 27 |
+
echo "Error calculating checksum for $file_name. Skipping..."
|
| 28 |
+
return
|
| 29 |
+
fi
|
| 30 |
+
|
| 31 |
+
echo "Checksum for $file_name: $new_checksum"
|
| 32 |
+
|
| 33 |
+
# Compare and update the YAML file if checksums do not match
|
| 34 |
+
|
| 35 |
+
if [[ $result -eq 2 ]]; then
|
| 36 |
+
echo "File not found, deleting entry for $file_name..."
|
| 37 |
+
# yq eval -i "del(.[$idx].files[] | select(.filename == \"$file_name\"))" "$input_yaml"
|
| 38 |
+
elif [[ "$old_checksum" != "$new_checksum" ]]; then
|
| 39 |
+
echo "Checksum mismatch for $file_name. Updating..."
|
| 40 |
+
yq eval -i "del(.[$idx].files[] | select(.filename == \"$file_name\").sha256)" "$input_yaml"
|
| 41 |
+
yq eval -i "(.[$idx].files[] | select(.filename == \"$file_name\")).sha256 = \"$new_checksum\"" "$input_yaml"
|
| 42 |
+
elif [[ $result -ne 0 ]]; then
|
| 43 |
+
echo "Error downloading file $file_name. Skipping..."
|
| 44 |
+
else
|
| 45 |
+
echo "Checksum match for $file_name. No update needed."
|
| 46 |
+
fi
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
# Read the YAML and process each file
|
| 50 |
+
len=$(yq eval '. | length' "$input_yaml")
|
| 51 |
+
for ((i=0; i<$len; i++))
|
| 52 |
+
do
|
| 53 |
+
name=$(yq eval ".[$i].name" "$input_yaml")
|
| 54 |
+
files_len=$(yq eval ".[$i].files | length" "$input_yaml")
|
| 55 |
+
for ((j=0; j<$files_len; j++))
|
| 56 |
+
do
|
| 57 |
+
filename=$(yq eval ".[$i].files[$j].filename" "$input_yaml")
|
| 58 |
+
uri=$(yq eval ".[$i].files[$j].uri" "$input_yaml")
|
| 59 |
+
checksum=$(yq eval ".[$i].files[$j].sha256" "$input_yaml")
|
| 60 |
+
echo "Checking model $name, file $filename. URI = $uri, Checksum = $checksum"
|
| 61 |
+
check_and_update_checksum "$name" "$filename" "$uri" "$checksum" "$i"
|
| 62 |
+
done
|
| 63 |
+
done
|
.github/ci/modelslist.go
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"fmt"
|
| 5 |
+
"html/template"
|
| 6 |
+
"io/ioutil"
|
| 7 |
+
"os"
|
| 8 |
+
|
| 9 |
+
"github.com/microcosm-cc/bluemonday"
|
| 10 |
+
"gopkg.in/yaml.v3"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
var modelPageTemplate string = `
|
| 14 |
+
<!DOCTYPE html>
|
| 15 |
+
<html>
|
| 16 |
+
<head>
|
| 17 |
+
<meta charset="UTF-8">
|
| 18 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 19 |
+
<title>LocalAI models</title>
|
| 20 |
+
<link href="https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.3.0/flowbite.min.css" rel="stylesheet" />
|
| 21 |
+
<script src="https://cdn.jsdelivr.net/npm/vanilla-lazyload@19.1.3/dist/lazyload.min.js"></script>
|
| 22 |
+
|
| 23 |
+
<link
|
| 24 |
+
rel="stylesheet"
|
| 25 |
+
href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.8.0/build/styles/default.min.css"
|
| 26 |
+
/>
|
| 27 |
+
<script
|
| 28 |
+
defer
|
| 29 |
+
src="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.8.0/build/highlight.min.js"
|
| 30 |
+
></script>
|
| 31 |
+
<script
|
| 32 |
+
defer
|
| 33 |
+
src="https://cdn.jsdelivr.net/npm/alpinejs@3.x.x/dist/cdn.min.js"
|
| 34 |
+
></script>
|
| 35 |
+
<script
|
| 36 |
+
defer
|
| 37 |
+
src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"
|
| 38 |
+
></script>
|
| 39 |
+
<script
|
| 40 |
+
defer
|
| 41 |
+
src="https://cdn.jsdelivr.net/npm/dompurify@3.0.6/dist/purify.min.js"
|
| 42 |
+
></script>
|
| 43 |
+
|
| 44 |
+
<link href="/static/general.css" rel="stylesheet" />
|
| 45 |
+
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&family=Roboto:wght@400;500&display=swap" rel="stylesheet">
|
| 46 |
+
<link
|
| 47 |
+
href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700,900&display=swap"
|
| 48 |
+
rel="stylesheet" />
|
| 49 |
+
<link
|
| 50 |
+
rel="stylesheet"
|
| 51 |
+
href="https://cdn.jsdelivr.net/npm/tw-elements/css/tw-elements.min.css" />
|
| 52 |
+
<script src="https://cdn.tailwindcss.com/3.3.0"></script>
|
| 53 |
+
<script>
|
| 54 |
+
tailwind.config = {
|
| 55 |
+
darkMode: "class",
|
| 56 |
+
theme: {
|
| 57 |
+
fontFamily: {
|
| 58 |
+
sans: ["Roboto", "sans-serif"],
|
| 59 |
+
body: ["Roboto", "sans-serif"],
|
| 60 |
+
mono: ["ui-monospace", "monospace"],
|
| 61 |
+
},
|
| 62 |
+
},
|
| 63 |
+
corePlugins: {
|
| 64 |
+
preflight: false,
|
| 65 |
+
},
|
| 66 |
+
};
|
| 67 |
+
</script>
|
| 68 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.1.1/css/all.min.css">
|
| 69 |
+
<script src="https://unpkg.com/htmx.org@1.9.12" integrity="sha384-ujb1lZYygJmzgSwoxRggbCHcjc0rB2XoQrxeTUQyRjrOnlCoYta87iKBWq3EsdM2" crossorigin="anonymous"></script>
|
| 70 |
+
</head>
|
| 71 |
+
|
| 72 |
+
<body class="bg-gray-900 text-gray-200">
|
| 73 |
+
<div class="flex flex-col min-h-screen">
|
| 74 |
+
|
| 75 |
+
<nav class="bg-gray-800 shadow-lg">
|
| 76 |
+
<div class="container mx-auto px-4 py-4">
|
| 77 |
+
<div class="flex items-center justify-between">
|
| 78 |
+
<div class="flex items-center">
|
| 79 |
+
<a href="/" class="text-white text-xl font-bold"><img src="https://github.com/mudler/LocalAI/assets/2420543/0966aa2a-166e-4f99-a3e5-6c915fc997dd" alt="LocalAI Logo" class="h-10 mr-3 border-2 border-gray-300 shadow rounded"></a>
|
| 80 |
+
<a href="/" class="text-white text-xl font-bold">LocalAI</a>
|
| 81 |
+
</div>
|
| 82 |
+
<!-- Menu button for small screens -->
|
| 83 |
+
<div class="lg:hidden">
|
| 84 |
+
<button id="menu-toggle" class="text-gray-400 hover:text-white focus:outline-none">
|
| 85 |
+
<i class="fas fa-bars fa-lg"></i>
|
| 86 |
+
</button>
|
| 87 |
+
</div>
|
| 88 |
+
<!-- Navigation links -->
|
| 89 |
+
<div class="hidden lg:flex lg:items-center lg:justify-end lg:flex-1 lg:w-0">
|
| 90 |
+
<a href="https://localai.io" class="text-gray-400 hover:text-white px-3 py-2 rounded" target="_blank" ><i class="fas fa-book-reader pr-2"></i> Documentation</a>
|
| 91 |
+
</div>
|
| 92 |
+
</div>
|
| 93 |
+
<!-- Collapsible menu for small screens -->
|
| 94 |
+
<div class="hidden lg:hidden" id="mobile-menu">
|
| 95 |
+
<div class="pt-4 pb-3 border-t border-gray-700">
|
| 96 |
+
|
| 97 |
+
<a href="https://localai.io" class="block text-gray-400 hover:text-white px-3 py-2 rounded mt-1" target="_blank" ><i class="fas fa-book-reader pr-2"></i> Documentation</a>
|
| 98 |
+
|
| 99 |
+
</div>
|
| 100 |
+
</div>
|
| 101 |
+
</div>
|
| 102 |
+
</nav>
|
| 103 |
+
|
| 104 |
+
<style>
|
| 105 |
+
.is-hidden {
|
| 106 |
+
display: none;
|
| 107 |
+
}
|
| 108 |
+
</style>
|
| 109 |
+
|
| 110 |
+
<div class="container mx-auto px-4 flex-grow">
|
| 111 |
+
|
| 112 |
+
<div class="models mt-12">
|
| 113 |
+
<h2 class="text-center text-3xl font-semibold text-gray-100">
|
| 114 |
+
LocalAI model gallery list </h2><br>
|
| 115 |
+
|
| 116 |
+
<h2 class="text-center text-3xl font-semibold text-gray-100">
|
| 117 |
+
|
| 118 |
+
🖼️ Available {{.AvailableModels}} models</i> <a href="https://localai.io/models/" target="_blank" >
|
| 119 |
+
<i class="fas fa-circle-info pr-2"></i>
|
| 120 |
+
</a></h2>
|
| 121 |
+
|
| 122 |
+
<h3>
|
| 123 |
+
Refer to the Model gallery <a href="https://localai.io/models/" target="_blank" ><i class="fas fa-circle-info pr-2"></i></a> for more information on how to use the models with LocalAI.<br>
|
| 124 |
+
|
| 125 |
+
You can install models with the CLI command <code>local-ai models install <model-name></code>. or by using the WebUI.
|
| 126 |
+
</h3>
|
| 127 |
+
|
| 128 |
+
<input class="form-control appearance-none block w-full mt-5 px-3 py-2 text-base font-normal text-gray-300 pb-2 mb-5 bg-gray-800 bg-clip-padding border border-solid border-gray-600 rounded transition ease-in-out m-0 focus:text-gray-300 focus:bg-gray-900 focus:border-blue-500 focus:outline-none" type="search"
|
| 129 |
+
id="searchbox" placeholder="Live search keyword..">
|
| 130 |
+
<div class="dark grid grid-cols-1 grid-rows-1 md:grid-cols-3 block rounded-lg shadow-secondary-1 dark:bg-surface-dark">
|
| 131 |
+
{{ range $_, $model := .Models }}
|
| 132 |
+
<div class="box me-4 mb-2 block rounded-lg bg-white shadow-secondary-1 dark:bg-gray-800 dark:bg-surface-dark dark:text-white text-surface pb-2">
|
| 133 |
+
<div>
|
| 134 |
+
{{ $icon := "https://upload.wikimedia.org/wikipedia/commons/6/65/No-Image-Placeholder.svg" }}
|
| 135 |
+
{{ if $model.Icon }}
|
| 136 |
+
{{ $icon = $model.Icon }}
|
| 137 |
+
{{ end }}
|
| 138 |
+
<div class="flex justify-center items-center">
|
| 139 |
+
<img data-src="{{ $icon }}" alt="{{$model.Name}}" class="rounded-t-lg max-h-48 max-w-96 object-cover mt-3 lazy">
|
| 140 |
+
</div>
|
| 141 |
+
<div class="p-6 text-surface dark:text-white">
|
| 142 |
+
<h5 class="mb-2 text-xl font-medium leading-tight">{{$model.Name}}</h5>
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
<p class="mb-4 text-base truncate">{{ $model.Description }}</p>
|
| 146 |
+
|
| 147 |
+
</div>
|
| 148 |
+
<div class="px-6 pt-4 pb-2">
|
| 149 |
+
|
| 150 |
+
<!-- Modal toggle -->
|
| 151 |
+
<button data-modal-target="{{ $model.Name}}-modal" data-modal-toggle="{{ $model.Name }}-modal" class="block text-white bg-blue-700 hover:bg-blue-800 focus:ring-4 focus:outline-none focus:ring-blue-300 font-medium rounded-lg text-sm px-5 py-2.5 text-center dark:bg-blue-600 dark:hover:bg-blue-700 dark:focus:ring-blue-800" type="button">
|
| 152 |
+
More info
|
| 153 |
+
</button>
|
| 154 |
+
|
| 155 |
+
<!-- Main modal -->
|
| 156 |
+
<div id="{{ $model.Name}}-modal" tabindex="-1" aria-hidden="true" class="hidden overflow-y-auto overflow-x-hidden fixed top-0 right-0 left-0 z-50 justify-center items-center w-full md:inset-0 h-[calc(100%-1rem)] max-h-full">
|
| 157 |
+
<div class="relative p-4 w-full max-w-2xl max-h-full">
|
| 158 |
+
<!-- Modal content -->
|
| 159 |
+
<div class="relative bg-white rounded-lg shadow dark:bg-gray-700">
|
| 160 |
+
<!-- Modal header -->
|
| 161 |
+
<div class="flex items-center justify-between p-4 md:p-5 border-b rounded-t dark:border-gray-600">
|
| 162 |
+
<h3 class="text-xl font-semibold text-gray-900 dark:text-white">
|
| 163 |
+
{{ $model.Name}}
|
| 164 |
+
</h3>
|
| 165 |
+
<button type="button" class="text-gray-400 bg-transparent hover:bg-gray-200 hover:text-gray-900 rounded-lg text-sm w-8 h-8 ms-auto inline-flex justify-center items-center dark:hover:bg-gray-600 dark:hover:text-white" data-modal-hide="{{$model.Name}}-modal">
|
| 166 |
+
<svg class="w-3 h-3" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 14 14">
|
| 167 |
+
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="m1 1 6 6m0 0 6 6M7 7l6-6M7 7l-6 6"/>
|
| 168 |
+
</svg>
|
| 169 |
+
<span class="sr-only">Close modal</span>
|
| 170 |
+
</button>
|
| 171 |
+
</div>
|
| 172 |
+
<!-- Modal body -->
|
| 173 |
+
<div class="p-4 md:p-5 space-y-4">
|
| 174 |
+
<div class="flex justify-center items-center">
|
| 175 |
+
<img data-src="{{ $icon }}" alt="{{$model.Name}}" class="lazy rounded-t-lg max-h-48 max-w-96 object-cover mt-3">
|
| 176 |
+
</div>
|
| 177 |
+
|
| 178 |
+
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400">
|
| 179 |
+
{{ $model.Description }}
|
| 180 |
+
|
| 181 |
+
</p>
|
| 182 |
+
|
| 183 |
+
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400">
|
| 184 |
+
To install the model with the CLI, run: <br>
|
| 185 |
+
<code> local-ai models install {{$model.Name}} </code> <br>
|
| 186 |
+
|
| 187 |
+
<hr>
|
| 188 |
+
See also <a href="https://localai.io/models/" target="_blank" >
|
| 189 |
+
Installation <i class="fas fa-circle-info pr-2"></i>
|
| 190 |
+
</a> to see how to install models with the REST API.
|
| 191 |
+
</p>
|
| 192 |
+
|
| 193 |
+
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400">
|
| 194 |
+
<ul>
|
| 195 |
+
{{ range $_, $u := $model.URLs }}
|
| 196 |
+
<li><a href="{{ $u }}" target=_blank><i class="fa-solid fa-link"></i> {{ $u }}</a></li>
|
| 197 |
+
{{ end }}
|
| 198 |
+
</ul>
|
| 199 |
+
</p>
|
| 200 |
+
</div>
|
| 201 |
+
<!-- Modal footer -->
|
| 202 |
+
<div class="flex items-center p-4 md:p-5 border-t border-gray-200 rounded-b dark:border-gray-600">
|
| 203 |
+
<button data-modal-hide="{{ $model.Name}}-modal" type="button" class="py-2.5 px-5 ms-3 text-sm font-medium text-gray-900 focus:outline-none bg-white rounded-lg border border-gray-200 hover:bg-gray-100 hover:text-blue-700 focus:z-10 focus:ring-4 focus:ring-gray-100 dark:focus:ring-gray-700 dark:bg-gray-800 dark:text-gray-400 dark:border-gray-600 dark:hover:text-white dark:hover:bg-gray-700">Close</button>
|
| 204 |
+
</div>
|
| 205 |
+
</div>
|
| 206 |
+
</div>
|
| 207 |
+
</div>
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
</div>
|
| 211 |
+
</div>
|
| 212 |
+
</div>
|
| 213 |
+
{{ end }}
|
| 214 |
+
|
| 215 |
+
</div>
|
| 216 |
+
</div>
|
| 217 |
+
</div>
|
| 218 |
+
|
| 219 |
+
<script>
|
| 220 |
+
var lazyLoadInstance = new LazyLoad({
|
| 221 |
+
// Your custom settings go here
|
| 222 |
+
});
|
| 223 |
+
|
| 224 |
+
let cards = document.querySelectorAll('.box')
|
| 225 |
+
|
| 226 |
+
function liveSearch() {
|
| 227 |
+
let search_query = document.getElementById("searchbox").value;
|
| 228 |
+
|
| 229 |
+
//Use innerText if all contents are visible
|
| 230 |
+
//Use textContent for including hidden elements
|
| 231 |
+
for (var i = 0; i < cards.length; i++) {
|
| 232 |
+
if(cards[i].textContent.toLowerCase()
|
| 233 |
+
.includes(search_query.toLowerCase())) {
|
| 234 |
+
cards[i].classList.remove("is-hidden");
|
| 235 |
+
} else {
|
| 236 |
+
cards[i].classList.add("is-hidden");
|
| 237 |
+
}
|
| 238 |
+
}
|
| 239 |
+
}
|
| 240 |
+
|
| 241 |
+
//A little delay
|
| 242 |
+
let typingTimer;
|
| 243 |
+
let typeInterval = 500;
|
| 244 |
+
let searchInput = document.getElementById('searchbox');
|
| 245 |
+
|
| 246 |
+
searchInput.addEventListener('keyup', () => {
|
| 247 |
+
clearTimeout(typingTimer);
|
| 248 |
+
typingTimer = setTimeout(liveSearch, typeInterval);
|
| 249 |
+
});
|
| 250 |
+
</script>
|
| 251 |
+
|
| 252 |
+
</div>
|
| 253 |
+
|
| 254 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.3.0/flowbite.min.js"></script>
|
| 255 |
+
</body>
|
| 256 |
+
</html>
|
| 257 |
+
`
|
| 258 |
+
|
| 259 |
+
type GalleryModel struct {
|
| 260 |
+
Name string `json:"name" yaml:"name"`
|
| 261 |
+
URLs []string `json:"urls" yaml:"urls"`
|
| 262 |
+
Icon string `json:"icon" yaml:"icon"`
|
| 263 |
+
Description string `json:"description" yaml:"description"`
|
| 264 |
+
}
|
| 265 |
+
|
| 266 |
+
func main() {
|
| 267 |
+
// read the YAML file which contains the models
|
| 268 |
+
|
| 269 |
+
f, err := ioutil.ReadFile(os.Args[1])
|
| 270 |
+
if err != nil {
|
| 271 |
+
fmt.Println("Error reading file:", err)
|
| 272 |
+
return
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
models := []*GalleryModel{}
|
| 276 |
+
err = yaml.Unmarshal(f, &models)
|
| 277 |
+
if err != nil {
|
| 278 |
+
// write to stderr
|
| 279 |
+
os.Stderr.WriteString("Error unmarshaling YAML: " + err.Error() + "\n")
|
| 280 |
+
return
|
| 281 |
+
}
|
| 282 |
+
|
| 283 |
+
// Ensure that all arbitrary text content is sanitized before display
|
| 284 |
+
for i, m := range models {
|
| 285 |
+
models[i].Name = bluemonday.StrictPolicy().Sanitize(m.Name)
|
| 286 |
+
models[i].Description = bluemonday.StrictPolicy().Sanitize(m.Description)
|
| 287 |
+
}
|
| 288 |
+
|
| 289 |
+
// render the template
|
| 290 |
+
data := struct {
|
| 291 |
+
Models []*GalleryModel
|
| 292 |
+
AvailableModels int
|
| 293 |
+
}{
|
| 294 |
+
Models: models,
|
| 295 |
+
AvailableModels: len(models),
|
| 296 |
+
}
|
| 297 |
+
tmpl := template.Must(template.New("modelPage").Parse(modelPageTemplate))
|
| 298 |
+
|
| 299 |
+
err = tmpl.Execute(os.Stdout, data)
|
| 300 |
+
if err != nil {
|
| 301 |
+
fmt.Println("Error executing template:", err)
|
| 302 |
+
return
|
| 303 |
+
}
|
| 304 |
+
}
|
.github/dependabot.yml
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
| 2 |
+
version: 2
|
| 3 |
+
updates:
|
| 4 |
+
- package-ecosystem: "gitsubmodule"
|
| 5 |
+
directory: "/"
|
| 6 |
+
schedule:
|
| 7 |
+
interval: "weekly"
|
| 8 |
+
- package-ecosystem: "gomod"
|
| 9 |
+
directory: "/"
|
| 10 |
+
schedule:
|
| 11 |
+
interval: "weekly"
|
| 12 |
+
ignore:
|
| 13 |
+
- dependency-name: "github.com/mudler/LocalAI/pkg/grpc/proto"
|
| 14 |
+
- package-ecosystem: "github-actions"
|
| 15 |
+
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
|
| 16 |
+
directory: "/"
|
| 17 |
+
schedule:
|
| 18 |
+
# Check for updates to GitHub Actions every weekday
|
| 19 |
+
interval: "weekly"
|
| 20 |
+
- package-ecosystem: "pip"
|
| 21 |
+
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
|
| 22 |
+
directory: "/"
|
| 23 |
+
schedule:
|
| 24 |
+
# Check for updates to GitHub Actions every weekday
|
| 25 |
+
interval: "weekly"
|
| 26 |
+
- package-ecosystem: "docker"
|
| 27 |
+
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
|
| 28 |
+
directory: "/"
|
| 29 |
+
schedule:
|
| 30 |
+
# Check for updates to GitHub Actions every weekday
|
| 31 |
+
interval: "weekly"
|
| 32 |
+
- package-ecosystem: "pip"
|
| 33 |
+
directory: "/backend/python/bark"
|
| 34 |
+
schedule:
|
| 35 |
+
interval: "weekly"
|
| 36 |
+
- package-ecosystem: "pip"
|
| 37 |
+
directory: "/backend/python/common/template"
|
| 38 |
+
schedule:
|
| 39 |
+
interval: "weekly"
|
| 40 |
+
- package-ecosystem: "pip"
|
| 41 |
+
directory: "/backend/python/coqui"
|
| 42 |
+
schedule:
|
| 43 |
+
interval: "weekly"
|
| 44 |
+
- package-ecosystem: "pip"
|
| 45 |
+
directory: "/backend/python/diffusers"
|
| 46 |
+
schedule:
|
| 47 |
+
interval: "weekly"
|
| 48 |
+
- package-ecosystem: "pip"
|
| 49 |
+
directory: "/backend/python/exllama"
|
| 50 |
+
schedule:
|
| 51 |
+
interval: "weekly"
|
| 52 |
+
- package-ecosystem: "pip"
|
| 53 |
+
directory: "/backend/python/exllama2"
|
| 54 |
+
schedule:
|
| 55 |
+
interval: "weekly"
|
| 56 |
+
- package-ecosystem: "pip"
|
| 57 |
+
directory: "/backend/python/mamba"
|
| 58 |
+
schedule:
|
| 59 |
+
interval: "weekly"
|
| 60 |
+
- package-ecosystem: "pip"
|
| 61 |
+
directory: "/backend/python/openvoice"
|
| 62 |
+
schedule:
|
| 63 |
+
interval: "weekly"
|
| 64 |
+
- package-ecosystem: "pip"
|
| 65 |
+
directory: "/backend/python/rerankers"
|
| 66 |
+
schedule:
|
| 67 |
+
interval: "weekly"
|
| 68 |
+
- package-ecosystem: "pip"
|
| 69 |
+
directory: "/backend/python/sentencetransformers"
|
| 70 |
+
schedule:
|
| 71 |
+
interval: "weekly"
|
| 72 |
+
- package-ecosystem: "pip"
|
| 73 |
+
directory: "/backend/python/transformers"
|
| 74 |
+
schedule:
|
| 75 |
+
interval: "weekly"
|
| 76 |
+
- package-ecosystem: "pip"
|
| 77 |
+
directory: "/backend/python/vllm"
|
| 78 |
+
schedule:
|
| 79 |
+
interval: "weekly"
|
| 80 |
+
- package-ecosystem: "pip"
|
| 81 |
+
directory: "/examples/chainlit"
|
| 82 |
+
schedule:
|
| 83 |
+
interval: "weekly"
|
| 84 |
+
- package-ecosystem: "pip"
|
| 85 |
+
directory: "/examples/functions"
|
| 86 |
+
schedule:
|
| 87 |
+
interval: "weekly"
|
| 88 |
+
- package-ecosystem: "pip"
|
| 89 |
+
directory: "/examples/langchain/langchainpy-localai-example"
|
| 90 |
+
schedule:
|
| 91 |
+
interval: "weekly"
|
| 92 |
+
- package-ecosystem: "pip"
|
| 93 |
+
directory: "/examples/langchain-chroma"
|
| 94 |
+
schedule:
|
| 95 |
+
interval: "weekly"
|
| 96 |
+
- package-ecosystem: "pip"
|
| 97 |
+
directory: "/examples/streamlit-bot"
|
| 98 |
+
schedule:
|
| 99 |
+
interval: "weekly"
|
| 100 |
+
- package-ecosystem: "docker"
|
| 101 |
+
directory: "/examples/k8sgpt"
|
| 102 |
+
schedule:
|
| 103 |
+
interval: "weekly"
|
| 104 |
+
- package-ecosystem: "docker"
|
| 105 |
+
directory: "/examples/kubernetes"
|
| 106 |
+
schedule:
|
| 107 |
+
interval: "weekly"
|
| 108 |
+
- package-ecosystem: "docker"
|
| 109 |
+
directory: "/examples/langchain"
|
| 110 |
+
schedule:
|
| 111 |
+
interval: "weekly"
|
| 112 |
+
- package-ecosystem: "gomod"
|
| 113 |
+
directory: "/examples/semantic-todo"
|
| 114 |
+
schedule:
|
| 115 |
+
interval: "weekly"
|
| 116 |
+
- package-ecosystem: "docker"
|
| 117 |
+
directory: "/examples/telegram-bot"
|
| 118 |
+
schedule:
|
| 119 |
+
interval: "weekly"
|
.github/gallery-agent/agent.go
ADDED
|
@@ -0,0 +1,445 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"encoding/json"
|
| 6 |
+
"fmt"
|
| 7 |
+
"io"
|
| 8 |
+
"net/http"
|
| 9 |
+
"os"
|
| 10 |
+
"regexp"
|
| 11 |
+
"slices"
|
| 12 |
+
"strings"
|
| 13 |
+
|
| 14 |
+
"github.com/ghodss/yaml"
|
| 15 |
+
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
|
| 16 |
+
cogito "github.com/mudler/cogito"
|
| 17 |
+
|
| 18 |
+
"github.com/mudler/cogito/structures"
|
| 19 |
+
"github.com/sashabaranov/go-openai/jsonschema"
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
var (
|
| 23 |
+
openAIModel = os.Getenv("OPENAI_MODEL")
|
| 24 |
+
openAIKey = os.Getenv("OPENAI_KEY")
|
| 25 |
+
openAIBaseURL = os.Getenv("OPENAI_BASE_URL")
|
| 26 |
+
galleryIndexPath = os.Getenv("GALLERY_INDEX_PATH")
|
| 27 |
+
//defaultclient
|
| 28 |
+
llm = cogito.NewOpenAILLM(openAIModel, openAIKey, openAIBaseURL)
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
// cleanTextContent removes trailing spaces, tabs, and normalizes line endings
|
| 32 |
+
// to prevent YAML linting issues like trailing spaces and multiple empty lines
|
| 33 |
+
func cleanTextContent(text string) string {
|
| 34 |
+
lines := strings.Split(text, "\n")
|
| 35 |
+
var cleanedLines []string
|
| 36 |
+
var prevEmpty bool
|
| 37 |
+
for _, line := range lines {
|
| 38 |
+
// Remove all trailing whitespace (spaces, tabs, etc.)
|
| 39 |
+
trimmed := strings.TrimRight(line, " \t\r")
|
| 40 |
+
// Avoid multiple consecutive empty lines
|
| 41 |
+
if trimmed == "" {
|
| 42 |
+
if !prevEmpty {
|
| 43 |
+
cleanedLines = append(cleanedLines, "")
|
| 44 |
+
}
|
| 45 |
+
prevEmpty = true
|
| 46 |
+
} else {
|
| 47 |
+
cleanedLines = append(cleanedLines, trimmed)
|
| 48 |
+
prevEmpty = false
|
| 49 |
+
}
|
| 50 |
+
}
|
| 51 |
+
// Remove trailing empty lines from the result
|
| 52 |
+
result := strings.Join(cleanedLines, "\n")
|
| 53 |
+
return stripThinkingTags(strings.TrimRight(result, "\n"))
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
type galleryModel struct {
|
| 57 |
+
Name string `yaml:"name"`
|
| 58 |
+
Urls []string `yaml:"urls"`
|
| 59 |
+
}
|
| 60 |
+
|
| 61 |
+
// isModelExisting checks if a specific model ID exists in the gallery using text search
|
| 62 |
+
func isModelExisting(modelID string) (bool, error) {
|
| 63 |
+
indexPath := getGalleryIndexPath()
|
| 64 |
+
content, err := os.ReadFile(indexPath)
|
| 65 |
+
if err != nil {
|
| 66 |
+
return false, fmt.Errorf("failed to read %s: %w", indexPath, err)
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
var galleryModels []galleryModel
|
| 70 |
+
|
| 71 |
+
err = yaml.Unmarshal(content, &galleryModels)
|
| 72 |
+
if err != nil {
|
| 73 |
+
return false, fmt.Errorf("failed to unmarshal %s: %w", indexPath, err)
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
for _, galleryModel := range galleryModels {
|
| 77 |
+
if slices.Contains(galleryModel.Urls, modelID) {
|
| 78 |
+
return true, nil
|
| 79 |
+
}
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
return false, nil
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
// filterExistingModels removes models that already exist in the gallery
|
| 86 |
+
func filterExistingModels(models []ProcessedModel) ([]ProcessedModel, error) {
|
| 87 |
+
var filteredModels []ProcessedModel
|
| 88 |
+
for _, model := range models {
|
| 89 |
+
exists, err := isModelExisting(model.ModelID)
|
| 90 |
+
if err != nil {
|
| 91 |
+
fmt.Printf("Error checking if model %s exists: %v, skipping\n", model.ModelID, err)
|
| 92 |
+
continue
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
if !exists {
|
| 96 |
+
filteredModels = append(filteredModels, model)
|
| 97 |
+
} else {
|
| 98 |
+
fmt.Printf("Skipping existing model: %s\n", model.ModelID)
|
| 99 |
+
}
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
fmt.Printf("Filtered out %d existing models, %d new models remaining\n",
|
| 103 |
+
len(models)-len(filteredModels), len(filteredModels))
|
| 104 |
+
|
| 105 |
+
return filteredModels, nil
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
// getGalleryIndexPath returns the gallery index file path, with a default fallback
|
| 109 |
+
func getGalleryIndexPath() string {
|
| 110 |
+
if galleryIndexPath != "" {
|
| 111 |
+
return galleryIndexPath
|
| 112 |
+
}
|
| 113 |
+
return "gallery/index.yaml"
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
func stripThinkingTags(content string) string {
|
| 117 |
+
// Remove content between <thinking> and </thinking> (including multi-line)
|
| 118 |
+
content = regexp.MustCompile(`(?s)<thinking>.*?</thinking>`).ReplaceAllString(content, "")
|
| 119 |
+
// Remove content between <think> and </think> (including multi-line)
|
| 120 |
+
content = regexp.MustCompile(`(?s)<think>.*?</think>`).ReplaceAllString(content, "")
|
| 121 |
+
// Clean up any extra whitespace
|
| 122 |
+
content = strings.TrimSpace(content)
|
| 123 |
+
return content
|
| 124 |
+
}
|
| 125 |
+
|
| 126 |
+
func getRealReadme(ctx context.Context, repository string) (string, error) {
|
| 127 |
+
// Create a conversation fragment
|
| 128 |
+
fragment := cogito.NewEmptyFragment().
|
| 129 |
+
AddMessage("user",
|
| 130 |
+
`Your task is to get a clear description of a large language model from huggingface by using the provided tool. I will share with you a repository that might be quantized, and as such probably not by the original model author. We need to get the real description of the model, and not the one that might be quantized. You will have to call the tool to get the readme more than once by figuring out from the quantized readme which is the base model readme. This is the repository: `+repository)
|
| 131 |
+
|
| 132 |
+
// Execute with tools
|
| 133 |
+
result, err := cogito.ExecuteTools(llm, fragment,
|
| 134 |
+
cogito.WithIterations(3),
|
| 135 |
+
cogito.WithMaxAttempts(3),
|
| 136 |
+
cogito.WithTools(&HFReadmeTool{client: hfapi.NewClient()}))
|
| 137 |
+
if err != nil {
|
| 138 |
+
return "", err
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
result = result.AddMessage("user", "Describe the model in a clear and concise way that can be shared in a model gallery.")
|
| 142 |
+
|
| 143 |
+
// Get a response
|
| 144 |
+
newFragment, err := llm.Ask(ctx, result)
|
| 145 |
+
if err != nil {
|
| 146 |
+
return "", err
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
content := newFragment.LastMessage().Content
|
| 150 |
+
return cleanTextContent(content), nil
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
func selectMostInterestingModels(ctx context.Context, searchResult *SearchResult) ([]ProcessedModel, error) {
|
| 154 |
+
|
| 155 |
+
if len(searchResult.Models) == 1 {
|
| 156 |
+
return searchResult.Models, nil
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
// Create a conversation fragment
|
| 160 |
+
fragment := cogito.NewEmptyFragment().
|
| 161 |
+
AddMessage("user",
|
| 162 |
+
`Your task is to analyze a list of AI models and select the most interesting ones for a model gallery. You will be given detailed information about multiple models including their metadata, file information, and README content.
|
| 163 |
+
|
| 164 |
+
Consider the following criteria when selecting models:
|
| 165 |
+
1. Model popularity (download count)
|
| 166 |
+
2. Model recency (last modified date)
|
| 167 |
+
3. Model completeness (has preferred model file, README, etc.)
|
| 168 |
+
4. Model uniqueness (not duplicates or very similar models)
|
| 169 |
+
5. Model quality (based on README content and description)
|
| 170 |
+
6. Model utility (practical applications)
|
| 171 |
+
|
| 172 |
+
You should select models that would be most valuable for users browsing a model gallery. Prioritize models that are:
|
| 173 |
+
- Well-documented with clear READMEs
|
| 174 |
+
- Recently updated
|
| 175 |
+
- Popular (high download count)
|
| 176 |
+
- Have the preferred quantization format available
|
| 177 |
+
- Offer unique capabilities or are from reputable authors
|
| 178 |
+
|
| 179 |
+
Return your analysis and selection reasoning.`)
|
| 180 |
+
|
| 181 |
+
// Add the search results as context
|
| 182 |
+
modelsInfo := fmt.Sprintf("Found %d models matching '%s' with quantization preference '%s':\n\n",
|
| 183 |
+
searchResult.TotalModelsFound, searchResult.SearchTerm, searchResult.Quantization)
|
| 184 |
+
|
| 185 |
+
for i, model := range searchResult.Models {
|
| 186 |
+
modelsInfo += fmt.Sprintf("Model %d:\n", i+1)
|
| 187 |
+
modelsInfo += fmt.Sprintf(" ID: %s\n", model.ModelID)
|
| 188 |
+
modelsInfo += fmt.Sprintf(" Author: %s\n", model.Author)
|
| 189 |
+
modelsInfo += fmt.Sprintf(" Downloads: %d\n", model.Downloads)
|
| 190 |
+
modelsInfo += fmt.Sprintf(" Last Modified: %s\n", model.LastModified)
|
| 191 |
+
modelsInfo += fmt.Sprintf(" Files: %d files\n", len(model.Files))
|
| 192 |
+
|
| 193 |
+
if model.PreferredModelFile != nil {
|
| 194 |
+
modelsInfo += fmt.Sprintf(" Preferred Model File: %s (%d bytes)\n",
|
| 195 |
+
model.PreferredModelFile.Path, model.PreferredModelFile.Size)
|
| 196 |
+
} else {
|
| 197 |
+
modelsInfo += " No preferred model file found\n"
|
| 198 |
+
}
|
| 199 |
+
|
| 200 |
+
if model.ReadmeContent != "" {
|
| 201 |
+
modelsInfo += fmt.Sprintf(" README: %s\n", model.ReadmeContent)
|
| 202 |
+
}
|
| 203 |
+
|
| 204 |
+
if model.ProcessingError != "" {
|
| 205 |
+
modelsInfo += fmt.Sprintf(" Processing Error: %s\n", model.ProcessingError)
|
| 206 |
+
}
|
| 207 |
+
|
| 208 |
+
modelsInfo += "\n"
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
fragment = fragment.AddMessage("user", modelsInfo)
|
| 212 |
+
|
| 213 |
+
fragment = fragment.AddMessage("user", "Based on your analysis, select the top 5 most interesting models and provide a brief explanation for each selection. Also, create a filtered SearchResult with only the selected models. Return just a list of repositories IDs, you will later be asked to output it as a JSON array with the json tool.")
|
| 214 |
+
|
| 215 |
+
// Get a response
|
| 216 |
+
newFragment, err := llm.Ask(ctx, fragment)
|
| 217 |
+
if err != nil {
|
| 218 |
+
return nil, err
|
| 219 |
+
}
|
| 220 |
+
|
| 221 |
+
fmt.Println(newFragment.LastMessage().Content)
|
| 222 |
+
repositories := struct {
|
| 223 |
+
Repositories []string `json:"repositories"`
|
| 224 |
+
}{}
|
| 225 |
+
|
| 226 |
+
s := structures.Structure{
|
| 227 |
+
Schema: jsonschema.Definition{
|
| 228 |
+
Type: jsonschema.Object,
|
| 229 |
+
AdditionalProperties: false,
|
| 230 |
+
Properties: map[string]jsonschema.Definition{
|
| 231 |
+
"repositories": {
|
| 232 |
+
Type: jsonschema.Array,
|
| 233 |
+
Items: &jsonschema.Definition{Type: jsonschema.String},
|
| 234 |
+
Description: "The trending repositories IDs",
|
| 235 |
+
},
|
| 236 |
+
},
|
| 237 |
+
Required: []string{"repositories"},
|
| 238 |
+
},
|
| 239 |
+
Object: &repositories,
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
err = newFragment.ExtractStructure(ctx, llm, s)
|
| 243 |
+
if err != nil {
|
| 244 |
+
return nil, err
|
| 245 |
+
}
|
| 246 |
+
|
| 247 |
+
filteredModels := []ProcessedModel{}
|
| 248 |
+
for _, m := range searchResult.Models {
|
| 249 |
+
if slices.Contains(repositories.Repositories, m.ModelID) {
|
| 250 |
+
filteredModels = append(filteredModels, m)
|
| 251 |
+
}
|
| 252 |
+
}
|
| 253 |
+
|
| 254 |
+
return filteredModels, nil
|
| 255 |
+
}
|
| 256 |
+
|
| 257 |
+
// ModelMetadata represents extracted metadata from a model
|
| 258 |
+
type ModelMetadata struct {
|
| 259 |
+
Tags []string `json:"tags"`
|
| 260 |
+
License string `json:"license"`
|
| 261 |
+
}
|
| 262 |
+
|
| 263 |
+
// extractModelMetadata extracts tags and license from model README and documentation
|
| 264 |
+
func extractModelMetadata(ctx context.Context, model ProcessedModel) ([]string, string, error) {
|
| 265 |
+
// Create a conversation fragment
|
| 266 |
+
fragment := cogito.NewEmptyFragment().
|
| 267 |
+
AddMessage("user",
|
| 268 |
+
`Your task is to extract metadata from an AI model's README and documentation. You will be provided with:
|
| 269 |
+
1. Model information (ID, author, description)
|
| 270 |
+
2. README content
|
| 271 |
+
|
| 272 |
+
You need to extract:
|
| 273 |
+
1. **Tags**: An array of relevant tags that describe the model. Use common tags from the gallery such as:
|
| 274 |
+
- llm, gguf, gpu, cpu, multimodal, image-to-text, text-to-text, text-to-speech, tts
|
| 275 |
+
- thinking, reasoning, chat, instruction-tuned, code, vision
|
| 276 |
+
- Model family names (e.g., llama, qwen, mistral, gemma) if applicable
|
| 277 |
+
- Any other relevant descriptive tags
|
| 278 |
+
Select 3-8 most relevant tags.
|
| 279 |
+
|
| 280 |
+
2. **License**: The license identifier (e.g., "apache-2.0", "mit", "llama2", "gpl-3.0", "bsd", "cc-by-4.0").
|
| 281 |
+
If no license is found, return an empty string.
|
| 282 |
+
|
| 283 |
+
Return the extracted metadata in a structured format.`)
|
| 284 |
+
|
| 285 |
+
// Add model information
|
| 286 |
+
modelInfo := "Model Information:\n"
|
| 287 |
+
modelInfo += fmt.Sprintf(" ID: %s\n", model.ModelID)
|
| 288 |
+
modelInfo += fmt.Sprintf(" Author: %s\n", model.Author)
|
| 289 |
+
modelInfo += fmt.Sprintf(" Downloads: %d\n", model.Downloads)
|
| 290 |
+
if model.ReadmeContent != "" {
|
| 291 |
+
modelInfo += fmt.Sprintf(" README Content:\n%s\n", model.ReadmeContent)
|
| 292 |
+
} else if model.ReadmeContentPreview != "" {
|
| 293 |
+
modelInfo += fmt.Sprintf(" README Preview: %s\n", model.ReadmeContentPreview)
|
| 294 |
+
}
|
| 295 |
+
|
| 296 |
+
fragment = fragment.AddMessage("user", modelInfo)
|
| 297 |
+
fragment = fragment.AddMessage("user", "Extract the tags and license from the model information. Return the metadata as a JSON object with 'tags' (array of strings) and 'license' (string).")
|
| 298 |
+
|
| 299 |
+
// Get a response
|
| 300 |
+
newFragment, err := llm.Ask(ctx, fragment)
|
| 301 |
+
if err != nil {
|
| 302 |
+
return nil, "", err
|
| 303 |
+
}
|
| 304 |
+
|
| 305 |
+
// Extract structured metadata
|
| 306 |
+
metadata := ModelMetadata{}
|
| 307 |
+
|
| 308 |
+
s := structures.Structure{
|
| 309 |
+
Schema: jsonschema.Definition{
|
| 310 |
+
Type: jsonschema.Object,
|
| 311 |
+
AdditionalProperties: false,
|
| 312 |
+
Properties: map[string]jsonschema.Definition{
|
| 313 |
+
"tags": {
|
| 314 |
+
Type: jsonschema.Array,
|
| 315 |
+
Items: &jsonschema.Definition{Type: jsonschema.String},
|
| 316 |
+
Description: "Array of relevant tags describing the model",
|
| 317 |
+
},
|
| 318 |
+
"license": {
|
| 319 |
+
Type: jsonschema.String,
|
| 320 |
+
Description: "License identifier (e.g., apache-2.0, mit, llama2). Empty string if not found.",
|
| 321 |
+
},
|
| 322 |
+
},
|
| 323 |
+
Required: []string{"tags", "license"},
|
| 324 |
+
},
|
| 325 |
+
Object: &metadata,
|
| 326 |
+
}
|
| 327 |
+
|
| 328 |
+
err = newFragment.ExtractStructure(ctx, llm, s)
|
| 329 |
+
if err != nil {
|
| 330 |
+
return nil, "", err
|
| 331 |
+
}
|
| 332 |
+
|
| 333 |
+
return metadata.Tags, metadata.License, nil
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
// extractIconFromReadme scans the README content for image URLs and returns the first suitable icon URL found
|
| 337 |
+
func extractIconFromReadme(readmeContent string) string {
|
| 338 |
+
if readmeContent == "" {
|
| 339 |
+
return ""
|
| 340 |
+
}
|
| 341 |
+
|
| 342 |
+
// Regular expressions to match image URLs in various formats (case-insensitive)
|
| 343 |
+
// Match markdown image syntax:  - case insensitive extensions
|
| 344 |
+
markdownImageRegex := regexp.MustCompile(`(?i)!\[[^\]]*\]\(([^)]+\.(png|jpg|jpeg|svg|webp|gif))\)`)
|
| 345 |
+
// Match HTML img tags: <img src="url">
|
| 346 |
+
htmlImageRegex := regexp.MustCompile(`(?i)<img[^>]+src=["']([^"']+\.(png|jpg|jpeg|svg|webp|gif))["']`)
|
| 347 |
+
// Match plain URLs ending with image extensions
|
| 348 |
+
plainImageRegex := regexp.MustCompile(`(?i)https?://[^\s<>"']+\.(png|jpg|jpeg|svg|webp|gif)`)
|
| 349 |
+
|
| 350 |
+
// Try markdown format first
|
| 351 |
+
matches := markdownImageRegex.FindStringSubmatch(readmeContent)
|
| 352 |
+
if len(matches) > 1 && matches[1] != "" {
|
| 353 |
+
url := strings.TrimSpace(matches[1])
|
| 354 |
+
// Prefer HuggingFace CDN URLs or absolute URLs
|
| 355 |
+
if strings.HasPrefix(strings.ToLower(url), "http") {
|
| 356 |
+
return url
|
| 357 |
+
}
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
+
// Try HTML img tags
|
| 361 |
+
matches = htmlImageRegex.FindStringSubmatch(readmeContent)
|
| 362 |
+
if len(matches) > 1 && matches[1] != "" {
|
| 363 |
+
url := strings.TrimSpace(matches[1])
|
| 364 |
+
if strings.HasPrefix(strings.ToLower(url), "http") {
|
| 365 |
+
return url
|
| 366 |
+
}
|
| 367 |
+
}
|
| 368 |
+
|
| 369 |
+
// Try plain URLs
|
| 370 |
+
matches = plainImageRegex.FindStringSubmatch(readmeContent)
|
| 371 |
+
if len(matches) > 0 {
|
| 372 |
+
url := strings.TrimSpace(matches[0])
|
| 373 |
+
if strings.HasPrefix(strings.ToLower(url), "http") {
|
| 374 |
+
return url
|
| 375 |
+
}
|
| 376 |
+
}
|
| 377 |
+
|
| 378 |
+
return ""
|
| 379 |
+
}
|
| 380 |
+
|
| 381 |
+
// getHuggingFaceAvatarURL attempts to get the HuggingFace avatar URL for a user
|
| 382 |
+
func getHuggingFaceAvatarURL(author string) string {
|
| 383 |
+
if author == "" {
|
| 384 |
+
return ""
|
| 385 |
+
}
|
| 386 |
+
|
| 387 |
+
// Try to fetch user info from HuggingFace API
|
| 388 |
+
// HuggingFace API endpoint: https://huggingface.co/api/users/{username}
|
| 389 |
+
baseURL := "https://huggingface.co"
|
| 390 |
+
userURL := fmt.Sprintf("%s/api/users/%s", baseURL, author)
|
| 391 |
+
|
| 392 |
+
req, err := http.NewRequest("GET", userURL, nil)
|
| 393 |
+
if err != nil {
|
| 394 |
+
return ""
|
| 395 |
+
}
|
| 396 |
+
|
| 397 |
+
client := &http.Client{}
|
| 398 |
+
resp, err := client.Do(req)
|
| 399 |
+
if err != nil {
|
| 400 |
+
return ""
|
| 401 |
+
}
|
| 402 |
+
defer resp.Body.Close()
|
| 403 |
+
|
| 404 |
+
if resp.StatusCode != http.StatusOK {
|
| 405 |
+
return ""
|
| 406 |
+
}
|
| 407 |
+
|
| 408 |
+
// Parse the response to get avatar URL
|
| 409 |
+
var userInfo map[string]interface{}
|
| 410 |
+
body, err := io.ReadAll(resp.Body)
|
| 411 |
+
if err != nil {
|
| 412 |
+
return ""
|
| 413 |
+
}
|
| 414 |
+
|
| 415 |
+
if err := json.Unmarshal(body, &userInfo); err != nil {
|
| 416 |
+
return ""
|
| 417 |
+
}
|
| 418 |
+
|
| 419 |
+
// Try to extract avatar URL from response
|
| 420 |
+
if avatar, ok := userInfo["avatarUrl"].(string); ok && avatar != "" {
|
| 421 |
+
return avatar
|
| 422 |
+
}
|
| 423 |
+
if avatar, ok := userInfo["avatar"].(string); ok && avatar != "" {
|
| 424 |
+
return avatar
|
| 425 |
+
}
|
| 426 |
+
|
| 427 |
+
return ""
|
| 428 |
+
}
|
| 429 |
+
|
| 430 |
+
// extractModelIcon extracts icon URL from README or falls back to HuggingFace avatar
|
| 431 |
+
func extractModelIcon(model ProcessedModel) string {
|
| 432 |
+
// First, try to extract icon from README
|
| 433 |
+
if icon := extractIconFromReadme(model.ReadmeContent); icon != "" {
|
| 434 |
+
return icon
|
| 435 |
+
}
|
| 436 |
+
|
| 437 |
+
// Fallback: Try to get HuggingFace user avatar
|
| 438 |
+
if model.Author != "" {
|
| 439 |
+
if avatar := getHuggingFaceAvatarURL(model.Author); avatar != "" {
|
| 440 |
+
return avatar
|
| 441 |
+
}
|
| 442 |
+
}
|
| 443 |
+
|
| 444 |
+
return ""
|
| 445 |
+
}
|
.github/gallery-agent/gallery.go
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"encoding/json"
|
| 6 |
+
"fmt"
|
| 7 |
+
"os"
|
| 8 |
+
"strings"
|
| 9 |
+
|
| 10 |
+
"github.com/ghodss/yaml"
|
| 11 |
+
"github.com/mudler/LocalAI/core/gallery/importers"
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
func formatTextContent(text string) string {
|
| 15 |
+
return formatTextContentWithIndent(text, 4, 6)
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
// formatTextContentWithIndent formats text content with specified base and list item indentation
|
| 19 |
+
func formatTextContentWithIndent(text string, baseIndent int, listItemIndent int) string {
|
| 20 |
+
var formattedLines []string
|
| 21 |
+
lines := strings.Split(text, "\n")
|
| 22 |
+
for _, line := range lines {
|
| 23 |
+
trimmed := strings.TrimRight(line, " \t\r")
|
| 24 |
+
if trimmed == "" {
|
| 25 |
+
// Keep empty lines as empty (no indentation)
|
| 26 |
+
formattedLines = append(formattedLines, "")
|
| 27 |
+
} else {
|
| 28 |
+
// Preserve relative indentation from yaml.Marshal output
|
| 29 |
+
// Count existing leading spaces to preserve relative structure
|
| 30 |
+
leadingSpaces := len(trimmed) - len(strings.TrimLeft(trimmed, " \t"))
|
| 31 |
+
trimmedStripped := strings.TrimLeft(trimmed, " \t")
|
| 32 |
+
|
| 33 |
+
var totalIndent int
|
| 34 |
+
if strings.HasPrefix(trimmedStripped, "-") {
|
| 35 |
+
// List items: use listItemIndent (ignore existing leading spaces)
|
| 36 |
+
totalIndent = listItemIndent
|
| 37 |
+
} else {
|
| 38 |
+
// Regular lines: use baseIndent + preserve relative indentation
|
| 39 |
+
// This handles both top-level keys (leadingSpaces=0) and nested properties (leadingSpaces>0)
|
| 40 |
+
totalIndent = baseIndent + leadingSpaces
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
indentStr := strings.Repeat(" ", totalIndent)
|
| 44 |
+
formattedLines = append(formattedLines, indentStr+trimmedStripped)
|
| 45 |
+
}
|
| 46 |
+
}
|
| 47 |
+
formattedText := strings.Join(formattedLines, "\n")
|
| 48 |
+
// Remove any trailing spaces from the formatted description
|
| 49 |
+
formattedText = strings.TrimRight(formattedText, " \t")
|
| 50 |
+
return formattedText
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
// generateYAMLEntry generates a YAML entry for a model using the specified anchor
|
| 54 |
+
func generateYAMLEntry(model ProcessedModel, quantization string) string {
|
| 55 |
+
modelConfig, err := importers.DiscoverModelConfig("https://huggingface.co/"+model.ModelID, json.RawMessage(`{ "quantization": "`+quantization+`"}`))
|
| 56 |
+
if err != nil {
|
| 57 |
+
panic(err)
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
// Extract model name from ModelID
|
| 61 |
+
parts := strings.Split(model.ModelID, "/")
|
| 62 |
+
modelName := model.ModelID
|
| 63 |
+
if len(parts) > 0 {
|
| 64 |
+
modelName = strings.ToLower(parts[len(parts)-1])
|
| 65 |
+
}
|
| 66 |
+
// Remove common suffixes
|
| 67 |
+
modelName = strings.ReplaceAll(modelName, "-gguf", "")
|
| 68 |
+
modelName = strings.ReplaceAll(modelName, "-q4_k_m", "")
|
| 69 |
+
modelName = strings.ReplaceAll(modelName, "-q4_k_s", "")
|
| 70 |
+
modelName = strings.ReplaceAll(modelName, "-q3_k_m", "")
|
| 71 |
+
modelName = strings.ReplaceAll(modelName, "-q2_k", "")
|
| 72 |
+
|
| 73 |
+
description := model.ReadmeContent
|
| 74 |
+
if description == "" {
|
| 75 |
+
description = fmt.Sprintf("AI model: %s", modelName)
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
// Clean up description to prevent YAML linting issues
|
| 79 |
+
description = cleanTextContent(description)
|
| 80 |
+
formattedDescription := formatTextContent(description)
|
| 81 |
+
|
| 82 |
+
configFile := formatTextContent(modelConfig.ConfigFile)
|
| 83 |
+
|
| 84 |
+
filesYAML, _ := yaml.Marshal(modelConfig.Files)
|
| 85 |
+
|
| 86 |
+
// Files section: list items need 4 spaces (not 6), since files: is at 2 spaces
|
| 87 |
+
files := formatTextContentWithIndent(string(filesYAML), 4, 4)
|
| 88 |
+
|
| 89 |
+
// Build metadata sections
|
| 90 |
+
var metadataSections []string
|
| 91 |
+
|
| 92 |
+
// Add license if present
|
| 93 |
+
if model.License != "" {
|
| 94 |
+
metadataSections = append(metadataSections, fmt.Sprintf(` license: "%s"`, model.License))
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
// Add tags if present
|
| 98 |
+
if len(model.Tags) > 0 {
|
| 99 |
+
tagsYAML, _ := yaml.Marshal(model.Tags)
|
| 100 |
+
tagsFormatted := formatTextContentWithIndent(string(tagsYAML), 4, 4)
|
| 101 |
+
tagsFormatted = strings.TrimRight(tagsFormatted, "\n")
|
| 102 |
+
metadataSections = append(metadataSections, fmt.Sprintf(" tags:\n%s", tagsFormatted))
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
// Add icon if present
|
| 106 |
+
if model.Icon != "" {
|
| 107 |
+
metadataSections = append(metadataSections, fmt.Sprintf(` icon: %s`, model.Icon))
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
// Build the metadata block
|
| 111 |
+
metadataBlock := ""
|
| 112 |
+
if len(metadataSections) > 0 {
|
| 113 |
+
metadataBlock = strings.Join(metadataSections, "\n") + "\n"
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
yamlTemplate := ""
|
| 117 |
+
yamlTemplate = `- name: "%s"
|
| 118 |
+
url: "github:mudler/LocalAI/gallery/virtual.yaml@master"
|
| 119 |
+
urls:
|
| 120 |
+
- https://huggingface.co/%s
|
| 121 |
+
description: |
|
| 122 |
+
%s%s
|
| 123 |
+
overrides:
|
| 124 |
+
%s
|
| 125 |
+
files:
|
| 126 |
+
%s`
|
| 127 |
+
// Trim trailing newlines from formatted sections to prevent extra blank lines
|
| 128 |
+
formattedDescription = strings.TrimRight(formattedDescription, "\n")
|
| 129 |
+
configFile = strings.TrimRight(configFile, "\n")
|
| 130 |
+
files = strings.TrimRight(files, "\n")
|
| 131 |
+
// Add newline before metadata block if present
|
| 132 |
+
if metadataBlock != "" {
|
| 133 |
+
metadataBlock = "\n" + strings.TrimRight(metadataBlock, "\n")
|
| 134 |
+
}
|
| 135 |
+
return fmt.Sprintf(yamlTemplate,
|
| 136 |
+
modelName,
|
| 137 |
+
model.ModelID,
|
| 138 |
+
formattedDescription,
|
| 139 |
+
metadataBlock,
|
| 140 |
+
configFile,
|
| 141 |
+
files,
|
| 142 |
+
)
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
// generateYAMLForModels generates YAML entries for selected models and appends to index.yaml
|
| 146 |
+
func generateYAMLForModels(ctx context.Context, models []ProcessedModel, quantization string) error {
|
| 147 |
+
|
| 148 |
+
// Generate YAML entries for each model
|
| 149 |
+
var yamlEntries []string
|
| 150 |
+
for _, model := range models {
|
| 151 |
+
fmt.Printf("Generating YAML entry for model: %s\n", model.ModelID)
|
| 152 |
+
|
| 153 |
+
// Generate YAML entry
|
| 154 |
+
yamlEntry := generateYAMLEntry(model, quantization)
|
| 155 |
+
yamlEntries = append(yamlEntries, yamlEntry)
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
// Prepend to index.yaml (write at the top)
|
| 159 |
+
if len(yamlEntries) > 0 {
|
| 160 |
+
indexPath := getGalleryIndexPath()
|
| 161 |
+
fmt.Printf("Prepending YAML entries to %s...\n", indexPath)
|
| 162 |
+
|
| 163 |
+
// Read current content
|
| 164 |
+
content, err := os.ReadFile(indexPath)
|
| 165 |
+
if err != nil {
|
| 166 |
+
return fmt.Errorf("failed to read %s: %w", indexPath, err)
|
| 167 |
+
}
|
| 168 |
+
|
| 169 |
+
existingContent := string(content)
|
| 170 |
+
yamlBlock := strings.Join(yamlEntries, "\n")
|
| 171 |
+
|
| 172 |
+
// Check if file starts with "---"
|
| 173 |
+
var newContent string
|
| 174 |
+
if strings.HasPrefix(existingContent, "---\n") {
|
| 175 |
+
// File starts with "---", prepend new entries after it
|
| 176 |
+
restOfContent := strings.TrimPrefix(existingContent, "---\n")
|
| 177 |
+
// Ensure proper spacing: "---\n" + new entries + "\n" + rest of content
|
| 178 |
+
newContent = "---\n" + yamlBlock + "\n" + restOfContent
|
| 179 |
+
} else if strings.HasPrefix(existingContent, "---") {
|
| 180 |
+
// File starts with "---" but no newline after
|
| 181 |
+
restOfContent := strings.TrimPrefix(existingContent, "---")
|
| 182 |
+
newContent = "---\n" + yamlBlock + "\n" + strings.TrimPrefix(restOfContent, "\n")
|
| 183 |
+
} else {
|
| 184 |
+
// No "---" at start, prepend new entries at the very beginning
|
| 185 |
+
// Trim leading whitespace from existing content
|
| 186 |
+
existingContent = strings.TrimLeft(existingContent, " \t\n\r")
|
| 187 |
+
newContent = yamlBlock + "\n" + existingContent
|
| 188 |
+
}
|
| 189 |
+
|
| 190 |
+
// Write back to file
|
| 191 |
+
err = os.WriteFile(indexPath, []byte(newContent), 0644)
|
| 192 |
+
if err != nil {
|
| 193 |
+
return fmt.Errorf("failed to write %s: %w", indexPath, err)
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
fmt.Printf("Successfully prepended %d models to %s\n", len(yamlEntries), indexPath)
|
| 197 |
+
}
|
| 198 |
+
|
| 199 |
+
return nil
|
| 200 |
+
}
|
.github/gallery-agent/main.go
ADDED
|
@@ -0,0 +1,383 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"encoding/json"
|
| 6 |
+
"fmt"
|
| 7 |
+
"os"
|
| 8 |
+
"strconv"
|
| 9 |
+
"strings"
|
| 10 |
+
"time"
|
| 11 |
+
|
| 12 |
+
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
// ProcessedModelFile represents a processed model file with additional metadata
|
| 16 |
+
type ProcessedModelFile struct {
|
| 17 |
+
Path string `json:"path"`
|
| 18 |
+
Size int64 `json:"size"`
|
| 19 |
+
SHA256 string `json:"sha256"`
|
| 20 |
+
IsReadme bool `json:"is_readme"`
|
| 21 |
+
FileType string `json:"file_type"` // "model", "readme", "other"
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
// ProcessedModel represents a processed model with all gathered metadata
|
| 25 |
+
type ProcessedModel struct {
|
| 26 |
+
ModelID string `json:"model_id"`
|
| 27 |
+
Author string `json:"author"`
|
| 28 |
+
Downloads int `json:"downloads"`
|
| 29 |
+
LastModified string `json:"last_modified"`
|
| 30 |
+
Files []ProcessedModelFile `json:"files"`
|
| 31 |
+
PreferredModelFile *ProcessedModelFile `json:"preferred_model_file,omitempty"`
|
| 32 |
+
ReadmeFile *ProcessedModelFile `json:"readme_file,omitempty"`
|
| 33 |
+
ReadmeContent string `json:"readme_content,omitempty"`
|
| 34 |
+
ReadmeContentPreview string `json:"readme_content_preview,omitempty"`
|
| 35 |
+
QuantizationPreferences []string `json:"quantization_preferences"`
|
| 36 |
+
ProcessingError string `json:"processing_error,omitempty"`
|
| 37 |
+
Tags []string `json:"tags,omitempty"`
|
| 38 |
+
License string `json:"license,omitempty"`
|
| 39 |
+
Icon string `json:"icon,omitempty"`
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
// SearchResult represents the complete result of searching and processing models
|
| 43 |
+
type SearchResult struct {
|
| 44 |
+
SearchTerm string `json:"search_term"`
|
| 45 |
+
Limit int `json:"limit"`
|
| 46 |
+
Quantization string `json:"quantization"`
|
| 47 |
+
TotalModelsFound int `json:"total_models_found"`
|
| 48 |
+
Models []ProcessedModel `json:"models"`
|
| 49 |
+
FormattedOutput string `json:"formatted_output"`
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
// AddedModelSummary represents a summary of models added to the gallery
|
| 53 |
+
type AddedModelSummary struct {
|
| 54 |
+
SearchTerm string `json:"search_term"`
|
| 55 |
+
TotalFound int `json:"total_found"`
|
| 56 |
+
ModelsAdded int `json:"models_added"`
|
| 57 |
+
AddedModelIDs []string `json:"added_model_ids"`
|
| 58 |
+
AddedModelURLs []string `json:"added_model_urls"`
|
| 59 |
+
Quantization string `json:"quantization"`
|
| 60 |
+
ProcessingTime string `json:"processing_time"`
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
func main() {
|
| 64 |
+
startTime := time.Now()
|
| 65 |
+
|
| 66 |
+
// Check for synthetic mode
|
| 67 |
+
syntheticMode := os.Getenv("SYNTHETIC_MODE")
|
| 68 |
+
if syntheticMode == "true" || syntheticMode == "1" {
|
| 69 |
+
fmt.Println("Running in SYNTHETIC MODE - generating random test data")
|
| 70 |
+
err := runSyntheticMode()
|
| 71 |
+
if err != nil {
|
| 72 |
+
fmt.Fprintf(os.Stderr, "Error in synthetic mode: %v\n", err)
|
| 73 |
+
os.Exit(1)
|
| 74 |
+
}
|
| 75 |
+
return
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
// Get configuration from environment variables
|
| 79 |
+
searchTerm := os.Getenv("SEARCH_TERM")
|
| 80 |
+
if searchTerm == "" {
|
| 81 |
+
searchTerm = "GGUF"
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
limitStr := os.Getenv("LIMIT")
|
| 85 |
+
if limitStr == "" {
|
| 86 |
+
limitStr = "5"
|
| 87 |
+
}
|
| 88 |
+
limit, err := strconv.Atoi(limitStr)
|
| 89 |
+
if err != nil {
|
| 90 |
+
fmt.Fprintf(os.Stderr, "Error parsing LIMIT: %v\n", err)
|
| 91 |
+
os.Exit(1)
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
quantization := os.Getenv("QUANTIZATION")
|
| 95 |
+
|
| 96 |
+
maxModels := os.Getenv("MAX_MODELS")
|
| 97 |
+
if maxModels == "" {
|
| 98 |
+
maxModels = "1"
|
| 99 |
+
}
|
| 100 |
+
maxModelsInt, err := strconv.Atoi(maxModels)
|
| 101 |
+
if err != nil {
|
| 102 |
+
fmt.Fprintf(os.Stderr, "Error parsing MAX_MODELS: %v\n", err)
|
| 103 |
+
os.Exit(1)
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
// Print configuration
|
| 107 |
+
fmt.Printf("Gallery Agent Configuration:\n")
|
| 108 |
+
fmt.Printf(" Search Term: %s\n", searchTerm)
|
| 109 |
+
fmt.Printf(" Limit: %d\n", limit)
|
| 110 |
+
fmt.Printf(" Quantization: %s\n", quantization)
|
| 111 |
+
fmt.Printf(" Max Models to Add: %d\n", maxModelsInt)
|
| 112 |
+
fmt.Printf(" Gallery Index Path: %s\n", os.Getenv("GALLERY_INDEX_PATH"))
|
| 113 |
+
fmt.Println()
|
| 114 |
+
|
| 115 |
+
result, err := searchAndProcessModels(searchTerm, limit, quantization)
|
| 116 |
+
if err != nil {
|
| 117 |
+
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
| 118 |
+
os.Exit(1)
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
fmt.Println(result.FormattedOutput)
|
| 122 |
+
var models []ProcessedModel
|
| 123 |
+
|
| 124 |
+
if len(result.Models) > 1 {
|
| 125 |
+
fmt.Println("More than one model found (", len(result.Models), "), using AI agent to select the most interesting models")
|
| 126 |
+
for _, model := range result.Models {
|
| 127 |
+
fmt.Println("Model: ", model.ModelID)
|
| 128 |
+
}
|
| 129 |
+
// Use AI agent to select the most interesting models
|
| 130 |
+
fmt.Println("Using AI agent to select the most interesting models...")
|
| 131 |
+
models, err = selectMostInterestingModels(context.Background(), result)
|
| 132 |
+
if err != nil {
|
| 133 |
+
fmt.Fprintf(os.Stderr, "Error in model selection: %v\n", err)
|
| 134 |
+
// Continue with original result if selection fails
|
| 135 |
+
models = result.Models
|
| 136 |
+
}
|
| 137 |
+
} else if len(result.Models) == 1 {
|
| 138 |
+
models = result.Models
|
| 139 |
+
fmt.Println("Only one model found, using it directly")
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
fmt.Print(models)
|
| 143 |
+
|
| 144 |
+
// Filter out models that already exist in the gallery
|
| 145 |
+
fmt.Println("Filtering out existing models...")
|
| 146 |
+
models, err = filterExistingModels(models)
|
| 147 |
+
if err != nil {
|
| 148 |
+
fmt.Fprintf(os.Stderr, "Error filtering existing models: %v\n", err)
|
| 149 |
+
os.Exit(1)
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
// Limit to maxModelsInt after filtering
|
| 153 |
+
if len(models) > maxModelsInt {
|
| 154 |
+
models = models[:maxModelsInt]
|
| 155 |
+
}
|
| 156 |
+
|
| 157 |
+
// Track added models for summary
|
| 158 |
+
var addedModelIDs []string
|
| 159 |
+
var addedModelURLs []string
|
| 160 |
+
|
| 161 |
+
// Generate YAML entries and append to gallery/index.yaml
|
| 162 |
+
if len(models) > 0 {
|
| 163 |
+
for _, model := range models {
|
| 164 |
+
addedModelIDs = append(addedModelIDs, model.ModelID)
|
| 165 |
+
// Generate Hugging Face URL for the model
|
| 166 |
+
modelURL := fmt.Sprintf("https://huggingface.co/%s", model.ModelID)
|
| 167 |
+
addedModelURLs = append(addedModelURLs, modelURL)
|
| 168 |
+
}
|
| 169 |
+
fmt.Println("Generating YAML entries for selected models...")
|
| 170 |
+
err = generateYAMLForModels(context.Background(), models, quantization)
|
| 171 |
+
if err != nil {
|
| 172 |
+
fmt.Fprintf(os.Stderr, "Error generating YAML entries: %v\n", err)
|
| 173 |
+
os.Exit(1)
|
| 174 |
+
}
|
| 175 |
+
} else {
|
| 176 |
+
fmt.Println("No new models to add to the gallery.")
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
// Create and write summary
|
| 180 |
+
processingTime := time.Since(startTime).String()
|
| 181 |
+
summary := AddedModelSummary{
|
| 182 |
+
SearchTerm: searchTerm,
|
| 183 |
+
TotalFound: result.TotalModelsFound,
|
| 184 |
+
ModelsAdded: len(addedModelIDs),
|
| 185 |
+
AddedModelIDs: addedModelIDs,
|
| 186 |
+
AddedModelURLs: addedModelURLs,
|
| 187 |
+
Quantization: quantization,
|
| 188 |
+
ProcessingTime: processingTime,
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
// Write summary to file
|
| 192 |
+
summaryData, err := json.MarshalIndent(summary, "", " ")
|
| 193 |
+
if err != nil {
|
| 194 |
+
fmt.Fprintf(os.Stderr, "Error marshaling summary: %v\n", err)
|
| 195 |
+
} else {
|
| 196 |
+
err = os.WriteFile("gallery-agent-summary.json", summaryData, 0644)
|
| 197 |
+
if err != nil {
|
| 198 |
+
fmt.Fprintf(os.Stderr, "Error writing summary file: %v\n", err)
|
| 199 |
+
} else {
|
| 200 |
+
fmt.Printf("Summary written to gallery-agent-summary.json\n")
|
| 201 |
+
}
|
| 202 |
+
}
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
func searchAndProcessModels(searchTerm string, limit int, quantization string) (*SearchResult, error) {
|
| 206 |
+
client := hfapi.NewClient()
|
| 207 |
+
var outputBuilder strings.Builder
|
| 208 |
+
|
| 209 |
+
fmt.Println("Searching for models...")
|
| 210 |
+
// Initialize the result struct
|
| 211 |
+
result := &SearchResult{
|
| 212 |
+
SearchTerm: searchTerm,
|
| 213 |
+
Limit: limit,
|
| 214 |
+
Quantization: quantization,
|
| 215 |
+
Models: []ProcessedModel{},
|
| 216 |
+
}
|
| 217 |
+
|
| 218 |
+
models, err := client.GetLatest(searchTerm, limit)
|
| 219 |
+
if err != nil {
|
| 220 |
+
return nil, fmt.Errorf("failed to fetch models: %w", err)
|
| 221 |
+
}
|
| 222 |
+
|
| 223 |
+
fmt.Println("Models found:", len(models))
|
| 224 |
+
result.TotalModelsFound = len(models)
|
| 225 |
+
|
| 226 |
+
if len(models) == 0 {
|
| 227 |
+
outputBuilder.WriteString("No models found.\n")
|
| 228 |
+
result.FormattedOutput = outputBuilder.String()
|
| 229 |
+
return result, nil
|
| 230 |
+
}
|
| 231 |
+
|
| 232 |
+
outputBuilder.WriteString(fmt.Sprintf("Found %d models matching '%s':\n\n", len(models), searchTerm))
|
| 233 |
+
|
| 234 |
+
// Process each model
|
| 235 |
+
for i, model := range models {
|
| 236 |
+
outputBuilder.WriteString(fmt.Sprintf("%d. Processing Model: %s\n", i+1, model.ModelID))
|
| 237 |
+
outputBuilder.WriteString(fmt.Sprintf(" Author: %s\n", model.Author))
|
| 238 |
+
outputBuilder.WriteString(fmt.Sprintf(" Downloads: %d\n", model.Downloads))
|
| 239 |
+
outputBuilder.WriteString(fmt.Sprintf(" Last Modified: %s\n", model.LastModified))
|
| 240 |
+
|
| 241 |
+
// Initialize processed model struct
|
| 242 |
+
processedModel := ProcessedModel{
|
| 243 |
+
ModelID: model.ModelID,
|
| 244 |
+
Author: model.Author,
|
| 245 |
+
Downloads: model.Downloads,
|
| 246 |
+
LastModified: model.LastModified,
|
| 247 |
+
QuantizationPreferences: []string{quantization, "Q4_K_M", "Q4_K_S", "Q3_K_M", "Q2_K"},
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
// Get detailed model information
|
| 251 |
+
details, err := client.GetModelDetails(model.ModelID)
|
| 252 |
+
if err != nil {
|
| 253 |
+
errorMsg := fmt.Sprintf(" Error getting model details: %v\n", err)
|
| 254 |
+
outputBuilder.WriteString(errorMsg)
|
| 255 |
+
processedModel.ProcessingError = err.Error()
|
| 256 |
+
result.Models = append(result.Models, processedModel)
|
| 257 |
+
continue
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
// Define quantization preferences (in order of preference)
|
| 261 |
+
quantizationPreferences := []string{quantization, "Q4_K_M", "Q4_K_S", "Q3_K_M", "Q2_K"}
|
| 262 |
+
|
| 263 |
+
// Find preferred model file
|
| 264 |
+
preferredModelFile := hfapi.FindPreferredModelFile(details.Files, quantizationPreferences)
|
| 265 |
+
|
| 266 |
+
// Process files
|
| 267 |
+
processedFiles := make([]ProcessedModelFile, len(details.Files))
|
| 268 |
+
for j, file := range details.Files {
|
| 269 |
+
fileType := "other"
|
| 270 |
+
if file.IsReadme {
|
| 271 |
+
fileType = "readme"
|
| 272 |
+
} else if preferredModelFile != nil && file.Path == preferredModelFile.Path {
|
| 273 |
+
fileType = "model"
|
| 274 |
+
}
|
| 275 |
+
|
| 276 |
+
processedFiles[j] = ProcessedModelFile{
|
| 277 |
+
Path: file.Path,
|
| 278 |
+
Size: file.Size,
|
| 279 |
+
SHA256: file.SHA256,
|
| 280 |
+
IsReadme: file.IsReadme,
|
| 281 |
+
FileType: fileType,
|
| 282 |
+
}
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
processedModel.Files = processedFiles
|
| 286 |
+
|
| 287 |
+
// Set preferred model file
|
| 288 |
+
if preferredModelFile != nil {
|
| 289 |
+
for _, file := range processedFiles {
|
| 290 |
+
if file.Path == preferredModelFile.Path {
|
| 291 |
+
processedModel.PreferredModelFile = &file
|
| 292 |
+
break
|
| 293 |
+
}
|
| 294 |
+
}
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
// Print file information
|
| 298 |
+
outputBuilder.WriteString(fmt.Sprintf(" Files found: %d\n", len(details.Files)))
|
| 299 |
+
|
| 300 |
+
if preferredModelFile != nil {
|
| 301 |
+
outputBuilder.WriteString(fmt.Sprintf(" Preferred Model File: %s (SHA256: %s)\n",
|
| 302 |
+
preferredModelFile.Path,
|
| 303 |
+
preferredModelFile.SHA256))
|
| 304 |
+
} else {
|
| 305 |
+
outputBuilder.WriteString(fmt.Sprintf(" No model file found with quantization preferences: %v\n", quantizationPreferences))
|
| 306 |
+
}
|
| 307 |
+
|
| 308 |
+
if details.ReadmeFile != nil {
|
| 309 |
+
outputBuilder.WriteString(fmt.Sprintf(" README File: %s\n", details.ReadmeFile.Path))
|
| 310 |
+
|
| 311 |
+
// Find and set readme file
|
| 312 |
+
for _, file := range processedFiles {
|
| 313 |
+
if file.IsReadme {
|
| 314 |
+
processedModel.ReadmeFile = &file
|
| 315 |
+
break
|
| 316 |
+
}
|
| 317 |
+
}
|
| 318 |
+
|
| 319 |
+
fmt.Println("Getting real readme for", model.ModelID, "waiting...")
|
| 320 |
+
// Use agent to get the real readme and prepare the model description
|
| 321 |
+
readmeContent, err := getRealReadme(context.Background(), model.ModelID)
|
| 322 |
+
if err == nil {
|
| 323 |
+
processedModel.ReadmeContent = readmeContent
|
| 324 |
+
processedModel.ReadmeContentPreview = truncateString(readmeContent, 200)
|
| 325 |
+
outputBuilder.WriteString(fmt.Sprintf(" README Content Preview: %s\n",
|
| 326 |
+
processedModel.ReadmeContentPreview))
|
| 327 |
+
} else {
|
| 328 |
+
fmt.Printf(" Warning: Failed to get real readme: %v\n", err)
|
| 329 |
+
}
|
| 330 |
+
fmt.Println("Real readme got", readmeContent)
|
| 331 |
+
|
| 332 |
+
// Extract metadata (tags, license) from README using LLM
|
| 333 |
+
fmt.Println("Extracting metadata for", model.ModelID, "waiting...")
|
| 334 |
+
tags, license, err := extractModelMetadata(context.Background(), processedModel)
|
| 335 |
+
if err == nil {
|
| 336 |
+
processedModel.Tags = tags
|
| 337 |
+
processedModel.License = license
|
| 338 |
+
outputBuilder.WriteString(fmt.Sprintf(" Tags: %v\n", tags))
|
| 339 |
+
outputBuilder.WriteString(fmt.Sprintf(" License: %s\n", license))
|
| 340 |
+
} else {
|
| 341 |
+
fmt.Printf(" Warning: Failed to extract metadata: %v\n", err)
|
| 342 |
+
}
|
| 343 |
+
|
| 344 |
+
// Extract icon from README or use HuggingFace avatar
|
| 345 |
+
icon := extractModelIcon(processedModel)
|
| 346 |
+
if icon != "" {
|
| 347 |
+
processedModel.Icon = icon
|
| 348 |
+
outputBuilder.WriteString(fmt.Sprintf(" Icon: %s\n", icon))
|
| 349 |
+
}
|
| 350 |
+
// Get README content
|
| 351 |
+
// readmeContent, err := client.GetReadmeContent(model.ModelID, details.ReadmeFile.Path)
|
| 352 |
+
// if err == nil {
|
| 353 |
+
// processedModel.ReadmeContent = readmeContent
|
| 354 |
+
// processedModel.ReadmeContentPreview = truncateString(readmeContent, 200)
|
| 355 |
+
// outputBuilder.WriteString(fmt.Sprintf(" README Content Preview: %s\n",
|
| 356 |
+
// processedModel.ReadmeContentPreview))
|
| 357 |
+
// }
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
+
// Print all files with their checksums
|
| 361 |
+
outputBuilder.WriteString(" All Files:\n")
|
| 362 |
+
for _, file := range processedFiles {
|
| 363 |
+
outputBuilder.WriteString(fmt.Sprintf(" - %s (%s, %d bytes", file.Path, file.FileType, file.Size))
|
| 364 |
+
if file.SHA256 != "" {
|
| 365 |
+
outputBuilder.WriteString(fmt.Sprintf(", SHA256: %s", file.SHA256))
|
| 366 |
+
}
|
| 367 |
+
outputBuilder.WriteString(")\n")
|
| 368 |
+
}
|
| 369 |
+
|
| 370 |
+
outputBuilder.WriteString("\n")
|
| 371 |
+
result.Models = append(result.Models, processedModel)
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
result.FormattedOutput = outputBuilder.String()
|
| 375 |
+
return result, nil
|
| 376 |
+
}
|
| 377 |
+
|
| 378 |
+
func truncateString(s string, maxLen int) string {
|
| 379 |
+
if len(s) <= maxLen {
|
| 380 |
+
return s
|
| 381 |
+
}
|
| 382 |
+
return s[:maxLen] + "..."
|
| 383 |
+
}
|
.github/gallery-agent/testing.go
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"fmt"
|
| 6 |
+
"math/rand"
|
| 7 |
+
"strings"
|
| 8 |
+
"time"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
// runSyntheticMode generates synthetic test data and appends it to the gallery
|
| 12 |
+
func runSyntheticMode() error {
|
| 13 |
+
generator := NewSyntheticDataGenerator()
|
| 14 |
+
|
| 15 |
+
// Generate a random number of synthetic models (1-3)
|
| 16 |
+
numModels := generator.rand.Intn(3) + 1
|
| 17 |
+
fmt.Printf("Generating %d synthetic models for testing...\n", numModels)
|
| 18 |
+
|
| 19 |
+
var models []ProcessedModel
|
| 20 |
+
for i := 0; i < numModels; i++ {
|
| 21 |
+
model := generator.GenerateProcessedModel()
|
| 22 |
+
models = append(models, model)
|
| 23 |
+
fmt.Printf("Generated synthetic model: %s\n", model.ModelID)
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
// Generate YAML entries and append to gallery/index.yaml
|
| 27 |
+
fmt.Println("Generating YAML entries for synthetic models...")
|
| 28 |
+
err := generateYAMLForModels(context.Background(), models, "Q4_K_M")
|
| 29 |
+
if err != nil {
|
| 30 |
+
return fmt.Errorf("error generating YAML entries: %w", err)
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
fmt.Printf("Successfully added %d synthetic models to the gallery for testing!\n", len(models))
|
| 34 |
+
return nil
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
// SyntheticDataGenerator provides methods to generate synthetic test data
|
| 38 |
+
type SyntheticDataGenerator struct {
|
| 39 |
+
rand *rand.Rand
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
// NewSyntheticDataGenerator creates a new synthetic data generator
|
| 43 |
+
func NewSyntheticDataGenerator() *SyntheticDataGenerator {
|
| 44 |
+
return &SyntheticDataGenerator{
|
| 45 |
+
rand: rand.New(rand.NewSource(time.Now().UnixNano())),
|
| 46 |
+
}
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
// GenerateProcessedModelFile creates a synthetic ProcessedModelFile
|
| 50 |
+
func (g *SyntheticDataGenerator) GenerateProcessedModelFile() ProcessedModelFile {
|
| 51 |
+
fileTypes := []string{"model", "readme", "other"}
|
| 52 |
+
fileType := fileTypes[g.rand.Intn(len(fileTypes))]
|
| 53 |
+
|
| 54 |
+
var path string
|
| 55 |
+
var isReadme bool
|
| 56 |
+
|
| 57 |
+
switch fileType {
|
| 58 |
+
case "model":
|
| 59 |
+
path = fmt.Sprintf("model-%s.gguf", g.randomString(8))
|
| 60 |
+
isReadme = false
|
| 61 |
+
case "readme":
|
| 62 |
+
path = "README.md"
|
| 63 |
+
isReadme = true
|
| 64 |
+
default:
|
| 65 |
+
path = fmt.Sprintf("file-%s.txt", g.randomString(6))
|
| 66 |
+
isReadme = false
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
return ProcessedModelFile{
|
| 70 |
+
Path: path,
|
| 71 |
+
Size: int64(g.rand.Intn(1000000000) + 1000000), // 1MB to 1GB
|
| 72 |
+
SHA256: g.randomSHA256(),
|
| 73 |
+
IsReadme: isReadme,
|
| 74 |
+
FileType: fileType,
|
| 75 |
+
}
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
// GenerateProcessedModel creates a synthetic ProcessedModel
|
| 79 |
+
func (g *SyntheticDataGenerator) GenerateProcessedModel() ProcessedModel {
|
| 80 |
+
authors := []string{"microsoft", "meta", "google", "openai", "anthropic", "mistralai", "huggingface"}
|
| 81 |
+
modelNames := []string{"llama", "gpt", "claude", "mistral", "gemma", "phi", "qwen", "codellama"}
|
| 82 |
+
|
| 83 |
+
author := authors[g.rand.Intn(len(authors))]
|
| 84 |
+
modelName := modelNames[g.rand.Intn(len(modelNames))]
|
| 85 |
+
modelID := fmt.Sprintf("%s/%s-%s", author, modelName, g.randomString(6))
|
| 86 |
+
|
| 87 |
+
// Generate files
|
| 88 |
+
numFiles := g.rand.Intn(5) + 2 // 2-6 files
|
| 89 |
+
files := make([]ProcessedModelFile, numFiles)
|
| 90 |
+
|
| 91 |
+
// Ensure at least one model file and one readme
|
| 92 |
+
hasModelFile := false
|
| 93 |
+
hasReadme := false
|
| 94 |
+
|
| 95 |
+
for i := 0; i < numFiles; i++ {
|
| 96 |
+
files[i] = g.GenerateProcessedModelFile()
|
| 97 |
+
if files[i].FileType == "model" {
|
| 98 |
+
hasModelFile = true
|
| 99 |
+
}
|
| 100 |
+
if files[i].FileType == "readme" {
|
| 101 |
+
hasReadme = true
|
| 102 |
+
}
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
// Add required files if missing
|
| 106 |
+
if !hasModelFile {
|
| 107 |
+
modelFile := g.GenerateProcessedModelFile()
|
| 108 |
+
modelFile.FileType = "model"
|
| 109 |
+
modelFile.Path = fmt.Sprintf("%s-Q4_K_M.gguf", modelName)
|
| 110 |
+
files = append(files, modelFile)
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
if !hasReadme {
|
| 114 |
+
readmeFile := g.GenerateProcessedModelFile()
|
| 115 |
+
readmeFile.FileType = "readme"
|
| 116 |
+
readmeFile.Path = "README.md"
|
| 117 |
+
readmeFile.IsReadme = true
|
| 118 |
+
files = append(files, readmeFile)
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
// Find preferred model file
|
| 122 |
+
var preferredModelFile *ProcessedModelFile
|
| 123 |
+
for i := range files {
|
| 124 |
+
if files[i].FileType == "model" {
|
| 125 |
+
preferredModelFile = &files[i]
|
| 126 |
+
break
|
| 127 |
+
}
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
// Find readme file
|
| 131 |
+
var readmeFile *ProcessedModelFile
|
| 132 |
+
for i := range files {
|
| 133 |
+
if files[i].FileType == "readme" {
|
| 134 |
+
readmeFile = &files[i]
|
| 135 |
+
break
|
| 136 |
+
}
|
| 137 |
+
}
|
| 138 |
+
|
| 139 |
+
readmeContent := g.generateReadmeContent(modelName, author)
|
| 140 |
+
|
| 141 |
+
// Generate sample metadata
|
| 142 |
+
licenses := []string{"apache-2.0", "mit", "llama2", "gpl-3.0", "bsd", ""}
|
| 143 |
+
license := licenses[g.rand.Intn(len(licenses))]
|
| 144 |
+
|
| 145 |
+
sampleTags := []string{"llm", "gguf", "gpu", "cpu", "text-to-text", "chat", "instruction-tuned"}
|
| 146 |
+
numTags := g.rand.Intn(4) + 3 // 3-6 tags
|
| 147 |
+
tags := make([]string, numTags)
|
| 148 |
+
for i := 0; i < numTags; i++ {
|
| 149 |
+
tags[i] = sampleTags[g.rand.Intn(len(sampleTags))]
|
| 150 |
+
}
|
| 151 |
+
// Remove duplicates
|
| 152 |
+
tags = g.removeDuplicates(tags)
|
| 153 |
+
|
| 154 |
+
// Optionally include icon (50% chance)
|
| 155 |
+
icon := ""
|
| 156 |
+
if g.rand.Intn(2) == 0 {
|
| 157 |
+
icon = fmt.Sprintf("https://cdn-avatars.huggingface.co/v1/production/uploads/%s.png", g.randomString(24))
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
return ProcessedModel{
|
| 161 |
+
ModelID: modelID,
|
| 162 |
+
Author: author,
|
| 163 |
+
Downloads: g.rand.Intn(1000000) + 1000,
|
| 164 |
+
LastModified: g.randomDate(),
|
| 165 |
+
Files: files,
|
| 166 |
+
PreferredModelFile: preferredModelFile,
|
| 167 |
+
ReadmeFile: readmeFile,
|
| 168 |
+
ReadmeContent: readmeContent,
|
| 169 |
+
ReadmeContentPreview: truncateString(readmeContent, 200),
|
| 170 |
+
QuantizationPreferences: []string{"Q4_K_M", "Q4_K_S", "Q3_K_M", "Q2_K"},
|
| 171 |
+
ProcessingError: "",
|
| 172 |
+
Tags: tags,
|
| 173 |
+
License: license,
|
| 174 |
+
Icon: icon,
|
| 175 |
+
}
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
// Helper methods for synthetic data generation
|
| 179 |
+
func (g *SyntheticDataGenerator) randomString(length int) string {
|
| 180 |
+
const charset = "abcdefghijklmnopqrstuvwxyz0123456789"
|
| 181 |
+
b := make([]byte, length)
|
| 182 |
+
for i := range b {
|
| 183 |
+
b[i] = charset[g.rand.Intn(len(charset))]
|
| 184 |
+
}
|
| 185 |
+
return string(b)
|
| 186 |
+
}
|
| 187 |
+
|
| 188 |
+
func (g *SyntheticDataGenerator) randomSHA256() string {
|
| 189 |
+
const charset = "0123456789abcdef"
|
| 190 |
+
b := make([]byte, 64)
|
| 191 |
+
for i := range b {
|
| 192 |
+
b[i] = charset[g.rand.Intn(len(charset))]
|
| 193 |
+
}
|
| 194 |
+
return string(b)
|
| 195 |
+
}
|
| 196 |
+
|
| 197 |
+
func (g *SyntheticDataGenerator) randomDate() string {
|
| 198 |
+
now := time.Now()
|
| 199 |
+
daysAgo := g.rand.Intn(365) // Random date within last year
|
| 200 |
+
pastDate := now.AddDate(0, 0, -daysAgo)
|
| 201 |
+
return pastDate.Format("2006-01-02T15:04:05.000Z")
|
| 202 |
+
}
|
| 203 |
+
|
| 204 |
+
func (g *SyntheticDataGenerator) removeDuplicates(slice []string) []string {
|
| 205 |
+
keys := make(map[string]bool)
|
| 206 |
+
result := []string{}
|
| 207 |
+
for _, item := range slice {
|
| 208 |
+
if !keys[item] {
|
| 209 |
+
keys[item] = true
|
| 210 |
+
result = append(result, item)
|
| 211 |
+
}
|
| 212 |
+
}
|
| 213 |
+
return result
|
| 214 |
+
}
|
| 215 |
+
|
| 216 |
+
func (g *SyntheticDataGenerator) generateReadmeContent(modelName, author string) string {
|
| 217 |
+
templates := []string{
|
| 218 |
+
fmt.Sprintf("# %s Model\n\nThis is a %s model developed by %s. It's designed for various natural language processing tasks including text generation, question answering, and conversation.\n\n## Features\n\n- High-quality text generation\n- Efficient inference\n- Multiple quantization options\n- Easy to use with LocalAI\n\n## Usage\n\nUse this model with LocalAI for various AI tasks.", strings.Title(modelName), modelName, author),
|
| 219 |
+
fmt.Sprintf("# %s\n\nA powerful language model from %s. This model excels at understanding and generating human-like text across multiple domains.\n\n## Capabilities\n\n- Text completion\n- Code generation\n- Creative writing\n- Technical documentation\n\n## Model Details\n\n- Architecture: Transformer-based\n- Training: Large-scale supervised learning\n- Quantization: Available in multiple formats", strings.Title(modelName), author),
|
| 220 |
+
fmt.Sprintf("# %s Language Model\n\nDeveloped by %s, this model represents state-of-the-art performance in natural language understanding and generation.\n\n## Key Features\n\n- Multilingual support\n- Context-aware responses\n- Efficient memory usage\n- Fast inference speed\n\n## Applications\n\n- Chatbots and virtual assistants\n- Content generation\n- Code completion\n- Educational tools", strings.Title(modelName), author),
|
| 221 |
+
}
|
| 222 |
+
|
| 223 |
+
return templates[g.rand.Intn(len(templates))]
|
| 224 |
+
}
|
.github/gallery-agent/tools.go
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"fmt"
|
| 5 |
+
|
| 6 |
+
hfapi "github.com/mudler/LocalAI/pkg/huggingface-api"
|
| 7 |
+
openai "github.com/sashabaranov/go-openai"
|
| 8 |
+
jsonschema "github.com/sashabaranov/go-openai/jsonschema"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
// Get repository README from HF
|
| 12 |
+
type HFReadmeTool struct {
|
| 13 |
+
client *hfapi.Client
|
| 14 |
+
}
|
| 15 |
+
|
| 16 |
+
func (s *HFReadmeTool) Execute(args map[string]any) (string, error) {
|
| 17 |
+
q, ok := args["repository"].(string)
|
| 18 |
+
if !ok {
|
| 19 |
+
return "", fmt.Errorf("no query")
|
| 20 |
+
}
|
| 21 |
+
readme, err := s.client.GetReadmeContent(q, "README.md")
|
| 22 |
+
if err != nil {
|
| 23 |
+
return "", err
|
| 24 |
+
}
|
| 25 |
+
return readme, nil
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
func (s *HFReadmeTool) Tool() openai.Tool {
|
| 29 |
+
return openai.Tool{
|
| 30 |
+
Type: openai.ToolTypeFunction,
|
| 31 |
+
Function: &openai.FunctionDefinition{
|
| 32 |
+
Name: "hf_readme",
|
| 33 |
+
Description: "A tool to get the README content of a huggingface repository",
|
| 34 |
+
Parameters: jsonschema.Definition{
|
| 35 |
+
Type: jsonschema.Object,
|
| 36 |
+
Properties: map[string]jsonschema.Definition{
|
| 37 |
+
"repository": {
|
| 38 |
+
Type: jsonschema.String,
|
| 39 |
+
Description: "The huggingface repository to get the README content of",
|
| 40 |
+
},
|
| 41 |
+
},
|
| 42 |
+
Required: []string{"repository"},
|
| 43 |
+
},
|
| 44 |
+
},
|
| 45 |
+
}
|
| 46 |
+
}
|
.github/labeler.yml
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
enhancement:
|
| 2 |
+
- head-branch: ['^feature', 'feature']
|
| 3 |
+
|
| 4 |
+
dependencies:
|
| 5 |
+
- any:
|
| 6 |
+
- changed-files:
|
| 7 |
+
- any-glob-to-any-file: 'Makefile'
|
| 8 |
+
- changed-files:
|
| 9 |
+
- any-glob-to-any-file: '*.mod'
|
| 10 |
+
- changed-files:
|
| 11 |
+
- any-glob-to-any-file: '*.sum'
|
| 12 |
+
|
| 13 |
+
kind/documentation:
|
| 14 |
+
- any:
|
| 15 |
+
- changed-files:
|
| 16 |
+
- any-glob-to-any-file: 'docs/*'
|
| 17 |
+
- changed-files:
|
| 18 |
+
- any-glob-to-any-file: '*.md'
|
| 19 |
+
|
| 20 |
+
area/ai-model:
|
| 21 |
+
- any:
|
| 22 |
+
- changed-files:
|
| 23 |
+
- any-glob-to-any-file: 'gallery/*'
|
| 24 |
+
|
| 25 |
+
examples:
|
| 26 |
+
- any:
|
| 27 |
+
- changed-files:
|
| 28 |
+
- any-glob-to-any-file: 'examples/*'
|
| 29 |
+
|
| 30 |
+
ci:
|
| 31 |
+
- any:
|
| 32 |
+
- changed-files:
|
| 33 |
+
- any-glob-to-any-file: '.github/*'
|
.github/release.yml
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# .github/release.yml
|
| 2 |
+
|
| 3 |
+
changelog:
|
| 4 |
+
exclude:
|
| 5 |
+
labels:
|
| 6 |
+
- ignore-for-release
|
| 7 |
+
categories:
|
| 8 |
+
- title: Breaking Changes 🛠
|
| 9 |
+
labels:
|
| 10 |
+
- Semver-Major
|
| 11 |
+
- breaking-change
|
| 12 |
+
- title: "Bug fixes :bug:"
|
| 13 |
+
labels:
|
| 14 |
+
- bug
|
| 15 |
+
- regression
|
| 16 |
+
- title: "🖧 P2P area"
|
| 17 |
+
labels:
|
| 18 |
+
- area/p2p
|
| 19 |
+
- title: Exciting New Features 🎉
|
| 20 |
+
labels:
|
| 21 |
+
- Semver-Minor
|
| 22 |
+
- enhancement
|
| 23 |
+
- ux
|
| 24 |
+
- roadmap
|
| 25 |
+
- title: 🧠 Models
|
| 26 |
+
labels:
|
| 27 |
+
- area/ai-model
|
| 28 |
+
- title: 📖 Documentation and examples
|
| 29 |
+
labels:
|
| 30 |
+
- kind/documentation
|
| 31 |
+
- examples
|
| 32 |
+
- title: 👒 Dependencies
|
| 33 |
+
labels:
|
| 34 |
+
- dependencies
|
| 35 |
+
- title: Other Changes
|
| 36 |
+
labels:
|
| 37 |
+
- "*"
|
.github/stale.yml
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Number of days of inactivity before an issue becomes stale
|
| 2 |
+
daysUntilStale: 45
|
| 3 |
+
# Number of days of inactivity before a stale issue is closed
|
| 4 |
+
daysUntilClose: 10
|
| 5 |
+
# Issues with these labels will never be considered stale
|
| 6 |
+
exemptLabels:
|
| 7 |
+
- issue/willfix
|
| 8 |
+
# Label to use when marking an issue as stale
|
| 9 |
+
staleLabel: issue/stale
|
| 10 |
+
# Comment to post when marking an issue as stale. Set to `false` to disable
|
| 11 |
+
markComment: >
|
| 12 |
+
This issue has been automatically marked as stale because it has not had
|
| 13 |
+
recent activity. It will be closed if no further activity occurs. Thank you
|
| 14 |
+
for your contributions.
|
| 15 |
+
# Comment to post when closing a stale issue. Set to `false` to disable
|
| 16 |
+
closeComment: >
|
| 17 |
+
This issue is being automatically closed due to inactivity.
|
| 18 |
+
However, you may choose to reopen this issue.
|
.github/workflows/backend.yml
ADDED
|
@@ -0,0 +1,1498 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'build backend container images'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
push:
|
| 6 |
+
branches:
|
| 7 |
+
- master
|
| 8 |
+
tags:
|
| 9 |
+
- '*'
|
| 10 |
+
|
| 11 |
+
concurrency:
|
| 12 |
+
group: ci-backends-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 13 |
+
cancel-in-progress: true
|
| 14 |
+
|
| 15 |
+
jobs:
|
| 16 |
+
backend-jobs:
|
| 17 |
+
uses: ./.github/workflows/backend_build.yml
|
| 18 |
+
with:
|
| 19 |
+
tag-latest: ${{ matrix.tag-latest }}
|
| 20 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 21 |
+
build-type: ${{ matrix.build-type }}
|
| 22 |
+
cuda-major-version: ${{ matrix.cuda-major-version }}
|
| 23 |
+
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
| 24 |
+
platforms: ${{ matrix.platforms }}
|
| 25 |
+
runs-on: ${{ matrix.runs-on }}
|
| 26 |
+
base-image: ${{ matrix.base-image }}
|
| 27 |
+
backend: ${{ matrix.backend }}
|
| 28 |
+
dockerfile: ${{ matrix.dockerfile }}
|
| 29 |
+
skip-drivers: ${{ matrix.skip-drivers }}
|
| 30 |
+
context: ${{ matrix.context }}
|
| 31 |
+
ubuntu-version: ${{ matrix.ubuntu-version }}
|
| 32 |
+
secrets:
|
| 33 |
+
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 34 |
+
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 35 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 36 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 37 |
+
strategy:
|
| 38 |
+
fail-fast: false
|
| 39 |
+
#max-parallel: ${{ github.event_name != 'pull_request' && 6 || 4 }}
|
| 40 |
+
matrix:
|
| 41 |
+
include:
|
| 42 |
+
- build-type: 'l4t'
|
| 43 |
+
cuda-major-version: "12"
|
| 44 |
+
cuda-minor-version: "0"
|
| 45 |
+
platforms: 'linux/arm64'
|
| 46 |
+
tag-latest: 'auto'
|
| 47 |
+
tag-suffix: '-nvidia-l4t-diffusers'
|
| 48 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 49 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 50 |
+
skip-drivers: 'true'
|
| 51 |
+
backend: "diffusers"
|
| 52 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 53 |
+
context: "./"
|
| 54 |
+
ubuntu-version: '2204'
|
| 55 |
+
- build-type: ''
|
| 56 |
+
cuda-major-version: ""
|
| 57 |
+
cuda-minor-version: ""
|
| 58 |
+
platforms: 'linux/amd64'
|
| 59 |
+
tag-latest: 'auto'
|
| 60 |
+
tag-suffix: '-cpu-diffusers'
|
| 61 |
+
runs-on: 'ubuntu-latest'
|
| 62 |
+
base-image: "ubuntu:24.04"
|
| 63 |
+
skip-drivers: 'true'
|
| 64 |
+
backend: "diffusers"
|
| 65 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 66 |
+
context: "./"
|
| 67 |
+
ubuntu-version: '2404'
|
| 68 |
+
- build-type: ''
|
| 69 |
+
cuda-major-version: ""
|
| 70 |
+
cuda-minor-version: ""
|
| 71 |
+
platforms: 'linux/amd64'
|
| 72 |
+
tag-latest: 'auto'
|
| 73 |
+
tag-suffix: '-cpu-chatterbox'
|
| 74 |
+
runs-on: 'ubuntu-latest'
|
| 75 |
+
base-image: "ubuntu:24.04"
|
| 76 |
+
skip-drivers: 'true'
|
| 77 |
+
backend: "chatterbox"
|
| 78 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 79 |
+
context: "./"
|
| 80 |
+
ubuntu-version: '2404'
|
| 81 |
+
- build-type: ''
|
| 82 |
+
cuda-major-version: ""
|
| 83 |
+
cuda-minor-version: ""
|
| 84 |
+
platforms: 'linux/amd64'
|
| 85 |
+
tag-latest: 'auto'
|
| 86 |
+
tag-suffix: '-cpu-moonshine'
|
| 87 |
+
runs-on: 'ubuntu-latest'
|
| 88 |
+
base-image: "ubuntu:24.04"
|
| 89 |
+
skip-drivers: 'true'
|
| 90 |
+
backend: "moonshine"
|
| 91 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 92 |
+
context: "./"
|
| 93 |
+
ubuntu-version: '2404'
|
| 94 |
+
# CUDA 12 builds
|
| 95 |
+
- build-type: 'cublas'
|
| 96 |
+
cuda-major-version: "12"
|
| 97 |
+
cuda-minor-version: "9"
|
| 98 |
+
platforms: 'linux/amd64'
|
| 99 |
+
tag-latest: 'auto'
|
| 100 |
+
tag-suffix: '-gpu-nvidia-cuda-12-vibevoice'
|
| 101 |
+
runs-on: 'ubuntu-latest'
|
| 102 |
+
base-image: "ubuntu:24.04"
|
| 103 |
+
skip-drivers: 'false'
|
| 104 |
+
backend: "vibevoice"
|
| 105 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 106 |
+
context: "./"
|
| 107 |
+
ubuntu-version: '2404'
|
| 108 |
+
- build-type: 'cublas'
|
| 109 |
+
cuda-major-version: "12"
|
| 110 |
+
cuda-minor-version: "9"
|
| 111 |
+
platforms: 'linux/amd64'
|
| 112 |
+
tag-latest: 'auto'
|
| 113 |
+
tag-suffix: '-gpu-nvidia-cuda-12-pocket-tts'
|
| 114 |
+
runs-on: 'ubuntu-latest'
|
| 115 |
+
base-image: "ubuntu:24.04"
|
| 116 |
+
skip-drivers: 'false'
|
| 117 |
+
backend: "pocket-tts"
|
| 118 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 119 |
+
context: "./"
|
| 120 |
+
ubuntu-version: '2404'
|
| 121 |
+
- build-type: 'cublas'
|
| 122 |
+
cuda-major-version: "12"
|
| 123 |
+
cuda-minor-version: "0"
|
| 124 |
+
platforms: 'linux/amd64'
|
| 125 |
+
tag-latest: 'auto'
|
| 126 |
+
tag-suffix: '-gpu-nvidia-cuda-12-rerankers'
|
| 127 |
+
runs-on: 'ubuntu-latest'
|
| 128 |
+
base-image: "ubuntu:24.04"
|
| 129 |
+
skip-drivers: 'false'
|
| 130 |
+
backend: "rerankers"
|
| 131 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 132 |
+
context: "./"
|
| 133 |
+
ubuntu-version: '2404'
|
| 134 |
+
- build-type: 'cublas'
|
| 135 |
+
cuda-major-version: "12"
|
| 136 |
+
cuda-minor-version: "9"
|
| 137 |
+
platforms: 'linux/amd64'
|
| 138 |
+
tag-latest: 'auto'
|
| 139 |
+
tag-suffix: '-gpu-nvidia-cuda-12-llama-cpp'
|
| 140 |
+
runs-on: 'ubuntu-latest'
|
| 141 |
+
base-image: "ubuntu:24.04"
|
| 142 |
+
skip-drivers: 'false'
|
| 143 |
+
backend: "llama-cpp"
|
| 144 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 145 |
+
context: "./"
|
| 146 |
+
ubuntu-version: '2404'
|
| 147 |
+
- build-type: 'cublas'
|
| 148 |
+
cuda-major-version: "12"
|
| 149 |
+
cuda-minor-version: "9"
|
| 150 |
+
platforms: 'linux/amd64'
|
| 151 |
+
tag-latest: 'auto'
|
| 152 |
+
tag-suffix: '-gpu-nvidia-cuda-12-vllm'
|
| 153 |
+
runs-on: 'arc-runner-set'
|
| 154 |
+
base-image: "ubuntu:24.04"
|
| 155 |
+
skip-drivers: 'false'
|
| 156 |
+
backend: "vllm"
|
| 157 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 158 |
+
context: "./"
|
| 159 |
+
ubuntu-version: '2404'
|
| 160 |
+
- build-type: 'cublas'
|
| 161 |
+
cuda-major-version: "12"
|
| 162 |
+
cuda-minor-version: "9"
|
| 163 |
+
platforms: 'linux/amd64'
|
| 164 |
+
tag-latest: 'auto'
|
| 165 |
+
tag-suffix: '-gpu-nvidia-cuda-12-transformers'
|
| 166 |
+
runs-on: 'ubuntu-latest'
|
| 167 |
+
base-image: "ubuntu:24.04"
|
| 168 |
+
skip-drivers: 'false'
|
| 169 |
+
backend: "transformers"
|
| 170 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 171 |
+
context: "./"
|
| 172 |
+
ubuntu-version: '2404'
|
| 173 |
+
- build-type: 'cublas'
|
| 174 |
+
cuda-major-version: "12"
|
| 175 |
+
cuda-minor-version: "9"
|
| 176 |
+
platforms: 'linux/amd64'
|
| 177 |
+
tag-latest: 'auto'
|
| 178 |
+
tag-suffix: '-gpu-nvidia-cuda-12-diffusers'
|
| 179 |
+
runs-on: 'ubuntu-latest'
|
| 180 |
+
base-image: "ubuntu:24.04"
|
| 181 |
+
skip-drivers: 'false'
|
| 182 |
+
backend: "diffusers"
|
| 183 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 184 |
+
context: "./"
|
| 185 |
+
ubuntu-version: '2404'
|
| 186 |
+
- build-type: 'cublas'
|
| 187 |
+
cuda-major-version: "12"
|
| 188 |
+
cuda-minor-version: "9"
|
| 189 |
+
platforms: 'linux/amd64'
|
| 190 |
+
tag-latest: 'auto'
|
| 191 |
+
tag-suffix: '-gpu-nvidia-cuda-12-kokoro'
|
| 192 |
+
runs-on: 'ubuntu-latest'
|
| 193 |
+
base-image: "ubuntu:24.04"
|
| 194 |
+
skip-drivers: 'false'
|
| 195 |
+
backend: "kokoro"
|
| 196 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 197 |
+
context: "./"
|
| 198 |
+
ubuntu-version: '2404'
|
| 199 |
+
- build-type: 'cublas'
|
| 200 |
+
cuda-major-version: "12"
|
| 201 |
+
cuda-minor-version: "9"
|
| 202 |
+
platforms: 'linux/amd64'
|
| 203 |
+
tag-latest: 'auto'
|
| 204 |
+
tag-suffix: '-gpu-nvidia-cuda-12-faster-whisper'
|
| 205 |
+
runs-on: 'ubuntu-latest'
|
| 206 |
+
base-image: "ubuntu:24.04"
|
| 207 |
+
skip-drivers: 'false'
|
| 208 |
+
backend: "faster-whisper"
|
| 209 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 210 |
+
context: "./"
|
| 211 |
+
ubuntu-version: '2404'
|
| 212 |
+
- build-type: 'cublas'
|
| 213 |
+
cuda-major-version: "12"
|
| 214 |
+
cuda-minor-version: "9"
|
| 215 |
+
platforms: 'linux/amd64'
|
| 216 |
+
tag-latest: 'auto'
|
| 217 |
+
tag-suffix: '-gpu-nvidia-cuda-12-coqui'
|
| 218 |
+
runs-on: 'ubuntu-latest'
|
| 219 |
+
base-image: "ubuntu:24.04"
|
| 220 |
+
skip-drivers: 'false'
|
| 221 |
+
backend: "coqui"
|
| 222 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 223 |
+
context: "./"
|
| 224 |
+
ubuntu-version: '2404'
|
| 225 |
+
- build-type: 'cublas'
|
| 226 |
+
cuda-major-version: "12"
|
| 227 |
+
cuda-minor-version: "9"
|
| 228 |
+
platforms: 'linux/amd64'
|
| 229 |
+
tag-latest: 'auto'
|
| 230 |
+
tag-suffix: '-gpu-nvidia-cuda-12-bark'
|
| 231 |
+
runs-on: 'ubuntu-latest'
|
| 232 |
+
base-image: "ubuntu:24.04"
|
| 233 |
+
skip-drivers: 'false'
|
| 234 |
+
backend: "bark"
|
| 235 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 236 |
+
context: "./"
|
| 237 |
+
ubuntu-version: '2404'
|
| 238 |
+
- build-type: 'cublas'
|
| 239 |
+
cuda-major-version: "12"
|
| 240 |
+
cuda-minor-version: "9"
|
| 241 |
+
platforms: 'linux/amd64'
|
| 242 |
+
tag-latest: 'auto'
|
| 243 |
+
tag-suffix: '-gpu-nvidia-cuda-12-chatterbox'
|
| 244 |
+
runs-on: 'ubuntu-latest'
|
| 245 |
+
base-image: "ubuntu:24.04"
|
| 246 |
+
skip-drivers: 'false'
|
| 247 |
+
backend: "chatterbox"
|
| 248 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 249 |
+
context: "./"
|
| 250 |
+
ubuntu-version: '2404'
|
| 251 |
+
- build-type: 'cublas'
|
| 252 |
+
cuda-major-version: "12"
|
| 253 |
+
cuda-minor-version: "9"
|
| 254 |
+
platforms: 'linux/amd64'
|
| 255 |
+
tag-latest: 'auto'
|
| 256 |
+
tag-suffix: '-gpu-nvidia-cuda-12-moonshine'
|
| 257 |
+
runs-on: 'ubuntu-latest'
|
| 258 |
+
base-image: "ubuntu:24.04"
|
| 259 |
+
skip-drivers: 'false'
|
| 260 |
+
backend: "moonshine"
|
| 261 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 262 |
+
context: "./"
|
| 263 |
+
ubuntu-version: '2404'
|
| 264 |
+
- build-type: 'cublas'
|
| 265 |
+
cuda-major-version: "12"
|
| 266 |
+
cuda-minor-version: "9"
|
| 267 |
+
platforms: 'linux/amd64'
|
| 268 |
+
tag-latest: 'auto'
|
| 269 |
+
tag-suffix: '-gpu-nvidia-cuda-12-stablediffusion-ggml'
|
| 270 |
+
runs-on: 'ubuntu-latest'
|
| 271 |
+
base-image: "ubuntu:24.04"
|
| 272 |
+
skip-drivers: 'false'
|
| 273 |
+
backend: "stablediffusion-ggml"
|
| 274 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 275 |
+
context: "./"
|
| 276 |
+
ubuntu-version: '2404'
|
| 277 |
+
- build-type: 'cublas'
|
| 278 |
+
cuda-major-version: "12"
|
| 279 |
+
cuda-minor-version: "9"
|
| 280 |
+
platforms: 'linux/amd64'
|
| 281 |
+
tag-latest: 'auto'
|
| 282 |
+
tag-suffix: '-gpu-nvidia-cuda-12-whisper'
|
| 283 |
+
runs-on: 'ubuntu-latest'
|
| 284 |
+
base-image: "ubuntu:24.04"
|
| 285 |
+
skip-drivers: 'false'
|
| 286 |
+
backend: "whisper"
|
| 287 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 288 |
+
context: "./"
|
| 289 |
+
ubuntu-version: '2404'
|
| 290 |
+
- build-type: 'cublas'
|
| 291 |
+
cuda-major-version: "12"
|
| 292 |
+
cuda-minor-version: "9"
|
| 293 |
+
platforms: 'linux/amd64'
|
| 294 |
+
tag-latest: 'auto'
|
| 295 |
+
tag-suffix: '-gpu-nvidia-cuda-12-rfdetr'
|
| 296 |
+
runs-on: 'ubuntu-latest'
|
| 297 |
+
base-image: "ubuntu:24.04"
|
| 298 |
+
skip-drivers: 'false'
|
| 299 |
+
backend: "rfdetr"
|
| 300 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 301 |
+
context: "./"
|
| 302 |
+
ubuntu-version: '2404'
|
| 303 |
+
- build-type: 'cublas'
|
| 304 |
+
cuda-major-version: "12"
|
| 305 |
+
cuda-minor-version: "9"
|
| 306 |
+
platforms: 'linux/amd64'
|
| 307 |
+
tag-latest: 'auto'
|
| 308 |
+
tag-suffix: '-gpu-nvidia-cuda-12-exllama2'
|
| 309 |
+
runs-on: 'ubuntu-latest'
|
| 310 |
+
base-image: "ubuntu:24.04"
|
| 311 |
+
skip-drivers: 'false'
|
| 312 |
+
backend: "exllama2"
|
| 313 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 314 |
+
context: "./"
|
| 315 |
+
ubuntu-version: '2404'
|
| 316 |
+
- build-type: 'cublas'
|
| 317 |
+
cuda-major-version: "12"
|
| 318 |
+
cuda-minor-version: "9"
|
| 319 |
+
platforms: 'linux/amd64'
|
| 320 |
+
tag-latest: 'auto'
|
| 321 |
+
tag-suffix: '-gpu-nvidia-cuda-12-neutts'
|
| 322 |
+
runs-on: 'ubuntu-latest'
|
| 323 |
+
base-image: "ubuntu:24.04"
|
| 324 |
+
skip-drivers: 'false'
|
| 325 |
+
backend: "neutts"
|
| 326 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 327 |
+
context: "./"
|
| 328 |
+
ubuntu-version: '2404'
|
| 329 |
+
# cuda 13
|
| 330 |
+
- build-type: 'cublas'
|
| 331 |
+
cuda-major-version: "13"
|
| 332 |
+
cuda-minor-version: "0"
|
| 333 |
+
platforms: 'linux/amd64'
|
| 334 |
+
tag-latest: 'auto'
|
| 335 |
+
tag-suffix: '-gpu-nvidia-cuda-13-rerankers'
|
| 336 |
+
runs-on: 'ubuntu-latest'
|
| 337 |
+
base-image: "ubuntu:24.04"
|
| 338 |
+
skip-drivers: 'false'
|
| 339 |
+
backend: "rerankers"
|
| 340 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 341 |
+
context: "./"
|
| 342 |
+
ubuntu-version: '2404'
|
| 343 |
+
- build-type: 'cublas'
|
| 344 |
+
cuda-major-version: "13"
|
| 345 |
+
cuda-minor-version: "0"
|
| 346 |
+
platforms: 'linux/amd64'
|
| 347 |
+
tag-latest: 'auto'
|
| 348 |
+
tag-suffix: '-gpu-nvidia-cuda-13-vibevoice'
|
| 349 |
+
runs-on: 'ubuntu-latest'
|
| 350 |
+
base-image: "ubuntu:24.04"
|
| 351 |
+
skip-drivers: 'false'
|
| 352 |
+
backend: "vibevoice"
|
| 353 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 354 |
+
context: "./"
|
| 355 |
+
ubuntu-version: '2404'
|
| 356 |
+
- build-type: 'cublas'
|
| 357 |
+
cuda-major-version: "13"
|
| 358 |
+
cuda-minor-version: "0"
|
| 359 |
+
platforms: 'linux/amd64'
|
| 360 |
+
tag-latest: 'auto'
|
| 361 |
+
tag-suffix: '-gpu-nvidia-cuda-13-pocket-tts'
|
| 362 |
+
runs-on: 'ubuntu-latest'
|
| 363 |
+
base-image: "ubuntu:24.04"
|
| 364 |
+
skip-drivers: 'false'
|
| 365 |
+
backend: "pocket-tts"
|
| 366 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 367 |
+
context: "./"
|
| 368 |
+
ubuntu-version: '2404'
|
| 369 |
+
- build-type: 'cublas'
|
| 370 |
+
cuda-major-version: "13"
|
| 371 |
+
cuda-minor-version: "0"
|
| 372 |
+
platforms: 'linux/amd64'
|
| 373 |
+
tag-latest: 'auto'
|
| 374 |
+
tag-suffix: '-gpu-nvidia-cuda-13-llama-cpp'
|
| 375 |
+
runs-on: 'ubuntu-latest'
|
| 376 |
+
base-image: "ubuntu:24.04"
|
| 377 |
+
skip-drivers: 'false'
|
| 378 |
+
backend: "llama-cpp"
|
| 379 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 380 |
+
context: "./"
|
| 381 |
+
ubuntu-version: '2404'
|
| 382 |
+
- build-type: 'cublas'
|
| 383 |
+
cuda-major-version: "13"
|
| 384 |
+
cuda-minor-version: "0"
|
| 385 |
+
platforms: 'linux/arm64'
|
| 386 |
+
skip-drivers: 'false'
|
| 387 |
+
tag-latest: 'auto'
|
| 388 |
+
tag-suffix: '-nvidia-l4t-cuda-13-arm64-llama-cpp'
|
| 389 |
+
base-image: "ubuntu:24.04"
|
| 390 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 391 |
+
ubuntu-version: '2404'
|
| 392 |
+
backend: "llama-cpp"
|
| 393 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 394 |
+
context: "./"
|
| 395 |
+
- build-type: 'cublas'
|
| 396 |
+
cuda-major-version: "13"
|
| 397 |
+
cuda-minor-version: "0"
|
| 398 |
+
platforms: 'linux/amd64'
|
| 399 |
+
tag-latest: 'auto'
|
| 400 |
+
tag-suffix: '-gpu-nvidia-cuda-13-transformers'
|
| 401 |
+
runs-on: 'ubuntu-latest'
|
| 402 |
+
base-image: "ubuntu:24.04"
|
| 403 |
+
skip-drivers: 'false'
|
| 404 |
+
backend: "transformers"
|
| 405 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 406 |
+
context: "./"
|
| 407 |
+
ubuntu-version: '2404'
|
| 408 |
+
- build-type: 'cublas'
|
| 409 |
+
cuda-major-version: "13"
|
| 410 |
+
cuda-minor-version: "0"
|
| 411 |
+
platforms: 'linux/amd64'
|
| 412 |
+
tag-latest: 'auto'
|
| 413 |
+
tag-suffix: '-gpu-nvidia-cuda-13-diffusers'
|
| 414 |
+
runs-on: 'ubuntu-latest'
|
| 415 |
+
base-image: "ubuntu:24.04"
|
| 416 |
+
skip-drivers: 'false'
|
| 417 |
+
backend: "diffusers"
|
| 418 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 419 |
+
context: "./"
|
| 420 |
+
ubuntu-version: '2404'
|
| 421 |
+
- build-type: 'l4t'
|
| 422 |
+
cuda-major-version: "13"
|
| 423 |
+
cuda-minor-version: "0"
|
| 424 |
+
platforms: 'linux/arm64'
|
| 425 |
+
tag-latest: 'auto'
|
| 426 |
+
tag-suffix: '-nvidia-l4t-cuda-13-arm64-vibevoice'
|
| 427 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 428 |
+
base-image: "ubuntu:24.04"
|
| 429 |
+
skip-drivers: 'false'
|
| 430 |
+
ubuntu-version: '2404'
|
| 431 |
+
backend: "vibevoice"
|
| 432 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 433 |
+
context: "./"
|
| 434 |
+
- build-type: 'l4t'
|
| 435 |
+
cuda-major-version: "13"
|
| 436 |
+
cuda-minor-version: "0"
|
| 437 |
+
platforms: 'linux/arm64'
|
| 438 |
+
tag-latest: 'auto'
|
| 439 |
+
tag-suffix: '-nvidia-l4t-cuda-13-arm64-pocket-tts'
|
| 440 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 441 |
+
base-image: "ubuntu:24.04"
|
| 442 |
+
skip-drivers: 'false'
|
| 443 |
+
ubuntu-version: '2404'
|
| 444 |
+
backend: "pocket-tts"
|
| 445 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 446 |
+
context: "./"
|
| 447 |
+
- build-type: 'l4t'
|
| 448 |
+
cuda-major-version: "13"
|
| 449 |
+
cuda-minor-version: "0"
|
| 450 |
+
platforms: 'linux/arm64'
|
| 451 |
+
tag-latest: 'auto'
|
| 452 |
+
tag-suffix: '-nvidia-l4t-cuda-13-arm64-diffusers'
|
| 453 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 454 |
+
base-image: "ubuntu:24.04"
|
| 455 |
+
skip-drivers: 'false'
|
| 456 |
+
ubuntu-version: '2404'
|
| 457 |
+
backend: "diffusers"
|
| 458 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 459 |
+
context: "./"
|
| 460 |
+
- build-type: 'cublas'
|
| 461 |
+
cuda-major-version: "13"
|
| 462 |
+
cuda-minor-version: "0"
|
| 463 |
+
platforms: 'linux/amd64'
|
| 464 |
+
tag-latest: 'auto'
|
| 465 |
+
tag-suffix: '-gpu-nvidia-cuda-13-kokoro'
|
| 466 |
+
runs-on: 'ubuntu-latest'
|
| 467 |
+
base-image: "ubuntu:24.04"
|
| 468 |
+
skip-drivers: 'false'
|
| 469 |
+
backend: "kokoro"
|
| 470 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 471 |
+
context: "./"
|
| 472 |
+
ubuntu-version: '2404'
|
| 473 |
+
- build-type: 'cublas'
|
| 474 |
+
cuda-major-version: "13"
|
| 475 |
+
cuda-minor-version: "0"
|
| 476 |
+
platforms: 'linux/amd64'
|
| 477 |
+
tag-latest: 'auto'
|
| 478 |
+
tag-suffix: '-gpu-nvidia-cuda-13-faster-whisper'
|
| 479 |
+
runs-on: 'ubuntu-latest'
|
| 480 |
+
base-image: "ubuntu:24.04"
|
| 481 |
+
skip-drivers: 'false'
|
| 482 |
+
backend: "faster-whisper"
|
| 483 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 484 |
+
context: "./"
|
| 485 |
+
ubuntu-version: '2404'
|
| 486 |
+
- build-type: 'cublas'
|
| 487 |
+
cuda-major-version: "13"
|
| 488 |
+
cuda-minor-version: "0"
|
| 489 |
+
platforms: 'linux/amd64'
|
| 490 |
+
tag-latest: 'auto'
|
| 491 |
+
tag-suffix: '-gpu-nvidia-cuda-13-bark'
|
| 492 |
+
runs-on: 'ubuntu-latest'
|
| 493 |
+
base-image: "ubuntu:24.04"
|
| 494 |
+
skip-drivers: 'false'
|
| 495 |
+
backend: "bark"
|
| 496 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 497 |
+
context: "./"
|
| 498 |
+
ubuntu-version: '2404'
|
| 499 |
+
- build-type: 'cublas'
|
| 500 |
+
cuda-major-version: "13"
|
| 501 |
+
cuda-minor-version: "0"
|
| 502 |
+
platforms: 'linux/amd64'
|
| 503 |
+
tag-latest: 'auto'
|
| 504 |
+
tag-suffix: '-gpu-nvidia-cuda-13-chatterbox'
|
| 505 |
+
runs-on: 'ubuntu-latest'
|
| 506 |
+
base-image: "ubuntu:24.04"
|
| 507 |
+
skip-drivers: 'false'
|
| 508 |
+
backend: "chatterbox"
|
| 509 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 510 |
+
context: "./"
|
| 511 |
+
ubuntu-version: '2404'
|
| 512 |
+
- build-type: 'cublas'
|
| 513 |
+
cuda-major-version: "13"
|
| 514 |
+
cuda-minor-version: "0"
|
| 515 |
+
platforms: 'linux/amd64'
|
| 516 |
+
tag-latest: 'auto'
|
| 517 |
+
tag-suffix: '-gpu-nvidia-cuda-13-moonshine'
|
| 518 |
+
runs-on: 'ubuntu-latest'
|
| 519 |
+
base-image: "ubuntu:24.04"
|
| 520 |
+
skip-drivers: 'false'
|
| 521 |
+
backend: "moonshine"
|
| 522 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 523 |
+
context: "./"
|
| 524 |
+
ubuntu-version: '2404'
|
| 525 |
+
- build-type: 'cublas'
|
| 526 |
+
cuda-major-version: "13"
|
| 527 |
+
cuda-minor-version: "0"
|
| 528 |
+
platforms: 'linux/amd64'
|
| 529 |
+
tag-latest: 'auto'
|
| 530 |
+
tag-suffix: '-gpu-nvidia-cuda-13-stablediffusion-ggml'
|
| 531 |
+
runs-on: 'ubuntu-latest'
|
| 532 |
+
base-image: "ubuntu:24.04"
|
| 533 |
+
skip-drivers: 'false'
|
| 534 |
+
backend: "stablediffusion-ggml"
|
| 535 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 536 |
+
context: "./"
|
| 537 |
+
ubuntu-version: '2404'
|
| 538 |
+
- build-type: 'cublas'
|
| 539 |
+
cuda-major-version: "13"
|
| 540 |
+
cuda-minor-version: "0"
|
| 541 |
+
platforms: 'linux/arm64'
|
| 542 |
+
skip-drivers: 'false'
|
| 543 |
+
tag-latest: 'auto'
|
| 544 |
+
tag-suffix: '-nvidia-l4t-cuda-13-arm64-stablediffusion-ggml'
|
| 545 |
+
base-image: "ubuntu:24.04"
|
| 546 |
+
ubuntu-version: '2404'
|
| 547 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 548 |
+
backend: "stablediffusion-ggml"
|
| 549 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 550 |
+
context: "./"
|
| 551 |
+
- build-type: 'cublas'
|
| 552 |
+
cuda-major-version: "13"
|
| 553 |
+
cuda-minor-version: "0"
|
| 554 |
+
platforms: 'linux/amd64'
|
| 555 |
+
tag-latest: 'auto'
|
| 556 |
+
tag-suffix: '-gpu-nvidia-cuda-13-whisper'
|
| 557 |
+
runs-on: 'ubuntu-latest'
|
| 558 |
+
base-image: "ubuntu:24.04"
|
| 559 |
+
skip-drivers: 'false'
|
| 560 |
+
backend: "whisper"
|
| 561 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 562 |
+
context: "./"
|
| 563 |
+
ubuntu-version: '2404'
|
| 564 |
+
- build-type: 'cublas'
|
| 565 |
+
cuda-major-version: "13"
|
| 566 |
+
cuda-minor-version: "0"
|
| 567 |
+
platforms: 'linux/arm64'
|
| 568 |
+
skip-drivers: 'false'
|
| 569 |
+
tag-latest: 'auto'
|
| 570 |
+
tag-suffix: '-nvidia-l4t-cuda-13-arm64-whisper'
|
| 571 |
+
base-image: "ubuntu:24.04"
|
| 572 |
+
ubuntu-version: '2404'
|
| 573 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 574 |
+
backend: "whisper"
|
| 575 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 576 |
+
context: "./"
|
| 577 |
+
- build-type: 'cublas'
|
| 578 |
+
cuda-major-version: "13"
|
| 579 |
+
cuda-minor-version: "0"
|
| 580 |
+
platforms: 'linux/amd64'
|
| 581 |
+
tag-latest: 'auto'
|
| 582 |
+
tag-suffix: '-gpu-nvidia-cuda-13-rfdetr'
|
| 583 |
+
runs-on: 'ubuntu-latest'
|
| 584 |
+
base-image: "ubuntu:24.04"
|
| 585 |
+
skip-drivers: 'false'
|
| 586 |
+
backend: "rfdetr"
|
| 587 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 588 |
+
context: "./"
|
| 589 |
+
ubuntu-version: '2404'
|
| 590 |
+
# hipblas builds
|
| 591 |
+
- build-type: 'hipblas'
|
| 592 |
+
cuda-major-version: ""
|
| 593 |
+
cuda-minor-version: ""
|
| 594 |
+
platforms: 'linux/amd64'
|
| 595 |
+
tag-latest: 'auto'
|
| 596 |
+
tag-suffix: '-gpu-rocm-hipblas-rerankers'
|
| 597 |
+
runs-on: 'ubuntu-latest'
|
| 598 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 599 |
+
skip-drivers: 'false'
|
| 600 |
+
backend: "rerankers"
|
| 601 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 602 |
+
context: "./"
|
| 603 |
+
ubuntu-version: '2404'
|
| 604 |
+
- build-type: 'hipblas'
|
| 605 |
+
cuda-major-version: ""
|
| 606 |
+
cuda-minor-version: ""
|
| 607 |
+
platforms: 'linux/amd64'
|
| 608 |
+
tag-latest: 'auto'
|
| 609 |
+
tag-suffix: '-gpu-rocm-hipblas-llama-cpp'
|
| 610 |
+
runs-on: 'ubuntu-latest'
|
| 611 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 612 |
+
skip-drivers: 'false'
|
| 613 |
+
backend: "llama-cpp"
|
| 614 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 615 |
+
context: "./"
|
| 616 |
+
ubuntu-version: '2404'
|
| 617 |
+
- build-type: 'hipblas'
|
| 618 |
+
cuda-major-version: ""
|
| 619 |
+
cuda-minor-version: ""
|
| 620 |
+
platforms: 'linux/amd64'
|
| 621 |
+
tag-latest: 'auto'
|
| 622 |
+
tag-suffix: '-gpu-rocm-hipblas-vllm'
|
| 623 |
+
runs-on: 'arc-runner-set'
|
| 624 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 625 |
+
skip-drivers: 'false'
|
| 626 |
+
backend: "vllm"
|
| 627 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 628 |
+
context: "./"
|
| 629 |
+
ubuntu-version: '2404'
|
| 630 |
+
- build-type: 'hipblas'
|
| 631 |
+
cuda-major-version: ""
|
| 632 |
+
cuda-minor-version: ""
|
| 633 |
+
platforms: 'linux/amd64'
|
| 634 |
+
tag-latest: 'auto'
|
| 635 |
+
tag-suffix: '-gpu-rocm-hipblas-transformers'
|
| 636 |
+
runs-on: 'arc-runner-set'
|
| 637 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 638 |
+
skip-drivers: 'false'
|
| 639 |
+
backend: "transformers"
|
| 640 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 641 |
+
context: "./"
|
| 642 |
+
ubuntu-version: '2404'
|
| 643 |
+
- build-type: 'hipblas'
|
| 644 |
+
cuda-major-version: ""
|
| 645 |
+
cuda-minor-version: ""
|
| 646 |
+
platforms: 'linux/amd64'
|
| 647 |
+
tag-latest: 'auto'
|
| 648 |
+
tag-suffix: '-gpu-rocm-hipblas-diffusers'
|
| 649 |
+
runs-on: 'arc-runner-set'
|
| 650 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 651 |
+
skip-drivers: 'false'
|
| 652 |
+
backend: "diffusers"
|
| 653 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 654 |
+
context: "./"
|
| 655 |
+
ubuntu-version: '2404'
|
| 656 |
+
# ROCm additional backends
|
| 657 |
+
- build-type: 'hipblas'
|
| 658 |
+
cuda-major-version: ""
|
| 659 |
+
cuda-minor-version: ""
|
| 660 |
+
platforms: 'linux/amd64'
|
| 661 |
+
tag-latest: 'auto'
|
| 662 |
+
tag-suffix: '-gpu-rocm-hipblas-kokoro'
|
| 663 |
+
runs-on: 'arc-runner-set'
|
| 664 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 665 |
+
skip-drivers: 'false'
|
| 666 |
+
backend: "kokoro"
|
| 667 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 668 |
+
context: "./"
|
| 669 |
+
ubuntu-version: '2404'
|
| 670 |
+
- build-type: 'hipblas'
|
| 671 |
+
cuda-major-version: ""
|
| 672 |
+
cuda-minor-version: ""
|
| 673 |
+
platforms: 'linux/amd64'
|
| 674 |
+
tag-latest: 'auto'
|
| 675 |
+
tag-suffix: '-gpu-rocm-hipblas-vibevoice'
|
| 676 |
+
runs-on: 'arc-runner-set'
|
| 677 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 678 |
+
skip-drivers: 'false'
|
| 679 |
+
backend: "vibevoice"
|
| 680 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 681 |
+
context: "./"
|
| 682 |
+
ubuntu-version: '2404'
|
| 683 |
+
- build-type: 'hipblas'
|
| 684 |
+
cuda-major-version: ""
|
| 685 |
+
cuda-minor-version: ""
|
| 686 |
+
platforms: 'linux/amd64'
|
| 687 |
+
tag-latest: 'auto'
|
| 688 |
+
tag-suffix: '-gpu-rocm-hipblas-pocket-tts'
|
| 689 |
+
runs-on: 'arc-runner-set'
|
| 690 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 691 |
+
skip-drivers: 'false'
|
| 692 |
+
backend: "pocket-tts"
|
| 693 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 694 |
+
context: "./"
|
| 695 |
+
ubuntu-version: '2404'
|
| 696 |
+
- build-type: 'hipblas'
|
| 697 |
+
cuda-major-version: ""
|
| 698 |
+
cuda-minor-version: ""
|
| 699 |
+
platforms: 'linux/amd64'
|
| 700 |
+
tag-latest: 'auto'
|
| 701 |
+
tag-suffix: '-gpu-rocm-hipblas-faster-whisper'
|
| 702 |
+
runs-on: 'ubuntu-latest'
|
| 703 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 704 |
+
skip-drivers: 'false'
|
| 705 |
+
backend: "faster-whisper"
|
| 706 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 707 |
+
context: "./"
|
| 708 |
+
ubuntu-version: '2404'
|
| 709 |
+
- build-type: 'hipblas'
|
| 710 |
+
cuda-major-version: ""
|
| 711 |
+
cuda-minor-version: ""
|
| 712 |
+
platforms: 'linux/amd64'
|
| 713 |
+
tag-latest: 'auto'
|
| 714 |
+
tag-suffix: '-gpu-rocm-hipblas-coqui'
|
| 715 |
+
runs-on: 'ubuntu-latest'
|
| 716 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 717 |
+
skip-drivers: 'false'
|
| 718 |
+
backend: "coqui"
|
| 719 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 720 |
+
context: "./"
|
| 721 |
+
ubuntu-version: '2404'
|
| 722 |
+
- build-type: 'hipblas'
|
| 723 |
+
cuda-major-version: ""
|
| 724 |
+
cuda-minor-version: ""
|
| 725 |
+
platforms: 'linux/amd64'
|
| 726 |
+
tag-latest: 'auto'
|
| 727 |
+
tag-suffix: '-gpu-rocm-hipblas-bark'
|
| 728 |
+
runs-on: 'arc-runner-set'
|
| 729 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 730 |
+
skip-drivers: 'false'
|
| 731 |
+
backend: "bark"
|
| 732 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 733 |
+
context: "./"
|
| 734 |
+
ubuntu-version: '2404'
|
| 735 |
+
# sycl builds
|
| 736 |
+
- build-type: 'intel'
|
| 737 |
+
cuda-major-version: ""
|
| 738 |
+
cuda-minor-version: ""
|
| 739 |
+
platforms: 'linux/amd64'
|
| 740 |
+
tag-latest: 'auto'
|
| 741 |
+
tag-suffix: '-gpu-intel-rerankers'
|
| 742 |
+
runs-on: 'ubuntu-latest'
|
| 743 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 744 |
+
skip-drivers: 'false'
|
| 745 |
+
backend: "rerankers"
|
| 746 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 747 |
+
context: "./"
|
| 748 |
+
ubuntu-version: '2404'
|
| 749 |
+
- build-type: 'sycl_f32'
|
| 750 |
+
cuda-major-version: ""
|
| 751 |
+
cuda-minor-version: ""
|
| 752 |
+
platforms: 'linux/amd64'
|
| 753 |
+
tag-latest: 'auto'
|
| 754 |
+
tag-suffix: '-gpu-intel-sycl-f32-llama-cpp'
|
| 755 |
+
runs-on: 'ubuntu-latest'
|
| 756 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 757 |
+
skip-drivers: 'false'
|
| 758 |
+
backend: "llama-cpp"
|
| 759 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 760 |
+
context: "./"
|
| 761 |
+
ubuntu-version: '2404'
|
| 762 |
+
- build-type: 'sycl_f16'
|
| 763 |
+
cuda-major-version: ""
|
| 764 |
+
cuda-minor-version: ""
|
| 765 |
+
platforms: 'linux/amd64'
|
| 766 |
+
tag-latest: 'auto'
|
| 767 |
+
tag-suffix: '-gpu-intel-sycl-f16-llama-cpp'
|
| 768 |
+
runs-on: 'ubuntu-latest'
|
| 769 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 770 |
+
skip-drivers: 'false'
|
| 771 |
+
backend: "llama-cpp"
|
| 772 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 773 |
+
context: "./"
|
| 774 |
+
ubuntu-version: '2404'
|
| 775 |
+
- build-type: 'intel'
|
| 776 |
+
cuda-major-version: ""
|
| 777 |
+
cuda-minor-version: ""
|
| 778 |
+
platforms: 'linux/amd64'
|
| 779 |
+
tag-latest: 'auto'
|
| 780 |
+
tag-suffix: '-gpu-intel-vllm'
|
| 781 |
+
runs-on: 'arc-runner-set'
|
| 782 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 783 |
+
skip-drivers: 'false'
|
| 784 |
+
backend: "vllm"
|
| 785 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 786 |
+
context: "./"
|
| 787 |
+
ubuntu-version: '2404'
|
| 788 |
+
- build-type: 'intel'
|
| 789 |
+
cuda-major-version: ""
|
| 790 |
+
cuda-minor-version: ""
|
| 791 |
+
platforms: 'linux/amd64'
|
| 792 |
+
tag-latest: 'auto'
|
| 793 |
+
tag-suffix: '-gpu-intel-transformers'
|
| 794 |
+
runs-on: 'ubuntu-latest'
|
| 795 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 796 |
+
skip-drivers: 'false'
|
| 797 |
+
backend: "transformers"
|
| 798 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 799 |
+
context: "./"
|
| 800 |
+
ubuntu-version: '2404'
|
| 801 |
+
- build-type: 'intel'
|
| 802 |
+
cuda-major-version: ""
|
| 803 |
+
cuda-minor-version: ""
|
| 804 |
+
platforms: 'linux/amd64'
|
| 805 |
+
tag-latest: 'auto'
|
| 806 |
+
tag-suffix: '-gpu-intel-diffusers'
|
| 807 |
+
runs-on: 'ubuntu-latest'
|
| 808 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 809 |
+
skip-drivers: 'false'
|
| 810 |
+
backend: "diffusers"
|
| 811 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 812 |
+
context: "./"
|
| 813 |
+
ubuntu-version: '2404'
|
| 814 |
+
- build-type: 'l4t'
|
| 815 |
+
cuda-major-version: "12"
|
| 816 |
+
cuda-minor-version: "0"
|
| 817 |
+
platforms: 'linux/arm64'
|
| 818 |
+
tag-latest: 'auto'
|
| 819 |
+
tag-suffix: '-nvidia-l4t-vibevoice'
|
| 820 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 821 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 822 |
+
skip-drivers: 'true'
|
| 823 |
+
backend: "vibevoice"
|
| 824 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 825 |
+
context: "./"
|
| 826 |
+
ubuntu-version: '2204'
|
| 827 |
+
- build-type: 'l4t'
|
| 828 |
+
cuda-major-version: "12"
|
| 829 |
+
cuda-minor-version: "0"
|
| 830 |
+
platforms: 'linux/arm64'
|
| 831 |
+
tag-latest: 'auto'
|
| 832 |
+
tag-suffix: '-nvidia-l4t-pocket-tts'
|
| 833 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 834 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 835 |
+
skip-drivers: 'true'
|
| 836 |
+
backend: "pocket-tts"
|
| 837 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 838 |
+
context: "./"
|
| 839 |
+
ubuntu-version: '2204'
|
| 840 |
+
- build-type: 'l4t'
|
| 841 |
+
cuda-major-version: "12"
|
| 842 |
+
cuda-minor-version: "0"
|
| 843 |
+
platforms: 'linux/arm64'
|
| 844 |
+
tag-latest: 'auto'
|
| 845 |
+
tag-suffix: '-nvidia-l4t-kokoro'
|
| 846 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 847 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 848 |
+
skip-drivers: 'true'
|
| 849 |
+
backend: "kokoro"
|
| 850 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 851 |
+
context: "./"
|
| 852 |
+
ubuntu-version: '2204'
|
| 853 |
+
# SYCL additional backends
|
| 854 |
+
- build-type: 'intel'
|
| 855 |
+
cuda-major-version: ""
|
| 856 |
+
cuda-minor-version: ""
|
| 857 |
+
platforms: 'linux/amd64'
|
| 858 |
+
tag-latest: 'auto'
|
| 859 |
+
tag-suffix: '-gpu-intel-kokoro'
|
| 860 |
+
runs-on: 'ubuntu-latest'
|
| 861 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 862 |
+
skip-drivers: 'false'
|
| 863 |
+
backend: "kokoro"
|
| 864 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 865 |
+
context: "./"
|
| 866 |
+
ubuntu-version: '2404'
|
| 867 |
+
- build-type: 'intel'
|
| 868 |
+
cuda-major-version: ""
|
| 869 |
+
cuda-minor-version: ""
|
| 870 |
+
platforms: 'linux/amd64'
|
| 871 |
+
tag-latest: 'auto'
|
| 872 |
+
tag-suffix: '-gpu-intel-faster-whisper'
|
| 873 |
+
runs-on: 'ubuntu-latest'
|
| 874 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 875 |
+
skip-drivers: 'false'
|
| 876 |
+
backend: "faster-whisper"
|
| 877 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 878 |
+
context: "./"
|
| 879 |
+
ubuntu-version: '2404'
|
| 880 |
+
- build-type: 'intel'
|
| 881 |
+
cuda-major-version: ""
|
| 882 |
+
cuda-minor-version: ""
|
| 883 |
+
platforms: 'linux/amd64'
|
| 884 |
+
tag-latest: 'auto'
|
| 885 |
+
tag-suffix: '-gpu-intel-vibevoice'
|
| 886 |
+
runs-on: 'arc-runner-set'
|
| 887 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 888 |
+
skip-drivers: 'false'
|
| 889 |
+
backend: "vibevoice"
|
| 890 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 891 |
+
context: "./"
|
| 892 |
+
ubuntu-version: '2404'
|
| 893 |
+
- build-type: 'intel'
|
| 894 |
+
cuda-major-version: ""
|
| 895 |
+
cuda-minor-version: ""
|
| 896 |
+
platforms: 'linux/amd64'
|
| 897 |
+
tag-latest: 'auto'
|
| 898 |
+
tag-suffix: '-gpu-intel-pocket-tts'
|
| 899 |
+
runs-on: 'arc-runner-set'
|
| 900 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 901 |
+
skip-drivers: 'false'
|
| 902 |
+
backend: "pocket-tts"
|
| 903 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 904 |
+
context: "./"
|
| 905 |
+
ubuntu-version: '2404'
|
| 906 |
+
- build-type: 'intel'
|
| 907 |
+
cuda-major-version: ""
|
| 908 |
+
cuda-minor-version: ""
|
| 909 |
+
platforms: 'linux/amd64'
|
| 910 |
+
tag-latest: 'auto'
|
| 911 |
+
tag-suffix: '-gpu-intel-coqui'
|
| 912 |
+
runs-on: 'ubuntu-latest'
|
| 913 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 914 |
+
skip-drivers: 'false'
|
| 915 |
+
backend: "coqui"
|
| 916 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 917 |
+
context: "./"
|
| 918 |
+
ubuntu-version: '2404'
|
| 919 |
+
- build-type: 'intel'
|
| 920 |
+
cuda-major-version: ""
|
| 921 |
+
cuda-minor-version: ""
|
| 922 |
+
platforms: 'linux/amd64'
|
| 923 |
+
tag-latest: 'auto'
|
| 924 |
+
tag-suffix: '-gpu-intel-bark'
|
| 925 |
+
runs-on: 'ubuntu-latest'
|
| 926 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 927 |
+
skip-drivers: 'false'
|
| 928 |
+
backend: "bark"
|
| 929 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 930 |
+
context: "./"
|
| 931 |
+
ubuntu-version: '2404'
|
| 932 |
+
# piper
|
| 933 |
+
- build-type: ''
|
| 934 |
+
cuda-major-version: ""
|
| 935 |
+
cuda-minor-version: ""
|
| 936 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 937 |
+
tag-latest: 'auto'
|
| 938 |
+
tag-suffix: '-piper'
|
| 939 |
+
runs-on: 'ubuntu-latest'
|
| 940 |
+
base-image: "ubuntu:24.04"
|
| 941 |
+
skip-drivers: 'false'
|
| 942 |
+
backend: "piper"
|
| 943 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 944 |
+
context: "./"
|
| 945 |
+
ubuntu-version: '2404'
|
| 946 |
+
# bark-cpp
|
| 947 |
+
- build-type: ''
|
| 948 |
+
cuda-major-version: ""
|
| 949 |
+
cuda-minor-version: ""
|
| 950 |
+
platforms: 'linux/amd64'
|
| 951 |
+
tag-latest: 'auto'
|
| 952 |
+
tag-suffix: '-bark-cpp'
|
| 953 |
+
runs-on: 'ubuntu-latest'
|
| 954 |
+
base-image: "ubuntu:24.04"
|
| 955 |
+
skip-drivers: 'false'
|
| 956 |
+
backend: "bark-cpp"
|
| 957 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 958 |
+
context: "./"
|
| 959 |
+
ubuntu-version: '2404'
|
| 960 |
+
- build-type: ''
|
| 961 |
+
cuda-major-version: ""
|
| 962 |
+
cuda-minor-version: ""
|
| 963 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 964 |
+
tag-latest: 'auto'
|
| 965 |
+
tag-suffix: '-cpu-llama-cpp'
|
| 966 |
+
runs-on: 'ubuntu-latest'
|
| 967 |
+
base-image: "ubuntu:24.04"
|
| 968 |
+
skip-drivers: 'false'
|
| 969 |
+
backend: "llama-cpp"
|
| 970 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 971 |
+
context: "./"
|
| 972 |
+
ubuntu-version: '2404'
|
| 973 |
+
- build-type: 'cublas'
|
| 974 |
+
cuda-major-version: "12"
|
| 975 |
+
cuda-minor-version: "0"
|
| 976 |
+
platforms: 'linux/arm64'
|
| 977 |
+
skip-drivers: 'false'
|
| 978 |
+
tag-latest: 'auto'
|
| 979 |
+
tag-suffix: '-nvidia-l4t-arm64-llama-cpp'
|
| 980 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 981 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 982 |
+
backend: "llama-cpp"
|
| 983 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 984 |
+
context: "./"
|
| 985 |
+
ubuntu-version: '2204'
|
| 986 |
+
- build-type: 'vulkan'
|
| 987 |
+
cuda-major-version: ""
|
| 988 |
+
cuda-minor-version: ""
|
| 989 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 990 |
+
tag-latest: 'auto'
|
| 991 |
+
tag-suffix: '-gpu-vulkan-llama-cpp'
|
| 992 |
+
runs-on: 'ubuntu-latest'
|
| 993 |
+
base-image: "ubuntu:24.04"
|
| 994 |
+
skip-drivers: 'false'
|
| 995 |
+
backend: "llama-cpp"
|
| 996 |
+
dockerfile: "./backend/Dockerfile.llama-cpp"
|
| 997 |
+
context: "./"
|
| 998 |
+
ubuntu-version: '2404'
|
| 999 |
+
# Stablediffusion-ggml
|
| 1000 |
+
- build-type: ''
|
| 1001 |
+
cuda-major-version: ""
|
| 1002 |
+
cuda-minor-version: ""
|
| 1003 |
+
platforms: 'linux/amd64'
|
| 1004 |
+
tag-latest: 'auto'
|
| 1005 |
+
tag-suffix: '-cpu-stablediffusion-ggml'
|
| 1006 |
+
runs-on: 'ubuntu-latest'
|
| 1007 |
+
base-image: "ubuntu:24.04"
|
| 1008 |
+
skip-drivers: 'false'
|
| 1009 |
+
backend: "stablediffusion-ggml"
|
| 1010 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1011 |
+
context: "./"
|
| 1012 |
+
ubuntu-version: '2404'
|
| 1013 |
+
- build-type: 'sycl_f32'
|
| 1014 |
+
cuda-major-version: ""
|
| 1015 |
+
cuda-minor-version: ""
|
| 1016 |
+
platforms: 'linux/amd64'
|
| 1017 |
+
tag-latest: 'auto'
|
| 1018 |
+
tag-suffix: '-gpu-intel-sycl-f32-stablediffusion-ggml'
|
| 1019 |
+
runs-on: 'ubuntu-latest'
|
| 1020 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 1021 |
+
skip-drivers: 'false'
|
| 1022 |
+
backend: "stablediffusion-ggml"
|
| 1023 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1024 |
+
context: "./"
|
| 1025 |
+
ubuntu-version: '2404'
|
| 1026 |
+
- build-type: 'sycl_f16'
|
| 1027 |
+
cuda-major-version: ""
|
| 1028 |
+
cuda-minor-version: ""
|
| 1029 |
+
platforms: 'linux/amd64'
|
| 1030 |
+
tag-latest: 'auto'
|
| 1031 |
+
tag-suffix: '-gpu-intel-sycl-f16-stablediffusion-ggml'
|
| 1032 |
+
runs-on: 'ubuntu-latest'
|
| 1033 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 1034 |
+
skip-drivers: 'false'
|
| 1035 |
+
backend: "stablediffusion-ggml"
|
| 1036 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1037 |
+
context: "./"
|
| 1038 |
+
ubuntu-version: '2404'
|
| 1039 |
+
- build-type: 'vulkan'
|
| 1040 |
+
cuda-major-version: ""
|
| 1041 |
+
cuda-minor-version: ""
|
| 1042 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1043 |
+
tag-latest: 'auto'
|
| 1044 |
+
tag-suffix: '-gpu-vulkan-stablediffusion-ggml'
|
| 1045 |
+
runs-on: 'ubuntu-latest'
|
| 1046 |
+
base-image: "ubuntu:24.04"
|
| 1047 |
+
skip-drivers: 'false'
|
| 1048 |
+
backend: "stablediffusion-ggml"
|
| 1049 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1050 |
+
context: "./"
|
| 1051 |
+
ubuntu-version: '2404'
|
| 1052 |
+
- build-type: 'cublas'
|
| 1053 |
+
cuda-major-version: "12"
|
| 1054 |
+
cuda-minor-version: "0"
|
| 1055 |
+
platforms: 'linux/arm64'
|
| 1056 |
+
skip-drivers: 'false'
|
| 1057 |
+
tag-latest: 'auto'
|
| 1058 |
+
tag-suffix: '-nvidia-l4t-arm64-stablediffusion-ggml'
|
| 1059 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 1060 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 1061 |
+
backend: "stablediffusion-ggml"
|
| 1062 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1063 |
+
context: "./"
|
| 1064 |
+
ubuntu-version: '2204'
|
| 1065 |
+
# whisper
|
| 1066 |
+
- build-type: ''
|
| 1067 |
+
cuda-major-version: ""
|
| 1068 |
+
cuda-minor-version: ""
|
| 1069 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1070 |
+
tag-latest: 'auto'
|
| 1071 |
+
tag-suffix: '-cpu-whisper'
|
| 1072 |
+
runs-on: 'ubuntu-latest'
|
| 1073 |
+
base-image: "ubuntu:24.04"
|
| 1074 |
+
skip-drivers: 'false'
|
| 1075 |
+
backend: "whisper"
|
| 1076 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1077 |
+
context: "./"
|
| 1078 |
+
ubuntu-version: '2404'
|
| 1079 |
+
- build-type: 'sycl_f32'
|
| 1080 |
+
cuda-major-version: ""
|
| 1081 |
+
cuda-minor-version: ""
|
| 1082 |
+
platforms: 'linux/amd64'
|
| 1083 |
+
tag-latest: 'auto'
|
| 1084 |
+
tag-suffix: '-gpu-intel-sycl-f32-whisper'
|
| 1085 |
+
runs-on: 'ubuntu-latest'
|
| 1086 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 1087 |
+
skip-drivers: 'false'
|
| 1088 |
+
backend: "whisper"
|
| 1089 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1090 |
+
context: "./"
|
| 1091 |
+
ubuntu-version: '2404'
|
| 1092 |
+
- build-type: 'sycl_f16'
|
| 1093 |
+
cuda-major-version: ""
|
| 1094 |
+
cuda-minor-version: ""
|
| 1095 |
+
platforms: 'linux/amd64'
|
| 1096 |
+
tag-latest: 'auto'
|
| 1097 |
+
tag-suffix: '-gpu-intel-sycl-f16-whisper'
|
| 1098 |
+
runs-on: 'ubuntu-latest'
|
| 1099 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 1100 |
+
skip-drivers: 'false'
|
| 1101 |
+
backend: "whisper"
|
| 1102 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1103 |
+
context: "./"
|
| 1104 |
+
ubuntu-version: '2404'
|
| 1105 |
+
- build-type: 'vulkan'
|
| 1106 |
+
cuda-major-version: ""
|
| 1107 |
+
cuda-minor-version: ""
|
| 1108 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1109 |
+
tag-latest: 'auto'
|
| 1110 |
+
tag-suffix: '-gpu-vulkan-whisper'
|
| 1111 |
+
runs-on: 'ubuntu-latest'
|
| 1112 |
+
base-image: "ubuntu:24.04"
|
| 1113 |
+
skip-drivers: 'false'
|
| 1114 |
+
backend: "whisper"
|
| 1115 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1116 |
+
context: "./"
|
| 1117 |
+
ubuntu-version: '2404'
|
| 1118 |
+
- build-type: 'cublas'
|
| 1119 |
+
cuda-major-version: "12"
|
| 1120 |
+
cuda-minor-version: "0"
|
| 1121 |
+
platforms: 'linux/arm64'
|
| 1122 |
+
skip-drivers: 'false'
|
| 1123 |
+
tag-latest: 'auto'
|
| 1124 |
+
tag-suffix: '-nvidia-l4t-arm64-whisper'
|
| 1125 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 1126 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 1127 |
+
backend: "whisper"
|
| 1128 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1129 |
+
context: "./"
|
| 1130 |
+
ubuntu-version: '2204'
|
| 1131 |
+
- build-type: 'hipblas'
|
| 1132 |
+
cuda-major-version: ""
|
| 1133 |
+
cuda-minor-version: ""
|
| 1134 |
+
platforms: 'linux/amd64'
|
| 1135 |
+
tag-latest: 'auto'
|
| 1136 |
+
tag-suffix: '-gpu-rocm-hipblas-whisper'
|
| 1137 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 1138 |
+
runs-on: 'ubuntu-latest'
|
| 1139 |
+
skip-drivers: 'false'
|
| 1140 |
+
backend: "whisper"
|
| 1141 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1142 |
+
context: "./"
|
| 1143 |
+
ubuntu-version: '2404'
|
| 1144 |
+
#silero-vad
|
| 1145 |
+
- build-type: ''
|
| 1146 |
+
cuda-major-version: ""
|
| 1147 |
+
cuda-minor-version: ""
|
| 1148 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1149 |
+
tag-latest: 'auto'
|
| 1150 |
+
tag-suffix: '-cpu-silero-vad'
|
| 1151 |
+
runs-on: 'ubuntu-latest'
|
| 1152 |
+
base-image: "ubuntu:24.04"
|
| 1153 |
+
skip-drivers: 'false'
|
| 1154 |
+
backend: "silero-vad"
|
| 1155 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1156 |
+
context: "./"
|
| 1157 |
+
ubuntu-version: '2404'
|
| 1158 |
+
# local-store
|
| 1159 |
+
- build-type: ''
|
| 1160 |
+
cuda-major-version: ""
|
| 1161 |
+
cuda-minor-version: ""
|
| 1162 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1163 |
+
tag-latest: 'auto'
|
| 1164 |
+
tag-suffix: '-cpu-local-store'
|
| 1165 |
+
runs-on: 'ubuntu-latest'
|
| 1166 |
+
base-image: "ubuntu:24.04"
|
| 1167 |
+
skip-drivers: 'false'
|
| 1168 |
+
backend: "local-store"
|
| 1169 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1170 |
+
context: "./"
|
| 1171 |
+
ubuntu-version: '2404'
|
| 1172 |
+
# huggingface
|
| 1173 |
+
- build-type: ''
|
| 1174 |
+
cuda-major-version: ""
|
| 1175 |
+
cuda-minor-version: ""
|
| 1176 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1177 |
+
tag-latest: 'auto'
|
| 1178 |
+
tag-suffix: '-huggingface'
|
| 1179 |
+
runs-on: 'ubuntu-latest'
|
| 1180 |
+
base-image: "ubuntu:24.04"
|
| 1181 |
+
skip-drivers: 'false'
|
| 1182 |
+
backend: "huggingface"
|
| 1183 |
+
dockerfile: "./backend/Dockerfile.golang"
|
| 1184 |
+
context: "./"
|
| 1185 |
+
ubuntu-version: '2404'
|
| 1186 |
+
# rfdetr
|
| 1187 |
+
- build-type: ''
|
| 1188 |
+
cuda-major-version: ""
|
| 1189 |
+
cuda-minor-version: ""
|
| 1190 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1191 |
+
tag-latest: 'auto'
|
| 1192 |
+
tag-suffix: '-cpu-rfdetr'
|
| 1193 |
+
runs-on: 'ubuntu-latest'
|
| 1194 |
+
base-image: "ubuntu:24.04"
|
| 1195 |
+
skip-drivers: 'false'
|
| 1196 |
+
backend: "rfdetr"
|
| 1197 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1198 |
+
context: "./"
|
| 1199 |
+
ubuntu-version: '2404'
|
| 1200 |
+
- build-type: 'intel'
|
| 1201 |
+
cuda-major-version: ""
|
| 1202 |
+
cuda-minor-version: ""
|
| 1203 |
+
platforms: 'linux/amd64'
|
| 1204 |
+
tag-latest: 'auto'
|
| 1205 |
+
tag-suffix: '-gpu-intel-rfdetr'
|
| 1206 |
+
runs-on: 'ubuntu-latest'
|
| 1207 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 1208 |
+
skip-drivers: 'false'
|
| 1209 |
+
backend: "rfdetr"
|
| 1210 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1211 |
+
context: "./"
|
| 1212 |
+
ubuntu-version: '2404'
|
| 1213 |
+
- build-type: 'l4t'
|
| 1214 |
+
cuda-major-version: "12"
|
| 1215 |
+
cuda-minor-version: "0"
|
| 1216 |
+
platforms: 'linux/arm64'
|
| 1217 |
+
skip-drivers: 'true'
|
| 1218 |
+
tag-latest: 'auto'
|
| 1219 |
+
tag-suffix: '-nvidia-l4t-arm64-rfdetr'
|
| 1220 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 1221 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 1222 |
+
backend: "rfdetr"
|
| 1223 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1224 |
+
context: "./"
|
| 1225 |
+
ubuntu-version: '2204'
|
| 1226 |
+
# exllama2
|
| 1227 |
+
- build-type: ''
|
| 1228 |
+
cuda-major-version: ""
|
| 1229 |
+
cuda-minor-version: ""
|
| 1230 |
+
platforms: 'linux/amd64'
|
| 1231 |
+
tag-latest: 'auto'
|
| 1232 |
+
tag-suffix: '-cpu-exllama2'
|
| 1233 |
+
runs-on: 'ubuntu-latest'
|
| 1234 |
+
base-image: "ubuntu:24.04"
|
| 1235 |
+
skip-drivers: 'false'
|
| 1236 |
+
backend: "exllama2"
|
| 1237 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1238 |
+
context: "./"
|
| 1239 |
+
ubuntu-version: '2404'
|
| 1240 |
+
- build-type: 'intel'
|
| 1241 |
+
cuda-major-version: ""
|
| 1242 |
+
cuda-minor-version: ""
|
| 1243 |
+
platforms: 'linux/amd64'
|
| 1244 |
+
tag-latest: 'auto'
|
| 1245 |
+
tag-suffix: '-gpu-intel-exllama2'
|
| 1246 |
+
runs-on: 'ubuntu-latest'
|
| 1247 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 1248 |
+
skip-drivers: 'false'
|
| 1249 |
+
backend: "exllama2"
|
| 1250 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1251 |
+
context: "./"
|
| 1252 |
+
ubuntu-version: '2404'
|
| 1253 |
+
- build-type: 'hipblas'
|
| 1254 |
+
cuda-major-version: ""
|
| 1255 |
+
cuda-minor-version: ""
|
| 1256 |
+
platforms: 'linux/amd64'
|
| 1257 |
+
skip-drivers: 'true'
|
| 1258 |
+
tag-latest: 'auto'
|
| 1259 |
+
tag-suffix: '-gpu-hipblas-exllama2'
|
| 1260 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 1261 |
+
runs-on: 'ubuntu-latest'
|
| 1262 |
+
backend: "exllama2"
|
| 1263 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1264 |
+
context: "./"
|
| 1265 |
+
ubuntu-version: '2404'
|
| 1266 |
+
- build-type: 'l4t'
|
| 1267 |
+
cuda-major-version: "12"
|
| 1268 |
+
cuda-minor-version: "0"
|
| 1269 |
+
platforms: 'linux/arm64'
|
| 1270 |
+
skip-drivers: 'true'
|
| 1271 |
+
tag-latest: 'auto'
|
| 1272 |
+
tag-suffix: '-nvidia-l4t-arm64-chatterbox'
|
| 1273 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 1274 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 1275 |
+
backend: "chatterbox"
|
| 1276 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1277 |
+
context: "./"
|
| 1278 |
+
ubuntu-version: '2204'
|
| 1279 |
+
# runs out of space on the runner
|
| 1280 |
+
# - build-type: 'hipblas'
|
| 1281 |
+
# cuda-major-version: ""
|
| 1282 |
+
# cuda-minor-version: ""
|
| 1283 |
+
# platforms: 'linux/amd64'
|
| 1284 |
+
# tag-latest: 'auto'
|
| 1285 |
+
# tag-suffix: '-gpu-hipblas-rfdetr'
|
| 1286 |
+
# base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 1287 |
+
# runs-on: 'ubuntu-latest'
|
| 1288 |
+
# skip-drivers: 'false'
|
| 1289 |
+
# backend: "rfdetr"
|
| 1290 |
+
# dockerfile: "./backend/Dockerfile.python"
|
| 1291 |
+
# context: "./"
|
| 1292 |
+
# kitten-tts
|
| 1293 |
+
- build-type: ''
|
| 1294 |
+
cuda-major-version: ""
|
| 1295 |
+
cuda-minor-version: ""
|
| 1296 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1297 |
+
tag-latest: 'auto'
|
| 1298 |
+
tag-suffix: '-kitten-tts'
|
| 1299 |
+
runs-on: 'ubuntu-latest'
|
| 1300 |
+
base-image: "ubuntu:24.04"
|
| 1301 |
+
skip-drivers: 'false'
|
| 1302 |
+
backend: "kitten-tts"
|
| 1303 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1304 |
+
context: "./"
|
| 1305 |
+
ubuntu-version: '2404'
|
| 1306 |
+
# neutts
|
| 1307 |
+
- build-type: ''
|
| 1308 |
+
cuda-major-version: ""
|
| 1309 |
+
cuda-minor-version: ""
|
| 1310 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1311 |
+
tag-latest: 'auto'
|
| 1312 |
+
tag-suffix: '-cpu-neutts'
|
| 1313 |
+
runs-on: 'ubuntu-latest'
|
| 1314 |
+
base-image: "ubuntu:24.04"
|
| 1315 |
+
skip-drivers: 'false'
|
| 1316 |
+
backend: "neutts"
|
| 1317 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1318 |
+
context: "./"
|
| 1319 |
+
ubuntu-version: '2404'
|
| 1320 |
+
- build-type: 'hipblas'
|
| 1321 |
+
cuda-major-version: ""
|
| 1322 |
+
cuda-minor-version: ""
|
| 1323 |
+
platforms: 'linux/amd64'
|
| 1324 |
+
tag-latest: 'auto'
|
| 1325 |
+
tag-suffix: '-gpu-rocm-hipblas-neutts'
|
| 1326 |
+
runs-on: 'arc-runner-set'
|
| 1327 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 1328 |
+
skip-drivers: 'false'
|
| 1329 |
+
backend: "neutts"
|
| 1330 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1331 |
+
context: "./"
|
| 1332 |
+
ubuntu-version: '2404'
|
| 1333 |
+
- build-type: 'l4t'
|
| 1334 |
+
cuda-major-version: "12"
|
| 1335 |
+
cuda-minor-version: "0"
|
| 1336 |
+
platforms: 'linux/arm64'
|
| 1337 |
+
skip-drivers: 'true'
|
| 1338 |
+
tag-latest: 'auto'
|
| 1339 |
+
tag-suffix: '-nvidia-l4t-arm64-neutts'
|
| 1340 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 1341 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 1342 |
+
backend: "neutts"
|
| 1343 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1344 |
+
context: "./"
|
| 1345 |
+
ubuntu-version: '2204'
|
| 1346 |
+
- build-type: ''
|
| 1347 |
+
cuda-major-version: ""
|
| 1348 |
+
cuda-minor-version: ""
|
| 1349 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1350 |
+
tag-latest: 'auto'
|
| 1351 |
+
tag-suffix: '-cpu-vibevoice'
|
| 1352 |
+
runs-on: 'ubuntu-latest'
|
| 1353 |
+
base-image: "ubuntu:24.04"
|
| 1354 |
+
skip-drivers: 'false'
|
| 1355 |
+
backend: "vibevoice"
|
| 1356 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1357 |
+
context: "./"
|
| 1358 |
+
ubuntu-version: '2404'
|
| 1359 |
+
- build-type: ''
|
| 1360 |
+
cuda-major-version: ""
|
| 1361 |
+
cuda-minor-version: ""
|
| 1362 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 1363 |
+
tag-latest: 'auto'
|
| 1364 |
+
tag-suffix: '-cpu-pocket-tts'
|
| 1365 |
+
runs-on: 'ubuntu-latest'
|
| 1366 |
+
base-image: "ubuntu:24.04"
|
| 1367 |
+
skip-drivers: 'false'
|
| 1368 |
+
backend: "pocket-tts"
|
| 1369 |
+
dockerfile: "./backend/Dockerfile.python"
|
| 1370 |
+
context: "./"
|
| 1371 |
+
ubuntu-version: '2404'
|
| 1372 |
+
backend-jobs-darwin:
|
| 1373 |
+
uses: ./.github/workflows/backend_build_darwin.yml
|
| 1374 |
+
strategy:
|
| 1375 |
+
matrix:
|
| 1376 |
+
include:
|
| 1377 |
+
- backend: "diffusers"
|
| 1378 |
+
tag-suffix: "-metal-darwin-arm64-diffusers"
|
| 1379 |
+
build-type: "mps"
|
| 1380 |
+
- backend: "mlx"
|
| 1381 |
+
tag-suffix: "-metal-darwin-arm64-mlx"
|
| 1382 |
+
build-type: "mps"
|
| 1383 |
+
- backend: "chatterbox"
|
| 1384 |
+
tag-suffix: "-metal-darwin-arm64-chatterbox"
|
| 1385 |
+
build-type: "mps"
|
| 1386 |
+
- backend: "mlx-vlm"
|
| 1387 |
+
tag-suffix: "-metal-darwin-arm64-mlx-vlm"
|
| 1388 |
+
build-type: "mps"
|
| 1389 |
+
- backend: "mlx-audio"
|
| 1390 |
+
tag-suffix: "-metal-darwin-arm64-mlx-audio"
|
| 1391 |
+
build-type: "mps"
|
| 1392 |
+
- backend: "stablediffusion-ggml"
|
| 1393 |
+
tag-suffix: "-metal-darwin-arm64-stablediffusion-ggml"
|
| 1394 |
+
build-type: "metal"
|
| 1395 |
+
lang: "go"
|
| 1396 |
+
- backend: "whisper"
|
| 1397 |
+
tag-suffix: "-metal-darwin-arm64-whisper"
|
| 1398 |
+
build-type: "metal"
|
| 1399 |
+
lang: "go"
|
| 1400 |
+
with:
|
| 1401 |
+
backend: ${{ matrix.backend }}
|
| 1402 |
+
build-type: ${{ matrix.build-type }}
|
| 1403 |
+
go-version: "1.24.x"
|
| 1404 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 1405 |
+
lang: ${{ matrix.lang || 'python' }}
|
| 1406 |
+
use-pip: ${{ matrix.backend == 'diffusers' }}
|
| 1407 |
+
runs-on: "macos-latest"
|
| 1408 |
+
secrets:
|
| 1409 |
+
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 1410 |
+
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 1411 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 1412 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 1413 |
+
llama-cpp-darwin:
|
| 1414 |
+
runs-on: macos-latest
|
| 1415 |
+
strategy:
|
| 1416 |
+
matrix:
|
| 1417 |
+
go-version: ['1.25.x']
|
| 1418 |
+
steps:
|
| 1419 |
+
- name: Clone
|
| 1420 |
+
uses: actions/checkout@v6
|
| 1421 |
+
with:
|
| 1422 |
+
submodules: true
|
| 1423 |
+
- name: Setup Go ${{ matrix.go-version }}
|
| 1424 |
+
uses: actions/setup-go@v5
|
| 1425 |
+
with:
|
| 1426 |
+
go-version: ${{ matrix.go-version }}
|
| 1427 |
+
cache: false
|
| 1428 |
+
# You can test your matrix by printing the current Go version
|
| 1429 |
+
- name: Display Go version
|
| 1430 |
+
run: go version
|
| 1431 |
+
- name: Dependencies
|
| 1432 |
+
run: |
|
| 1433 |
+
brew install protobuf grpc make protoc-gen-go protoc-gen-go-grpc libomp llvm
|
| 1434 |
+
- name: Build llama-cpp-darwin
|
| 1435 |
+
run: |
|
| 1436 |
+
make protogen-go
|
| 1437 |
+
make backends/llama-cpp-darwin
|
| 1438 |
+
- name: Upload llama-cpp.tar
|
| 1439 |
+
uses: actions/upload-artifact@v6
|
| 1440 |
+
with:
|
| 1441 |
+
name: llama-cpp-tar
|
| 1442 |
+
path: backend-images/llama-cpp.tar
|
| 1443 |
+
llama-cpp-darwin-publish:
|
| 1444 |
+
needs: llama-cpp-darwin
|
| 1445 |
+
if: github.event_name != 'pull_request'
|
| 1446 |
+
runs-on: ubuntu-latest
|
| 1447 |
+
steps:
|
| 1448 |
+
- name: Download llama-cpp.tar
|
| 1449 |
+
uses: actions/download-artifact@v7
|
| 1450 |
+
with:
|
| 1451 |
+
name: llama-cpp-tar
|
| 1452 |
+
path: .
|
| 1453 |
+
- name: Install crane
|
| 1454 |
+
run: |
|
| 1455 |
+
curl -L https://github.com/google/go-containerregistry/releases/latest/download/go-containerregistry_Linux_x86_64.tar.gz | tar -xz
|
| 1456 |
+
sudo mv crane /usr/local/bin/
|
| 1457 |
+
- name: Log in to DockerHub
|
| 1458 |
+
run: |
|
| 1459 |
+
echo "${{ secrets.DOCKERHUB_PASSWORD }}" | crane auth login docker.io -u "${{ secrets.DOCKERHUB_USERNAME }}" --password-stdin
|
| 1460 |
+
- name: Log in to quay.io
|
| 1461 |
+
run: |
|
| 1462 |
+
echo "${{ secrets.LOCALAI_REGISTRY_PASSWORD }}" | crane auth login quay.io -u "${{ secrets.LOCALAI_REGISTRY_USERNAME }}" --password-stdin
|
| 1463 |
+
- name: Docker meta
|
| 1464 |
+
id: meta
|
| 1465 |
+
uses: docker/metadata-action@v5
|
| 1466 |
+
with:
|
| 1467 |
+
images: |
|
| 1468 |
+
localai/localai-backends
|
| 1469 |
+
tags: |
|
| 1470 |
+
type=ref,event=branch
|
| 1471 |
+
type=semver,pattern={{raw}}
|
| 1472 |
+
type=sha
|
| 1473 |
+
flavor: |
|
| 1474 |
+
latest=auto
|
| 1475 |
+
suffix=-metal-darwin-arm64-llama-cpp,onlatest=true
|
| 1476 |
+
- name: Docker meta
|
| 1477 |
+
id: quaymeta
|
| 1478 |
+
uses: docker/metadata-action@v5
|
| 1479 |
+
with:
|
| 1480 |
+
images: |
|
| 1481 |
+
quay.io/go-skynet/local-ai-backends
|
| 1482 |
+
tags: |
|
| 1483 |
+
type=ref,event=branch
|
| 1484 |
+
type=semver,pattern={{raw}}
|
| 1485 |
+
type=sha
|
| 1486 |
+
flavor: |
|
| 1487 |
+
latest=auto
|
| 1488 |
+
suffix=-metal-darwin-arm64-llama-cpp,onlatest=true
|
| 1489 |
+
- name: Push Docker image (DockerHub)
|
| 1490 |
+
run: |
|
| 1491 |
+
for tag in $(echo "${{ steps.meta.outputs.tags }}" | tr ',' '\n'); do
|
| 1492 |
+
crane push llama-cpp.tar $tag
|
| 1493 |
+
done
|
| 1494 |
+
- name: Push Docker image (Quay)
|
| 1495 |
+
run: |
|
| 1496 |
+
for tag in $(echo "${{ steps.quaymeta.outputs.tags }}" | tr ',' '\n'); do
|
| 1497 |
+
crane push llama-cpp.tar $tag
|
| 1498 |
+
done
|
.github/workflows/backend_build.yml
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'build backend container images (reusable)'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
workflow_call:
|
| 6 |
+
inputs:
|
| 7 |
+
base-image:
|
| 8 |
+
description: 'Base image'
|
| 9 |
+
required: true
|
| 10 |
+
type: string
|
| 11 |
+
build-type:
|
| 12 |
+
description: 'Build type'
|
| 13 |
+
default: ''
|
| 14 |
+
type: string
|
| 15 |
+
cuda-major-version:
|
| 16 |
+
description: 'CUDA major version'
|
| 17 |
+
default: "12"
|
| 18 |
+
type: string
|
| 19 |
+
cuda-minor-version:
|
| 20 |
+
description: 'CUDA minor version'
|
| 21 |
+
default: "1"
|
| 22 |
+
type: string
|
| 23 |
+
platforms:
|
| 24 |
+
description: 'Platforms'
|
| 25 |
+
default: ''
|
| 26 |
+
type: string
|
| 27 |
+
tag-latest:
|
| 28 |
+
description: 'Tag latest'
|
| 29 |
+
default: ''
|
| 30 |
+
type: string
|
| 31 |
+
tag-suffix:
|
| 32 |
+
description: 'Tag suffix'
|
| 33 |
+
default: ''
|
| 34 |
+
type: string
|
| 35 |
+
runs-on:
|
| 36 |
+
description: 'Runs on'
|
| 37 |
+
required: true
|
| 38 |
+
default: ''
|
| 39 |
+
type: string
|
| 40 |
+
backend:
|
| 41 |
+
description: 'Backend to build'
|
| 42 |
+
required: true
|
| 43 |
+
type: string
|
| 44 |
+
context:
|
| 45 |
+
description: 'Build context'
|
| 46 |
+
required: true
|
| 47 |
+
type: string
|
| 48 |
+
dockerfile:
|
| 49 |
+
description: 'Build Dockerfile'
|
| 50 |
+
required: true
|
| 51 |
+
type: string
|
| 52 |
+
skip-drivers:
|
| 53 |
+
description: 'Skip drivers'
|
| 54 |
+
default: 'false'
|
| 55 |
+
type: string
|
| 56 |
+
ubuntu-version:
|
| 57 |
+
description: 'Ubuntu version'
|
| 58 |
+
required: false
|
| 59 |
+
default: '2204'
|
| 60 |
+
type: string
|
| 61 |
+
secrets:
|
| 62 |
+
dockerUsername:
|
| 63 |
+
required: false
|
| 64 |
+
dockerPassword:
|
| 65 |
+
required: false
|
| 66 |
+
quayUsername:
|
| 67 |
+
required: true
|
| 68 |
+
quayPassword:
|
| 69 |
+
required: true
|
| 70 |
+
|
| 71 |
+
jobs:
|
| 72 |
+
backend-build:
|
| 73 |
+
runs-on: ${{ inputs.runs-on }}
|
| 74 |
+
env:
|
| 75 |
+
quay_username: ${{ secrets.quayUsername }}
|
| 76 |
+
steps:
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
- name: Free Disk Space (Ubuntu)
|
| 80 |
+
if: inputs.runs-on == 'ubuntu-latest'
|
| 81 |
+
uses: jlumbroso/free-disk-space@main
|
| 82 |
+
with:
|
| 83 |
+
# this might remove tools that are actually needed,
|
| 84 |
+
# if set to "true" but frees about 6 GB
|
| 85 |
+
tool-cache: true
|
| 86 |
+
# all of these default to true, but feel free to set to
|
| 87 |
+
# "false" if necessary for your workflow
|
| 88 |
+
android: true
|
| 89 |
+
dotnet: true
|
| 90 |
+
haskell: true
|
| 91 |
+
large-packages: true
|
| 92 |
+
docker-images: true
|
| 93 |
+
swap-storage: true
|
| 94 |
+
|
| 95 |
+
- name: Force Install GIT latest
|
| 96 |
+
run: |
|
| 97 |
+
sudo apt-get update \
|
| 98 |
+
&& sudo apt-get install -y software-properties-common \
|
| 99 |
+
&& sudo apt-get update \
|
| 100 |
+
&& sudo add-apt-repository -y ppa:git-core/ppa \
|
| 101 |
+
&& sudo apt-get update \
|
| 102 |
+
&& sudo apt-get install -y git
|
| 103 |
+
|
| 104 |
+
- name: Checkout
|
| 105 |
+
uses: actions/checkout@v6
|
| 106 |
+
|
| 107 |
+
- name: Release space from worker
|
| 108 |
+
if: inputs.runs-on == 'ubuntu-latest'
|
| 109 |
+
run: |
|
| 110 |
+
echo "Listing top largest packages"
|
| 111 |
+
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
| 112 |
+
head -n 30 <<< "${pkgs}"
|
| 113 |
+
echo
|
| 114 |
+
df -h
|
| 115 |
+
echo
|
| 116 |
+
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
| 117 |
+
sudo apt-get remove --auto-remove android-sdk-platform-tools snapd || true
|
| 118 |
+
sudo apt-get purge --auto-remove android-sdk-platform-tools snapd || true
|
| 119 |
+
sudo rm -rf /usr/local/lib/android
|
| 120 |
+
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
| 121 |
+
sudo rm -rf /usr/share/dotnet
|
| 122 |
+
sudo apt-get remove -y '^mono-.*' || true
|
| 123 |
+
sudo apt-get remove -y '^ghc-.*' || true
|
| 124 |
+
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
| 125 |
+
sudo apt-get remove -y 'php.*' || true
|
| 126 |
+
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
| 127 |
+
sudo apt-get remove -y '^google-.*' || true
|
| 128 |
+
sudo apt-get remove -y azure-cli || true
|
| 129 |
+
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
| 130 |
+
sudo apt-get remove -y '^gfortran-.*' || true
|
| 131 |
+
sudo apt-get remove -y microsoft-edge-stable || true
|
| 132 |
+
sudo apt-get remove -y firefox || true
|
| 133 |
+
sudo apt-get remove -y powershell || true
|
| 134 |
+
sudo apt-get remove -y r-base-core || true
|
| 135 |
+
sudo apt-get autoremove -y
|
| 136 |
+
sudo apt-get clean
|
| 137 |
+
echo
|
| 138 |
+
echo "Listing top largest packages"
|
| 139 |
+
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
| 140 |
+
head -n 30 <<< "${pkgs}"
|
| 141 |
+
echo
|
| 142 |
+
sudo rm -rfv build || true
|
| 143 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 144 |
+
sudo rm -rf /opt/ghc || true
|
| 145 |
+
sudo rm -rf "/usr/local/share/boost" || true
|
| 146 |
+
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
| 147 |
+
df -h
|
| 148 |
+
|
| 149 |
+
- name: Docker meta
|
| 150 |
+
id: meta
|
| 151 |
+
if: github.event_name != 'pull_request'
|
| 152 |
+
uses: docker/metadata-action@v5
|
| 153 |
+
with:
|
| 154 |
+
images: |
|
| 155 |
+
quay.io/go-skynet/local-ai-backends
|
| 156 |
+
localai/localai-backends
|
| 157 |
+
tags: |
|
| 158 |
+
type=ref,event=branch
|
| 159 |
+
type=semver,pattern={{raw}}
|
| 160 |
+
type=sha
|
| 161 |
+
flavor: |
|
| 162 |
+
latest=${{ inputs.tag-latest }}
|
| 163 |
+
suffix=${{ inputs.tag-suffix }},onlatest=true
|
| 164 |
+
|
| 165 |
+
- name: Docker meta for PR
|
| 166 |
+
id: meta_pull_request
|
| 167 |
+
if: github.event_name == 'pull_request'
|
| 168 |
+
uses: docker/metadata-action@v5
|
| 169 |
+
with:
|
| 170 |
+
images: |
|
| 171 |
+
quay.io/go-skynet/ci-tests
|
| 172 |
+
tags: |
|
| 173 |
+
type=ref,event=branch,suffix=${{ github.event.number }}-${{ inputs.backend }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
| 174 |
+
type=semver,pattern={{raw}},suffix=${{ github.event.number }}-${{ inputs.backend }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
| 175 |
+
type=sha,suffix=${{ github.event.number }}-${{ inputs.backend }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
| 176 |
+
flavor: |
|
| 177 |
+
latest=${{ inputs.tag-latest }}
|
| 178 |
+
suffix=${{ inputs.tag-suffix }},onlatest=true
|
| 179 |
+
## End testing image
|
| 180 |
+
- name: Set up QEMU
|
| 181 |
+
uses: docker/setup-qemu-action@master
|
| 182 |
+
with:
|
| 183 |
+
platforms: all
|
| 184 |
+
|
| 185 |
+
- name: Set up Docker Buildx
|
| 186 |
+
id: buildx
|
| 187 |
+
uses: docker/setup-buildx-action@master
|
| 188 |
+
|
| 189 |
+
- name: Login to DockerHub
|
| 190 |
+
if: github.event_name != 'pull_request'
|
| 191 |
+
uses: docker/login-action@v3
|
| 192 |
+
with:
|
| 193 |
+
username: ${{ secrets.dockerUsername }}
|
| 194 |
+
password: ${{ secrets.dockerPassword }}
|
| 195 |
+
|
| 196 |
+
- name: Login to Quay.io
|
| 197 |
+
if: ${{ env.quay_username != '' }}
|
| 198 |
+
uses: docker/login-action@v3
|
| 199 |
+
with:
|
| 200 |
+
registry: quay.io
|
| 201 |
+
username: ${{ secrets.quayUsername }}
|
| 202 |
+
password: ${{ secrets.quayPassword }}
|
| 203 |
+
|
| 204 |
+
- name: Build and push
|
| 205 |
+
uses: docker/build-push-action@v6
|
| 206 |
+
if: github.event_name != 'pull_request'
|
| 207 |
+
with:
|
| 208 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 209 |
+
build-args: |
|
| 210 |
+
BUILD_TYPE=${{ inputs.build-type }}
|
| 211 |
+
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
| 212 |
+
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
| 213 |
+
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
| 214 |
+
BASE_IMAGE=${{ inputs.base-image }}
|
| 215 |
+
BACKEND=${{ inputs.backend }}
|
| 216 |
+
UBUNTU_VERSION=${{ inputs.ubuntu-version }}
|
| 217 |
+
context: ${{ inputs.context }}
|
| 218 |
+
file: ${{ inputs.dockerfile }}
|
| 219 |
+
cache-from: type=gha
|
| 220 |
+
platforms: ${{ inputs.platforms }}
|
| 221 |
+
push: ${{ github.event_name != 'pull_request' }}
|
| 222 |
+
tags: ${{ steps.meta.outputs.tags }}
|
| 223 |
+
labels: ${{ steps.meta.outputs.labels }}
|
| 224 |
+
|
| 225 |
+
- name: Build and push (PR)
|
| 226 |
+
uses: docker/build-push-action@v6
|
| 227 |
+
if: github.event_name == 'pull_request'
|
| 228 |
+
with:
|
| 229 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 230 |
+
build-args: |
|
| 231 |
+
BUILD_TYPE=${{ inputs.build-type }}
|
| 232 |
+
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
| 233 |
+
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
| 234 |
+
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
| 235 |
+
BASE_IMAGE=${{ inputs.base-image }}
|
| 236 |
+
BACKEND=${{ inputs.backend }}
|
| 237 |
+
UBUNTU_VERSION=${{ inputs.ubuntu-version }}
|
| 238 |
+
context: ${{ inputs.context }}
|
| 239 |
+
file: ${{ inputs.dockerfile }}
|
| 240 |
+
cache-from: type=gha
|
| 241 |
+
platforms: ${{ inputs.platforms }}
|
| 242 |
+
push: ${{ env.quay_username != '' }}
|
| 243 |
+
tags: ${{ steps.meta_pull_request.outputs.tags }}
|
| 244 |
+
labels: ${{ steps.meta_pull_request.outputs.labels }}
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
- name: job summary
|
| 249 |
+
run: |
|
| 250 |
+
echo "Built image: ${{ steps.meta.outputs.labels }}" >> $GITHUB_STEP_SUMMARY
|
.github/workflows/backend_build_darwin.yml
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'build darwin python backend container images (reusable)'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
workflow_call:
|
| 6 |
+
inputs:
|
| 7 |
+
backend:
|
| 8 |
+
description: 'Backend to build'
|
| 9 |
+
required: true
|
| 10 |
+
type: string
|
| 11 |
+
build-type:
|
| 12 |
+
description: 'Build type (e.g., mps)'
|
| 13 |
+
default: ''
|
| 14 |
+
type: string
|
| 15 |
+
use-pip:
|
| 16 |
+
description: 'Use pip to install dependencies'
|
| 17 |
+
default: false
|
| 18 |
+
type: boolean
|
| 19 |
+
lang:
|
| 20 |
+
description: 'Programming language (e.g. go)'
|
| 21 |
+
default: 'python'
|
| 22 |
+
type: string
|
| 23 |
+
go-version:
|
| 24 |
+
description: 'Go version to use'
|
| 25 |
+
default: '1.24.x'
|
| 26 |
+
type: string
|
| 27 |
+
tag-suffix:
|
| 28 |
+
description: 'Tag suffix for the built image'
|
| 29 |
+
required: true
|
| 30 |
+
type: string
|
| 31 |
+
runs-on:
|
| 32 |
+
description: 'Runner to use'
|
| 33 |
+
default: 'macOS-14'
|
| 34 |
+
type: string
|
| 35 |
+
secrets:
|
| 36 |
+
dockerUsername:
|
| 37 |
+
required: false
|
| 38 |
+
dockerPassword:
|
| 39 |
+
required: false
|
| 40 |
+
quayUsername:
|
| 41 |
+
required: true
|
| 42 |
+
quayPassword:
|
| 43 |
+
required: true
|
| 44 |
+
|
| 45 |
+
jobs:
|
| 46 |
+
darwin-backend-build:
|
| 47 |
+
runs-on: ${{ inputs.runs-on }}
|
| 48 |
+
strategy:
|
| 49 |
+
matrix:
|
| 50 |
+
go-version: ['${{ inputs.go-version }}']
|
| 51 |
+
steps:
|
| 52 |
+
- name: Clone
|
| 53 |
+
uses: actions/checkout@v6
|
| 54 |
+
with:
|
| 55 |
+
submodules: true
|
| 56 |
+
|
| 57 |
+
- name: Setup Go ${{ matrix.go-version }}
|
| 58 |
+
uses: actions/setup-go@v5
|
| 59 |
+
with:
|
| 60 |
+
go-version: ${{ matrix.go-version }}
|
| 61 |
+
cache: false
|
| 62 |
+
|
| 63 |
+
# You can test your matrix by printing the current Go version
|
| 64 |
+
- name: Display Go version
|
| 65 |
+
run: go version
|
| 66 |
+
|
| 67 |
+
- name: Dependencies
|
| 68 |
+
run: |
|
| 69 |
+
brew install protobuf grpc make protoc-gen-go protoc-gen-go-grpc libomp llvm
|
| 70 |
+
|
| 71 |
+
- name: Build ${{ inputs.backend }}-darwin
|
| 72 |
+
run: |
|
| 73 |
+
make protogen-go
|
| 74 |
+
BACKEND=${{ inputs.backend }} BUILD_TYPE=${{ inputs.build-type }} USE_PIP=${{ inputs.use-pip }} make build-darwin-${{ inputs.lang }}-backend
|
| 75 |
+
|
| 76 |
+
- name: Upload ${{ inputs.backend }}.tar
|
| 77 |
+
uses: actions/upload-artifact@v6
|
| 78 |
+
with:
|
| 79 |
+
name: ${{ inputs.backend }}-tar
|
| 80 |
+
path: backend-images/${{ inputs.backend }}.tar
|
| 81 |
+
|
| 82 |
+
darwin-backend-publish:
|
| 83 |
+
needs: darwin-backend-build
|
| 84 |
+
if: github.event_name != 'pull_request'
|
| 85 |
+
runs-on: ubuntu-latest
|
| 86 |
+
steps:
|
| 87 |
+
- name: Download ${{ inputs.backend }}.tar
|
| 88 |
+
uses: actions/download-artifact@v7
|
| 89 |
+
with:
|
| 90 |
+
name: ${{ inputs.backend }}-tar
|
| 91 |
+
path: .
|
| 92 |
+
|
| 93 |
+
- name: Install crane
|
| 94 |
+
run: |
|
| 95 |
+
curl -L https://github.com/google/go-containerregistry/releases/latest/download/go-containerregistry_Linux_x86_64.tar.gz | tar -xz
|
| 96 |
+
sudo mv crane /usr/local/bin/
|
| 97 |
+
|
| 98 |
+
- name: Log in to DockerHub
|
| 99 |
+
run: |
|
| 100 |
+
echo "${{ secrets.dockerPassword }}" | crane auth login docker.io -u "${{ secrets.dockerUsername }}" --password-stdin
|
| 101 |
+
|
| 102 |
+
- name: Log in to quay.io
|
| 103 |
+
run: |
|
| 104 |
+
echo "${{ secrets.quayPassword }}" | crane auth login quay.io -u "${{ secrets.quayUsername }}" --password-stdin
|
| 105 |
+
|
| 106 |
+
- name: Docker meta
|
| 107 |
+
id: meta
|
| 108 |
+
uses: docker/metadata-action@v5
|
| 109 |
+
with:
|
| 110 |
+
images: |
|
| 111 |
+
localai/localai-backends
|
| 112 |
+
tags: |
|
| 113 |
+
type=ref,event=branch
|
| 114 |
+
type=semver,pattern={{raw}}
|
| 115 |
+
type=sha
|
| 116 |
+
flavor: |
|
| 117 |
+
latest=auto
|
| 118 |
+
suffix=${{ inputs.tag-suffix }},onlatest=true
|
| 119 |
+
|
| 120 |
+
- name: Docker meta
|
| 121 |
+
id: quaymeta
|
| 122 |
+
uses: docker/metadata-action@v5
|
| 123 |
+
with:
|
| 124 |
+
images: |
|
| 125 |
+
quay.io/go-skynet/local-ai-backends
|
| 126 |
+
tags: |
|
| 127 |
+
type=ref,event=branch
|
| 128 |
+
type=semver,pattern={{raw}}
|
| 129 |
+
type=sha
|
| 130 |
+
flavor: |
|
| 131 |
+
latest=auto
|
| 132 |
+
suffix=${{ inputs.tag-suffix }},onlatest=true
|
| 133 |
+
|
| 134 |
+
- name: Push Docker image (DockerHub)
|
| 135 |
+
run: |
|
| 136 |
+
for tag in $(echo "${{ steps.meta.outputs.tags }}" | tr ',' '\n'); do
|
| 137 |
+
crane push ${{ inputs.backend }}.tar $tag
|
| 138 |
+
done
|
| 139 |
+
|
| 140 |
+
- name: Push Docker image (Quay)
|
| 141 |
+
run: |
|
| 142 |
+
for tag in $(echo "${{ steps.quaymeta.outputs.tags }}" | tr ',' '\n'); do
|
| 143 |
+
crane push ${{ inputs.backend }}.tar $tag
|
| 144 |
+
done
|
.github/workflows/backend_pr.yml
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: 'build backend container images (PR-filtered)'
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
pull_request:
|
| 5 |
+
|
| 6 |
+
concurrency:
|
| 7 |
+
group: ci-backends-pr-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 8 |
+
cancel-in-progress: true
|
| 9 |
+
|
| 10 |
+
jobs:
|
| 11 |
+
generate-matrix:
|
| 12 |
+
runs-on: ubuntu-latest
|
| 13 |
+
outputs:
|
| 14 |
+
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
| 15 |
+
matrix-darwin: ${{ steps.set-matrix.outputs.matrix-darwin }}
|
| 16 |
+
has-backends: ${{ steps.set-matrix.outputs.has-backends }}
|
| 17 |
+
has-backends-darwin: ${{ steps.set-matrix.outputs.has-backends-darwin }}
|
| 18 |
+
steps:
|
| 19 |
+
- name: Checkout repository
|
| 20 |
+
uses: actions/checkout@v6
|
| 21 |
+
|
| 22 |
+
- name: Setup Bun
|
| 23 |
+
uses: oven-sh/setup-bun@v2
|
| 24 |
+
|
| 25 |
+
- name: Install dependencies
|
| 26 |
+
run: |
|
| 27 |
+
bun add js-yaml
|
| 28 |
+
bun add @octokit/core
|
| 29 |
+
|
| 30 |
+
# filters the matrix in backend.yml
|
| 31 |
+
- name: Filter matrix for changed backends
|
| 32 |
+
id: set-matrix
|
| 33 |
+
env:
|
| 34 |
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
| 35 |
+
GITHUB_EVENT_PATH: ${{ github.event_path }}
|
| 36 |
+
run: bun run scripts/changed-backends.js
|
| 37 |
+
|
| 38 |
+
backend-jobs:
|
| 39 |
+
needs: generate-matrix
|
| 40 |
+
uses: ./.github/workflows/backend_build.yml
|
| 41 |
+
if: needs.generate-matrix.outputs.has-backends == 'true'
|
| 42 |
+
with:
|
| 43 |
+
tag-latest: ${{ matrix.tag-latest }}
|
| 44 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 45 |
+
build-type: ${{ matrix.build-type }}
|
| 46 |
+
cuda-major-version: ${{ matrix.cuda-major-version }}
|
| 47 |
+
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
| 48 |
+
platforms: ${{ matrix.platforms }}
|
| 49 |
+
runs-on: ${{ matrix.runs-on }}
|
| 50 |
+
base-image: ${{ matrix.base-image }}
|
| 51 |
+
backend: ${{ matrix.backend }}
|
| 52 |
+
dockerfile: ${{ matrix.dockerfile }}
|
| 53 |
+
skip-drivers: ${{ matrix.skip-drivers }}
|
| 54 |
+
context: ${{ matrix.context }}
|
| 55 |
+
ubuntu-version: ${{ matrix.ubuntu-version }}
|
| 56 |
+
secrets:
|
| 57 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 58 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 59 |
+
strategy:
|
| 60 |
+
fail-fast: true
|
| 61 |
+
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix) }}
|
| 62 |
+
backend-jobs-darwin:
|
| 63 |
+
needs: generate-matrix
|
| 64 |
+
uses: ./.github/workflows/backend_build_darwin.yml
|
| 65 |
+
if: needs.generate-matrix.outputs.has-backends-darwin == 'true'
|
| 66 |
+
with:
|
| 67 |
+
backend: ${{ matrix.backend }}
|
| 68 |
+
build-type: ${{ matrix.build-type }}
|
| 69 |
+
go-version: "1.24.x"
|
| 70 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 71 |
+
lang: ${{ matrix.lang || 'python' }}
|
| 72 |
+
use-pip: ${{ matrix.backend == 'diffusers' }}
|
| 73 |
+
runs-on: "macos-latest"
|
| 74 |
+
secrets:
|
| 75 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 76 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 77 |
+
strategy:
|
| 78 |
+
fail-fast: true
|
| 79 |
+
matrix: ${{ fromJson(needs.generate-matrix.outputs.matrix-darwin) }}
|
.github/workflows/build-test.yaml
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Build test
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
branches:
|
| 6 |
+
- master
|
| 7 |
+
pull_request:
|
| 8 |
+
|
| 9 |
+
jobs:
|
| 10 |
+
build-test:
|
| 11 |
+
runs-on: ubuntu-latest
|
| 12 |
+
steps:
|
| 13 |
+
- name: Checkout
|
| 14 |
+
uses: actions/checkout@v6
|
| 15 |
+
with:
|
| 16 |
+
fetch-depth: 0
|
| 17 |
+
- name: Set up Go
|
| 18 |
+
uses: actions/setup-go@v5
|
| 19 |
+
with:
|
| 20 |
+
go-version: 1.25
|
| 21 |
+
- name: Run GoReleaser
|
| 22 |
+
run: |
|
| 23 |
+
make dev-dist
|
| 24 |
+
launcher-build-darwin:
|
| 25 |
+
runs-on: macos-latest
|
| 26 |
+
steps:
|
| 27 |
+
- name: Checkout
|
| 28 |
+
uses: actions/checkout@v6
|
| 29 |
+
with:
|
| 30 |
+
fetch-depth: 0
|
| 31 |
+
- name: Set up Go
|
| 32 |
+
uses: actions/setup-go@v5
|
| 33 |
+
with:
|
| 34 |
+
go-version: 1.25
|
| 35 |
+
- name: Build launcher for macOS ARM64
|
| 36 |
+
run: |
|
| 37 |
+
make build-launcher-darwin
|
| 38 |
+
ls -liah dist
|
| 39 |
+
- name: Upload macOS launcher artifacts
|
| 40 |
+
uses: actions/upload-artifact@v6
|
| 41 |
+
with:
|
| 42 |
+
name: launcher-macos
|
| 43 |
+
path: dist/
|
| 44 |
+
retention-days: 30
|
| 45 |
+
|
| 46 |
+
launcher-build-linux:
|
| 47 |
+
runs-on: ubuntu-latest
|
| 48 |
+
steps:
|
| 49 |
+
- name: Checkout
|
| 50 |
+
uses: actions/checkout@v6
|
| 51 |
+
with:
|
| 52 |
+
fetch-depth: 0
|
| 53 |
+
- name: Set up Go
|
| 54 |
+
uses: actions/setup-go@v5
|
| 55 |
+
with:
|
| 56 |
+
go-version: 1.25
|
| 57 |
+
- name: Build launcher for Linux
|
| 58 |
+
run: |
|
| 59 |
+
sudo apt-get update
|
| 60 |
+
sudo apt-get install golang gcc libgl1-mesa-dev xorg-dev libxkbcommon-dev
|
| 61 |
+
make build-launcher-linux
|
| 62 |
+
- name: Upload Linux launcher artifacts
|
| 63 |
+
uses: actions/upload-artifact@v6
|
| 64 |
+
with:
|
| 65 |
+
name: launcher-linux
|
| 66 |
+
path: local-ai-launcher-linux.tar.xz
|
| 67 |
+
retention-days: 30
|
.github/workflows/bump_deps.yaml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Bump Backend dependencies
|
| 2 |
+
on:
|
| 3 |
+
schedule:
|
| 4 |
+
- cron: 0 20 * * *
|
| 5 |
+
workflow_dispatch:
|
| 6 |
+
jobs:
|
| 7 |
+
bump-backends:
|
| 8 |
+
strategy:
|
| 9 |
+
fail-fast: false
|
| 10 |
+
matrix:
|
| 11 |
+
include:
|
| 12 |
+
- repository: "ggml-org/llama.cpp"
|
| 13 |
+
variable: "LLAMA_VERSION"
|
| 14 |
+
branch: "master"
|
| 15 |
+
file: "backend/cpp/llama-cpp/Makefile"
|
| 16 |
+
- repository: "ggml-org/whisper.cpp"
|
| 17 |
+
variable: "WHISPER_CPP_VERSION"
|
| 18 |
+
branch: "master"
|
| 19 |
+
file: "backend/go/whisper/Makefile"
|
| 20 |
+
- repository: "PABannier/bark.cpp"
|
| 21 |
+
variable: "BARKCPP_VERSION"
|
| 22 |
+
branch: "main"
|
| 23 |
+
file: "Makefile"
|
| 24 |
+
- repository: "leejet/stable-diffusion.cpp"
|
| 25 |
+
variable: "STABLEDIFFUSION_GGML_VERSION"
|
| 26 |
+
branch: "master"
|
| 27 |
+
file: "backend/go/stablediffusion-ggml/Makefile"
|
| 28 |
+
- repository: "mudler/go-piper"
|
| 29 |
+
variable: "PIPER_VERSION"
|
| 30 |
+
branch: "master"
|
| 31 |
+
file: "backend/go/piper/Makefile"
|
| 32 |
+
runs-on: ubuntu-latest
|
| 33 |
+
steps:
|
| 34 |
+
- uses: actions/checkout@v6
|
| 35 |
+
- name: Bump dependencies 🔧
|
| 36 |
+
id: bump
|
| 37 |
+
run: |
|
| 38 |
+
bash .github/bump_deps.sh ${{ matrix.repository }} ${{ matrix.branch }} ${{ matrix.variable }} ${{ matrix.file }}
|
| 39 |
+
{
|
| 40 |
+
echo 'message<<EOF'
|
| 41 |
+
cat "${{ matrix.variable }}_message.txt"
|
| 42 |
+
echo EOF
|
| 43 |
+
} >> "$GITHUB_OUTPUT"
|
| 44 |
+
{
|
| 45 |
+
echo 'commit<<EOF'
|
| 46 |
+
cat "${{ matrix.variable }}_commit.txt"
|
| 47 |
+
echo EOF
|
| 48 |
+
} >> "$GITHUB_OUTPUT"
|
| 49 |
+
rm -rfv ${{ matrix.variable }}_message.txt
|
| 50 |
+
rm -rfv ${{ matrix.variable }}_commit.txt
|
| 51 |
+
- name: Create Pull Request
|
| 52 |
+
uses: peter-evans/create-pull-request@v8
|
| 53 |
+
with:
|
| 54 |
+
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
| 55 |
+
push-to-fork: ci-forks/LocalAI
|
| 56 |
+
commit-message: ':arrow_up: Update ${{ matrix.repository }}'
|
| 57 |
+
title: 'chore: :arrow_up: Update ${{ matrix.repository }} to `${{ steps.bump.outputs.commit }}`'
|
| 58 |
+
branch: "update/${{ matrix.variable }}"
|
| 59 |
+
body: ${{ steps.bump.outputs.message }}
|
| 60 |
+
signoff: true
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
.github/workflows/bump_docs.yaml
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Bump Documentation
|
| 2 |
+
on:
|
| 3 |
+
schedule:
|
| 4 |
+
- cron: 0 20 * * *
|
| 5 |
+
workflow_dispatch:
|
| 6 |
+
jobs:
|
| 7 |
+
bump-docs:
|
| 8 |
+
strategy:
|
| 9 |
+
fail-fast: false
|
| 10 |
+
matrix:
|
| 11 |
+
include:
|
| 12 |
+
- repository: "mudler/LocalAI"
|
| 13 |
+
runs-on: ubuntu-latest
|
| 14 |
+
steps:
|
| 15 |
+
- uses: actions/checkout@v6
|
| 16 |
+
- name: Bump dependencies 🔧
|
| 17 |
+
run: |
|
| 18 |
+
bash .github/bump_docs.sh ${{ matrix.repository }}
|
| 19 |
+
- name: Create Pull Request
|
| 20 |
+
uses: peter-evans/create-pull-request@v8
|
| 21 |
+
with:
|
| 22 |
+
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
| 23 |
+
push-to-fork: ci-forks/LocalAI
|
| 24 |
+
commit-message: ':arrow_up: Update docs version ${{ matrix.repository }}'
|
| 25 |
+
title: 'docs: :arrow_up: update docs version ${{ matrix.repository }}'
|
| 26 |
+
branch: "update/docs"
|
| 27 |
+
body: Bump of ${{ matrix.repository }} version inside docs
|
| 28 |
+
signoff: true
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
.github/workflows/checksum_checker.yaml
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Check if checksums are up-to-date
|
| 2 |
+
on:
|
| 3 |
+
schedule:
|
| 4 |
+
- cron: 0 20 * * *
|
| 5 |
+
workflow_dispatch:
|
| 6 |
+
jobs:
|
| 7 |
+
checksum_check:
|
| 8 |
+
runs-on: ubuntu-latest
|
| 9 |
+
steps:
|
| 10 |
+
- name: Force Install GIT latest
|
| 11 |
+
run: |
|
| 12 |
+
sudo apt-get update \
|
| 13 |
+
&& sudo apt-get install -y software-properties-common \
|
| 14 |
+
&& sudo apt-get update \
|
| 15 |
+
&& sudo add-apt-repository -y ppa:git-core/ppa \
|
| 16 |
+
&& sudo apt-get update \
|
| 17 |
+
&& sudo apt-get install -y git
|
| 18 |
+
- uses: actions/checkout@v6
|
| 19 |
+
- name: Install dependencies
|
| 20 |
+
run: |
|
| 21 |
+
sudo apt-get update
|
| 22 |
+
sudo apt-get install -y pip wget
|
| 23 |
+
pip install huggingface_hub
|
| 24 |
+
- name: 'Setup yq'
|
| 25 |
+
uses: dcarbone/install-yq-action@v1.3.1
|
| 26 |
+
with:
|
| 27 |
+
version: 'v4.44.2'
|
| 28 |
+
download-compressed: true
|
| 29 |
+
force: true
|
| 30 |
+
|
| 31 |
+
- name: Checksum checker 🔧
|
| 32 |
+
run: |
|
| 33 |
+
export HF_HOME=/hf_cache
|
| 34 |
+
sudo mkdir /hf_cache
|
| 35 |
+
sudo chmod 777 /hf_cache
|
| 36 |
+
bash .github/checksum_checker.sh gallery/index.yaml
|
| 37 |
+
- name: Create Pull Request
|
| 38 |
+
uses: peter-evans/create-pull-request@v8
|
| 39 |
+
with:
|
| 40 |
+
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
| 41 |
+
push-to-fork: ci-forks/LocalAI
|
| 42 |
+
commit-message: ':arrow_up: Checksum updates in gallery/index.yaml'
|
| 43 |
+
title: 'chore(model-gallery): :arrow_up: update checksum'
|
| 44 |
+
branch: "update/checksum"
|
| 45 |
+
body: Updating checksums in gallery/index.yaml
|
| 46 |
+
signoff: true
|
.github/workflows/dependabot_auto.yml
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Dependabot auto-merge
|
| 2 |
+
on:
|
| 3 |
+
- pull_request_target
|
| 4 |
+
|
| 5 |
+
permissions:
|
| 6 |
+
contents: write
|
| 7 |
+
pull-requests: write
|
| 8 |
+
packages: read
|
| 9 |
+
|
| 10 |
+
jobs:
|
| 11 |
+
dependabot:
|
| 12 |
+
runs-on: ubuntu-latest
|
| 13 |
+
if: ${{ github.actor == 'dependabot[bot]' }}
|
| 14 |
+
steps:
|
| 15 |
+
- name: Dependabot metadata
|
| 16 |
+
id: metadata
|
| 17 |
+
uses: dependabot/fetch-metadata@v2.5.0
|
| 18 |
+
with:
|
| 19 |
+
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
| 20 |
+
skip-commit-verification: true
|
| 21 |
+
|
| 22 |
+
- name: Checkout repository
|
| 23 |
+
uses: actions/checkout@v6
|
| 24 |
+
|
| 25 |
+
- name: Approve a PR if not already approved
|
| 26 |
+
run: |
|
| 27 |
+
gh pr checkout "$PR_URL"
|
| 28 |
+
if [ "$(gh pr status --json reviewDecision -q .currentBranch.reviewDecision)" != "APPROVED" ];
|
| 29 |
+
then
|
| 30 |
+
gh pr review --approve "$PR_URL"
|
| 31 |
+
else
|
| 32 |
+
echo "PR already approved.";
|
| 33 |
+
fi
|
| 34 |
+
env:
|
| 35 |
+
PR_URL: ${{github.event.pull_request.html_url}}
|
| 36 |
+
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
| 37 |
+
|
| 38 |
+
- name: Enable auto-merge for Dependabot PRs
|
| 39 |
+
if: ${{ contains(github.event.pull_request.title, 'bump')}}
|
| 40 |
+
run: gh pr merge --auto --squash "$PR_URL"
|
| 41 |
+
env:
|
| 42 |
+
PR_URL: ${{github.event.pull_request.html_url}}
|
| 43 |
+
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
.github/workflows/deploy-explorer.yaml
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Explorer deployment
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
branches:
|
| 6 |
+
- master
|
| 7 |
+
tags:
|
| 8 |
+
- 'v*'
|
| 9 |
+
|
| 10 |
+
concurrency:
|
| 11 |
+
group: ci-deploy-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 12 |
+
|
| 13 |
+
jobs:
|
| 14 |
+
build-linux:
|
| 15 |
+
runs-on: ubuntu-latest
|
| 16 |
+
steps:
|
| 17 |
+
- name: Clone
|
| 18 |
+
uses: actions/checkout@v6
|
| 19 |
+
with:
|
| 20 |
+
submodules: true
|
| 21 |
+
- uses: actions/setup-go@v5
|
| 22 |
+
with:
|
| 23 |
+
go-version: '1.21.x'
|
| 24 |
+
cache: false
|
| 25 |
+
- name: Dependencies
|
| 26 |
+
run: |
|
| 27 |
+
sudo apt-get update
|
| 28 |
+
sudo apt-get install -y wget curl build-essential ffmpeg protobuf-compiler ccache upx-ucl gawk cmake libgmock-dev
|
| 29 |
+
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
| 30 |
+
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
| 31 |
+
make protogen-go
|
| 32 |
+
- name: Build api
|
| 33 |
+
run: |
|
| 34 |
+
CGO_ENABLED=0 make build
|
| 35 |
+
- name: rm
|
| 36 |
+
uses: appleboy/ssh-action@v1.2.4
|
| 37 |
+
with:
|
| 38 |
+
host: ${{ secrets.EXPLORER_SSH_HOST }}
|
| 39 |
+
username: ${{ secrets.EXPLORER_SSH_USERNAME }}
|
| 40 |
+
key: ${{ secrets.EXPLORER_SSH_KEY }}
|
| 41 |
+
port: ${{ secrets.EXPLORER_SSH_PORT }}
|
| 42 |
+
script: |
|
| 43 |
+
sudo rm -rf local-ai/ || true
|
| 44 |
+
- name: copy file via ssh
|
| 45 |
+
uses: appleboy/scp-action@v1.0.0
|
| 46 |
+
with:
|
| 47 |
+
host: ${{ secrets.EXPLORER_SSH_HOST }}
|
| 48 |
+
username: ${{ secrets.EXPLORER_SSH_USERNAME }}
|
| 49 |
+
key: ${{ secrets.EXPLORER_SSH_KEY }}
|
| 50 |
+
port: ${{ secrets.EXPLORER_SSH_PORT }}
|
| 51 |
+
source: "local-ai"
|
| 52 |
+
overwrite: true
|
| 53 |
+
rm: true
|
| 54 |
+
target: ./local-ai
|
| 55 |
+
- name: restarting
|
| 56 |
+
uses: appleboy/ssh-action@v1.2.4
|
| 57 |
+
with:
|
| 58 |
+
host: ${{ secrets.EXPLORER_SSH_HOST }}
|
| 59 |
+
username: ${{ secrets.EXPLORER_SSH_USERNAME }}
|
| 60 |
+
key: ${{ secrets.EXPLORER_SSH_KEY }}
|
| 61 |
+
port: ${{ secrets.EXPLORER_SSH_PORT }}
|
| 62 |
+
script: |
|
| 63 |
+
sudo cp -rfv local-ai/local-ai /usr/bin/local-ai
|
| 64 |
+
sudo systemctl restart local-ai
|
.github/workflows/disabled/comment-pr.yaml
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Comment PRs
|
| 2 |
+
on:
|
| 3 |
+
pull_request_target:
|
| 4 |
+
|
| 5 |
+
jobs:
|
| 6 |
+
comment-pr:
|
| 7 |
+
env:
|
| 8 |
+
MODEL_NAME: hermes-2-theta-llama-3-8b
|
| 9 |
+
runs-on: ubuntu-latest
|
| 10 |
+
steps:
|
| 11 |
+
- name: Checkout code
|
| 12 |
+
uses: actions/checkout@v3
|
| 13 |
+
with:
|
| 14 |
+
ref: "${{ github.event.pull_request.merge_commit_sha }}"
|
| 15 |
+
fetch-depth: 0 # needed to checkout all branches for this Action to work
|
| 16 |
+
- uses: mudler/localai-github-action@v1
|
| 17 |
+
with:
|
| 18 |
+
model: 'hermes-2-theta-llama-3-8b' # Any from models.localai.io, or from huggingface.com with: "huggingface://<repository>/file"
|
| 19 |
+
# Check the PR diff using the current branch and the base branch of the PR
|
| 20 |
+
- uses: GrantBirki/git-diff-action@v2.7.0
|
| 21 |
+
id: git-diff-action
|
| 22 |
+
with:
|
| 23 |
+
json_diff_file_output: diff.json
|
| 24 |
+
raw_diff_file_output: diff.txt
|
| 25 |
+
file_output_only: "true"
|
| 26 |
+
base_branch: ${{ github.event.pull_request.base.sha }}
|
| 27 |
+
- name: Show diff
|
| 28 |
+
env:
|
| 29 |
+
DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }}
|
| 30 |
+
run: |
|
| 31 |
+
cat $DIFF
|
| 32 |
+
- name: Summarize
|
| 33 |
+
env:
|
| 34 |
+
DIFF: ${{ steps.git-diff-action.outputs.raw-diff-path }}
|
| 35 |
+
id: summarize
|
| 36 |
+
run: |
|
| 37 |
+
input="$(cat $DIFF)"
|
| 38 |
+
|
| 39 |
+
# Define the LocalAI API endpoint
|
| 40 |
+
API_URL="http://localhost:8080/chat/completions"
|
| 41 |
+
|
| 42 |
+
# Create a JSON payload using jq to handle special characters
|
| 43 |
+
json_payload=$(jq -n --arg input "$input" '{
|
| 44 |
+
model: "'$MODEL_NAME'",
|
| 45 |
+
messages: [
|
| 46 |
+
{
|
| 47 |
+
role: "system",
|
| 48 |
+
content: "You are LocalAI-bot in Github that helps understanding PRs and assess complexity. Explain what has changed in this PR diff and why"
|
| 49 |
+
},
|
| 50 |
+
{
|
| 51 |
+
role: "user",
|
| 52 |
+
content: $input
|
| 53 |
+
}
|
| 54 |
+
]
|
| 55 |
+
}')
|
| 56 |
+
|
| 57 |
+
# Send the request to LocalAI
|
| 58 |
+
response=$(curl -s -X POST $API_URL \
|
| 59 |
+
-H "Content-Type: application/json" \
|
| 60 |
+
-d "$json_payload")
|
| 61 |
+
|
| 62 |
+
# Extract the summary from the response
|
| 63 |
+
summary="$(echo $response | jq -r '.choices[0].message.content')"
|
| 64 |
+
|
| 65 |
+
# Print the summary
|
| 66 |
+
# -H "Authorization: Bearer $API_KEY" \
|
| 67 |
+
echo "Summary:"
|
| 68 |
+
echo "$summary"
|
| 69 |
+
echo "payload sent"
|
| 70 |
+
echo "$json_payload"
|
| 71 |
+
{
|
| 72 |
+
echo 'message<<EOF'
|
| 73 |
+
echo "$summary"
|
| 74 |
+
echo EOF
|
| 75 |
+
} >> "$GITHUB_OUTPUT"
|
| 76 |
+
docker logs --tail 10 local-ai
|
| 77 |
+
- uses: mshick/add-pr-comment@v2
|
| 78 |
+
if: always()
|
| 79 |
+
with:
|
| 80 |
+
repo-token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
| 81 |
+
message: ${{ steps.summarize.outputs.message }}
|
| 82 |
+
message-failure: |
|
| 83 |
+
Uh oh! Could not analyze this PR, maybe it's too big?
|
.github/workflows/disabled/test-gpu.yml
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'GPU tests'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
pull_request:
|
| 6 |
+
push:
|
| 7 |
+
branches:
|
| 8 |
+
- master
|
| 9 |
+
tags:
|
| 10 |
+
- '*'
|
| 11 |
+
|
| 12 |
+
concurrency:
|
| 13 |
+
group: ci-gpu-tests-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 14 |
+
cancel-in-progress: true
|
| 15 |
+
|
| 16 |
+
jobs:
|
| 17 |
+
ubuntu-latest:
|
| 18 |
+
runs-on: gpu
|
| 19 |
+
strategy:
|
| 20 |
+
matrix:
|
| 21 |
+
go-version: ['1.21.x']
|
| 22 |
+
steps:
|
| 23 |
+
- name: Clone
|
| 24 |
+
uses: actions/checkout@v4
|
| 25 |
+
with:
|
| 26 |
+
submodules: true
|
| 27 |
+
- name: Setup Go ${{ matrix.go-version }}
|
| 28 |
+
uses: actions/setup-go@v4
|
| 29 |
+
with:
|
| 30 |
+
go-version: ${{ matrix.go-version }}
|
| 31 |
+
# You can test your matrix by printing the current Go version
|
| 32 |
+
- name: Display Go version
|
| 33 |
+
run: go version
|
| 34 |
+
- name: Dependencies
|
| 35 |
+
run: |
|
| 36 |
+
sudo apt-get update
|
| 37 |
+
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y make wget
|
| 38 |
+
- name: Build
|
| 39 |
+
run: |
|
| 40 |
+
if [ ! -e /run/systemd/system ]; then
|
| 41 |
+
sudo mkdir /run/systemd/system
|
| 42 |
+
fi
|
| 43 |
+
sudo mkdir -p /host/tests/${{ github.head_ref || github.ref }}
|
| 44 |
+
sudo chmod -R 777 /host/tests/${{ github.head_ref || github.ref }}
|
| 45 |
+
make \
|
| 46 |
+
TEST_DIR="/host/tests/${{ github.head_ref || github.ref }}" \
|
| 47 |
+
BUILD_TYPE=cublas \
|
| 48 |
+
prepare-e2e run-e2e-image test-e2e
|
| 49 |
+
- name: Release space from worker ♻
|
| 50 |
+
if: always()
|
| 51 |
+
run: |
|
| 52 |
+
sudo rm -rf build || true
|
| 53 |
+
sudo rm -rf bin || true
|
| 54 |
+
sudo rm -rf dist || true
|
| 55 |
+
sudo docker logs $(sudo docker ps -q --filter ancestor=localai-tests) > logs.txt
|
| 56 |
+
sudo cat logs.txt || true
|
| 57 |
+
sudo rm -rf logs.txt
|
| 58 |
+
make clean || true
|
| 59 |
+
make \
|
| 60 |
+
TEST_DIR="/host/tests/${{ github.head_ref || github.ref }}" \
|
| 61 |
+
teardown-e2e || true
|
| 62 |
+
sudo rm -rf /host/tests/${{ github.head_ref || github.ref }} || true
|
| 63 |
+
docker system prune -f -a --volumes || true
|
.github/workflows/gallery-agent.yaml
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Gallery Agent
|
| 2 |
+
on:
|
| 3 |
+
|
| 4 |
+
schedule:
|
| 5 |
+
- cron: '0 */3 * * *' # Run every 4 hours
|
| 6 |
+
workflow_dispatch:
|
| 7 |
+
inputs:
|
| 8 |
+
search_term:
|
| 9 |
+
description: 'Search term for models'
|
| 10 |
+
required: false
|
| 11 |
+
default: 'GGUF'
|
| 12 |
+
type: string
|
| 13 |
+
limit:
|
| 14 |
+
description: 'Maximum number of models to process'
|
| 15 |
+
required: false
|
| 16 |
+
default: '15'
|
| 17 |
+
type: string
|
| 18 |
+
quantization:
|
| 19 |
+
description: 'Preferred quantization format'
|
| 20 |
+
required: false
|
| 21 |
+
default: 'Q4_K_M'
|
| 22 |
+
type: string
|
| 23 |
+
max_models:
|
| 24 |
+
description: 'Maximum number of models to add to the gallery'
|
| 25 |
+
required: false
|
| 26 |
+
default: '1'
|
| 27 |
+
type: string
|
| 28 |
+
jobs:
|
| 29 |
+
gallery-agent:
|
| 30 |
+
runs-on: ubuntu-latest
|
| 31 |
+
steps:
|
| 32 |
+
- name: Checkout repository
|
| 33 |
+
uses: actions/checkout@v6
|
| 34 |
+
with:
|
| 35 |
+
token: ${{ secrets.GITHUB_TOKEN }}
|
| 36 |
+
|
| 37 |
+
- name: Set up Go
|
| 38 |
+
uses: actions/setup-go@v5
|
| 39 |
+
with:
|
| 40 |
+
go-version: '1.21'
|
| 41 |
+
- name: Proto Dependencies
|
| 42 |
+
run: |
|
| 43 |
+
# Install protoc
|
| 44 |
+
curl -L -s https://github.com/protocolbuffers/protobuf/releases/download/v26.1/protoc-26.1-linux-x86_64.zip -o protoc.zip && \
|
| 45 |
+
unzip -j -d /usr/local/bin protoc.zip bin/protoc && \
|
| 46 |
+
rm protoc.zip
|
| 47 |
+
go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
|
| 48 |
+
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@1958fcbe2ca8bd93af633f11e97d44e567e945af
|
| 49 |
+
PATH="$PATH:$HOME/go/bin" make protogen-go
|
| 50 |
+
- uses: mudler/localai-github-action@v1.1
|
| 51 |
+
with:
|
| 52 |
+
model: 'https://huggingface.co/bartowski/Qwen_Qwen3-1.7B-GGUF'
|
| 53 |
+
|
| 54 |
+
- name: Run gallery agent
|
| 55 |
+
env:
|
| 56 |
+
#OPENAI_MODEL: ${{ secrets.OPENAI_MODEL }}
|
| 57 |
+
OPENAI_MODE: Qwen_Qwen3-1.7B-GGUF
|
| 58 |
+
OPENAI_BASE_URL: "http://localhost:8080"
|
| 59 |
+
OPENAI_KEY: ${{ secrets.OPENAI_KEY }}
|
| 60 |
+
#OPENAI_BASE_URL: ${{ secrets.OPENAI_BASE_URL }}
|
| 61 |
+
SEARCH_TERM: ${{ github.event.inputs.search_term || 'GGUF' }}
|
| 62 |
+
LIMIT: ${{ github.event.inputs.limit || '15' }}
|
| 63 |
+
QUANTIZATION: ${{ github.event.inputs.quantization || 'Q4_K_M' }}
|
| 64 |
+
MAX_MODELS: ${{ github.event.inputs.max_models || '1' }}
|
| 65 |
+
run: |
|
| 66 |
+
export GALLERY_INDEX_PATH=$PWD/gallery/index.yaml
|
| 67 |
+
go run ./.github/gallery-agent
|
| 68 |
+
|
| 69 |
+
- name: Check for changes
|
| 70 |
+
id: check_changes
|
| 71 |
+
run: |
|
| 72 |
+
if git diff --quiet gallery/index.yaml; then
|
| 73 |
+
echo "changes=false" >> $GITHUB_OUTPUT
|
| 74 |
+
echo "No changes detected in gallery/index.yaml"
|
| 75 |
+
else
|
| 76 |
+
echo "changes=true" >> $GITHUB_OUTPUT
|
| 77 |
+
echo "Changes detected in gallery/index.yaml"
|
| 78 |
+
git diff gallery/index.yaml
|
| 79 |
+
fi
|
| 80 |
+
|
| 81 |
+
- name: Read gallery agent summary
|
| 82 |
+
id: read_summary
|
| 83 |
+
if: steps.check_changes.outputs.changes == 'true'
|
| 84 |
+
run: |
|
| 85 |
+
if [ -f "./gallery-agent-summary.json" ]; then
|
| 86 |
+
echo "summary_exists=true" >> $GITHUB_OUTPUT
|
| 87 |
+
# Extract summary data using jq
|
| 88 |
+
echo "search_term=$(jq -r '.search_term' ./gallery-agent-summary.json)" >> $GITHUB_OUTPUT
|
| 89 |
+
echo "total_found=$(jq -r '.total_found' ./gallery-agent-summary.json)" >> $GITHUB_OUTPUT
|
| 90 |
+
echo "models_added=$(jq -r '.models_added' ./gallery-agent-summary.json)" >> $GITHUB_OUTPUT
|
| 91 |
+
echo "quantization=$(jq -r '.quantization' ./gallery-agent-summary.json)" >> $GITHUB_OUTPUT
|
| 92 |
+
echo "processing_time=$(jq -r '.processing_time' ./gallery-agent-summary.json)" >> $GITHUB_OUTPUT
|
| 93 |
+
|
| 94 |
+
# Create a formatted list of added models with URLs
|
| 95 |
+
added_models=$(jq -r 'range(0; .added_model_ids | length) as $i | "- [\(.added_model_ids[$i])](\(.added_model_urls[$i]))"' ./gallery-agent-summary.json | tr '\n' '\n')
|
| 96 |
+
echo "added_models<<EOF" >> $GITHUB_OUTPUT
|
| 97 |
+
echo "$added_models" >> $GITHUB_OUTPUT
|
| 98 |
+
echo "EOF" >> $GITHUB_OUTPUT
|
| 99 |
+
rm -f ./gallery-agent-summary.json
|
| 100 |
+
else
|
| 101 |
+
echo "summary_exists=false" >> $GITHUB_OUTPUT
|
| 102 |
+
fi
|
| 103 |
+
|
| 104 |
+
- name: Create Pull Request
|
| 105 |
+
if: steps.check_changes.outputs.changes == 'true'
|
| 106 |
+
uses: peter-evans/create-pull-request@v8
|
| 107 |
+
with:
|
| 108 |
+
token: ${{ secrets.UPDATE_BOT_TOKEN }}
|
| 109 |
+
push-to-fork: ci-forks/LocalAI
|
| 110 |
+
commit-message: 'chore(model gallery): :robot: add new models via gallery agent'
|
| 111 |
+
title: 'chore(model gallery): :robot: add ${{ steps.read_summary.outputs.models_added || 0 }} new models via gallery agent'
|
| 112 |
+
# Branch has to be unique so PRs are not overriding each other
|
| 113 |
+
branch-suffix: timestamp
|
| 114 |
+
body: |
|
| 115 |
+
This PR was automatically created by the gallery agent workflow.
|
| 116 |
+
|
| 117 |
+
**Summary:**
|
| 118 |
+
- **Search Term:** ${{ steps.read_summary.outputs.search_term || github.event.inputs.search_term || 'GGUF' }}
|
| 119 |
+
- **Models Found:** ${{ steps.read_summary.outputs.total_found || 'N/A' }}
|
| 120 |
+
- **Models Added:** ${{ steps.read_summary.outputs.models_added || '0' }}
|
| 121 |
+
- **Quantization:** ${{ steps.read_summary.outputs.quantization || github.event.inputs.quantization || 'Q4_K_M' }}
|
| 122 |
+
- **Processing Time:** ${{ steps.read_summary.outputs.processing_time || 'N/A' }}
|
| 123 |
+
|
| 124 |
+
**Added Models:**
|
| 125 |
+
${{ steps.read_summary.outputs.added_models || '- No models added' }}
|
| 126 |
+
|
| 127 |
+
**Workflow Details:**
|
| 128 |
+
- Triggered by: `${{ github.event_name }}`
|
| 129 |
+
- Run ID: `${{ github.run_id }}`
|
| 130 |
+
- Commit: `${{ github.sha }}`
|
| 131 |
+
signoff: true
|
| 132 |
+
delete-branch: true
|
.github/workflows/generate_grpc_cache.yaml
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: 'generate and publish GRPC docker caches'
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
workflow_dispatch:
|
| 5 |
+
|
| 6 |
+
schedule:
|
| 7 |
+
# daily at midnight
|
| 8 |
+
- cron: '0 0 * * *'
|
| 9 |
+
|
| 10 |
+
concurrency:
|
| 11 |
+
group: grpc-cache-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 12 |
+
cancel-in-progress: true
|
| 13 |
+
|
| 14 |
+
jobs:
|
| 15 |
+
generate_caches:
|
| 16 |
+
strategy:
|
| 17 |
+
matrix:
|
| 18 |
+
include:
|
| 19 |
+
- grpc-base-image: ubuntu:24.04
|
| 20 |
+
runs-on: 'ubuntu-latest'
|
| 21 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 22 |
+
runs-on: ${{matrix.runs-on}}
|
| 23 |
+
steps:
|
| 24 |
+
- name: Release space from worker
|
| 25 |
+
if: matrix.runs-on == 'ubuntu-latest'
|
| 26 |
+
run: |
|
| 27 |
+
echo "Listing top largest packages"
|
| 28 |
+
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
| 29 |
+
head -n 30 <<< "${pkgs}"
|
| 30 |
+
echo
|
| 31 |
+
df -h
|
| 32 |
+
echo
|
| 33 |
+
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
| 34 |
+
sudo apt-get remove --auto-remove android-sdk-platform-tools || true
|
| 35 |
+
sudo apt-get purge --auto-remove android-sdk-platform-tools || true
|
| 36 |
+
sudo rm -rf /usr/local/lib/android
|
| 37 |
+
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
| 38 |
+
sudo rm -rf /usr/share/dotnet
|
| 39 |
+
sudo apt-get remove -y '^mono-.*' || true
|
| 40 |
+
sudo apt-get remove -y '^ghc-.*' || true
|
| 41 |
+
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
| 42 |
+
sudo apt-get remove -y 'php.*' || true
|
| 43 |
+
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
| 44 |
+
sudo apt-get remove -y '^google-.*' || true
|
| 45 |
+
sudo apt-get remove -y azure-cli || true
|
| 46 |
+
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
| 47 |
+
sudo apt-get remove -y '^gfortran-.*' || true
|
| 48 |
+
sudo apt-get remove -y microsoft-edge-stable || true
|
| 49 |
+
sudo apt-get remove -y firefox || true
|
| 50 |
+
sudo apt-get remove -y powershell || true
|
| 51 |
+
sudo apt-get remove -y r-base-core || true
|
| 52 |
+
sudo apt-get autoremove -y
|
| 53 |
+
sudo apt-get clean
|
| 54 |
+
echo
|
| 55 |
+
echo "Listing top largest packages"
|
| 56 |
+
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
| 57 |
+
head -n 30 <<< "${pkgs}"
|
| 58 |
+
echo
|
| 59 |
+
sudo rm -rfv build || true
|
| 60 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 61 |
+
sudo rm -rf /opt/ghc || true
|
| 62 |
+
sudo rm -rf "/usr/local/share/boost" || true
|
| 63 |
+
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
| 64 |
+
df -h
|
| 65 |
+
|
| 66 |
+
- name: Set up QEMU
|
| 67 |
+
uses: docker/setup-qemu-action@master
|
| 68 |
+
with:
|
| 69 |
+
platforms: all
|
| 70 |
+
|
| 71 |
+
- name: Set up Docker Buildx
|
| 72 |
+
id: buildx
|
| 73 |
+
uses: docker/setup-buildx-action@master
|
| 74 |
+
|
| 75 |
+
- name: Checkout
|
| 76 |
+
uses: actions/checkout@v6
|
| 77 |
+
|
| 78 |
+
- name: Cache GRPC
|
| 79 |
+
uses: docker/build-push-action@v6
|
| 80 |
+
with:
|
| 81 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 82 |
+
# The build-args MUST be an EXACT match between the image cache and other workflow steps that want to use that cache.
|
| 83 |
+
# This means that even the MAKEFLAGS have to be an EXACT match.
|
| 84 |
+
# If the build-args are not an EXACT match, it will result in a cache miss, which will require GRPC to be built from scratch.
|
| 85 |
+
build-args: |
|
| 86 |
+
GRPC_BASE_IMAGE=${{ matrix.grpc-base-image }}
|
| 87 |
+
GRPC_MAKEFLAGS=--jobs=4 --output-sync=target
|
| 88 |
+
GRPC_VERSION=v1.65.0
|
| 89 |
+
context: .
|
| 90 |
+
file: ./Dockerfile
|
| 91 |
+
cache-to: type=gha,ignore-error=true
|
| 92 |
+
cache-from: type=gha
|
| 93 |
+
target: grpc
|
| 94 |
+
platforms: ${{ matrix.platforms }}
|
| 95 |
+
push: false
|
.github/workflows/generate_intel_image.yaml
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: 'generate and publish intel docker caches'
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
workflow_dispatch:
|
| 5 |
+
push:
|
| 6 |
+
branches:
|
| 7 |
+
- master
|
| 8 |
+
|
| 9 |
+
concurrency:
|
| 10 |
+
group: intel-cache-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 11 |
+
cancel-in-progress: true
|
| 12 |
+
|
| 13 |
+
jobs:
|
| 14 |
+
generate_caches:
|
| 15 |
+
strategy:
|
| 16 |
+
matrix:
|
| 17 |
+
include:
|
| 18 |
+
- base-image: intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04
|
| 19 |
+
runs-on: 'arc-runner-set'
|
| 20 |
+
platforms: 'linux/amd64'
|
| 21 |
+
runs-on: ${{matrix.runs-on}}
|
| 22 |
+
steps:
|
| 23 |
+
- name: Set up QEMU
|
| 24 |
+
uses: docker/setup-qemu-action@master
|
| 25 |
+
with:
|
| 26 |
+
platforms: all
|
| 27 |
+
- name: Login to DockerHub
|
| 28 |
+
if: github.event_name != 'pull_request'
|
| 29 |
+
uses: docker/login-action@v3
|
| 30 |
+
with:
|
| 31 |
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 32 |
+
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 33 |
+
|
| 34 |
+
- name: Login to quay
|
| 35 |
+
if: github.event_name != 'pull_request'
|
| 36 |
+
uses: docker/login-action@v3
|
| 37 |
+
with:
|
| 38 |
+
registry: quay.io
|
| 39 |
+
username: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 40 |
+
password: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 41 |
+
- name: Set up Docker Buildx
|
| 42 |
+
id: buildx
|
| 43 |
+
uses: docker/setup-buildx-action@master
|
| 44 |
+
|
| 45 |
+
- name: Checkout
|
| 46 |
+
uses: actions/checkout@v6
|
| 47 |
+
|
| 48 |
+
- name: Cache Intel images
|
| 49 |
+
uses: docker/build-push-action@v6
|
| 50 |
+
with:
|
| 51 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 52 |
+
build-args: |
|
| 53 |
+
BASE_IMAGE=${{ matrix.base-image }}
|
| 54 |
+
context: .
|
| 55 |
+
file: ./Dockerfile
|
| 56 |
+
tags: quay.io/go-skynet/intel-oneapi-base:24.04
|
| 57 |
+
push: true
|
| 58 |
+
target: intel
|
| 59 |
+
platforms: ${{ matrix.platforms }}
|
.github/workflows/image-pr.yml
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'build container images tests'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
pull_request:
|
| 6 |
+
|
| 7 |
+
concurrency:
|
| 8 |
+
group: ci-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 9 |
+
cancel-in-progress: true
|
| 10 |
+
|
| 11 |
+
jobs:
|
| 12 |
+
image-build:
|
| 13 |
+
uses: ./.github/workflows/image_build.yml
|
| 14 |
+
with:
|
| 15 |
+
tag-latest: ${{ matrix.tag-latest }}
|
| 16 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 17 |
+
build-type: ${{ matrix.build-type }}
|
| 18 |
+
cuda-major-version: ${{ matrix.cuda-major-version }}
|
| 19 |
+
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
| 20 |
+
platforms: ${{ matrix.platforms }}
|
| 21 |
+
runs-on: ${{ matrix.runs-on }}
|
| 22 |
+
base-image: ${{ matrix.base-image }}
|
| 23 |
+
grpc-base-image: ${{ matrix.grpc-base-image }}
|
| 24 |
+
makeflags: ${{ matrix.makeflags }}
|
| 25 |
+
ubuntu-version: ${{ matrix.ubuntu-version }}
|
| 26 |
+
secrets:
|
| 27 |
+
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 28 |
+
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 29 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 30 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 31 |
+
strategy:
|
| 32 |
+
# Pushing with all jobs in parallel
|
| 33 |
+
# eats the bandwidth of all the nodes
|
| 34 |
+
max-parallel: ${{ github.event_name != 'pull_request' && 4 || 8 }}
|
| 35 |
+
fail-fast: false
|
| 36 |
+
matrix:
|
| 37 |
+
include:
|
| 38 |
+
- build-type: 'cublas'
|
| 39 |
+
cuda-major-version: "12"
|
| 40 |
+
cuda-minor-version: "9"
|
| 41 |
+
platforms: 'linux/amd64'
|
| 42 |
+
tag-latest: 'false'
|
| 43 |
+
tag-suffix: '-gpu-nvidia-cuda-12'
|
| 44 |
+
runs-on: 'ubuntu-latest'
|
| 45 |
+
base-image: "ubuntu:24.04"
|
| 46 |
+
makeflags: "--jobs=3 --output-sync=target"
|
| 47 |
+
ubuntu-version: '2404'
|
| 48 |
+
- build-type: 'cublas'
|
| 49 |
+
cuda-major-version: "13"
|
| 50 |
+
cuda-minor-version: "0"
|
| 51 |
+
platforms: 'linux/amd64'
|
| 52 |
+
tag-latest: 'false'
|
| 53 |
+
tag-suffix: '-gpu-nvidia-cuda-13'
|
| 54 |
+
runs-on: 'ubuntu-latest'
|
| 55 |
+
base-image: "ubuntu:22.04"
|
| 56 |
+
makeflags: "--jobs=3 --output-sync=target"
|
| 57 |
+
ubuntu-version: '2404'
|
| 58 |
+
- build-type: 'hipblas'
|
| 59 |
+
platforms: 'linux/amd64'
|
| 60 |
+
tag-latest: 'false'
|
| 61 |
+
tag-suffix: '-hipblas'
|
| 62 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 63 |
+
grpc-base-image: "ubuntu:24.04"
|
| 64 |
+
runs-on: 'ubuntu-latest'
|
| 65 |
+
makeflags: "--jobs=3 --output-sync=target"
|
| 66 |
+
ubuntu-version: '2404'
|
| 67 |
+
- build-type: 'sycl'
|
| 68 |
+
platforms: 'linux/amd64'
|
| 69 |
+
tag-latest: 'false'
|
| 70 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 71 |
+
grpc-base-image: "ubuntu:24.04"
|
| 72 |
+
tag-suffix: 'sycl'
|
| 73 |
+
runs-on: 'ubuntu-latest'
|
| 74 |
+
makeflags: "--jobs=3 --output-sync=target"
|
| 75 |
+
ubuntu-version: '2404'
|
| 76 |
+
- build-type: 'vulkan'
|
| 77 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 78 |
+
tag-latest: 'false'
|
| 79 |
+
tag-suffix: '-vulkan-core'
|
| 80 |
+
runs-on: 'ubuntu-latest'
|
| 81 |
+
base-image: "ubuntu:24.04"
|
| 82 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 83 |
+
ubuntu-version: '2404'
|
| 84 |
+
- build-type: 'cublas'
|
| 85 |
+
cuda-major-version: "13"
|
| 86 |
+
cuda-minor-version: "0"
|
| 87 |
+
platforms: 'linux/arm64'
|
| 88 |
+
tag-latest: 'false'
|
| 89 |
+
tag-suffix: '-nvidia-l4t-arm64-cuda-13'
|
| 90 |
+
base-image: "ubuntu:24.04"
|
| 91 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 92 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 93 |
+
skip-drivers: 'false'
|
| 94 |
+
ubuntu-version: '2404'
|
| 95 |
+
|
.github/workflows/image.yml
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'build container images'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
push:
|
| 6 |
+
branches:
|
| 7 |
+
- master
|
| 8 |
+
tags:
|
| 9 |
+
- '*'
|
| 10 |
+
|
| 11 |
+
concurrency:
|
| 12 |
+
group: ci-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
| 13 |
+
cancel-in-progress: true
|
| 14 |
+
|
| 15 |
+
jobs:
|
| 16 |
+
hipblas-jobs:
|
| 17 |
+
uses: ./.github/workflows/image_build.yml
|
| 18 |
+
with:
|
| 19 |
+
tag-latest: ${{ matrix.tag-latest }}
|
| 20 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 21 |
+
build-type: ${{ matrix.build-type }}
|
| 22 |
+
cuda-major-version: ${{ matrix.cuda-major-version }}
|
| 23 |
+
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
| 24 |
+
platforms: ${{ matrix.platforms }}
|
| 25 |
+
runs-on: ${{ matrix.runs-on }}
|
| 26 |
+
base-image: ${{ matrix.base-image }}
|
| 27 |
+
grpc-base-image: ${{ matrix.grpc-base-image }}
|
| 28 |
+
aio: ${{ matrix.aio }}
|
| 29 |
+
makeflags: ${{ matrix.makeflags }}
|
| 30 |
+
ubuntu-version: ${{ matrix.ubuntu-version }}
|
| 31 |
+
ubuntu-codename: ${{ matrix.ubuntu-codename }}
|
| 32 |
+
secrets:
|
| 33 |
+
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 34 |
+
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 35 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 36 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 37 |
+
strategy:
|
| 38 |
+
matrix:
|
| 39 |
+
include:
|
| 40 |
+
- build-type: 'hipblas'
|
| 41 |
+
platforms: 'linux/amd64'
|
| 42 |
+
tag-latest: 'auto'
|
| 43 |
+
tag-suffix: '-gpu-hipblas'
|
| 44 |
+
base-image: "rocm/dev-ubuntu-24.04:6.4.4"
|
| 45 |
+
grpc-base-image: "ubuntu:24.04"
|
| 46 |
+
runs-on: 'ubuntu-latest'
|
| 47 |
+
makeflags: "--jobs=3 --output-sync=target"
|
| 48 |
+
aio: "-aio-gpu-hipblas"
|
| 49 |
+
ubuntu-version: '2404'
|
| 50 |
+
ubuntu-codename: 'noble'
|
| 51 |
+
|
| 52 |
+
core-image-build:
|
| 53 |
+
uses: ./.github/workflows/image_build.yml
|
| 54 |
+
with:
|
| 55 |
+
tag-latest: ${{ matrix.tag-latest }}
|
| 56 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 57 |
+
build-type: ${{ matrix.build-type }}
|
| 58 |
+
cuda-major-version: ${{ matrix.cuda-major-version }}
|
| 59 |
+
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
| 60 |
+
platforms: ${{ matrix.platforms }}
|
| 61 |
+
runs-on: ${{ matrix.runs-on }}
|
| 62 |
+
aio: ${{ matrix.aio }}
|
| 63 |
+
base-image: ${{ matrix.base-image }}
|
| 64 |
+
grpc-base-image: ${{ matrix.grpc-base-image }}
|
| 65 |
+
makeflags: ${{ matrix.makeflags }}
|
| 66 |
+
skip-drivers: ${{ matrix.skip-drivers }}
|
| 67 |
+
ubuntu-version: ${{ matrix.ubuntu-version }}
|
| 68 |
+
ubuntu-codename: ${{ matrix.ubuntu-codename }}
|
| 69 |
+
secrets:
|
| 70 |
+
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 71 |
+
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 72 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 73 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 74 |
+
strategy:
|
| 75 |
+
#max-parallel: ${{ github.event_name != 'pull_request' && 2 || 4 }}
|
| 76 |
+
matrix:
|
| 77 |
+
include:
|
| 78 |
+
- build-type: ''
|
| 79 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 80 |
+
tag-latest: 'auto'
|
| 81 |
+
tag-suffix: ''
|
| 82 |
+
base-image: "ubuntu:24.04"
|
| 83 |
+
runs-on: 'ubuntu-latest'
|
| 84 |
+
aio: "-aio-cpu"
|
| 85 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 86 |
+
skip-drivers: 'false'
|
| 87 |
+
ubuntu-version: '2404'
|
| 88 |
+
ubuntu-codename: 'noble'
|
| 89 |
+
- build-type: 'cublas'
|
| 90 |
+
cuda-major-version: "12"
|
| 91 |
+
cuda-minor-version: "9"
|
| 92 |
+
platforms: 'linux/amd64'
|
| 93 |
+
tag-latest: 'auto'
|
| 94 |
+
tag-suffix: '-gpu-nvidia-cuda-12'
|
| 95 |
+
runs-on: 'ubuntu-latest'
|
| 96 |
+
base-image: "ubuntu:24.04"
|
| 97 |
+
skip-drivers: 'false'
|
| 98 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 99 |
+
aio: "-aio-gpu-nvidia-cuda-12"
|
| 100 |
+
ubuntu-version: '2404'
|
| 101 |
+
ubuntu-codename: 'noble'
|
| 102 |
+
- build-type: 'cublas'
|
| 103 |
+
cuda-major-version: "13"
|
| 104 |
+
cuda-minor-version: "0"
|
| 105 |
+
platforms: 'linux/amd64'
|
| 106 |
+
tag-latest: 'auto'
|
| 107 |
+
tag-suffix: '-gpu-nvidia-cuda-13'
|
| 108 |
+
runs-on: 'ubuntu-latest'
|
| 109 |
+
base-image: "ubuntu:22.04"
|
| 110 |
+
skip-drivers: 'false'
|
| 111 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 112 |
+
aio: "-aio-gpu-nvidia-cuda-13"
|
| 113 |
+
ubuntu-version: '2404'
|
| 114 |
+
ubuntu-codename: 'noble'
|
| 115 |
+
- build-type: 'vulkan'
|
| 116 |
+
platforms: 'linux/amd64,linux/arm64'
|
| 117 |
+
tag-latest: 'auto'
|
| 118 |
+
tag-suffix: '-gpu-vulkan'
|
| 119 |
+
runs-on: 'ubuntu-latest'
|
| 120 |
+
base-image: "ubuntu:24.04"
|
| 121 |
+
skip-drivers: 'false'
|
| 122 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 123 |
+
aio: "-aio-gpu-vulkan"
|
| 124 |
+
ubuntu-version: '2404'
|
| 125 |
+
ubuntu-codename: 'noble'
|
| 126 |
+
- build-type: 'intel'
|
| 127 |
+
platforms: 'linux/amd64'
|
| 128 |
+
tag-latest: 'auto'
|
| 129 |
+
base-image: "intel/oneapi-basekit:2025.3.0-0-devel-ubuntu24.04"
|
| 130 |
+
grpc-base-image: "ubuntu:24.04"
|
| 131 |
+
tag-suffix: '-gpu-intel'
|
| 132 |
+
runs-on: 'ubuntu-latest'
|
| 133 |
+
makeflags: "--jobs=3 --output-sync=target"
|
| 134 |
+
aio: "-aio-gpu-intel"
|
| 135 |
+
ubuntu-version: '2404'
|
| 136 |
+
ubuntu-codename: 'noble'
|
| 137 |
+
|
| 138 |
+
gh-runner:
|
| 139 |
+
uses: ./.github/workflows/image_build.yml
|
| 140 |
+
with:
|
| 141 |
+
tag-latest: ${{ matrix.tag-latest }}
|
| 142 |
+
tag-suffix: ${{ matrix.tag-suffix }}
|
| 143 |
+
build-type: ${{ matrix.build-type }}
|
| 144 |
+
cuda-major-version: ${{ matrix.cuda-major-version }}
|
| 145 |
+
cuda-minor-version: ${{ matrix.cuda-minor-version }}
|
| 146 |
+
platforms: ${{ matrix.platforms }}
|
| 147 |
+
runs-on: ${{ matrix.runs-on }}
|
| 148 |
+
aio: ${{ matrix.aio }}
|
| 149 |
+
base-image: ${{ matrix.base-image }}
|
| 150 |
+
grpc-base-image: ${{ matrix.grpc-base-image }}
|
| 151 |
+
makeflags: ${{ matrix.makeflags }}
|
| 152 |
+
skip-drivers: ${{ matrix.skip-drivers }}
|
| 153 |
+
ubuntu-version: ${{ matrix.ubuntu-version }}
|
| 154 |
+
ubuntu-codename: ${{ matrix.ubuntu-codename }}
|
| 155 |
+
secrets:
|
| 156 |
+
dockerUsername: ${{ secrets.DOCKERHUB_USERNAME }}
|
| 157 |
+
dockerPassword: ${{ secrets.DOCKERHUB_PASSWORD }}
|
| 158 |
+
quayUsername: ${{ secrets.LOCALAI_REGISTRY_USERNAME }}
|
| 159 |
+
quayPassword: ${{ secrets.LOCALAI_REGISTRY_PASSWORD }}
|
| 160 |
+
strategy:
|
| 161 |
+
matrix:
|
| 162 |
+
include:
|
| 163 |
+
- build-type: 'cublas'
|
| 164 |
+
cuda-major-version: "12"
|
| 165 |
+
cuda-minor-version: "0"
|
| 166 |
+
platforms: 'linux/arm64'
|
| 167 |
+
tag-latest: 'auto'
|
| 168 |
+
tag-suffix: '-nvidia-l4t-arm64'
|
| 169 |
+
base-image: "nvcr.io/nvidia/l4t-jetpack:r36.4.0"
|
| 170 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 171 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 172 |
+
skip-drivers: 'true'
|
| 173 |
+
ubuntu-version: "2204"
|
| 174 |
+
ubuntu-codename: 'jammy'
|
| 175 |
+
- build-type: 'cublas'
|
| 176 |
+
cuda-major-version: "13"
|
| 177 |
+
cuda-minor-version: "0"
|
| 178 |
+
platforms: 'linux/arm64'
|
| 179 |
+
tag-latest: 'auto'
|
| 180 |
+
tag-suffix: '-nvidia-l4t-arm64-cuda-13'
|
| 181 |
+
base-image: "ubuntu:24.04"
|
| 182 |
+
runs-on: 'ubuntu-24.04-arm'
|
| 183 |
+
makeflags: "--jobs=4 --output-sync=target"
|
| 184 |
+
skip-drivers: 'false'
|
| 185 |
+
ubuntu-version: '2404'
|
| 186 |
+
ubuntu-codename: 'noble'
|
| 187 |
+
|
.github/workflows/image_build.yml
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: 'build container images (reusable)'
|
| 3 |
+
|
| 4 |
+
on:
|
| 5 |
+
workflow_call:
|
| 6 |
+
inputs:
|
| 7 |
+
base-image:
|
| 8 |
+
description: 'Base image'
|
| 9 |
+
required: true
|
| 10 |
+
type: string
|
| 11 |
+
grpc-base-image:
|
| 12 |
+
description: 'GRPC Base image, must be a compatible image with base-image'
|
| 13 |
+
required: false
|
| 14 |
+
default: ''
|
| 15 |
+
type: string
|
| 16 |
+
build-type:
|
| 17 |
+
description: 'Build type'
|
| 18 |
+
default: ''
|
| 19 |
+
type: string
|
| 20 |
+
cuda-major-version:
|
| 21 |
+
description: 'CUDA major version'
|
| 22 |
+
default: "12"
|
| 23 |
+
type: string
|
| 24 |
+
cuda-minor-version:
|
| 25 |
+
description: 'CUDA minor version'
|
| 26 |
+
default: "9"
|
| 27 |
+
type: string
|
| 28 |
+
platforms:
|
| 29 |
+
description: 'Platforms'
|
| 30 |
+
default: ''
|
| 31 |
+
type: string
|
| 32 |
+
tag-latest:
|
| 33 |
+
description: 'Tag latest'
|
| 34 |
+
default: ''
|
| 35 |
+
type: string
|
| 36 |
+
tag-suffix:
|
| 37 |
+
description: 'Tag suffix'
|
| 38 |
+
default: ''
|
| 39 |
+
type: string
|
| 40 |
+
skip-drivers:
|
| 41 |
+
description: 'Skip drivers by default'
|
| 42 |
+
default: 'false'
|
| 43 |
+
type: string
|
| 44 |
+
runs-on:
|
| 45 |
+
description: 'Runs on'
|
| 46 |
+
required: true
|
| 47 |
+
default: ''
|
| 48 |
+
type: string
|
| 49 |
+
makeflags:
|
| 50 |
+
description: 'Make Flags'
|
| 51 |
+
required: false
|
| 52 |
+
default: '--jobs=4 --output-sync=target'
|
| 53 |
+
type: string
|
| 54 |
+
aio:
|
| 55 |
+
description: 'AIO Image Name'
|
| 56 |
+
required: false
|
| 57 |
+
default: ''
|
| 58 |
+
type: string
|
| 59 |
+
ubuntu-version:
|
| 60 |
+
description: 'Ubuntu version'
|
| 61 |
+
required: false
|
| 62 |
+
default: '2204'
|
| 63 |
+
type: string
|
| 64 |
+
ubuntu-codename:
|
| 65 |
+
description: 'Ubuntu codename'
|
| 66 |
+
required: false
|
| 67 |
+
default: 'noble'
|
| 68 |
+
type: string
|
| 69 |
+
secrets:
|
| 70 |
+
dockerUsername:
|
| 71 |
+
required: true
|
| 72 |
+
dockerPassword:
|
| 73 |
+
required: true
|
| 74 |
+
quayUsername:
|
| 75 |
+
required: true
|
| 76 |
+
quayPassword:
|
| 77 |
+
required: true
|
| 78 |
+
jobs:
|
| 79 |
+
reusable_image-build:
|
| 80 |
+
runs-on: ${{ inputs.runs-on }}
|
| 81 |
+
steps:
|
| 82 |
+
|
| 83 |
+
- name: Free Disk Space (Ubuntu)
|
| 84 |
+
if: inputs.runs-on == 'ubuntu-latest'
|
| 85 |
+
uses: jlumbroso/free-disk-space@main
|
| 86 |
+
with:
|
| 87 |
+
# this might remove tools that are actually needed,
|
| 88 |
+
# if set to "true" but frees about 6 GB
|
| 89 |
+
tool-cache: true
|
| 90 |
+
# all of these default to true, but feel free to set to
|
| 91 |
+
# "false" if necessary for your workflow
|
| 92 |
+
android: true
|
| 93 |
+
dotnet: true
|
| 94 |
+
haskell: true
|
| 95 |
+
large-packages: true
|
| 96 |
+
docker-images: true
|
| 97 |
+
swap-storage: true
|
| 98 |
+
- name: Force Install GIT latest
|
| 99 |
+
run: |
|
| 100 |
+
sudo apt-get update \
|
| 101 |
+
&& sudo apt-get install -y software-properties-common \
|
| 102 |
+
&& sudo apt-get update \
|
| 103 |
+
&& sudo add-apt-repository -y ppa:git-core/ppa \
|
| 104 |
+
&& sudo apt-get update \
|
| 105 |
+
&& sudo apt-get install -y git
|
| 106 |
+
- name: Checkout
|
| 107 |
+
uses: actions/checkout@v6
|
| 108 |
+
|
| 109 |
+
- name: Release space from worker
|
| 110 |
+
if: inputs.runs-on == 'ubuntu-latest'
|
| 111 |
+
run: |
|
| 112 |
+
echo "Listing top largest packages"
|
| 113 |
+
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
| 114 |
+
head -n 30 <<< "${pkgs}"
|
| 115 |
+
echo
|
| 116 |
+
df -h
|
| 117 |
+
echo
|
| 118 |
+
sudo apt-get remove -y '^llvm-.*|^libllvm.*' || true
|
| 119 |
+
sudo apt-get remove --auto-remove android-sdk-platform-tools snapd || true
|
| 120 |
+
sudo apt-get purge --auto-remove android-sdk-platform-tools snapd || true
|
| 121 |
+
sudo rm -rf /usr/local/lib/android
|
| 122 |
+
sudo apt-get remove -y '^dotnet-.*|^aspnetcore-.*' || true
|
| 123 |
+
sudo rm -rf /usr/share/dotnet
|
| 124 |
+
sudo apt-get remove -y '^mono-.*' || true
|
| 125 |
+
sudo apt-get remove -y '^ghc-.*' || true
|
| 126 |
+
sudo apt-get remove -y '.*jdk.*|.*jre.*' || true
|
| 127 |
+
sudo apt-get remove -y 'php.*' || true
|
| 128 |
+
sudo apt-get remove -y hhvm powershell firefox monodoc-manual msbuild || true
|
| 129 |
+
sudo apt-get remove -y '^google-.*' || true
|
| 130 |
+
sudo apt-get remove -y azure-cli || true
|
| 131 |
+
sudo apt-get remove -y '^mongo.*-.*|^postgresql-.*|^mysql-.*|^mssql-.*' || true
|
| 132 |
+
sudo apt-get remove -y '^gfortran-.*' || true
|
| 133 |
+
sudo apt-get remove -y microsoft-edge-stable || true
|
| 134 |
+
sudo apt-get remove -y firefox || true
|
| 135 |
+
sudo apt-get remove -y powershell || true
|
| 136 |
+
sudo apt-get remove -y r-base-core || true
|
| 137 |
+
sudo apt-get autoremove -y
|
| 138 |
+
sudo apt-get clean
|
| 139 |
+
echo
|
| 140 |
+
echo "Listing top largest packages"
|
| 141 |
+
pkgs=$(dpkg-query -Wf '${Installed-Size}\t${Package}\t${Status}\n' | awk '$NF == "installed"{print $1 "\t" $2}' | sort -nr)
|
| 142 |
+
head -n 30 <<< "${pkgs}"
|
| 143 |
+
echo
|
| 144 |
+
sudo rm -rfv build || true
|
| 145 |
+
sudo rm -rf /usr/share/dotnet || true
|
| 146 |
+
sudo rm -rf /opt/ghc || true
|
| 147 |
+
sudo rm -rf "/usr/local/share/boost" || true
|
| 148 |
+
sudo rm -rf "$AGENT_TOOLSDIRECTORY" || true
|
| 149 |
+
df -h
|
| 150 |
+
|
| 151 |
+
- name: Docker meta
|
| 152 |
+
id: meta
|
| 153 |
+
if: github.event_name != 'pull_request'
|
| 154 |
+
uses: docker/metadata-action@v5
|
| 155 |
+
with:
|
| 156 |
+
images: |
|
| 157 |
+
quay.io/go-skynet/local-ai
|
| 158 |
+
localai/localai
|
| 159 |
+
tags: |
|
| 160 |
+
type=ref,event=branch
|
| 161 |
+
type=semver,pattern={{raw}}
|
| 162 |
+
type=sha
|
| 163 |
+
flavor: |
|
| 164 |
+
latest=${{ inputs.tag-latest }}
|
| 165 |
+
suffix=${{ inputs.tag-suffix }},onlatest=true
|
| 166 |
+
- name: Docker meta for PR
|
| 167 |
+
id: meta_pull_request
|
| 168 |
+
if: github.event_name == 'pull_request'
|
| 169 |
+
uses: docker/metadata-action@v5
|
| 170 |
+
with:
|
| 171 |
+
images: |
|
| 172 |
+
quay.io/go-skynet/ci-tests
|
| 173 |
+
tags: |
|
| 174 |
+
type=ref,event=branch,suffix=localai${{ github.event.number }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
| 175 |
+
type=semver,pattern={{raw}},suffix=localai${{ github.event.number }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
| 176 |
+
type=sha,suffix=localai${{ github.event.number }}-${{ inputs.build-type }}-${{ inputs.cuda-major-version }}-${{ inputs.cuda-minor-version }}
|
| 177 |
+
flavor: |
|
| 178 |
+
latest=${{ inputs.tag-latest }}
|
| 179 |
+
suffix=${{ inputs.tag-suffix }}
|
| 180 |
+
- name: Docker meta AIO (quay.io)
|
| 181 |
+
if: inputs.aio != ''
|
| 182 |
+
id: meta_aio
|
| 183 |
+
uses: docker/metadata-action@v5
|
| 184 |
+
with:
|
| 185 |
+
images: |
|
| 186 |
+
quay.io/go-skynet/local-ai
|
| 187 |
+
tags: |
|
| 188 |
+
type=ref,event=branch
|
| 189 |
+
type=semver,pattern={{raw}}
|
| 190 |
+
flavor: |
|
| 191 |
+
latest=${{ inputs.tag-latest }}
|
| 192 |
+
suffix=${{ inputs.aio }},onlatest=true
|
| 193 |
+
|
| 194 |
+
- name: Docker meta AIO (dockerhub)
|
| 195 |
+
if: inputs.aio != ''
|
| 196 |
+
id: meta_aio_dockerhub
|
| 197 |
+
uses: docker/metadata-action@v5
|
| 198 |
+
with:
|
| 199 |
+
images: |
|
| 200 |
+
localai/localai
|
| 201 |
+
tags: |
|
| 202 |
+
type=ref,event=branch
|
| 203 |
+
type=semver,pattern={{raw}}
|
| 204 |
+
flavor: |
|
| 205 |
+
latest=${{ inputs.tag-latest }}
|
| 206 |
+
suffix=${{ inputs.aio }},onlatest=true
|
| 207 |
+
|
| 208 |
+
- name: Set up QEMU
|
| 209 |
+
uses: docker/setup-qemu-action@master
|
| 210 |
+
with:
|
| 211 |
+
platforms: all
|
| 212 |
+
|
| 213 |
+
- name: Set up Docker Buildx
|
| 214 |
+
id: buildx
|
| 215 |
+
uses: docker/setup-buildx-action@master
|
| 216 |
+
|
| 217 |
+
- name: Login to DockerHub
|
| 218 |
+
if: github.event_name != 'pull_request'
|
| 219 |
+
uses: docker/login-action@v3
|
| 220 |
+
with:
|
| 221 |
+
username: ${{ secrets.dockerUsername }}
|
| 222 |
+
password: ${{ secrets.dockerPassword }}
|
| 223 |
+
|
| 224 |
+
- name: Login to DockerHub
|
| 225 |
+
if: github.event_name != 'pull_request'
|
| 226 |
+
uses: docker/login-action@v3
|
| 227 |
+
with:
|
| 228 |
+
registry: quay.io
|
| 229 |
+
username: ${{ secrets.quayUsername }}
|
| 230 |
+
password: ${{ secrets.quayPassword }}
|
| 231 |
+
|
| 232 |
+
- name: Build and push
|
| 233 |
+
uses: docker/build-push-action@v6
|
| 234 |
+
if: github.event_name != 'pull_request'
|
| 235 |
+
with:
|
| 236 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 237 |
+
# The build-args MUST be an EXACT match between the image cache and other workflow steps that want to use that cache.
|
| 238 |
+
# This means that even the MAKEFLAGS have to be an EXACT match.
|
| 239 |
+
# If the build-args are not an EXACT match, it will result in a cache miss, which will require GRPC to be built from scratch.
|
| 240 |
+
# This is why some build args like GRPC_VERSION and MAKEFLAGS are hardcoded
|
| 241 |
+
build-args: |
|
| 242 |
+
BUILD_TYPE=${{ inputs.build-type }}
|
| 243 |
+
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
| 244 |
+
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
| 245 |
+
BASE_IMAGE=${{ inputs.base-image }}
|
| 246 |
+
GRPC_BASE_IMAGE=${{ inputs.grpc-base-image || inputs.base-image }}
|
| 247 |
+
GRPC_MAKEFLAGS=--jobs=4 --output-sync=target
|
| 248 |
+
GRPC_VERSION=v1.65.0
|
| 249 |
+
MAKEFLAGS=${{ inputs.makeflags }}
|
| 250 |
+
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
| 251 |
+
UBUNTU_VERSION=${{ inputs.ubuntu-version }}
|
| 252 |
+
UBUNTU_CODENAME=${{ inputs.ubuntu-codename }}
|
| 253 |
+
context: .
|
| 254 |
+
file: ./Dockerfile
|
| 255 |
+
cache-from: type=gha
|
| 256 |
+
platforms: ${{ inputs.platforms }}
|
| 257 |
+
push: ${{ github.event_name != 'pull_request' }}
|
| 258 |
+
tags: ${{ steps.meta.outputs.tags }}
|
| 259 |
+
labels: ${{ steps.meta.outputs.labels }}
|
| 260 |
+
### Start testing image
|
| 261 |
+
- name: Build and push
|
| 262 |
+
uses: docker/build-push-action@v6
|
| 263 |
+
if: github.event_name == 'pull_request'
|
| 264 |
+
with:
|
| 265 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 266 |
+
# The build-args MUST be an EXACT match between the image cache and other workflow steps that want to use that cache.
|
| 267 |
+
# This means that even the MAKEFLAGS have to be an EXACT match.
|
| 268 |
+
# If the build-args are not an EXACT match, it will result in a cache miss, which will require GRPC to be built from scratch.
|
| 269 |
+
# This is why some build args like GRPC_VERSION and MAKEFLAGS are hardcoded
|
| 270 |
+
build-args: |
|
| 271 |
+
BUILD_TYPE=${{ inputs.build-type }}
|
| 272 |
+
CUDA_MAJOR_VERSION=${{ inputs.cuda-major-version }}
|
| 273 |
+
CUDA_MINOR_VERSION=${{ inputs.cuda-minor-version }}
|
| 274 |
+
BASE_IMAGE=${{ inputs.base-image }}
|
| 275 |
+
GRPC_BASE_IMAGE=${{ inputs.grpc-base-image || inputs.base-image }}
|
| 276 |
+
GRPC_MAKEFLAGS=--jobs=4 --output-sync=target
|
| 277 |
+
GRPC_VERSION=v1.65.0
|
| 278 |
+
MAKEFLAGS=${{ inputs.makeflags }}
|
| 279 |
+
SKIP_DRIVERS=${{ inputs.skip-drivers }}
|
| 280 |
+
UBUNTU_VERSION=${{ inputs.ubuntu-version }}
|
| 281 |
+
UBUNTU_CODENAME=${{ inputs.ubuntu-codename }}
|
| 282 |
+
context: .
|
| 283 |
+
file: ./Dockerfile
|
| 284 |
+
cache-from: type=gha
|
| 285 |
+
platforms: ${{ inputs.platforms }}
|
| 286 |
+
#push: true
|
| 287 |
+
tags: ${{ steps.meta_pull_request.outputs.tags }}
|
| 288 |
+
labels: ${{ steps.meta_pull_request.outputs.labels }}
|
| 289 |
+
## End testing image
|
| 290 |
+
- name: Build and push AIO image
|
| 291 |
+
if: inputs.aio != ''
|
| 292 |
+
uses: docker/build-push-action@v6
|
| 293 |
+
with:
|
| 294 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 295 |
+
build-args: |
|
| 296 |
+
BASE_IMAGE=quay.io/go-skynet/local-ai:${{ steps.meta.outputs.version }}
|
| 297 |
+
MAKEFLAGS=${{ inputs.makeflags }}
|
| 298 |
+
context: .
|
| 299 |
+
file: ./Dockerfile.aio
|
| 300 |
+
platforms: ${{ inputs.platforms }}
|
| 301 |
+
push: ${{ github.event_name != 'pull_request' }}
|
| 302 |
+
tags: ${{ steps.meta_aio.outputs.tags }}
|
| 303 |
+
labels: ${{ steps.meta_aio.outputs.labels }}
|
| 304 |
+
|
| 305 |
+
- name: Build and push AIO image (dockerhub)
|
| 306 |
+
if: inputs.aio != ''
|
| 307 |
+
uses: docker/build-push-action@v6
|
| 308 |
+
with:
|
| 309 |
+
builder: ${{ steps.buildx.outputs.name }}
|
| 310 |
+
build-args: |
|
| 311 |
+
BASE_IMAGE=localai/localai:${{ steps.meta.outputs.version }}
|
| 312 |
+
MAKEFLAGS=${{ inputs.makeflags }}
|
| 313 |
+
context: .
|
| 314 |
+
file: ./Dockerfile.aio
|
| 315 |
+
platforms: ${{ inputs.platforms }}
|
| 316 |
+
push: ${{ github.event_name != 'pull_request' }}
|
| 317 |
+
tags: ${{ steps.meta_aio_dockerhub.outputs.tags }}
|
| 318 |
+
labels: ${{ steps.meta_aio_dockerhub.outputs.labels }}
|
| 319 |
+
|
| 320 |
+
- name: job summary
|
| 321 |
+
run: |
|
| 322 |
+
echo "Built image: ${{ steps.meta.outputs.labels }}" >> $GITHUB_STEP_SUMMARY
|
| 323 |
+
|
| 324 |
+
- name: job summary(AIO)
|
| 325 |
+
if: inputs.aio != ''
|
| 326 |
+
run: |
|
| 327 |
+
echo "Built image: ${{ steps.meta_aio.outputs.labels }}" >> $GITHUB_STEP_SUMMARY
|
.github/workflows/labeler.yml
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: "Pull Request Labeler"
|
| 2 |
+
on:
|
| 3 |
+
- pull_request_target
|
| 4 |
+
|
| 5 |
+
jobs:
|
| 6 |
+
labeler:
|
| 7 |
+
permissions:
|
| 8 |
+
contents: read
|
| 9 |
+
pull-requests: write
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
steps:
|
| 12 |
+
- uses: actions/labeler@v6
|