mirror of
https://github.com/archlinux/aur.git
synced 2026-03-14 23:16:48 +01:00
Initial build (2.28.0)
This commit is contained in:
commit
7b90922fb7
8 changed files with 254 additions and 0 deletions
29
.SRCINFO
Normal file
29
.SRCINFO
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
pkgbase = local-ai-bin
|
||||
pkgdesc = Free, Open Source OpenAI alternative. Self-hosted, community-driven and local-first
|
||||
pkgver = 2.28.0
|
||||
pkgrel = 1
|
||||
url = https://localai.io
|
||||
arch = aarch64
|
||||
arch = x86_64
|
||||
license = MIT
|
||||
provides = local-ai
|
||||
conflicts = local-ai
|
||||
source = local-ai.conf
|
||||
source = local-ai.service
|
||||
source = local-ai.sysusers
|
||||
source = local-ai.tmpfiles
|
||||
source = local-ai-2.28.0-README.md::https://github.com/mudler/LocalAI/raw/refs/tags/v2.28.0/README.md
|
||||
source = local-ai-2.28.0-LICENSE::https://github.com/mudler/LocalAI/raw/refs/tags/v2.28.0/LICENSE
|
||||
sha256sums = 8e530e32d96d02c01192c987e8bffaf0bb67db34bf999fccdfa8d0777789233b
|
||||
sha256sums = bd420ec530cbfdb7f29b309e3c0c8cd72de6346b7c8e9882e917a071c65b344c
|
||||
sha256sums = 97ba21355c50ec658e220bc0558f506227b3dc77cc51f343b6f5657b0d77a19b
|
||||
sha256sums = 90e042d0f5885b63a6aa4db7f87d6b931956f6c9b022407593466f61f6973312
|
||||
sha256sums = 0d6fd6c9a4b32b8baafcab18e6e66c35a71183cdc47626bb5a7fc2d97c0859da
|
||||
sha256sums = 56bef7ba54ae4e4477b9effe34b6e6cadc1b0bcfaacc5be503096a1ce0a9d391
|
||||
source_aarch64 = local-ai-2.28.0-aarch64::https://github.com/mudler/LocalAI/releases/download/v2.28.0/local-ai-Linux-arm64
|
||||
sha256sums_aarch64 = 4c0bef97654d5cef0056aa3e02c6c4643df31136a653f33f9af4360b890bc26a
|
||||
source_x86_64 = local-ai-2.28.0-x86_64::https://github.com/mudler/LocalAI/releases/download/v2.28.0/local-ai-Linux-x86_64
|
||||
depends_x86_64 = glibc
|
||||
sha256sums_x86_64 = 623e7c201ee1246f8bd157d7dec8e61646bfdd572b0898956e85c28b9f615878
|
||||
|
||||
pkgname = local-ai-bin
|
||||
9
.gitignore
vendored
Normal file
9
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
*
|
||||
!PKGBUILD
|
||||
!.SRCINFO
|
||||
!.gitignore
|
||||
!.nvchecker.toml
|
||||
!*.conf
|
||||
!*.service
|
||||
!*.sysusers
|
||||
!*.tmpfiles
|
||||
5
.nvchecker.toml
Normal file
5
.nvchecker.toml
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
[local-ai-bin]
|
||||
source = "github"
|
||||
github = "mudler/LocalAI"
|
||||
use_latest_release = true
|
||||
prefix = "v"
|
||||
43
PKGBUILD
Normal file
43
PKGBUILD
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
# Maintainer: Vitalii Kuzhdin <vitaliikuzhdin@gmail.com>
|
||||
|
||||
_pkgname="local-ai"
|
||||
pkgname="${_pkgname}-bin"
|
||||
pkgver=2.28.0
|
||||
pkgrel=1
|
||||
pkgdesc="Free, Open Source OpenAI alternative. Self-hosted, community-driven and local-first"
|
||||
arch=('aarch64' 'x86_64')
|
||||
url="https://localai.io"
|
||||
_url="https://github.com/mudler/LocalAI"
|
||||
license=('MIT')
|
||||
depends_x86_64=('glibc')
|
||||
# makedepends=('upx')
|
||||
provides=("${_pkgname}")
|
||||
conflicts=("${_pkgname}")
|
||||
_pkgsrc="${_pkgname}-${pkgver}"
|
||||
source=("${_pkgname}."{conf,service,sysusers,tmpfiles}
|
||||
"${_pkgsrc}-README.md::${_url}/raw/refs/tags/v${pkgver}/README.md"
|
||||
"${_pkgsrc}-LICENSE::${_url}/raw/refs/tags/v${pkgver}/LICENSE")
|
||||
source_aarch64=("${_pkgsrc}-aarch64::${_url}/releases/download/v${pkgver}/${_pkgname}-Linux-arm64")
|
||||
source_x86_64=("${_pkgsrc}-x86_64::${_url}/releases/download/v${pkgver}/${_pkgname}-Linux-x86_64")
|
||||
sha256sums=('8e530e32d96d02c01192c987e8bffaf0bb67db34bf999fccdfa8d0777789233b'
|
||||
'bd420ec530cbfdb7f29b309e3c0c8cd72de6346b7c8e9882e917a071c65b344c'
|
||||
'97ba21355c50ec658e220bc0558f506227b3dc77cc51f343b6f5657b0d77a19b'
|
||||
'90e042d0f5885b63a6aa4db7f87d6b931956f6c9b022407593466f61f6973312'
|
||||
'0d6fd6c9a4b32b8baafcab18e6e66c35a71183cdc47626bb5a7fc2d97c0859da'
|
||||
'56bef7ba54ae4e4477b9effe34b6e6cadc1b0bcfaacc5be503096a1ce0a9d391')
|
||||
sha256sums_aarch64=('4c0bef97654d5cef0056aa3e02c6c4643df31136a653f33f9af4360b890bc26a')
|
||||
sha256sums_x86_64=('623e7c201ee1246f8bd157d7dec8e61646bfdd572b0898956e85c28b9f615878')
|
||||
|
||||
package() {
|
||||
cd "${srcdir}"
|
||||
install -vDm755 "${_pkgsrc}-${CARCH}" "${pkgdir}/usr/bin/${_pkgname}"
|
||||
install -vDm644 "${_pkgsrc}-README.md" "${pkgdir}/usr/share/doc/${_pkgname}/README.md"
|
||||
install -vDm644 "${_pkgsrc}-LICENSE" "${pkgdir}/usr/share/licenses/${_pkgname}/LICENSE"
|
||||
|
||||
install -vDm644 "${_pkgname}.conf" "${pkgdir}/etc/${_pkgname}/${_pkgname}.conf"
|
||||
install -vDm644 "${_pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${_pkgname}.service"
|
||||
install -vDm644 "${_pkgname}.sysusers" "${pkgdir}/usr/lib/sysusers.d/${_pkgname}.conf"
|
||||
install -vDm644 "${_pkgname}.tmpfiles" "${pkgdir}/usr/lib/tmpfiles.d/${_pkgname}.conf"
|
||||
|
||||
# upx "${pkgdir}/usr/bin/${_pkgname}"
|
||||
}
|
||||
142
local-ai.conf
Normal file
142
local-ai.conf
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
# Configuration file for https://github.com/mudler/LocalAI
|
||||
|
||||
# Path containing models used for inferencing
|
||||
LOCALAI_MODELS_PATH="/var/lib/local-ai/models"
|
||||
|
||||
# Path used to extract libraries required by some backends at runtime
|
||||
LOCALAI_BACKEND_ASSETS_PATH="/var/lib/local-ai/backend_data"
|
||||
|
||||
# Location for images generated by backends (e.g., stablediffusion)
|
||||
LOCALAI_IMAGE_PATH="/var/lib/local-ai/generated/images"
|
||||
|
||||
# Location for audio generated by backends (e.g., piper)
|
||||
LOCALAI_AUDIO_PATH="/var/lib/local-ai/generated/audio"
|
||||
|
||||
# Path to store uploads from files API
|
||||
# LOCALAI_UPLOAD_PATH="/tmp/localai/upload"
|
||||
|
||||
# Path for configuration files
|
||||
# LOCALAI_CONFIG_PATH="/tmp/localai/config"
|
||||
|
||||
# Directory for dynamic loading of certain configuration files
|
||||
LOCALAI_CONFIG_DIR="/etc/local-ai"
|
||||
|
||||
# Interval to poll the LocalAI Config Directory for changes
|
||||
# LOCALAI_CONFIG_DIR_POLL_INTERVAL="1m"
|
||||
|
||||
# YAML file containing a list of model backend configs
|
||||
# LOCALAI_MODELS_CONFIG_FILE="config.yaml"
|
||||
|
||||
# JSON list of
|
||||
# LOCALAI_GALLERIES='[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]'
|
||||
|
||||
# Enable automatic loading of galleries
|
||||
# LOCALAI_AUTOLOAD_GALLERIES=true
|
||||
|
||||
# A LocalAI remote library URL
|
||||
# LOCALAI_REMOTE_LIBRARY="${remoteLibraryURL}"
|
||||
|
||||
# List of models to apply at startup in JSON format
|
||||
# LOCALAI_PRELOAD_MODELS="[]"
|
||||
|
||||
# List of model configuration URLs to load
|
||||
# LOCALAI_MODELS="[]"
|
||||
|
||||
# Path to a YAML config file for preloading models at startup
|
||||
# LOCALAI_PRELOAD_MODELS_CONFIG="preload_config.yaml"
|
||||
|
||||
# Enable GPU acceleration
|
||||
# LOCALAI_F16=true
|
||||
|
||||
# Number of threads used for parallel computation
|
||||
# LOCALAI_THREADS=4
|
||||
|
||||
# Default context size for models
|
||||
# LOCALAI_CONTEXT_SIZE=512
|
||||
|
||||
# Bind address for the API server
|
||||
# LOCALAI_ADDRESS=":8080"
|
||||
|
||||
# Enable CORS for the API
|
||||
LOCALAI_CORS=true
|
||||
|
||||
# Path to the library directory for external libraries used by backends
|
||||
# LOCALAI_LIBRARY_PATH="/usr/share/local-ai/libs"
|
||||
|
||||
# Enable fiber CSRF middleware
|
||||
# LOCALAI_CSRF=true
|
||||
|
||||
# Default upload limit in MB
|
||||
# LOCALAI_UPLOAD_LIMIT=15
|
||||
|
||||
# List of API Keys for authentication
|
||||
# LOCALAI_API_KEY="[]"
|
||||
|
||||
# Disable web UI
|
||||
# LOCALAI_DISABLE_WEBUI=false
|
||||
|
||||
# Disable the best-effort security scanner before downloading files
|
||||
# LOCALAI_DISABLE_PREDOWNLOAD_SCAN=false
|
||||
|
||||
# Replace all error responses with blank 500 errors
|
||||
# LOCALAI_OPAQUE_ERRORS=false
|
||||
|
||||
# Use constant-time comparisons for API Key validation
|
||||
# LOCALAI_SUBTLE_KEY_COMPARISON=false
|
||||
|
||||
# Disable API key requirement for HTTP GET requests
|
||||
# LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET=false
|
||||
|
||||
# Disable the /metrics endpoint
|
||||
# LOCALAI_DISABLE_METRICS_ENDPOINT=false
|
||||
|
||||
# List of endpoints exempt from API key requirement for GET requests
|
||||
# LOCALAI_HTTP_GET_EXEMPTED_ENDPOINTS="^/$,^/browse/?$,^/talk/?$,^/p2p/?$,^/chat/?$,^/text2image/?$,^/tts/?$,^/static/.*$,^/swagger.*$"
|
||||
|
||||
# Enable P2P mode
|
||||
# LOCALAI_P2P=false
|
||||
|
||||
# Interval for DHT refresh during token generation
|
||||
# LOCALAI_P2P_DHT_INTERVAL=360
|
||||
|
||||
# Interval for OTP refresh during token generation
|
||||
# LOCALAI_P2P_OTP_INTERVAL=9000
|
||||
|
||||
# Token for P2P mode (optional)
|
||||
# LOCALAI_P2P_TOKEN=""
|
||||
|
||||
# Network ID for P2P mode
|
||||
# LOCALAI_P2P_NETWORK_ID=""
|
||||
|
||||
# Enable backends to handle multiple requests in parallel
|
||||
# LOCALAI_PARALLEL_REQUESTS=true
|
||||
|
||||
# Allow only one backend to be run at a time
|
||||
# LOCALAI_SINGLE_ACTIVE_BACKEND=false
|
||||
|
||||
# Do not launch API services, only preloaded models/backends are started
|
||||
# LOCALAI_PRELOAD_BACKEND_ONLY=false
|
||||
|
||||
# List of external gRPC backends
|
||||
# LOCALAI_EXTERNAL_GRPC_BACKENDS="[]"
|
||||
|
||||
# Enable watchdog for stopping idle backends
|
||||
# LOCALAI_WATCHDOG_IDLE=false
|
||||
|
||||
# Threshold for stopping idle backends
|
||||
# LOCALAI_WATCHDOG_IDLE_TIMEOUT="15m"
|
||||
|
||||
# Enable watchdog for stopping busy backends
|
||||
# LOCALAI_WATCHDOG_BUSY=false
|
||||
|
||||
# Threshold for stopping busy backends
|
||||
# LOCALAI_WATCHDOG_BUSY_TIMEOUT="5m"
|
||||
|
||||
# Enable federated instance
|
||||
# LOCALAI_FEDERATED=false
|
||||
|
||||
# Disable the gallery endpoints
|
||||
# LOCALAI_DISABLE_GALLERY_ENDPOINT=false
|
||||
|
||||
# List of models to load into memory at startup
|
||||
# LOCALAI_LOAD_TO_MEMORY="[]"
|
||||
15
local-ai.service
Normal file
15
local-ai.service
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
[Unit]
|
||||
Description=LocalAI API For Running LLaMA/GPT Models
|
||||
After=local-fs.target
|
||||
|
||||
[Service]
|
||||
User=local-ai
|
||||
Group=local-ai
|
||||
EnvironmentFile=/etc/local-ai/local-ai.conf
|
||||
SyslogIdentifier=local-ai
|
||||
Type=simple
|
||||
ExecStart=/usr/bin/local-ai
|
||||
SuccessExitStatus=1
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
3
local-ai.sysusers
Normal file
3
local-ai.sysusers
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
u local-ai - "local-ai" /var/lib/local-ai /bin/sh
|
||||
g local-ai - -
|
||||
|
||||
8
local-ai.tmpfiles
Normal file
8
local-ai.tmpfiles
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
d /var/lib/local-ai 0755 local-ai local-ai
|
||||
d /var/log/local-ai 0755 local-ai local-ai
|
||||
d /var/lib/local-ai/models 0755 local-ai local-ai
|
||||
d /var/lib/local-ai/backend_data 0755 local-ai local-ai
|
||||
d /var/lib/local-ai/generated 0755 local-ai local-ai
|
||||
d /var/lib/local-ai/generated/images 0755 local-ai local-ai
|
||||
d /var/lib/local-ai/generated/audio 0755 local-ai local-ai
|
||||
Z /var/lib/local-ai 0755 local-ai local-ai
|
||||
Loading…
Add table
Reference in a new issue