From 4435accbecdedf71ce58f8c620cd7db8ebc5fb76 Mon Sep 17 00:00:00 2001 From: Balian of Ibelin Date: Wed, 16 Oct 2024 16:57:24 +0300 Subject: [PATCH] add: `ollama-bin` --- packagelist | 1 + packages/ollama-bin/.SRCINFO | 20 ++++++ packages/ollama-bin/ollama-bin.pacscript | 80 ++++++++++++++++++++++++ packages/ollama-bin/ollama.service | 14 +++++ srclist | 21 +++++++ 5 files changed, 136 insertions(+) create mode 100644 packages/ollama-bin/.SRCINFO create mode 100644 packages/ollama-bin/ollama-bin.pacscript create mode 100644 packages/ollama-bin/ollama.service diff --git a/packagelist b/packagelist index 387cccccb8..910caaa404 100644 --- a/packagelist +++ b/packagelist @@ -376,6 +376,7 @@ obsidian-deb oh-my-posh-git oh-my-zsh-git oil +ollama-bin onefetch-bin onlyoffice-desktopeditors-deb onnxruntime-bin diff --git a/packages/ollama-bin/.SRCINFO b/packages/ollama-bin/.SRCINFO new file mode 100644 index 0000000000..79a45ae64b --- /dev/null +++ b/packages/ollama-bin/.SRCINFO @@ -0,0 +1,20 @@ +pkgbase = ollama-bin + gives = ollama + pkgver = 0.3.13 + pkgdesc = Get up and running with large language models + url = https://ollama.com/ + arch = amd64 + arch = arm64 + license = MIT + maintainer = bibelin + repology = project:ollama + optdepends_ubuntu = libnvidia-compute-390 | libnvidia-compute-418 | libnvidia-compute-430 | libnvidia-compute-440 | libnvidia-compute-450 | libnvidia-compute-455 | libnvidia-compute-460 | libnvidia-compute-465 | libnvidia-compute-470 | libnvidia-compute-495 | libnvidia-compute-510 | libnvidia-compute-515 | libnvidia-compute-520 | libnvidia-compute-525 | libnvidia-compute-530 | libnvidia-compute-535 | libnvidia-compute-545 | libnvidia-compute-550: required to use CUDA on NVIDIA GPU + optdepends_debian = libcuda1: required to use CUDA on NVIDIA GPU + source_amd64 = https://github.com/ollama/ollama/releases/download/v0.3.13/ollama-linux-amd64.tgz + sha256sums_amd64 = 19d8b87864d7d56d3bcf61b98e6a20bd68744037aba7515803f317dfdad62984 + sha256sums_amd64 = e6893011419cf5d3073f713a130f112b61bbd1ce6f64ee9b4513c6f020f9624a + source_arm64 = https://github.com/ollama/ollama/releases/download/v0.3.13/ollama-linux-arm64.tgz + sha256sums_arm64 = 28cd33e4dd0cdb956272119839a349f7fc45ddd5b97978e2b19fc5804150d721 + sha256sums_arm64 = e6893011419cf5d3073f713a130f112b61bbd1ce6f64ee9b4513c6f020f9624a + +pkgname = ollama-bin diff --git a/packages/ollama-bin/ollama-bin.pacscript b/packages/ollama-bin/ollama-bin.pacscript new file mode 100644 index 0000000000..a9db379e10 --- /dev/null +++ b/packages/ollama-bin/ollama-bin.pacscript @@ -0,0 +1,80 @@ +pkgname="ollama-bin" +gives="ollama" +pkgdesc="Get up and running with large language models" +repology=("project:ollama") +license=("MIT") +url='https://ollama.com/' +arch=("amd64" "arm64") +pkgver="0.3.13" +source=( + "https://github.com/${gives}/${gives}/releases/download/v${pkgver}/${gives}-linux-${CARCH}.tgz" + "ollama.service" +) +sha256sums_amd64=( + "19d8b87864d7d56d3bcf61b98e6a20bd68744037aba7515803f317dfdad62984" + "e6893011419cf5d3073f713a130f112b61bbd1ce6f64ee9b4513c6f020f9624a" +) +sha256sums_arm64=( + "28cd33e4dd0cdb956272119839a349f7fc45ddd5b97978e2b19fc5804150d721" + "e6893011419cf5d3073f713a130f112b61bbd1ce6f64ee9b4513c6f020f9624a" +) +optdepends_ubuntu=( + "libnvidia-compute-390 | libnvidia-compute-418 | libnvidia-compute-430 | libnvidia-compute-440 | libnvidia-compute-450 | libnvidia-compute-455 | libnvidia-compute-460 | libnvidia-compute-465 | libnvidia-compute-470 | libnvidia-compute-495 | libnvidia-compute-510 | libnvidia-compute-515 | libnvidia-compute-520 | libnvidia-compute-525 | libnvidia-compute-530 | libnvidia-compute-535 | libnvidia-compute-545 | libnvidia-compute-550: required to use CUDA on NVIDIA GPU" +) +optdepends_debian=( + "libcuda1: required to use CUDA on NVIDIA GPU" +) +maintainer=("bibelin ") + +package() { + install -Dm755 "${srcdir}/bin/ollama" "${pkgdir}/usr/bin/${gives}" + find "${srcdir}/lib/${gives}" -type "f" -exec install -Dm644 "{}" -t "${pkgdir}/usr/lib/${gives}" \; + find "${srcdir}/lib/${gives}" -type "l" -exec install -Dm644 "{}" -t "${pkgdir}/usr/lib/${gives}" \; + install -Dm644 "${gives}.service" "${pkgdir}/usr/lib/systemd/system/${gives}.service" +} + +post_install() { + # Adopted from https://github.com/ollama/ollama/blob/24636dfa87c3759b1d89efc47a1fd01623058fd1/scripts/install.sh#L105 + if ! id ollama > /dev/null 2>&1; then + useradd -r -s /bin/false -U -m -d /usr/share/ollama ollama + fi + if getent group render > /dev/null 2>&1; then + usermod -a -G render ollama + fi + if getent group video > /dev/null 2>&1; then + usermod -a -G video ollama + fi + + usermod -a -G ollama "${PACSTALL_USER}" + + SYSTEMCTL_RUNNING="$(systemctl is-system-running || true)" + case ${SYSTEMCTL_RUNNING} in + running | degraded) + systemctl daemon-reload + systemctl enable ollama + systemctl start ollama + ;; + *) ;; + esac +} + +post_upgrade() { + SERVICE_ENABLED="$(systemctl is-enabled ollama || true)" + case ${SERVICE_ENABLED} in + enabled | enabled-runtime) + systemctl restart ollama + ;; + *) ;; + esac +} + +pre_remove() { + SYSTEMCTL_RUNNING="$(systemctl is-system-running || true)" + case ${SYSTEMCTL_RUNNING} in + running | degraded) + systemctl stop ollama + systemctl disable ollama + ;; + *) ;; + esac +} diff --git a/packages/ollama-bin/ollama.service b/packages/ollama-bin/ollama.service new file mode 100644 index 0000000000..9deee1da0b --- /dev/null +++ b/packages/ollama-bin/ollama.service @@ -0,0 +1,14 @@ +[Unit] +Description=Ollama Service +After=network-online.target + +[Service] +ExecStart=/usr/bin/ollama serve +User=ollama +Group=ollama +Restart=always +RestartSec=3 +Environment="PATH=$PATH" + +[Install] +WantedBy=default.target diff --git a/srclist b/srclist index e9f3a2017e..9c54645838 100644 --- a/srclist +++ b/srclist @@ -7267,6 +7267,27 @@ pkgbase = oil pkgname = oil --- +pkgbase = ollama-bin + gives = ollama + pkgver = 0.3.13 + pkgdesc = Get up and running with large language models + url = https://ollama.com/ + arch = amd64 + arch = arm64 + license = MIT + maintainer = bibelin + repology = project:ollama + optdepends_ubuntu = libnvidia-compute-390 | libnvidia-compute-418 | libnvidia-compute-430 | libnvidia-compute-440 | libnvidia-compute-450 | libnvidia-compute-455 | libnvidia-compute-460 | libnvidia-compute-465 | libnvidia-compute-470 | libnvidia-compute-495 | libnvidia-compute-510 | libnvidia-compute-515 | libnvidia-compute-520 | libnvidia-compute-525 | libnvidia-compute-530 | libnvidia-compute-535 | libnvidia-compute-545 | libnvidia-compute-550: required to use CUDA on NVIDIA GPU + optdepends_debian = libcuda1: required to use CUDA on NVIDIA GPU + source_amd64 = https://github.com/ollama/ollama/releases/download/v0.3.13/ollama-linux-amd64.tgz + sha256sums_amd64 = 19d8b87864d7d56d3bcf61b98e6a20bd68744037aba7515803f317dfdad62984 + sha256sums_amd64 = e6893011419cf5d3073f713a130f112b61bbd1ce6f64ee9b4513c6f020f9624a + source_arm64 = https://github.com/ollama/ollama/releases/download/v0.3.13/ollama-linux-arm64.tgz + sha256sums_arm64 = 28cd33e4dd0cdb956272119839a349f7fc45ddd5b97978e2b19fc5804150d721 + sha256sums_arm64 = e6893011419cf5d3073f713a130f112b61bbd1ce6f64ee9b4513c6f020f9624a + +pkgname = ollama-bin +--- pkgbase = onefetch-bin gives = onefetch pkgver = 2.18.0