linted
This commit is contained in:
atagen 2025-02-02 16:57:31 +11:00
parent 4e99a0e323
commit 7485de646a
95 changed files with 2743 additions and 2282 deletions

View file

@ -3,71 +3,76 @@
lib,
config,
...
}: {
}:
{
xdg.desktopEntries.ollama = {
name = "Ollama";
type = "Application";
icon = ../icons/ollama.png;
exec = let
inherit (lib) getExe getExe';
notify-send = "${getExe' pkgs.libnotify "notify-send"} -a \"Ollama\"";
systemctl = "${getExe' pkgs.systemd "systemctl"}";
podman = "${getExe pkgs.podman}";
in "${pkgs.writeShellScript "ollama" ''
set -euo pipefail
exec =
let
inherit (lib) getExe getExe';
notify-send = "${getExe' pkgs.libnotify "notify-send"} -a \"Ollama\"";
systemctl = "${getExe' pkgs.systemd "systemctl"}";
podman = "${getExe pkgs.podman}";
in
"${pkgs.writeShellScript "ollama" ''
set -euo pipefail
exit_error() {
${notify-send} -w "Failure" $1
exit 1
}
exit_error() {
${notify-send} -w "Failure" $1
exit 1
}
container_checks() {
if [ "$(${podman} inspect -f {{.State.Health.Status}} ollama)" == "healthy" ]; then
return 0
else
return 1
fi
}
container_checks() {
if [ "$(${podman} inspect -f {{.State.Health.Status}} ollama)" == "healthy" ]; then
return 0
else
return 1
fi
}
${notify-send} "Launching Ollama.." "Please be patient."
${notify-send} "Launching Ollama.." "Please be patient."
${systemctl} --user start podman-ollama
checks=0
until container_checks; do
sleep 2
checks=$((checks+1))
if [ $((checks%10)) -eq 0 ]; then
${notify-send} "Launching.."
fi
if [ $checks -ge 60 ]; then
${systemctl} --no-block --user stop podman-ollama.target
exit_error "Failed to launch!"
fi
done
${systemctl} --user start podman-ollama
checks=0
until container_checks; do
sleep 2
checks=$((checks+1))
if [ $((checks%10)) -eq 0 ]; then
${notify-send} "Launching.."
fi
if [ $checks -ge 60 ]; then
${systemctl} --no-block --user stop podman-ollama.target
exit_error "Failed to launch!"
fi
done
${notify-send} "Ollama serving on port 11434."
''}";
${notify-send} "Ollama serving on port 11434."
''}";
};
services.podman = {
containers.ollama = let
username = config.mainUser;
in {
image = "docker.io/ollama/ollama:latest";
devices = ["nvidia.com/gpu=all"];
autoStart = false;
autoUpdate = "registry";
network = ["ollama"];
ports = ["11434:11434"];
volumes = [
"/home/${username}/.local/share/ollama:/models"
];
environment.OLLAMA_MODELS = "/models";
extraPodmanArgs = [
"--health-cmd"
(lib.escapeShellArg "bash -c 'cat < /dev/null > /dev/tcp/localhost/11434'")
];
};
containers.ollama =
let
username = config.mainUser;
in
{
image = "docker.io/ollama/ollama:latest";
devices = [ "nvidia.com/gpu=all" ];
autoStart = false;
autoUpdate = "registry";
network = [ "ollama" ];
ports = [ "11434:11434" ];
volumes = [
"/home/${username}/.local/share/ollama:/models"
];
environment.OLLAMA_MODELS = "/models";
extraPodmanArgs = [
"--health-cmd"
(lib.escapeShellArg "bash -c 'cat < /dev/null > /dev/tcp/localhost/11434'")
];
};
networks.ollama = {
subnet = "192.168.10.0/24";