local ollama for vscode

This commit is contained in:
atagen 2024-09-17 10:19:33 +10:00
parent 5786863608
commit f9371908d6
6 changed files with 107 additions and 46 deletions

BIN
home/icons/anythingllm.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View file

@ -1,13 +1,13 @@
{pkgs, ...}: {
# home.packages = with pkgs; [
# gtk-engine-murrine
# ];
home.packages = with pkgs; [
gtk-engine-murrine
];
fonts.fontconfig.enable = true;
qt = {
enable = true;
# style.name = "adwaita-dark";
# platformTheme.name = "adwaita";
style.name = "adwaita-dark";
platformTheme.name = "adwaita";
};
gtk = with pkgs;

View file

@ -11,7 +11,7 @@ in {
anything-llm = {
name = "Anything LLM";
genericName = "Chatbot";
icon = ../icons/openwebui.png;
icon = ../icons/anythingllm.png;
inherit port;
requires.containers = ["anything-llm" "ollama"];
};
@ -22,16 +22,29 @@ in {
username = config.mainUser;
in {
image = "mintplexlabs/anythingllm";
devices = ["nvidia.com/gpu=all"];
ports = ["${str port}:3001"];
autostart = false;
networks = ["ollama"];
volumes = let
share = "/home/${username}/.local/share/anything-llm";
home = "/home/${username}";
share = "${home}/.local/share/anything-llm";
in [
"${share}:/storage"
"${share}:/app/server/storage"
"${share}/.env:/app/server/.env"
"${share}/config:/home/anythingllm/.config/anythingllm-desktop"
"${share}/config:/home/${username}/.config/anythingllm-desktop"
];
environment.STORAGE_LOCATION = "/storage";
environment = {
STORAGE_DIR = "/app/server/storage";
LLM_PROVIDER = "ollama";
OLLAMA_BASE_PATH = "http://ollama:11434";
EMBEDDING_ENGINE = "ollama";
EMBEDDING_BASE_PATH = "http://ollama:11434";
VECTOR_DB = "lancedb";
WHISPER_PROVIDER = "local";
TTS_PROVIDER = "native";
};
unitConfig = {Requires = ["podman-ollama.service"];};
extraOptions = [
"--health-cmd"

View file

@ -1,8 +1,54 @@
{
pkgs,
lib,
config,
...
}: {
xdg.desktopEntries.ollama = {
name = "Ollama";
type = "Application";
icon = ../icons/ollama.png;
exec = let
inherit (lib) getExe getExe';
notify-send = "${getExe' pkgs.libnotify "notify-send"} -a \"Ollama\"";
systemctl = "${getExe' pkgs.systemd "systemctl"}";
podman = "${getExe pkgs.podman}";
in "${pkgs.writeShellScript "ollama" ''
set -euo pipefail
exit_error() {
${notify-send} -w "Failure" $1
exit 1
}
container_checks() {
if [ "$(${podman} inspect -f {{.State.Health.Status}} ollama)" == "healthy" ]; then
return 0
else
return 1
fi
}
${notify-send} "Launching Ollama.." "Please be patient."
${systemctl} --user start podman-ollama
checks=0
until container_checks; do
sleep 2
checks=$((checks+1))
if [ $((checks%10)) -eq 0 ]; then
${notify-send} "Launching.."
fi
if [ $checks -ge 60 ]; then
${systemctl} --no-block --user stop podman-ollama.target
exit_error "Failed to launch!"
fi
done
${notify-send} "Ollama serving on port 11434."
''}";
};
services.podman = {
containers.ollama = let
username = config.mainUser;
@ -11,6 +57,7 @@
devices = ["nvidia.com/gpu=all"];
autostart = false;
networks = ["ollama"];
ports = ["11434:11434"];
volumes = [
"/home/${username}/.local/share/ollama:/models"
];

View file

@ -20,6 +20,7 @@
replaceStrings
concatMapStringsSep
;
partOf = cfg: "${replaceStrings [" "] ["-"] (toLower cfg.name)}.target";
# make a firefox webapp + hidden .desktop entry for the client app
make-firefox = cfg:
mapAttrs' (
@ -60,7 +61,7 @@
name = "${req}";
value = {
Unit = {
PartOf = "${replaceStrings [" "] ["-"] (toLower cfg.name)}.target";
PartOf = partOf cfg;
};
};
}
@ -75,7 +76,7 @@
name = "${req}";
value = {
unitConfig = {
PartOf = "${toLower cfg.name}.target";
PartOf = partOf cfg;
};
};
}