nix/home/programs/anything-llm.nix
atagen 7485de646a current
linted
2025-02-02 17:07:42 +11:00

64 lines
1.6 KiB
Nix

{
lib,
config,
...
}:
let
port = 3021;
in
{
imports = [ ./ollama.nix ];
localWebApps = {
anything-llm = {
name = "Anything LLM";
genericName = "Chatbot";
icon = ../icons/anythingllm.png;
inherit port;
requires.containers = [
"anything-llm"
"ollama"
];
};
};
services.podman.containers.anything-llm =
let
str = builtins.toString;
username = config.mainUser;
in
{
image = "docker.io/mintplexlabs/anythingllm";
devices = [ "nvidia.com/gpu=all" ];
ports = [ "${str port}:3001" ];
autoStart = false;
autoUpdate = "registry";
network = [ "ollama" ];
volumes =
let
home = "/home/${username}";
share = "${home}/.local/share/anything-llm";
in
[
"${share}:/app/server/storage"
"${share}/.env:/app/server/.env"
"${share}/config:/home/anythingllm/.config/anythingllm-desktop"
"${share}/config:/home/${username}/.config/anythingllm-desktop"
];
environment = {
STORAGE_DIR = "/app/server/storage";
LLM_PROVIDER = "ollama";
OLLAMA_BASE_PATH = "http://ollama:11434";
EMBEDDING_ENGINE = "ollama";
EMBEDDING_BASE_PATH = "http://ollama:11434";
VECTOR_DB = "lancedb";
WHISPER_PROVIDER = "local";
TTS_PROVIDER = "native";
};
extraConfig.Unit.Requires = [ "podman-ollama.service" ];
extraPodmanArgs = [
"--health-cmd"
(lib.escapeShellArg "bash -c 'cat < /dev/null > /dev/tcp/localhost/3001'")
];
};
}