current
linted
This commit is contained in:
parent
4e99a0e323
commit
7485de646a
95 changed files with 2743 additions and 2282 deletions
|
@ -2,10 +2,12 @@
|
|||
lib,
|
||||
config,
|
||||
...
|
||||
}: let
|
||||
}:
|
||||
let
|
||||
port = 3021;
|
||||
in {
|
||||
imports = [./ollama.nix];
|
||||
in
|
||||
{
|
||||
imports = [ ./ollama.nix ];
|
||||
|
||||
localWebApps = {
|
||||
anything-llm = {
|
||||
|
@ -13,43 +15,50 @@ in {
|
|||
genericName = "Chatbot";
|
||||
icon = ../icons/anythingllm.png;
|
||||
inherit port;
|
||||
requires.containers = ["anything-llm" "ollama"];
|
||||
requires.containers = [
|
||||
"anything-llm"
|
||||
"ollama"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
services.podman.containers.anything-llm = let
|
||||
str = builtins.toString;
|
||||
username = config.mainUser;
|
||||
in {
|
||||
image = "docker.io/mintplexlabs/anythingllm";
|
||||
devices = ["nvidia.com/gpu=all"];
|
||||
ports = ["${str port}:3001"];
|
||||
autoStart = false;
|
||||
autoUpdate = "registry";
|
||||
network = ["ollama"];
|
||||
volumes = let
|
||||
home = "/home/${username}";
|
||||
share = "${home}/.local/share/anything-llm";
|
||||
in [
|
||||
"${share}:/app/server/storage"
|
||||
"${share}/.env:/app/server/.env"
|
||||
"${share}/config:/home/anythingllm/.config/anythingllm-desktop"
|
||||
"${share}/config:/home/${username}/.config/anythingllm-desktop"
|
||||
];
|
||||
environment = {
|
||||
STORAGE_DIR = "/app/server/storage";
|
||||
LLM_PROVIDER = "ollama";
|
||||
OLLAMA_BASE_PATH = "http://ollama:11434";
|
||||
EMBEDDING_ENGINE = "ollama";
|
||||
EMBEDDING_BASE_PATH = "http://ollama:11434";
|
||||
VECTOR_DB = "lancedb";
|
||||
WHISPER_PROVIDER = "local";
|
||||
TTS_PROVIDER = "native";
|
||||
services.podman.containers.anything-llm =
|
||||
let
|
||||
str = builtins.toString;
|
||||
username = config.mainUser;
|
||||
in
|
||||
{
|
||||
image = "docker.io/mintplexlabs/anythingllm";
|
||||
devices = [ "nvidia.com/gpu=all" ];
|
||||
ports = [ "${str port}:3001" ];
|
||||
autoStart = false;
|
||||
autoUpdate = "registry";
|
||||
network = [ "ollama" ];
|
||||
volumes =
|
||||
let
|
||||
home = "/home/${username}";
|
||||
share = "${home}/.local/share/anything-llm";
|
||||
in
|
||||
[
|
||||
"${share}:/app/server/storage"
|
||||
"${share}/.env:/app/server/.env"
|
||||
"${share}/config:/home/anythingllm/.config/anythingllm-desktop"
|
||||
"${share}/config:/home/${username}/.config/anythingllm-desktop"
|
||||
];
|
||||
environment = {
|
||||
STORAGE_DIR = "/app/server/storage";
|
||||
LLM_PROVIDER = "ollama";
|
||||
OLLAMA_BASE_PATH = "http://ollama:11434";
|
||||
EMBEDDING_ENGINE = "ollama";
|
||||
EMBEDDING_BASE_PATH = "http://ollama:11434";
|
||||
VECTOR_DB = "lancedb";
|
||||
WHISPER_PROVIDER = "local";
|
||||
TTS_PROVIDER = "native";
|
||||
};
|
||||
extraConfig.Unit.Requires = [ "podman-ollama.service" ];
|
||||
extraPodmanArgs = [
|
||||
"--health-cmd"
|
||||
(lib.escapeShellArg "bash -c 'cat < /dev/null > /dev/tcp/localhost/3001'")
|
||||
];
|
||||
};
|
||||
extraConfig.Unit.Requires = ["podman-ollama.service"];
|
||||
extraPodmanArgs = [
|
||||
"--health-cmd"
|
||||
(lib.escapeShellArg "bash -c 'cat < /dev/null > /dev/tcp/localhost/3001'")
|
||||
];
|
||||
};
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue