local ollama for vscode

This commit is contained in:
atagen 2024-09-17 10:19:33 +10:00
parent 5786863608
commit f9371908d6
6 changed files with 107 additions and 46 deletions

View file

@ -11,7 +11,7 @@ in {
anything-llm = {
name = "Anything LLM";
genericName = "Chatbot";
icon = ../icons/openwebui.png;
icon = ../icons/anythingllm.png;
inherit port;
requires.containers = ["anything-llm" "ollama"];
};
@ -22,16 +22,29 @@ in {
username = config.mainUser;
in {
image = "mintplexlabs/anythingllm";
devices = ["nvidia.com/gpu=all"];
ports = ["${str port}:3001"];
autostart = false;
networks = ["ollama"];
volumes = let
share = "/home/${username}/.local/share/anything-llm";
home = "/home/${username}";
share = "${home}/.local/share/anything-llm";
in [
"${share}:/storage"
"${share}:/app/server/storage"
"${share}/.env:/app/server/.env"
"${share}/config:/home/anythingllm/.config/anythingllm-desktop"
"${share}/config:/home/${username}/.config/anythingllm-desktop"
];
environment.STORAGE_LOCATION = "/storage";
environment = {
STORAGE_DIR = "/app/server/storage";
LLM_PROVIDER = "ollama";
OLLAMA_BASE_PATH = "http://ollama:11434";
EMBEDDING_ENGINE = "ollama";
EMBEDDING_BASE_PATH = "http://ollama:11434";
VECTOR_DB = "lancedb";
WHISPER_PROVIDER = "local";
TTS_PROVIDER = "native";
};
unitConfig = {Requires = ["podman-ollama.service"];};
extraOptions = [
"--health-cmd"