This repository has been archived on 2024-08-30. You can view files and clone it, but cannot push or open issues or pull requests.
infrastructure/hosts/lime/services/ai.nix
2024-05-29 17:15:20 +02:00

30 lines
640 B
Nix

{ pkgs, ... }: {
environment.systemPackages = with pkgs; [
ollama
];
services.ollama.listenAddress = "0.0.0.0:4827";
services.ollama.environmentVariables = {
#"CUDA_VISIBLE_DEVICES" = "GPU-cf2321f0-d34b-ec9b-31e9-8c0c69e1444a";
OLLAMA_LLM_LIBRARY = "gpu";
};
services.ollama.enable = true;
#services.ollama.acceleration = "cuda";
# virtualisation.oci-containers.containers = {
# webui = {
# image = "ghcr.io/ollama-webui/ollama-webui:main";
# ports = [
# "5121:8080"
# ];
# volumes = [
# "/home/carbon/ollamawebui:/app/backend/data"
# ];
# };
# };
}