nix/module/Ollama.nix

52 lines
1.2 KiB
Nix
Raw Normal View History

# https://github.com/ollama/ollama
2024-06-25 04:04:39 +03:00
{ pkgsStable, lib, config, ... }: with lib; let
pkgs = pkgsStable;
2024-06-25 04:04:39 +03:00
cfg = config.module.ollama;
in {
2024-06-25 04:04:39 +03:00
options = {
module.ollama = {
enable = mkEnableOption "Local LLM server";
primaryModel = mkOption {
default = "llama3";
type = types.str;
};
models = mkOption {
default = [ cfg.primaryModel ];
type = types.listOf types.str;
};
};
};
2024-06-25 04:04:39 +03:00
config = mkIf cfg.enable {
environment = {
# Specify default model.
variables.OLLAMA_MODEL = cfg.primaryModel;
};
# Enable Ollama server.
systemd.services.ollama = {
description = "Ollama LLM server.";
wantedBy = [ "multi-user.target" ];
wants = [ "NetworkManager-wait-online.service" ];
after = [ "NetworkManager-wait-online.service" ];
serviceConfig.Type = "simple";
script = ''
HOME=/root ${getExe pkgs.ollama} serve
'';
};
2024-06-25 04:04:39 +03:00
# Download Ollama models.
systemd.services.ollamamodel = {
description = "Ollama LLM model.";
wantedBy = [ "multi-user.target" ];
wants = [ "ollama.service" ];
after = [ "ollama.service" ];
serviceConfig.Type = "simple";
script = ''
sleep 5
${getExe pkgs.ollama} pull ${concatStringsSep " " cfg.models}
'';
};
};
}