nix/module/Ollama.nix
2024-11-06 01:07:30 +03:00

67 lines
1.3 KiB
Nix

# SRC: https://github.com/ollama/ollama
{
pkgsUnstable,
lib,
config,
util,
...
}: let
cfg = config.module.ollama;
in {
options.module.ollama = {
enable = lib.mkEnableOption "the local LLM server.";
models = lib.mkOption {
default = [ cfg.primaryModel ];
type = with lib.types; listOf str;
};
primaryModel = lib.mkOption {
default = "llama3.2";
type = lib.types.str;
};
};
config = lib.mkIf cfg.enable {
# Specify default model.
environment.variables.OLLAMA_MODEL = cfg.primaryModel;
systemd.services = {
# Enable Ollama server.
ollama = {
description = "Ollama LLM server";
serviceConfig = {
Type = "simple";
};
wantedBy = [
"multi-user.target"
];
script = ''
HOME=/root ${lib.getExe pkgsUnstable.ollama} serve
'';
};
# Download Ollama models.
ollama-pull = {
description = "Ollama LLM model";
after = [
"NetworkManager-wait-online.service"
"ollama.service"
];
wantedBy = [
"multi-user.target"
];
wants = [
"NetworkManager-wait-online.service"
"ollama.service"
];
serviceConfig = {
Type = "simple";
};
script = util.trimTabs ''
sleep 5
${lib.getExe pkgsUnstable.ollama} pull ${lib.concatStringsSep " " cfg.models}
'';
};
};
};
}