nix/module/Ollama.nix

64 lines
1.4 KiB
Nix
Raw Normal View History

2024-10-23 19:21:14 +03:00
# SRC: https://github.com/ollama/ollama
{
2024-10-23 19:47:31 +03:00
pkgsUnstable,
2024-10-23 19:21:14 +03:00
lib,
config,
...
}:
let
cfg = config.module.ollama;
in
{
options = {
module.ollama = {
2024-10-23 19:47:31 +03:00
enable = lib.mkEnableOption "Local LLM server";
primaryModel = lib.mkOption {
2024-10-23 19:21:14 +03:00
default = "llama3.2";
2024-10-23 19:47:31 +03:00
type = lib.types.str;
2024-10-23 19:21:14 +03:00
};
2024-10-23 19:47:31 +03:00
models = lib.mkOption {
2024-10-23 19:21:14 +03:00
default = [ cfg.primaryModel ];
2024-10-23 19:47:31 +03:00
type = with lib.types; listOf str;
2024-10-23 19:21:14 +03:00
};
};
};
2024-10-23 19:47:31 +03:00
config = lib.mkIf cfg.enable {
2024-10-23 19:21:14 +03:00
environment = {
# Specify default model.
variables.OLLAMA_MODEL = cfg.primaryModel;
};
systemd.services = {
# Enable Ollama server.
ollama = {
description = "Ollama LLM server.";
wantedBy = [ "multi-user.target" ];
serviceConfig.Type = "simple";
script = ''
2024-10-23 19:47:31 +03:00
HOME=/root ${lib.getExe pkgsUnstable.ollama} serve
2024-10-23 19:21:14 +03:00
'';
};
# Download Ollama models.
ollama-pull = {
description = "Ollama LLM model.";
wantedBy = [ "multi-user.target" ];
2024-10-23 19:24:53 +03:00
wants = [
"NetworkManager-wait-online.service"
"ollama.service"
];
after = [
"NetworkManager-wait-online.service"
"ollama.service"
];
2024-10-23 19:21:14 +03:00
serviceConfig.Type = "simple";
script = ''
2024-10-23 19:25:43 +03:00
sleep 5
2024-10-23 19:47:31 +03:00
${lib.getExe pkgsUnstable.ollama} pull ${lib.concatStringsSep " " cfg.models}
2024-10-23 19:21:14 +03:00
'';
};
};
};
}