2024-06-21 10:50:04 +03:00
|
|
|
# https://github.com/ollama/ollama
|
2024-06-25 04:04:39 +03:00
|
|
|
{ pkgsStable, lib, config, ... }: with lib; let
|
2024-06-21 10:50:04 +03:00
|
|
|
pkgs = pkgsStable;
|
2024-06-25 04:04:39 +03:00
|
|
|
cfg = config.module.ollama;
|
2024-06-21 10:50:04 +03:00
|
|
|
in {
|
2024-06-25 04:04:39 +03:00
|
|
|
options = {
|
|
|
|
module.ollama = {
|
|
|
|
enable = mkEnableOption "Local LLM server";
|
|
|
|
primaryModel = mkOption {
|
|
|
|
default = "llama3";
|
|
|
|
type = types.str;
|
|
|
|
};
|
|
|
|
models = mkOption {
|
|
|
|
default = [ cfg.primaryModel ];
|
|
|
|
type = types.listOf types.str;
|
|
|
|
};
|
|
|
|
};
|
2024-06-21 10:50:04 +03:00
|
|
|
};
|
2024-05-04 23:15:57 +03:00
|
|
|
|
2024-06-25 04:04:39 +03:00
|
|
|
config = mkIf cfg.enable {
|
|
|
|
environment = {
|
|
|
|
# Add Ollama CLI app.
|
|
|
|
systemPackages = with pkgs; [ ollama ];
|
|
|
|
|
|
|
|
# Specify default model.
|
|
|
|
variables.OLLAMA_MODEL = cfg.primaryModel;
|
|
|
|
};
|
|
|
|
|
|
|
|
# Enable Ollama server.
|
|
|
|
systemd.services.ollama = {
|
|
|
|
description = "Ollama LLM server.";
|
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
wants = [ "NetworkManager-wait-online.service" ];
|
|
|
|
after = [ "NetworkManager-wait-online.service" ];
|
|
|
|
serviceConfig.Type = "simple";
|
|
|
|
script = ''
|
|
|
|
HOME=/root ${getExe pkgs.ollama} serve
|
|
|
|
'';
|
|
|
|
};
|
2024-05-04 23:15:57 +03:00
|
|
|
|
2024-06-25 04:04:39 +03:00
|
|
|
# Download Ollama models.
|
|
|
|
systemd.services.ollamamodel = {
|
|
|
|
description = "Ollama LLM model.";
|
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
wants = [ "ollama.service" ];
|
|
|
|
after = [ "ollama.service" ];
|
|
|
|
serviceConfig.Type = "simple";
|
|
|
|
script = ''
|
|
|
|
sleep 5
|
|
|
|
${getExe pkgs.ollama} pull ${concatStringsSep " " cfg.models}
|
|
|
|
'';
|
|
|
|
};
|
2024-03-04 00:34:39 +03:00
|
|
|
};
|
|
|
|
}
|