2024-06-21 10:50:04 +03:00
|
|
|
# https://github.com/ollama/ollama
|
2024-06-24 13:36:10 +03:00
|
|
|
{ pkgsStable, lib, config, ... }: let
|
2024-06-21 10:50:04 +03:00
|
|
|
pkgs = pkgsStable;
|
|
|
|
in {
|
|
|
|
environment = {
|
|
|
|
# Add Ollama CLI app.
|
|
|
|
systemPackages = with pkgs; [ ollama ];
|
|
|
|
|
|
|
|
# Specify default model.
|
2024-06-24 13:36:10 +03:00
|
|
|
variables.OLLAMA_MODEL = config.setting.ollama.primaryModel;
|
2024-06-21 10:50:04 +03:00
|
|
|
};
|
2024-05-04 23:15:57 +03:00
|
|
|
|
|
|
|
# Enable Ollama server.
|
2024-03-04 00:34:39 +03:00
|
|
|
systemd.services.ollama = {
|
|
|
|
description = "Ollama LLM server.";
|
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
wants = [ "NetworkManager-wait-online.service" ];
|
|
|
|
after = [ "NetworkManager-wait-online.service" ];
|
|
|
|
serviceConfig.Type = "simple";
|
|
|
|
script = ''
|
|
|
|
HOME=/root ${lib.getExe pkgs.ollama} serve
|
|
|
|
'';
|
|
|
|
};
|
2024-05-04 23:15:57 +03:00
|
|
|
|
|
|
|
# Download Ollama models.
|
2024-03-04 00:34:39 +03:00
|
|
|
systemd.services.ollamamodel = {
|
|
|
|
description = "Ollama LLM model.";
|
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
wants = [ "ollama.service" ];
|
|
|
|
after = [ "ollama.service" ];
|
|
|
|
serviceConfig.Type = "simple";
|
|
|
|
script = ''
|
|
|
|
sleep 5
|
2024-06-24 13:36:10 +03:00
|
|
|
${lib.getExe pkgs.ollama} pull ${config.setting.ollama.primaryModel}
|
2024-03-04 00:34:39 +03:00
|
|
|
'';
|
|
|
|
};
|
|
|
|
}
|