Ollama: Fix and re-enable for desktop && work.

This commit is contained in:
Dmitry Voronin 2025-01-04 07:38:04 +03:00
parent 9e5ed18644
commit f260f5b6a2
Signed by: voronind
SSH key fingerprint: SHA256:3kBb4iV2ahufEBNq+vFbUe4QYfHt98DHQjN7QaptY9k
3 changed files with 38 additions and 44 deletions

View file

@ -1,4 +1,3 @@
# SRC: https://github.com/ollama/ollama
{
pkgsUnstable,
lib,
@ -11,45 +10,39 @@ in
{
config = lib.mkIf cfg.enable {
# Specify default model.
# environment.variables.OLLAMA_MODEL = cfg.primaryModel;
#
# systemd.services = {
# # Enable Ollama server.
# ollama = {
# description = "Ollama LLM server";
# serviceConfig = {
# Type = "simple";
# };
# wantedBy = [
# "multi-user.target"
# ];
# script = ''
# HOME=/root ${lib.getExe pkgsUnstable.ollama} serve
# '';
# };
#
# # Download Ollama models.
# ollama-pull = {
# description = "Ollama LLM model";
# after = [
# "NetworkManager-wait-online.service"
# "ollama.service"
# ];
# wantedBy = [
# "multi-user.target"
# ];
# wants = [
# "NetworkManager-wait-online.service"
# "ollama.service"
# ];
# serviceConfig = {
# Type = "simple";
# };
# script = ''
# sleep 5
# ${lib.getExe pkgsUnstable.ollama} pull ${lib.concatStringsSep " " cfg.models}
# '';
# };
# };
environment.variables.OLLAMA_MODEL = cfg.primaryModel;
systemd.services = {
# Enable Ollama server.
ollama = {
description = "Ollama LLM server";
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "simple";
};
script = ''
HOME=/root ${lib.getExe pkgsUnstable.ollama} serve
'';
};
# Download Ollama models.
ollama-pull = {
description = "Ollama LLM model";
wantedBy = [ "multi-user.target" ];
serviceConfig.Type = "simple";
after = [
"NetworkManager-wait-online.service"
"ollama.service"
];
wants = [
"NetworkManager-wait-online.service"
"ollama.service"
];
script = ''
sleep 5
HOME=/root ${lib.getExe pkgsUnstable.ollama} pull ${lib.concatStringsSep " " cfg.models}
'';
};
};
};
}

View file

@ -131,7 +131,6 @@ in
(lib.mkIf cfg.work {
module = {
distrobox.enable = true;
ollama.enable = true;
package.dev = true;
virtmanager.enable = true;
docker = {

View file

@ -1,16 +1,18 @@
# SRC: https://github.com/ollama/ollama
{ config, lib, ... }:
let
cfg = config.module.ollama;
purpose = config.module.purpose;
in
{
options.module.ollama = {
enable = lib.mkEnableOption "the local LLM server.";
enable = lib.mkEnableOption "the local LLM server." // { default = purpose.work && purpose.desktop; };
models = lib.mkOption {
default = [ cfg.primaryModel ];
type = with lib.types; listOf str;
};
primaryModel = lib.mkOption {
default = "llama3.2";
default = "llama3.3";
type = lib.types.str;
};
};