Ollama : Add systemd service.

This commit is contained in:
Dmitry Voronin 2024-02-29 07:50:27 +03:00
parent 48052195ed
commit 299df8305e
3 changed files with 33 additions and 0 deletions

View file

@ -162,6 +162,7 @@
./module/AmdGpu.nix ./module/AmdGpu.nix
./module/Desktop.nix ./module/Desktop.nix
./module/Gnome.nix ./module/Gnome.nix
./module/Ollama.nix
./module/PowersaveAmd.nix ./module/PowersaveAmd.nix
./module/Print.nix ./module/Print.nix
./module/VirtManager.nix ./module/VirtManager.nix

View file

@ -0,0 +1,23 @@
{ pkgs, lib, ... }: {
environment.systemPackages = with pkgs; [ ollama ];
systemd.services.ollama = {
description = "Ollama LLM server.";
wantedBy = [ "multi-user.target" "default.target" ];
wants = [ "NetworkManager-wait-online.service" ];
after = [ "NetworkManager-wait-online.service" ];
serviceConfig.Type = "simple";
script = ''
${lib.getExe pkgs.ollama} serve
'';
};
systemd.services.ollamamodel = {
description = "Ollama LLM model.";
wantedBy = [ "multi-user.target" "default.target" ];
wants = [ "NetworkManager-wait-online.service" "ollama.service" ];
after = [ "NetworkManager-wait-online.service" "ollama.service" ];
serviceConfig.Type = "simple";
script = ''
${lib.getExe pkgs.ollama} pull llama2-uncensored
'';
};
}

View file

@ -0,0 +1,9 @@
# Ask AI.
# Usage: ask <QUERY>
function ask() {
curl http://localhost:11434/api/generate -d "{
\"model\": \"llama2-uncensored\",
\"prompt\":\"${*}\"
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo
}