From 299df8305ed269948f70c49cfea64e660ba958da Mon Sep 17 00:00:00 2001 From: Dmitry Voronin Date: Thu, 29 Feb 2024 07:50:27 +0300 Subject: [PATCH] Ollama : Add systemd service. --- .config/linux/system/flake.nix | 1 + .config/linux/system/module/Ollama.nix | 23 +++++++++++++++++++ .../system/module/common/bash/module/Ask.sh | 9 ++++++++ 3 files changed, 33 insertions(+) create mode 100644 .config/linux/system/module/Ollama.nix create mode 100644 .config/linux/system/module/common/bash/module/Ask.sh diff --git a/.config/linux/system/flake.nix b/.config/linux/system/flake.nix index 80a2431..80f10d0 100644 --- a/.config/linux/system/flake.nix +++ b/.config/linux/system/flake.nix @@ -162,6 +162,7 @@ ./module/AmdGpu.nix ./module/Desktop.nix ./module/Gnome.nix + ./module/Ollama.nix ./module/PowersaveAmd.nix ./module/Print.nix ./module/VirtManager.nix diff --git a/.config/linux/system/module/Ollama.nix b/.config/linux/system/module/Ollama.nix new file mode 100644 index 0000000..504b65b --- /dev/null +++ b/.config/linux/system/module/Ollama.nix @@ -0,0 +1,23 @@ +{ pkgs, lib, ... }: { + environment.systemPackages = with pkgs; [ ollama ]; + systemd.services.ollama = { + description = "Ollama LLM server."; + wantedBy = [ "multi-user.target" "default.target" ]; + wants = [ "NetworkManager-wait-online.service" ]; + after = [ "NetworkManager-wait-online.service" ]; + serviceConfig.Type = "simple"; + script = '' + ${lib.getExe pkgs.ollama} serve + ''; + }; + systemd.services.ollamamodel = { + description = "Ollama LLM model."; + wantedBy = [ "multi-user.target" "default.target" ]; + wants = [ "NetworkManager-wait-online.service" "ollama.service" ]; + after = [ "NetworkManager-wait-online.service" "ollama.service" ]; + serviceConfig.Type = "simple"; + script = '' + ${lib.getExe pkgs.ollama} pull llama2-uncensored + ''; + }; +} diff --git a/.config/linux/system/module/common/bash/module/Ask.sh b/.config/linux/system/module/common/bash/module/Ask.sh new file mode 100644 index 0000000..058357d --- /dev/null +++ b/.config/linux/system/module/common/bash/module/Ask.sh @@ -0,0 +1,9 @@ +# Ask AI. +# Usage: ask +function ask() { + curl http://localhost:11434/api/generate -d "{ + \"model\": \"llama2-uncensored\", + \"prompt\":\"${*}\" + }" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'" + echo +}