From c83c0b51ca80d4fe0fef3827eb1ea7aebbfd7189 Mon Sep 17 00:00:00 2001 From: Dmitry Voronin Date: Fri, 21 Jun 2024 10:50:04 +0300 Subject: [PATCH] Desktop : Enable DockerRootless & Ollama. --- flake.lock | 8 ++++---- flake.nix | 5 +++-- module/Ollama.nix | 16 ++++++++++++---- module/common/bash/module/Ask.nix | 6 ++---- module/common/nvim/module/plugin/Ollama.nix | 4 ++-- part/Setting.nix | 2 ++ 6 files changed, 25 insertions(+), 16 deletions(-) diff --git a/flake.lock b/flake.lock index 21623d9..1780b38 100644 --- a/flake.lock +++ b/flake.lock @@ -359,17 +359,17 @@ }, "nixpkgsStable": { "locked": { - "lastModified": 1717786204, - "narHash": "sha256-4q0s6m0GUcN7q+Y2DqD27iLvbcd1G50T2lv08kKxkSI=", + "lastModified": 1718835956, + "narHash": "sha256-wM9v2yIxClRYsGHut5vHICZTK7xdrUGfrLkXvSuv6s4=", "owner": "nixos", "repo": "nixpkgs", - "rev": "051f920625ab5aabe37c920346e3e69d7d34400e", + "rev": "dd457de7e08c6d06789b1f5b88fc9327f4d96309", "type": "github" }, "original": { "owner": "nixos", + "ref": "nixos-24.05", "repo": "nixpkgs", - "rev": "051f920625ab5aabe37c920346e3e69d7d34400e", "type": "github" } }, diff --git a/flake.nix b/flake.nix index 3117bd5..cf082e4 100644 --- a/flake.nix +++ b/flake.nix @@ -8,7 +8,7 @@ # Manual: https://nixos.org/manual/nixos/stable # Search: https://search.nixos.org/packages and https://search.nixos.org/options nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; - nixpkgsStable.url = "github:nixos/nixpkgs/051f920625ab5aabe37c920346e3e69d7d34400e"; + nixpkgsStable.url = "github:nixos/nixpkgs/nixos-24.05"; nixpkgsMaster.url = "github:nixos/nixpkgs/master"; # This thing manages user's /home directroies. Because NixOS only manages system itself. @@ -248,7 +248,8 @@ ./module/AmdCompute.nix ./module/AmdCpu.nix ./module/AmdGpu.nix - # ./module/Ollama.nix # ISSUE: Currently broken. + ./module/DockerRootless.nix + ./module/Ollama.nix ./module/PowersaveAmd.nix ./module/Print.nix ./module/RemoteBuild.nix diff --git a/module/Ollama.nix b/module/Ollama.nix index 5ddbd4d..c430192 100644 --- a/module/Ollama.nix +++ b/module/Ollama.nix @@ -1,6 +1,14 @@ -{ pkgs, lib, ... }: { - # Add Ollama CLI app. - environment.systemPackages = with pkgs; [ ollama ]; +# https://github.com/ollama/ollama +{ pkgsStable, lib, setting, ... }: let + pkgs = pkgsStable; +in { + environment = { + # Add Ollama CLI app. + systemPackages = with pkgs; [ ollama ]; + + # Specify default model. + variables.OLLAMA_MODEL = setting.ollama.primaryModel; + }; # Enable Ollama server. systemd.services.ollama = { @@ -23,7 +31,7 @@ serviceConfig.Type = "simple"; script = '' sleep 5 - ${lib.getExe pkgs.ollama} pull mistral + ${lib.getExe pkgs.ollama} pull ${setting.ollama.primaryModel} ''; }; } diff --git a/module/common/bash/module/Ask.nix b/module/common/bash/module/Ask.nix index 857b9ce..5e8136c 100644 --- a/module/common/bash/module/Ask.nix +++ b/module/common/bash/module/Ask.nix @@ -1,12 +1,10 @@ { ... }: { text = '' - export _ask_model="mistral" - # Ask general AI. # Usage: ask function ask() { curl http://localhost:11434/api/generate -d "{ - \"model\": \"''${_ask_model}\", + \"model\": \"''${OLLAMA_MODEL}\", \"prompt\":\"''${*}\" }" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'" echo @@ -14,7 +12,7 @@ # Specify ask model. function ask_model() { - export _ask_model="''${1}" + export OLLAMA_MODEL="''${1}" } function _complete_ask_model() { diff --git a/module/common/nvim/module/plugin/Ollama.nix b/module/common/nvim/module/plugin/Ollama.nix index e2be317..b8695bf 100644 --- a/module/common/nvim/module/plugin/Ollama.nix +++ b/module/common/nvim/module/plugin/Ollama.nix @@ -1,7 +1,7 @@ -{ ... }: { +{ setting, ... }: { text = '' require("ollama").setup { - model = "mistral", + model = "${setting.ollama.primaryModel}", url = "http://127.0.0.1:11434", -- View the actual default prompts in ./lua/ollama/prompts.lua prompts = { diff --git a/part/Setting.nix b/part/Setting.nix index 7d4fc73..c25e59f 100644 --- a/part/Setting.nix +++ b/part/Setting.nix @@ -61,4 +61,6 @@ layouts = "us,ru"; options = "grp:toggle"; }; + + ollama.primaryModel = "llama3"; }