Desktop : Enable DockerRootless & Ollama.

This commit is contained in:
Dmitry Voronin 2024-06-21 10:50:04 +03:00
parent 0b37fca821
commit c83c0b51ca
Signed by: voronind
SSH key fingerprint: SHA256:3kBb4iV2ahufEBNq+vFbUe4QYfHt98DHQjN7QaptY9k
6 changed files with 25 additions and 16 deletions

View file

@ -359,17 +359,17 @@
}, },
"nixpkgsStable": { "nixpkgsStable": {
"locked": { "locked": {
"lastModified": 1717786204, "lastModified": 1718835956,
"narHash": "sha256-4q0s6m0GUcN7q+Y2DqD27iLvbcd1G50T2lv08kKxkSI=", "narHash": "sha256-wM9v2yIxClRYsGHut5vHICZTK7xdrUGfrLkXvSuv6s4=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e", "rev": "dd457de7e08c6d06789b1f5b88fc9327f4d96309",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nixos", "owner": "nixos",
"ref": "nixos-24.05",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
"type": "github" "type": "github"
} }
}, },

View file

@ -8,7 +8,7 @@
# Manual: https://nixos.org/manual/nixos/stable # Manual: https://nixos.org/manual/nixos/stable
# Search: https://search.nixos.org/packages and https://search.nixos.org/options # Search: https://search.nixos.org/packages and https://search.nixos.org/options
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
nixpkgsStable.url = "github:nixos/nixpkgs/051f920625ab5aabe37c920346e3e69d7d34400e"; nixpkgsStable.url = "github:nixos/nixpkgs/nixos-24.05";
nixpkgsMaster.url = "github:nixos/nixpkgs/master"; nixpkgsMaster.url = "github:nixos/nixpkgs/master";
# This thing manages user's /home directroies. Because NixOS only manages system itself. # This thing manages user's /home directroies. Because NixOS only manages system itself.
@ -248,7 +248,8 @@
./module/AmdCompute.nix ./module/AmdCompute.nix
./module/AmdCpu.nix ./module/AmdCpu.nix
./module/AmdGpu.nix ./module/AmdGpu.nix
# ./module/Ollama.nix # ISSUE: Currently broken. ./module/DockerRootless.nix
./module/Ollama.nix
./module/PowersaveAmd.nix ./module/PowersaveAmd.nix
./module/Print.nix ./module/Print.nix
./module/RemoteBuild.nix ./module/RemoteBuild.nix

View file

@ -1,6 +1,14 @@
{ pkgs, lib, ... }: { # https://github.com/ollama/ollama
{ pkgsStable, lib, setting, ... }: let
pkgs = pkgsStable;
in {
environment = {
# Add Ollama CLI app. # Add Ollama CLI app.
environment.systemPackages = with pkgs; [ ollama ]; systemPackages = with pkgs; [ ollama ];
# Specify default model.
variables.OLLAMA_MODEL = setting.ollama.primaryModel;
};
# Enable Ollama server. # Enable Ollama server.
systemd.services.ollama = { systemd.services.ollama = {
@ -23,7 +31,7 @@
serviceConfig.Type = "simple"; serviceConfig.Type = "simple";
script = '' script = ''
sleep 5 sleep 5
${lib.getExe pkgs.ollama} pull mistral ${lib.getExe pkgs.ollama} pull ${setting.ollama.primaryModel}
''; '';
}; };
} }

View file

@ -1,12 +1,10 @@
{ ... }: { { ... }: {
text = '' text = ''
export _ask_model="mistral"
# Ask general AI. # Ask general AI.
# Usage: ask <QUERY> # Usage: ask <QUERY>
function ask() { function ask() {
curl http://localhost:11434/api/generate -d "{ curl http://localhost:11434/api/generate -d "{
\"model\": \"''${_ask_model}\", \"model\": \"''${OLLAMA_MODEL}\",
\"prompt\":\"''${*}\" \"prompt\":\"''${*}\"
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'" }" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo echo
@ -14,7 +12,7 @@
# Specify ask model. # Specify ask model.
function ask_model() { function ask_model() {
export _ask_model="''${1}" export OLLAMA_MODEL="''${1}"
} }
function _complete_ask_model() { function _complete_ask_model() {

View file

@ -1,7 +1,7 @@
{ ... }: { { setting, ... }: {
text = '' text = ''
require("ollama").setup { require("ollama").setup {
model = "mistral", model = "${setting.ollama.primaryModel}",
url = "http://127.0.0.1:11434", url = "http://127.0.0.1:11434",
-- View the actual default prompts in ./lua/ollama/prompts.lua -- View the actual default prompts in ./lua/ollama/prompts.lua
prompts = { prompts = {

View file

@ -61,4 +61,6 @@
layouts = "us,ru"; layouts = "us,ru";
options = "grp:toggle"; options = "grp:toggle";
}; };
ollama.primaryModel = "llama3";
} }