Desktop : Enable DockerRootless & Ollama.

This commit is contained in:
Dmitry Voronin 2024-06-21 10:50:04 +03:00
parent 0b37fca821
commit c83c0b51ca
Signed by: voronind
SSH key fingerprint: SHA256:3kBb4iV2ahufEBNq+vFbUe4QYfHt98DHQjN7QaptY9k
6 changed files with 25 additions and 16 deletions

View file

@ -359,17 +359,17 @@
},
"nixpkgsStable": {
"locked": {
"lastModified": 1717786204,
"narHash": "sha256-4q0s6m0GUcN7q+Y2DqD27iLvbcd1G50T2lv08kKxkSI=",
"lastModified": 1718835956,
"narHash": "sha256-wM9v2yIxClRYsGHut5vHICZTK7xdrUGfrLkXvSuv6s4=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
"rev": "dd457de7e08c6d06789b1f5b88fc9327f4d96309",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
"type": "github"
}
},

View file

@ -8,7 +8,7 @@
# Manual: https://nixos.org/manual/nixos/stable
# Search: https://search.nixos.org/packages and https://search.nixos.org/options
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
nixpkgsStable.url = "github:nixos/nixpkgs/051f920625ab5aabe37c920346e3e69d7d34400e";
nixpkgsStable.url = "github:nixos/nixpkgs/nixos-24.05";
nixpkgsMaster.url = "github:nixos/nixpkgs/master";
# This thing manages user's /home directroies. Because NixOS only manages system itself.
@ -248,7 +248,8 @@
./module/AmdCompute.nix
./module/AmdCpu.nix
./module/AmdGpu.nix
# ./module/Ollama.nix # ISSUE: Currently broken.
./module/DockerRootless.nix
./module/Ollama.nix
./module/PowersaveAmd.nix
./module/Print.nix
./module/RemoteBuild.nix

View file

@ -1,6 +1,14 @@
{ pkgs, lib, ... }: {
# Add Ollama CLI app.
environment.systemPackages = with pkgs; [ ollama ];
# https://github.com/ollama/ollama
{ pkgsStable, lib, setting, ... }: let
pkgs = pkgsStable;
in {
environment = {
# Add Ollama CLI app.
systemPackages = with pkgs; [ ollama ];
# Specify default model.
variables.OLLAMA_MODEL = setting.ollama.primaryModel;
};
# Enable Ollama server.
systemd.services.ollama = {
@ -23,7 +31,7 @@
serviceConfig.Type = "simple";
script = ''
sleep 5
${lib.getExe pkgs.ollama} pull mistral
${lib.getExe pkgs.ollama} pull ${setting.ollama.primaryModel}
'';
};
}

View file

@ -1,12 +1,10 @@
{ ... }: {
text = ''
export _ask_model="mistral"
# Ask general AI.
# Usage: ask <QUERY>
function ask() {
curl http://localhost:11434/api/generate -d "{
\"model\": \"''${_ask_model}\",
\"model\": \"''${OLLAMA_MODEL}\",
\"prompt\":\"''${*}\"
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo
@ -14,7 +12,7 @@
# Specify ask model.
function ask_model() {
export _ask_model="''${1}"
export OLLAMA_MODEL="''${1}"
}
function _complete_ask_model() {

View file

@ -1,7 +1,7 @@
{ ... }: {
{ setting, ... }: {
text = ''
require("ollama").setup {
model = "mistral",
model = "${setting.ollama.primaryModel}",
url = "http://127.0.0.1:11434",
-- View the actual default prompts in ./lua/ollama/prompts.lua
prompts = {

View file

@ -61,4 +61,6 @@
layouts = "us,ru";
options = "grp:toggle";
};
ollama.primaryModel = "llama3";
}