Compare commits

..

No commits in common. "96b648d44d52985592591b841012e1cb480153f2" and "0b37fca82194d57286275f7b49381a650b013006" have entirely different histories.

7 changed files with 16 additions and 33 deletions

View file

@ -359,17 +359,17 @@
},
"nixpkgsStable": {
"locked": {
"lastModified": 1718835956,
"narHash": "sha256-wM9v2yIxClRYsGHut5vHICZTK7xdrUGfrLkXvSuv6s4=",
"lastModified": 1717786204,
"narHash": "sha256-4q0s6m0GUcN7q+Y2DqD27iLvbcd1G50T2lv08kKxkSI=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "dd457de7e08c6d06789b1f5b88fc9327f4d96309",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
"type": "github"
}
},

View file

@ -8,7 +8,7 @@
# Manual: https://nixos.org/manual/nixos/stable
# Search: https://search.nixos.org/packages and https://search.nixos.org/options
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
nixpkgsStable.url = "github:nixos/nixpkgs/nixos-24.05";
nixpkgsStable.url = "github:nixos/nixpkgs/051f920625ab5aabe37c920346e3e69d7d34400e";
nixpkgsMaster.url = "github:nixos/nixpkgs/master";
# This thing manages user's /home directroies. Because NixOS only manages system itself.
@ -248,8 +248,7 @@
./module/AmdCompute.nix
./module/AmdCpu.nix
./module/AmdGpu.nix
./module/DockerRootless.nix
./module/Ollama.nix
# ./module/Ollama.nix # ISSUE: Currently broken.
./module/PowersaveAmd.nix
./module/Print.nix
./module/RemoteBuild.nix

View file

@ -1,14 +1,6 @@
# https://github.com/ollama/ollama
{ pkgsStable, lib, setting, ... }: let
pkgs = pkgsStable;
in {
environment = {
# Add Ollama CLI app.
systemPackages = with pkgs; [ ollama ];
# Specify default model.
variables.OLLAMA_MODEL = setting.ollama.primaryModel;
};
{ pkgs, lib, ... }: {
# Add Ollama CLI app.
environment.systemPackages = with pkgs; [ ollama ];
# Enable Ollama server.
systemd.services.ollama = {
@ -31,7 +23,7 @@ in {
serviceConfig.Type = "simple";
script = ''
sleep 5
${lib.getExe pkgs.ollama} pull ${setting.ollama.primaryModel}
${lib.getExe pkgs.ollama} pull mistral
'';
};
}

View file

@ -1,10 +1,12 @@
{ ... }: {
text = ''
export _ask_model="mistral"
# Ask general AI.
# Usage: ask <QUERY>
function ask() {
curl http://localhost:11434/api/generate -d "{
\"model\": \"''${OLLAMA_MODEL}\",
\"model\": \"''${_ask_model}\",
\"prompt\":\"''${*}\"
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo
@ -12,7 +14,7 @@
# Specify ask model.
function ask_model() {
export OLLAMA_MODEL="''${1}"
export _ask_model="''${1}"
}
function _complete_ask_model() {

View file

@ -1,7 +1,7 @@
{ setting, ... }: {
{ ... }: {
text = ''
require("ollama").setup {
model = "${setting.ollama.primaryModel}",
model = "mistral",
url = "http://127.0.0.1:11434",
-- View the actual default prompts in ./lua/ollama/prompts.lua
prompts = {

View file

@ -1,8 +0,0 @@
# Use stable packages for Nix and Nixos-Rebuild.
{ pkgsStable, ... }: {
nixpkgs.overlays = [ (final: prev: {
nix = pkgsStable.nix;
nixos-rebuild = pkgsStable.nixos-rebuild;
})];
}

View file

@ -61,6 +61,4 @@
layouts = "us,ru";
options = "grp:toggle";
};
ollama.primaryModel = "llama3";
}