Ollama: Bring back.

This commit is contained in:
Dmitry Voronin 2024-10-23 19:21:14 +03:00
parent acc699a567
commit d3e936f844
Signed by: voronind
SSH key fingerprint: SHA256:3kBb4iV2ahufEBNq+vFbUe4QYfHt98DHQjN7QaptY9k
9 changed files with 149 additions and 0 deletions

View file

@ -609,6 +609,22 @@
"type": "github" "type": "github"
} }
}, },
"nvimOllama": {
"flake": false,
"locked": {
"lastModified": 1717906114,
"narHash": "sha256-8tW5tp2GiYw+PnR7rqiKfykLW/yqvGOtqauZCgEeQCg=",
"owner": "nomnivore",
"repo": "ollama.nvim",
"rev": "45e58779fecde7ac5b8f62800bbe7180d4b48507",
"type": "github"
},
"original": {
"owner": "nomnivore",
"repo": "ollama.nvim",
"type": "github"
}
},
"nvimPlenary": { "nvimPlenary": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -746,6 +762,7 @@
"nvimGruvboxMaterial": "nvimGruvboxMaterial", "nvimGruvboxMaterial": "nvimGruvboxMaterial",
"nvimIndentoMatic": "nvimIndentoMatic", "nvimIndentoMatic": "nvimIndentoMatic",
"nvimLspconfig": "nvimLspconfig", "nvimLspconfig": "nvimLspconfig",
"nvimOllama": "nvimOllama",
"nvimPlenary": "nvimPlenary", "nvimPlenary": "nvimPlenary",
"nvimTelescope": "nvimTelescope", "nvimTelescope": "nvimTelescope",
"nvimTodo": "nvimTodo", "nvimTodo": "nvimTodo",

View file

@ -82,6 +82,10 @@
url = "github:neovim/nvim-lspconfig"; url = "github:neovim/nvim-lspconfig";
flake = false; flake = false;
}; };
nvimOllama = {
url = "github:nomnivore/ollama.nvim";
flake = false;
};
nvimPlenary = { nvimPlenary = {
url = "github:nvim-lua/plenary.nvim"; url = "github:nvim-lua/plenary.nvim";
flake = false; flake = false;

View file

@ -35,6 +35,7 @@ in
"${inputs.nvimGruvboxMaterial}" "${inputs.nvimGruvboxMaterial}"
"${inputs.nvimIndentoMatic}" "${inputs.nvimIndentoMatic}"
"${inputs.nvimLspconfig}" "${inputs.nvimLspconfig}"
"${inputs.nvimOllama}"
"${inputs.nvimPlenary}" "${inputs.nvimPlenary}"
"${inputs.nvimTelescope}" "${inputs.nvimTelescope}"
"${inputs.nvimTodo}" "${inputs.nvimTodo}"
@ -65,6 +66,7 @@ in
./module/plugin/Align.nix ./module/plugin/Align.nix
./module/plugin/Treesitter.nix ./module/plugin/Treesitter.nix
./module/plugin/Fold.nix ./module/plugin/Fold.nix
./module/plugin/Ollama.nix
./module/plugin/Colorizer.nix ./module/plugin/Colorizer.nix
./module/plugin/lsp/Go.nix ./module/plugin/lsp/Go.nix
./module/plugin/lsp/Haskell.nix ./module/plugin/lsp/Haskell.nix
@ -80,6 +82,7 @@ in
./module/key/Filetree.nix ./module/key/Filetree.nix
./module/key/Gitsigns.nix ./module/key/Gitsigns.nix
./module/key/Navigation.nix ./module/key/Navigation.nix
./module/key/Prompt.nix
./module/key/Save.nix ./module/key/Save.nix
./module/key/Sort.nix ./module/key/Sort.nix
./module/key/TabWidth.nix ./module/key/TabWidth.nix

View file

@ -0,0 +1,7 @@
{ ... }:
{
text = ''
rekey_normal("<Leader>p", ":<c-u>lua require('ollama').prompt()<cr>")
rekey_visual("<Leader>p", ":<c-u>lua require('ollama').prompt()<cr>")
'';
}

View file

@ -0,0 +1,18 @@
{ config, ... }:
{
text = ''
require("ollama").setup {
model = "${config.setting.ollama.primaryModel}",
url = "http://127.0.0.1:11434",
-- View the actual default prompts in ./lua/ollama/prompts.lua
prompts = {
-- Sample_Prompt = {
-- prompt = "This is a sample prompt that receives $input and $sel(ection), among others.",
-- input_label = "> ",
-- model = "mistral",
-- action = "display",
-- }
}
}
'';
}

View file

@ -0,0 +1,27 @@
{ ... }:
{
text = ''
# Ask general AI.
# Usage: ask <QUERY>
function ask() {
curl http://localhost:11434/api/generate -d "{
\"model\": \"''${OLLAMA_MODEL}\",
\"prompt\":\"''${*}\"
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo
}
# Specify ask model.
function ask_model() {
export OLLAMA_MODEL="''${1}"
}
function _complete_ask_model() {
local IFS=$'\n'
local models=($(ollama list | sed -e "1d" | cut -f1))
_autocomplete_first ''${models[@]}
}
complete -F _complete_ask_model ask_model
'';
}

View file

@ -0,0 +1,11 @@
{ ... }:
{
text = ''
# FRKN.
# SOURCE: https://github.com/bol-van/zapret
function zapret() {
iptables -t mangle -I POSTROUTING -p tcp -m multiport --dports 80,443 -m connbytes --connbytes-dir=original --connbytes-mode=packets --connbytes 1:6 -m mark ! --mark 0x40000000/0x40000000 -j NFQUEUE --queue-num 201 --queue-bypass
nfqws --pidfile=/run/nfqws.pid --qnum=201 ''${@}
}
'';
}

View file

@ -11,6 +11,7 @@
builder.client.enable = true; builder.client.enable = true;
distrobox.enable = true; distrobox.enable = true;
keyd.enable = true; keyd.enable = true;
ollama.enable = true;
print.enable = true; print.enable = true;
virtmanager.enable = true; virtmanager.enable = true;
amd = { amd = {

61
module/Ollama.nix Normal file
View file

@ -0,0 +1,61 @@
# SRC: https://github.com/ollama/ollama
{
pkgsStable,
lib,
config,
...
}:
with lib;
let
pkgs = pkgsStable;
cfg = config.module.ollama;
in
{
options = {
module.ollama = {
enable = mkEnableOption "Local LLM server";
primaryModel = mkOption {
default = "llama3.2";
type = types.str;
};
models = mkOption {
default = [ cfg.primaryModel ];
type = types.listOf types.str;
};
};
};
config = mkIf cfg.enable {
environment = {
# Specify default model.
variables.OLLAMA_MODEL = cfg.primaryModel;
};
systemd.services = {
# Enable Ollama server.
ollama = {
description = "Ollama LLM server.";
wantedBy = [ "multi-user.target" ];
wants = [ "NetworkManager-wait-online.service" ];
after = [ "NetworkManager-wait-online.service" ];
serviceConfig.Type = "simple";
script = ''
HOME=/root ${getExe pkgs.ollama} serve
'';
};
# Download Ollama models.
ollama-pull = {
description = "Ollama LLM model.";
wantedBy = [ "multi-user.target" ];
wants = [ "ollama.service" ];
after = [ "ollama.service" ];
serviceConfig.Type = "simple";
script = ''
sleep 5
${getExe pkgs.ollama} pull ${concatStringsSep " " cfg.models}
'';
};
};
};
}