diff --git a/flake.lock b/flake.lock index f751daac..84d8b21a 100644 --- a/flake.lock +++ b/flake.lock @@ -609,6 +609,22 @@ "type": "github" } }, + "nvimOllama": { + "flake": false, + "locked": { + "lastModified": 1717906114, + "narHash": "sha256-8tW5tp2GiYw+PnR7rqiKfykLW/yqvGOtqauZCgEeQCg=", + "owner": "nomnivore", + "repo": "ollama.nvim", + "rev": "45e58779fecde7ac5b8f62800bbe7180d4b48507", + "type": "github" + }, + "original": { + "owner": "nomnivore", + "repo": "ollama.nvim", + "type": "github" + } + }, "nvimPlenary": { "flake": false, "locked": { @@ -746,6 +762,7 @@ "nvimGruvboxMaterial": "nvimGruvboxMaterial", "nvimIndentoMatic": "nvimIndentoMatic", "nvimLspconfig": "nvimLspconfig", + "nvimOllama": "nvimOllama", "nvimPlenary": "nvimPlenary", "nvimTelescope": "nvimTelescope", "nvimTodo": "nvimTodo", diff --git a/flake.nix b/flake.nix index ebe8e1f3..d120e296 100644 --- a/flake.nix +++ b/flake.nix @@ -82,6 +82,10 @@ url = "github:neovim/nvim-lspconfig"; flake = false; }; + nvimOllama = { + url = "github:nomnivore/ollama.nvim"; + flake = false; + }; nvimPlenary = { url = "github:nvim-lua/plenary.nvim"; flake = false; diff --git a/home/config/nvim/default.nix b/home/config/nvim/default.nix index f350f2db..70fd5edf 100644 --- a/home/config/nvim/default.nix +++ b/home/config/nvim/default.nix @@ -35,6 +35,7 @@ in "${inputs.nvimGruvboxMaterial}" "${inputs.nvimIndentoMatic}" "${inputs.nvimLspconfig}" + "${inputs.nvimOllama}" "${inputs.nvimPlenary}" "${inputs.nvimTelescope}" "${inputs.nvimTodo}" @@ -65,6 +66,7 @@ in ./module/plugin/Align.nix ./module/plugin/Treesitter.nix ./module/plugin/Fold.nix + ./module/plugin/Ollama.nix ./module/plugin/Colorizer.nix ./module/plugin/lsp/Go.nix ./module/plugin/lsp/Haskell.nix @@ -80,6 +82,7 @@ in ./module/key/Filetree.nix ./module/key/Gitsigns.nix ./module/key/Navigation.nix + ./module/key/Prompt.nix ./module/key/Save.nix ./module/key/Sort.nix ./module/key/TabWidth.nix diff --git a/home/config/nvim/module/key/Prompt.nix b/home/config/nvim/module/key/Prompt.nix new file mode 100644 index 00000000..13c9b346 --- /dev/null +++ b/home/config/nvim/module/key/Prompt.nix @@ -0,0 +1,7 @@ +{ ... }: +{ + text = '' + rekey_normal("p", ":lua require('ollama').prompt()") + rekey_visual("p", ":lua require('ollama').prompt()") + ''; +} diff --git a/home/config/nvim/module/plugin/Ollama.nix b/home/config/nvim/module/plugin/Ollama.nix new file mode 100644 index 00000000..01503feb --- /dev/null +++ b/home/config/nvim/module/plugin/Ollama.nix @@ -0,0 +1,18 @@ +{ config, ... }: +{ + text = '' + require("ollama").setup { + model = "${config.setting.ollama.primaryModel}", + url = "http://127.0.0.1:11434", + -- View the actual default prompts in ./lua/ollama/prompts.lua + prompts = { + -- Sample_Prompt = { + -- prompt = "This is a sample prompt that receives $input and $sel(ection), among others.", + -- input_label = "> ", + -- model = "mistral", + -- action = "display", + -- } + } + } + ''; +} diff --git a/home/program/bash/module/Ask.nix b/home/program/bash/module/Ask.nix new file mode 100644 index 00000000..d1a9fe7f --- /dev/null +++ b/home/program/bash/module/Ask.nix @@ -0,0 +1,27 @@ +{ ... }: +{ + text = '' + # Ask general AI. + # Usage: ask + function ask() { + curl http://localhost:11434/api/generate -d "{ + \"model\": \"''${OLLAMA_MODEL}\", + \"prompt\":\"''${*}\" + }" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'" + echo + } + + # Specify ask model. + function ask_model() { + export OLLAMA_MODEL="''${1}" + } + + function _complete_ask_model() { + local IFS=$'\n' + local models=($(ollama list | sed -e "1d" | cut -f1)) + _autocomplete_first ''${models[@]} + } + + complete -F _complete_ask_model ask_model + ''; +} diff --git a/home/program/bash/module/Zapret.nix b/home/program/bash/module/Zapret.nix new file mode 100644 index 00000000..5b3fd733 --- /dev/null +++ b/home/program/bash/module/Zapret.nix @@ -0,0 +1,11 @@ +{ ... }: +{ + text = '' + # FRKN. + # SOURCE: https://github.com/bol-van/zapret + function zapret() { + iptables -t mangle -I POSTROUTING -p tcp -m multiport --dports 80,443 -m connbytes --connbytes-dir=original --connbytes-mode=packets --connbytes 1:6 -m mark ! --mark 0x40000000/0x40000000 -j NFQUEUE --queue-num 201 --queue-bypass + nfqws --pidfile=/run/nfqws.pid --qnum=201 ''${@} + } + ''; +} diff --git a/host/x86_64-linux/desktop/default.nix b/host/x86_64-linux/desktop/default.nix index de324650..6f0bfb82 100644 --- a/host/x86_64-linux/desktop/default.nix +++ b/host/x86_64-linux/desktop/default.nix @@ -11,6 +11,7 @@ builder.client.enable = true; distrobox.enable = true; keyd.enable = true; + ollama.enable = true; print.enable = true; virtmanager.enable = true; amd = { diff --git a/module/Ollama.nix b/module/Ollama.nix new file mode 100644 index 00000000..0a765295 --- /dev/null +++ b/module/Ollama.nix @@ -0,0 +1,61 @@ +# SRC: https://github.com/ollama/ollama +{ + pkgsStable, + lib, + config, + ... +}: +with lib; +let + pkgs = pkgsStable; + cfg = config.module.ollama; +in +{ + options = { + module.ollama = { + enable = mkEnableOption "Local LLM server"; + primaryModel = mkOption { + default = "llama3.2"; + type = types.str; + }; + models = mkOption { + default = [ cfg.primaryModel ]; + type = types.listOf types.str; + }; + }; + }; + + config = mkIf cfg.enable { + environment = { + # Specify default model. + variables.OLLAMA_MODEL = cfg.primaryModel; + }; + + systemd.services = { + # Enable Ollama server. + ollama = { + description = "Ollama LLM server."; + wantedBy = [ "multi-user.target" ]; + wants = [ "NetworkManager-wait-online.service" ]; + after = [ "NetworkManager-wait-online.service" ]; + serviceConfig.Type = "simple"; + script = '' + HOME=/root ${getExe pkgs.ollama} serve + ''; + }; + + # Download Ollama models. + ollama-pull = { + description = "Ollama LLM model."; + wantedBy = [ "multi-user.target" ]; + wants = [ "ollama.service" ]; + after = [ "ollama.service" ]; + serviceConfig.Type = "simple"; + script = '' + sleep 5 + ${getExe pkgs.ollama} pull ${concatStringsSep " " cfg.models} + ''; + }; + }; + }; +}