Ollama : Add codellama.

This commit is contained in:
Dmitry Voronin 2024-02-29 08:20:11 +03:00
parent 299df8305e
commit ddb61d4091
3 changed files with 15 additions and 2 deletions

View file

@ -182,11 +182,12 @@
hostname = "home"; hostname = "home";
system = "x86_64-linux"; system = "x86_64-linux";
modules = [ modules = [
./module/AmdGpu.nix
./module/AmdCpu.nix ./module/AmdCpu.nix
./module/AmdGpu.nix
./module/Docker.nix ./module/Docker.nix
./module/Ftpd.nix ./module/Ftpd.nix
./module/Gnome.nix ./module/Gnome.nix
./module/Ollama.nix
./module/PowersaveAmd.nix ./module/PowersaveAmd.nix
./user/Voronind.nix ./user/Voronind.nix
]; ];
@ -226,6 +227,7 @@
./module/Desktop.nix ./module/Desktop.nix
./module/Gnome.nix ./module/Gnome.nix
./module/IntelCpu.nix ./module/IntelCpu.nix
./module/Ollama.nix
./module/Powerlimit.nix ./module/Powerlimit.nix
./module/PowersaveIntel.nix ./module/PowersaveIntel.nix
./module/Print.nix ./module/Print.nix

View file

@ -18,6 +18,7 @@
serviceConfig.Type = "simple"; serviceConfig.Type = "simple";
script = '' script = ''
${lib.getExe pkgs.ollama} pull llama2-uncensored ${lib.getExe pkgs.ollama} pull llama2-uncensored
${lib.getExe pkgs.ollama} pull codellama
''; '';
}; };
} }

View file

@ -1,4 +1,4 @@
# Ask AI. # Ask general AI.
# Usage: ask <QUERY> # Usage: ask <QUERY>
function ask() { function ask() {
curl http://localhost:11434/api/generate -d "{ curl http://localhost:11434/api/generate -d "{
@ -7,3 +7,13 @@ function ask() {
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'" }" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo echo
} }
# Ask code AI.
# Usage: ask_code <QUERY>
function ask_code() {
curl http://localhost:11434/api/generate -d "{
\"model\": \"codellama\",
\"prompt\":\"${*}\"
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
echo
}