2024-11-04 04:37:29 +03:00
|
|
|
{ ... }: {
|
|
|
|
text = ''
|
|
|
|
# Ask general AI.
|
|
|
|
# Usage: ask <QUERY>
|
|
|
|
function ask() {
|
|
|
|
curl http://localhost:11434/api/generate -d "{
|
|
|
|
\"model\":\"''${OLLAMA_MODEL}\",
|
|
|
|
\"raw\":true,
|
|
|
|
\"prompt\":\"''${*}\"
|
|
|
|
}" 2> /dev/null | parallel -j1 -- "printf '%s\n' {} | jq -r .response | sed -e 's/^$/\+\+\+/' | tr -d '\n' | sed -e 's/\+\+\+/\n/'"
|
|
|
|
echo
|
|
|
|
}
|
2024-10-23 19:21:14 +03:00
|
|
|
|
2024-11-04 04:37:29 +03:00
|
|
|
# Specify ask model.
|
|
|
|
function ask_model() {
|
|
|
|
export OLLAMA_MODEL="''${1}"
|
|
|
|
}
|
2024-10-23 19:21:14 +03:00
|
|
|
|
2024-11-04 04:37:29 +03:00
|
|
|
function _complete_ask_model() {
|
|
|
|
local IFS=$'\n'
|
|
|
|
local models=($(ollama list | sed -e "1d" | cut -f1))
|
2024-12-12 05:27:20 +03:00
|
|
|
_autocomplete ''${models[@]}
|
2024-11-04 04:37:29 +03:00
|
|
|
}
|
2024-10-23 19:21:14 +03:00
|
|
|
|
2024-11-04 04:37:29 +03:00
|
|
|
complete -F _complete_ask_model ask_model
|
|
|
|
'';
|
2024-10-23 19:21:14 +03:00
|
|
|
}
|