2024-04-06 03:03:58 +03:00
|
|
|
{ ... }: {
|
|
|
|
text = ''
|
|
|
|
# Ask general AI.
|
|
|
|
# Usage: ask <QUERY>
|
|
|
|
function ask() {
|
|
|
|
curl http://localhost:11434/api/generate -d "{
|
2024-06-21 10:50:04 +03:00
|
|
|
\"model\": \"''${OLLAMA_MODEL}\",
|
2024-04-06 03:03:58 +03:00
|
|
|
\"prompt\":\"''${*}\"
|
|
|
|
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
|
|
|
|
echo
|
|
|
|
}
|
|
|
|
|
|
|
|
# Specify ask model.
|
|
|
|
function ask_model() {
|
2024-06-21 10:50:04 +03:00
|
|
|
export OLLAMA_MODEL="''${1}"
|
2024-04-06 03:03:58 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
function _complete_ask_model() {
|
|
|
|
local IFS=$'\n'
|
|
|
|
local models=($(ollama list | sed -e "1d" | cut -f1))
|
|
|
|
_autocomplete_first ''${models[@]}
|
|
|
|
}
|
|
|
|
|
|
|
|
complete -F _complete_ask_model ask_model
|
|
|
|
'';
|
|
|
|
}
|