2023-11-02 21:49:26 +03:00
|
|
|
_ARCHIVE_PATTERN="_[0-9]{12}-[[:alnum:]]{40}.t[xg]z"
|
2023-11-03 00:15:40 +03:00
|
|
|
_ARCHIVE_DATE()
|
|
|
|
{
|
|
|
|
date +%Y%m%d%H%M
|
|
|
|
}
|
2023-10-30 14:22:24 +03:00
|
|
|
|
2023-08-08 16:24:15 +03:00
|
|
|
# archive file with maximum compression and checksum.
|
2023-10-30 03:49:10 +03:00
|
|
|
# usage: archive [FILES]
|
2023-08-08 16:24:15 +03:00
|
|
|
archive()
|
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-31 21:10:13 +03:00
|
|
|
local targets=("${@}") # target file(s).
|
|
|
|
local count=0 # processed count.
|
|
|
|
local total=${#} # total to process.
|
|
|
|
local date=$(_ARCHIVE_DATE) # date stamp.
|
2023-11-24 01:19:49 +03:00
|
|
|
local failed=0
|
2023-08-08 16:24:15 +03:00
|
|
|
|
2023-11-22 11:56:16 +03:00
|
|
|
# Set dafult value to target all directories.
|
2023-10-30 03:49:10 +03:00
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-11-22 13:20:01 +03:00
|
|
|
targets=($(ls --classify | grep /\$))
|
2023-10-30 03:49:10 +03:00
|
|
|
total=${#targets[@]}
|
2023-08-08 16:24:15 +03:00
|
|
|
fi
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
2023-10-29 21:26:22 +03:00
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
2023-10-03 06:50:40 +03:00
|
|
|
# status info.
|
2023-10-30 03:49:10 +03:00
|
|
|
local status="[${count}/${total}] ${target}"
|
2023-11-22 11:07:29 +03:00
|
|
|
echo -e "${status}"
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# create archive.
|
2023-11-24 01:19:49 +03:00
|
|
|
tar -c "${target}" | pv -s $(du -sb "${target}" | awk '{print $1}') | xz -9e > "${target%/*}".txz
|
2023-08-08 16:24:15 +03:00
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# append hash to target name.
|
2023-11-24 01:19:49 +03:00
|
|
|
mv "${target%/*}".txz "${target%/*}"_${date}-$(pv "${target%/*}".txz | sha1sum | cut -d\ -f1).txz
|
2023-11-02 21:07:28 +03:00
|
|
|
|
2023-11-24 01:19:49 +03:00
|
|
|
# Show error.
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
((failed++))
|
2023-11-22 13:49:12 +03:00
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
done
|
2023-11-24 01:19:49 +03:00
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
# archive file with minimal compression and checksum.
|
2023-10-30 03:49:10 +03:00
|
|
|
# usage: archive_fast [FILES]
|
2023-08-08 16:24:15 +03:00
|
|
|
archive_fast()
|
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-31 21:10:13 +03:00
|
|
|
local targets=("${@}") # target file(s).
|
|
|
|
local count=0 # processed count.
|
|
|
|
local total=${#} # total to process.
|
|
|
|
local date=$(_ARCHIVE_DATE) # date stamp.
|
2023-11-24 01:19:49 +03:00
|
|
|
local failed=0
|
2023-08-08 16:24:15 +03:00
|
|
|
|
2023-11-22 11:56:16 +03:00
|
|
|
# Set dafult value to target all directories.
|
2023-10-30 03:49:10 +03:00
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-11-22 11:56:16 +03:00
|
|
|
targets=($(ls --classify | grep /$))
|
2023-10-30 03:49:10 +03:00
|
|
|
total=${#targets[@]}
|
2023-08-08 16:24:15 +03:00
|
|
|
fi
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
2023-10-29 21:26:22 +03:00
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
2023-10-03 06:50:40 +03:00
|
|
|
# status info.
|
2023-10-30 03:49:10 +03:00
|
|
|
local status="[${count}/${total}] ${target}"
|
2023-11-22 11:07:29 +03:00
|
|
|
echo -e "${status}"
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# create archive.
|
2023-11-24 01:19:49 +03:00
|
|
|
tar -c "${target}" | pv -s $(du -sb "${target}" | awk '{print $1}') | gzip -1 > "${target%/*}".tgz
|
2023-08-08 16:24:15 +03:00
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# append hash to target name.
|
2023-11-24 01:19:49 +03:00
|
|
|
mv "${target%/*}".tgz "${target%/*}"_${date}-$(pv "${target%/*}".tgz | sha1sum | cut -d\ -f1).tgz
|
2023-11-02 21:07:28 +03:00
|
|
|
|
2023-11-24 01:19:49 +03:00
|
|
|
# Show error.
|
|
|
|
if [[ $? != 0 ]]; then
|
|
|
|
((failed++))
|
2023-11-22 13:49:12 +03:00
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
done
|
2023-11-24 01:19:49 +03:00
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
# check archive hashes.
|
2023-10-30 03:49:10 +03:00
|
|
|
# usage: archive_check [FILES]
|
2023-08-08 16:24:15 +03:00
|
|
|
archive_check()
|
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-30 03:49:10 +03:00
|
|
|
local targets=("${@}") # target file(s).
|
|
|
|
local total=${#} # total to process.
|
|
|
|
local count=0 # processed count.
|
2023-11-24 01:19:49 +03:00
|
|
|
local failed=0
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# set dafult value to target all supported archives.
|
2023-10-30 03:49:10 +03:00
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-10-31 21:10:13 +03:00
|
|
|
targets=($(ls | grep -E ${_ARCHIVE_PATTERN}))
|
2023-10-30 03:49:10 +03:00
|
|
|
total=${#targets[@]}
|
2023-08-08 16:24:15 +03:00
|
|
|
fi
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
2023-10-29 21:26:22 +03:00
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
2023-10-03 06:50:40 +03:00
|
|
|
# status info.
|
2023-10-30 03:49:10 +03:00
|
|
|
local status="[${count}/${total}] ${target}"
|
2023-11-24 01:19:49 +03:00
|
|
|
echo -e "${status}"
|
2023-10-03 06:50:40 +03:00
|
|
|
|
2023-08-08 16:24:15 +03:00
|
|
|
# extract hash from name.
|
2023-11-30 02:25:10 +03:00
|
|
|
local data=($(_archive_parse ${target}))
|
|
|
|
local saved=${data[2]}
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# calculate actual hash.
|
2023-10-30 03:49:10 +03:00
|
|
|
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# compare hashes, show error on mismatch.
|
2023-11-24 01:19:49 +03:00
|
|
|
if [[ "${actual}" != "${saved}" ]]; then
|
2023-10-03 08:51:19 +03:00
|
|
|
((failed++))
|
2023-11-24 01:19:49 +03:00
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
2023-08-08 16:24:15 +03:00
|
|
|
fi
|
|
|
|
done
|
2023-10-03 08:51:19 +03:00
|
|
|
|
2023-11-24 01:19:49 +03:00
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
2023-10-03 08:51:19 +03:00
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
2023-11-22 12:36:14 +03:00
|
|
|
# Delete old versions of an archives.
|
|
|
|
# Usage: archive_prune [NAME]
|
|
|
|
archive_prune()
|
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-11-22 12:36:14 +03:00
|
|
|
local targets=("${@}")
|
|
|
|
local count=0
|
|
|
|
local total=${#}
|
2023-11-24 01:19:49 +03:00
|
|
|
local failed=0
|
2023-11-22 13:49:12 +03:00
|
|
|
|
2023-11-22 12:36:14 +03:00
|
|
|
# All archives by default.
|
2023-11-22 13:49:12 +03:00
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-11-22 12:36:14 +03:00
|
|
|
targets=($(ls | grep -E ${_ARCHIVE_PATTERN}))
|
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
2023-11-22 13:49:12 +03:00
|
|
|
# Only work with existing files.
|
2023-11-22 12:36:14 +03:00
|
|
|
[[ -f "${target}" ]] || continue
|
|
|
|
|
|
|
|
# Iterate counter.
|
|
|
|
((count++))
|
2023-11-30 02:25:10 +03:00
|
|
|
|
|
|
|
local data=($(_archive_parse ${target}))
|
|
|
|
local name="${data[0]}"
|
|
|
|
local time="${data[1]}"
|
2023-11-22 13:49:12 +03:00
|
|
|
local copies=($(ls ${name}_*))
|
|
|
|
|
|
|
|
# Iterate each copy.
|
|
|
|
for copy in "${copies[@]}"; do
|
2023-11-30 02:25:10 +03:00
|
|
|
local copy_data=($(_archive_parse ${copy}))
|
|
|
|
local copy_time="${copy_data[1]}"
|
2023-11-22 13:49:12 +03:00
|
|
|
|
|
|
|
if [[ "${copy_time}" -lt "${time}" ]]; then
|
|
|
|
echo -e "${name}: prune ${copy_time}."
|
2023-11-24 01:19:49 +03:00
|
|
|
rm -- "${copy}"
|
|
|
|
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}${target}: Failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
fi
|
2023-11-22 13:49:12 +03:00
|
|
|
fi
|
|
|
|
done
|
2023-11-22 12:36:14 +03:00
|
|
|
done
|
2023-11-22 13:49:12 +03:00
|
|
|
|
2023-11-24 01:19:49 +03:00
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
2023-11-22 13:49:12 +03:00
|
|
|
fi
|
2023-11-22 12:36:14 +03:00
|
|
|
}
|
|
|
|
|
2023-08-08 16:24:15 +03:00
|
|
|
# extract previously created archive with checksum validation.
|
2023-10-30 03:49:10 +03:00
|
|
|
# usage: unarchive [FILES]
|
2023-08-08 16:24:15 +03:00
|
|
|
unarchive()
|
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-30 03:49:10 +03:00
|
|
|
local targets=("${@}") # target file(s).
|
|
|
|
local count=0 # processed count.
|
|
|
|
local total=${#} # total to process.
|
2023-11-24 01:19:49 +03:00
|
|
|
local failed=0
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# set dafult value to target all supported archives.
|
2023-10-30 03:49:10 +03:00
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-10-31 21:10:13 +03:00
|
|
|
targets=($(ls | grep -E ${_ARCHIVE_PATTERN}))
|
2023-10-30 03:49:10 +03:00
|
|
|
total=${#targets[@]}
|
2023-08-08 16:24:15 +03:00
|
|
|
fi
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
2023-10-29 21:26:22 +03:00
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
2023-10-03 06:50:40 +03:00
|
|
|
# status info.
|
2023-10-30 03:49:10 +03:00
|
|
|
local status="[${count}/${total}] ${target}"
|
2023-11-24 01:19:49 +03:00
|
|
|
echo -e "${status}"
|
2023-10-03 06:50:40 +03:00
|
|
|
|
2023-08-08 16:24:15 +03:00
|
|
|
# extract hash from name.
|
2023-11-30 02:25:10 +03:00
|
|
|
local data=($(_archive_parse "${target}"))
|
|
|
|
local saved="${data[2]}"
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# calculate actual hash.
|
2023-10-31 23:51:06 +03:00
|
|
|
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
2023-08-08 16:24:15 +03:00
|
|
|
|
|
|
|
# extract if hash matched or show error if not.
|
2023-10-29 21:26:22 +03:00
|
|
|
if [[ "${saved}" = "${actual}" ]]; then
|
2023-11-02 21:07:28 +03:00
|
|
|
# figure out the compression tool.
|
|
|
|
local compressor
|
2023-10-31 23:51:06 +03:00
|
|
|
case "${target##*.}" in
|
2023-11-02 23:51:54 +03:00
|
|
|
"txz")
|
2023-11-02 21:07:28 +03:00
|
|
|
compressor="xz -d"
|
2023-10-31 23:51:06 +03:00
|
|
|
;;
|
2023-11-02 23:51:54 +03:00
|
|
|
"tgz")
|
2023-11-02 21:07:28 +03:00
|
|
|
compressor="gzip -d"
|
2023-10-31 23:51:06 +03:00
|
|
|
;;
|
|
|
|
esac
|
2023-11-02 21:07:28 +03:00
|
|
|
|
|
|
|
# extract.
|
2023-11-23 00:28:14 +03:00
|
|
|
unset IFS
|
2023-11-02 21:07:28 +03:00
|
|
|
pv "${target}" | ${compressor} | tar -xf -
|
2023-11-24 01:19:49 +03:00
|
|
|
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
else
|
2023-11-24 01:19:49 +03:00
|
|
|
# report validation error & continue.
|
2023-11-08 20:30:54 +03:00
|
|
|
echo -e "${color_bred}${status}: Validation failed.${color_default}"
|
2023-11-24 01:19:49 +03:00
|
|
|
((failed++))
|
|
|
|
continue
|
2023-08-08 16:24:15 +03:00
|
|
|
fi
|
|
|
|
done
|
2023-11-24 01:19:49 +03:00
|
|
|
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# rename archive. if no name specified, it simplifies archive's name.
|
|
|
|
# usage: archive_name [ARCHIVE] [NAME]
|
|
|
|
archive_name()
|
2023-10-29 20:47:01 +03:00
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-30 03:49:10 +03:00
|
|
|
local targets="${1}" # target archive(s).
|
|
|
|
local name="${2}" # new name.
|
|
|
|
local total=1 # total targets to process.
|
|
|
|
local count=0 # processed targets counter.
|
2023-11-24 01:19:49 +03:00
|
|
|
local failed=0
|
2023-10-30 03:49:10 +03:00
|
|
|
|
|
|
|
# set dafult value to target all supported archives.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-10-31 21:10:13 +03:00
|
|
|
targets=($(ls | grep -E ${_ARCHIVE_PATTERN}))
|
2023-10-30 03:49:10 +03:00
|
|
|
total=${#targets[@]}
|
2023-10-29 20:47:01 +03:00
|
|
|
fi
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# iterate counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
# simplify name by default.
|
|
|
|
if [[ "${name}" = "" || ${count} -gt 1 ]]; then
|
|
|
|
name="${target%_*}"
|
|
|
|
name="$(parse_alnum ${name})"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# remove old name.
|
|
|
|
local data="${target##*_}"
|
|
|
|
local new_name="${name}_${data}"
|
2023-10-29 20:47:01 +03:00
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# prepare status.
|
|
|
|
local status="[${count}/${total}] ${target} -> ${new_name}"
|
2023-10-29 20:47:01 +03:00
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# check for the same name.
|
|
|
|
if [[ "${target}" = "${new_name}" ]]; then
|
2023-11-24 01:19:49 +03:00
|
|
|
echo -e "${status}"
|
2023-10-30 03:49:10 +03:00
|
|
|
continue
|
|
|
|
fi
|
|
|
|
|
|
|
|
# check for existing target.
|
|
|
|
if [[ -f "${new_name}" ]]; then
|
2023-11-08 20:30:54 +03:00
|
|
|
echo -e "${color_bred}${status}: Already exists.${color_default}"
|
2023-11-24 01:19:49 +03:00
|
|
|
((failed++))
|
|
|
|
continue
|
2023-10-30 03:49:10 +03:00
|
|
|
fi
|
|
|
|
|
2023-11-24 01:19:49 +03:00
|
|
|
echo -e "${status}"
|
|
|
|
|
2023-10-30 03:49:10 +03:00
|
|
|
# rename.
|
2023-11-24 01:19:49 +03:00
|
|
|
mv -- "${target}" "${new_name}"
|
|
|
|
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
fi
|
2023-10-30 03:49:10 +03:00
|
|
|
done
|
2023-11-24 01:19:49 +03:00
|
|
|
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-10-29 20:47:01 +03:00
|
|
|
}
|
|
|
|
|
2023-10-31 21:10:13 +03:00
|
|
|
# convert an old archive to a new format. TODO: remove me after some time when there won't be any old archives.
|
|
|
|
archive_convert()
|
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-31 21:10:13 +03:00
|
|
|
local old_format="_[[:alnum:]]{40}.tar.[xg]z"
|
|
|
|
local targets=($(ls | grep -E ${old_format}))
|
|
|
|
|
2023-11-02 21:49:26 +03:00
|
|
|
# add timestamp.
|
2023-10-31 21:10:13 +03:00
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
local stamp=$(stat --format '%w' -- "${target}" | sed -e 's/\..*//' -e 's/:..$//' -e 's/-//g' -e 's/://' -e 's/\ //')
|
|
|
|
local name="${target%_*}"
|
|
|
|
local old_data="${target##*_}"
|
|
|
|
local new_name="${name}_${stamp}-${old_data}"
|
|
|
|
|
|
|
|
echo "${target} -> ${new_name}"
|
|
|
|
|
|
|
|
mv "${target}" "${new_name}"
|
|
|
|
done
|
2023-11-02 21:49:26 +03:00
|
|
|
|
|
|
|
# convert tar.xz and tar.gz to .tgz and .txz.
|
|
|
|
old_format="_[0-9]{12}-[[:alnum:]]{40}.tar.[xg]z"
|
|
|
|
targets=($(ls | grep -E ${old_format}))
|
|
|
|
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
local compression="${target##*.}"
|
|
|
|
local new_compression
|
|
|
|
|
|
|
|
case "${compression}" in
|
|
|
|
"gz")
|
|
|
|
new_compression="tgz"
|
|
|
|
;;
|
|
|
|
"xz")
|
|
|
|
new_compression="txz"
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
|
|
|
|
local new_name="${target%.tar.*}".${new_compression}
|
|
|
|
|
|
|
|
echo "${target} -> ${new_name}"
|
|
|
|
|
|
|
|
mv -- "${target}" "${new_name}"
|
|
|
|
done
|
2023-10-31 21:10:13 +03:00
|
|
|
}
|
|
|
|
|
2023-11-30 02:25:10 +03:00
|
|
|
_archive_parse()
|
|
|
|
{
|
|
|
|
local input="${1}"
|
|
|
|
local name="${input%_*}"
|
|
|
|
local format="${input##*.}"
|
|
|
|
local data="${input##*_}"; data="${data%.*}"
|
|
|
|
local date="${data%%-*}"
|
|
|
|
local hash="${data##*-}"
|
|
|
|
|
|
|
|
echo "${name}"
|
|
|
|
echo "${date}"
|
|
|
|
echo "${hash}"
|
|
|
|
echo "${format}"
|
|
|
|
}
|
|
|
|
|
2023-08-08 16:24:15 +03:00
|
|
|
# export everything, primarily for use with parallel..
|
2023-11-03 00:15:40 +03:00
|
|
|
export -f archive archive_fast archive_check unarchive archive_name _ARCHIVE_DATE
|
|
|
|
export _ARCHIVE_PATTERN
|
2023-10-23 00:33:09 +03:00
|
|
|
|
2023-10-29 21:26:22 +03:00
|
|
|
# autocomplete.
|
2023-10-30 03:49:10 +03:00
|
|
|
_archive_name()
|
2023-10-29 22:04:26 +03:00
|
|
|
{
|
2023-11-22 13:20:01 +03:00
|
|
|
local IFS=$'\n'
|
2023-10-29 22:04:26 +03:00
|
|
|
COMPREPLY=()
|
|
|
|
|
|
|
|
local cur="${COMP_WORDS[COMP_CWORD]}"
|
|
|
|
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
|
|
|
local command="${COMP_WORDS[0]}"
|
|
|
|
|
|
|
|
if [[ "${prev}" = "${command}" ]]; then
|
2023-10-30 14:22:24 +03:00
|
|
|
COMPREPLY=( $(compgen -W "$(ls | grep -E ${_ARCHIVE_PATTERN})" -- ${cur}) )
|
2023-10-29 22:04:26 +03:00
|
|
|
return 0
|
|
|
|
else
|
|
|
|
local name="${prev%_*}"
|
|
|
|
COMPREPLY=( $(compgen -W "${name}" -- ${cur}) )
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2023-10-30 14:22:24 +03:00
|
|
|
_archive_grep()
|
|
|
|
{
|
|
|
|
_autocomplete_grep ${_ARCHIVE_PATTERN}
|
|
|
|
}
|
|
|
|
|
2023-11-22 13:20:01 +03:00
|
|
|
complete -o filenames -F _archive_grep archive_check unarchive
|
|
|
|
complete -o filenames -F _archive_name archive_name
|