2023-12-07 04:02:47 +03:00
|
|
|
export _archive_pattern="_[0-9]{12}-[[:alnum:]]{40}.t[xg]z"
|
2023-12-07 01:44:42 +03:00
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Archive directories.
|
|
|
|
# All directories by default.
|
|
|
|
# Usage: archive [DIRS]
|
2023-12-07 01:44:42 +03:00
|
|
|
function archive() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
2023-12-06 02:31:52 +03:00
|
|
|
local targets=("${@}")
|
|
|
|
local count=0
|
|
|
|
local total=${#}
|
2023-12-07 04:02:47 +03:00
|
|
|
local date=$(_archive_date)
|
2023-12-05 21:50:45 +03:00
|
|
|
local failed=0
|
|
|
|
|
|
|
|
# Set dafult value to target all directories.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
|
|
|
targets=($(ls --classify | grep /\$))
|
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
# status info.
|
|
|
|
local status="[${count}/${total}] ${target}"
|
|
|
|
echo -e "${status}"
|
|
|
|
|
|
|
|
local name=$(parse_camel "${target}")
|
|
|
|
|
|
|
|
# create archive.
|
2023-12-07 16:41:47 +03:00
|
|
|
tar -c "${target}" | pv -s $(/usr/bin/du -sb "${target}" | awk '{print $1}') | xz -9e > "${name}".txz
|
2023-12-10 04:31:03 +03:00
|
|
|
|
2023-12-05 21:50:45 +03:00
|
|
|
# append hash to target name.
|
2023-12-06 02:31:52 +03:00
|
|
|
mv "${name}".txz "${name}"_${date}-$(pv "${name}".txz | sha1sum | cut -d\ -f1).txz
|
2023-12-05 21:50:45 +03:00
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
((failed++))
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Archive directories with fast compression.
|
|
|
|
# All directories by default.
|
|
|
|
# Usage: archive_fast [DIRS]
|
2023-12-07 01:44:42 +03:00
|
|
|
function archive_fast() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
2023-12-06 02:31:52 +03:00
|
|
|
local targets=("${@}")
|
|
|
|
local count=0
|
|
|
|
local total=${#}
|
2023-12-07 04:02:47 +03:00
|
|
|
local date=$(_archive_date)
|
2023-12-05 21:50:45 +03:00
|
|
|
local failed=0
|
|
|
|
|
|
|
|
# Set dafult value to target all directories.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
|
|
|
targets=($(ls --classify | grep /$))
|
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
# status info.
|
|
|
|
local status="[${count}/${total}] ${target}"
|
|
|
|
echo -e "${status}"
|
|
|
|
|
|
|
|
local name=$(parse_camel "${target}")
|
|
|
|
|
|
|
|
# create archive.
|
2023-12-07 16:41:47 +03:00
|
|
|
tar -c "${target}" | pv -s $(/usr/bin/du -sb "${target}" | awk '{print $1}') | gzip -1 > "${name}".tgz
|
2023-12-10 04:31:03 +03:00
|
|
|
|
2023-12-05 21:50:45 +03:00
|
|
|
# append hash to target name.
|
2023-12-06 02:31:52 +03:00
|
|
|
mv "${name}".tgz "${name}"_${date}-$(pv "${name}".tgz | sha1sum | cut -d\ -f1).tgz
|
2023-12-05 21:50:45 +03:00
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ $? != 0 ]]; then
|
|
|
|
((failed++))
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Check archives integrity.
|
|
|
|
# Checks all archives by default.
|
|
|
|
# Usage: archive_check [FILES]
|
2023-12-07 01:44:42 +03:00
|
|
|
function archive_check() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
2023-12-06 02:31:52 +03:00
|
|
|
local targets=("${@}")
|
|
|
|
local total=${#}
|
|
|
|
local count=0
|
2023-12-05 21:50:45 +03:00
|
|
|
local failed=0
|
|
|
|
|
|
|
|
# set dafult value to target all supported archives.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-12-07 04:02:47 +03:00
|
|
|
targets=($(ls | grep -E ${_archive_pattern}))
|
2023-12-05 21:50:45 +03:00
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
# status info.
|
|
|
|
local status="[${count}/${total}] ${target}"
|
|
|
|
echo -e "${status}"
|
|
|
|
|
|
|
|
# extract hash from name.
|
|
|
|
local data=($(_archive_parse ${target}))
|
|
|
|
local saved=${data[2]}
|
|
|
|
|
|
|
|
# calculate actual hash.
|
2023-12-06 02:31:52 +03:00
|
|
|
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
2023-12-05 21:50:45 +03:00
|
|
|
|
|
|
|
# compare hashes, show error on mismatch.
|
|
|
|
if [[ "${actual}" != "${saved}" ]]; then
|
|
|
|
((failed++))
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 04:36:44 +03:00
|
|
|
# Delete old versions of archives.
|
2023-12-07 04:02:47 +03:00
|
|
|
# All archives by default.
|
2023-11-22 12:36:14 +03:00
|
|
|
# Usage: archive_prune [NAME]
|
2023-12-07 01:44:42 +03:00
|
|
|
function archive_prune() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
|
|
|
local targets=("${@}")
|
|
|
|
local count=0
|
|
|
|
local total=${#}
|
|
|
|
local failed=0
|
|
|
|
|
|
|
|
# All archives by default.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-12-07 04:02:47 +03:00
|
|
|
targets=($(ls | grep -E ${_archive_pattern}))
|
2023-12-05 21:50:45 +03:00
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# Only work with existing files.
|
|
|
|
[[ -f "${target}" ]] || continue
|
|
|
|
|
|
|
|
# Iterate counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
local data=($(_archive_parse ${target}))
|
|
|
|
local name="${data[0]}"
|
|
|
|
local time="${data[1]}"
|
|
|
|
local copies=($(ls ${name}_*))
|
|
|
|
|
|
|
|
# Iterate each copy.
|
|
|
|
for copy in "${copies[@]}"; do
|
|
|
|
local copy_data=($(_archive_parse ${copy}))
|
|
|
|
local copy_time="${copy_data[1]}"
|
|
|
|
|
|
|
|
if [[ "${copy_time}" -lt "${time}" ]]; then
|
|
|
|
echo -e "${name}: prune ${copy_time}."
|
|
|
|
rm -- "${copy}"
|
|
|
|
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}${target}: Failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
done
|
|
|
|
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-11-22 12:36:14 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Extract previously created archive with checksum validation.
|
|
|
|
# Usage: unarchive [FILES]
|
2023-12-07 01:44:42 +03:00
|
|
|
function unarchive() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
2023-12-06 02:31:52 +03:00
|
|
|
local targets=("${@}")
|
|
|
|
local count=0
|
|
|
|
local total=${#}
|
2023-12-05 21:50:45 +03:00
|
|
|
local failed=0
|
2023-12-10 04:31:03 +03:00
|
|
|
|
2023-12-05 21:50:45 +03:00
|
|
|
# set dafult value to target all supported archives.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-12-07 04:02:47 +03:00
|
|
|
targets=($(ls | grep -E ${_archive_pattern}))
|
2023-12-05 21:50:45 +03:00
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
# status info.
|
|
|
|
local status="[${count}/${total}] ${target}"
|
|
|
|
echo -e "${status}"
|
|
|
|
|
|
|
|
# extract hash from name.
|
|
|
|
local data=($(_archive_parse "${target}"))
|
|
|
|
local saved="${data[2]}"
|
2023-12-10 04:31:03 +03:00
|
|
|
|
2023-12-05 21:50:45 +03:00
|
|
|
# calculate actual hash.
|
2023-12-06 02:31:52 +03:00
|
|
|
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
2023-12-05 21:50:45 +03:00
|
|
|
|
|
|
|
# extract if hash matched or show error if not.
|
|
|
|
if [[ "${saved}" = "${actual}" ]]; then
|
|
|
|
# figure out the compression tool.
|
|
|
|
local compressor
|
|
|
|
case "${target##*.}" in
|
|
|
|
"txz")
|
|
|
|
compressor="xz -d"
|
|
|
|
;;
|
|
|
|
"tgz")
|
|
|
|
compressor="gzip -d"
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
|
|
|
|
# extract.
|
|
|
|
unset IFS
|
|
|
|
pv "${target}" | ${compressor} | tar -xf -
|
|
|
|
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
fi
|
|
|
|
else
|
|
|
|
# report validation error & continue.
|
|
|
|
echo -e "${color_bred}${status}: Validation failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
continue
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-08-08 16:24:15 +03:00
|
|
|
}
|
|
|
|
|
2023-12-10 04:31:03 +03:00
|
|
|
# Rename archives.
|
2023-12-07 04:02:47 +03:00
|
|
|
# If no name specified, it simplifies archive's name.
|
|
|
|
# If no archives specified, apply to all archives.
|
|
|
|
# Usage: archive_name [ARCHIVE] [NAME]
|
2023-12-07 01:44:42 +03:00
|
|
|
function archive_name() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
2023-12-06 02:31:52 +03:00
|
|
|
local targets="${1}"
|
|
|
|
local name="${2}"
|
|
|
|
local total=1
|
|
|
|
local count=0
|
2023-12-05 21:50:45 +03:00
|
|
|
local failed=0
|
|
|
|
|
|
|
|
# set dafult value to target all supported archives.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
2023-12-07 04:02:47 +03:00
|
|
|
targets=($(ls | grep -E ${_archive_pattern}))
|
2023-12-05 21:50:45 +03:00
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# iterate counter.
|
|
|
|
((count++))
|
2023-12-10 04:31:03 +03:00
|
|
|
|
2023-12-05 21:50:45 +03:00
|
|
|
# simplify name by default.
|
|
|
|
if [[ "${name}" = "" || ${count} -gt 1 ]]; then
|
|
|
|
name="${target%_*}"
|
|
|
|
name="$(parse_camel ${name})"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# remove old name.
|
|
|
|
local data="${target##*_}"
|
|
|
|
local new_name="${name}_${data}"
|
|
|
|
|
|
|
|
# prepare status.
|
|
|
|
local status="[${count}/${total}] ${target} -> ${new_name}"
|
|
|
|
|
|
|
|
# check for the same name.
|
|
|
|
if [[ "${target}" = "${new_name}" ]]; then
|
|
|
|
echo -e "${status}"
|
|
|
|
continue
|
|
|
|
fi
|
|
|
|
|
|
|
|
# check for existing target.
|
|
|
|
if [[ -f "${new_name}" ]]; then
|
|
|
|
echo -e "${color_bred}${status}: Already exists.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
continue
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo -e "${status}"
|
|
|
|
|
|
|
|
# rename.
|
|
|
|
mv -- "${target}" "${new_name}"
|
|
|
|
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
((failed++))
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
2023-10-29 20:47:01 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 05:11:26 +03:00
|
|
|
# Convert old archives to a new format. *TODO: remove me after some time when there won't be any old archives.*
|
2023-12-07 01:44:42 +03:00
|
|
|
function archive_convert() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
|
|
|
local old_format="_[[:alnum:]]{40}.tar.[xg]z"
|
|
|
|
local targets=($(ls | grep -E ${old_format}))
|
|
|
|
|
|
|
|
# add timestamp.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
local stamp=$(stat --format '%w' -- "${target}" | sed -e 's/\..*//' -e 's/:..$//' -e 's/-//g' -e 's/://' -e 's/\ //')
|
|
|
|
local name="${target%_*}"
|
|
|
|
local old_data="${target##*_}"
|
|
|
|
local new_name="${name}_${stamp}-${old_data}"
|
|
|
|
|
|
|
|
echo "${target} -> ${new_name}"
|
|
|
|
|
|
|
|
mv "${target}" "${new_name}"
|
|
|
|
done
|
|
|
|
|
|
|
|
# convert tar.xz and tar.gz to .tgz and .txz.
|
|
|
|
old_format="_[0-9]{12}-[[:alnum:]]{40}.tar.[xg]z"
|
|
|
|
targets=($(ls | grep -E ${old_format}))
|
|
|
|
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
local compression="${target##*.}"
|
|
|
|
local new_compression
|
2023-12-10 04:31:03 +03:00
|
|
|
|
2023-12-05 21:50:45 +03:00
|
|
|
case "${compression}" in
|
|
|
|
"gz")
|
|
|
|
new_compression="tgz"
|
|
|
|
;;
|
|
|
|
"xz")
|
|
|
|
new_compression="txz"
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
|
|
|
|
local new_name="${target%.tar.*}".${new_compression}
|
|
|
|
|
|
|
|
echo "${target} -> ${new_name}"
|
|
|
|
|
|
|
|
mv -- "${target}" "${new_name}"
|
|
|
|
done
|
2023-10-31 21:10:13 +03:00
|
|
|
}
|
|
|
|
|
2023-12-10 04:31:03 +03:00
|
|
|
# Delete specified or all archive files.
|
|
|
|
# Usage: archive_rm [FILES]
|
|
|
|
function archive_rm() {
|
|
|
|
local IFS=$'\n'
|
|
|
|
local targets=("${@}")
|
|
|
|
local total=${#}
|
|
|
|
local count=0
|
|
|
|
local failed=0
|
|
|
|
|
|
|
|
# set dafult value to target all supported archives.
|
|
|
|
if [[ "${targets}" = "" ]]; then
|
|
|
|
targets=($(ls | grep -E ${_archive_pattern}))
|
|
|
|
total=${#targets[@]}
|
|
|
|
fi
|
|
|
|
|
|
|
|
# iterate each target.
|
|
|
|
for target in "${targets[@]}"; do
|
|
|
|
# increment counter.
|
|
|
|
((count++))
|
|
|
|
|
|
|
|
# status info.
|
|
|
|
local status="[${count}/${total}] ${target}"
|
|
|
|
echo -e "${status}"
|
|
|
|
|
|
|
|
# Delete archive.
|
|
|
|
rm -- "${target}"
|
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${?} != 0 ]]; then
|
|
|
|
((failed++))
|
|
|
|
echo -e "${color_bred}${status}: Failed.${color_default}"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
# Show error.
|
|
|
|
if [[ ${failed} != 0 ]]; then
|
|
|
|
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
|
|
|
false
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Parse archive file name to get: name, date, hash and format.
|
|
|
|
# Usage: _archive_parse <FILENAME>
|
2023-12-07 01:44:42 +03:00
|
|
|
function _archive_parse() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local input="${1}"
|
|
|
|
local name="${input%_*}"
|
|
|
|
local format="${input##*.}"
|
|
|
|
local data="${input##*_}"; data="${data%.*}"
|
|
|
|
local date="${data%%-*}"
|
|
|
|
local hash="${data##*-}"
|
|
|
|
|
|
|
|
echo "${name}"
|
|
|
|
echo "${date}"
|
|
|
|
echo "${hash}"
|
|
|
|
echo "${format}"
|
2023-11-30 02:25:10 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Autocomplete for archive_name function.
|
|
|
|
# First arg is the archives list, second one is selected archive's current name.
|
2023-12-07 01:44:42 +03:00
|
|
|
function _archive_name() {
|
2023-12-05 21:50:45 +03:00
|
|
|
local IFS=$'\n'
|
|
|
|
COMPREPLY=()
|
|
|
|
|
|
|
|
local cur="${COMP_WORDS[COMP_CWORD]}"
|
|
|
|
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
|
|
|
local command="${COMP_WORDS[0]}"
|
|
|
|
|
|
|
|
if [[ "${prev}" = "${command}" ]]; then
|
2023-12-07 04:02:47 +03:00
|
|
|
COMPREPLY=( $(compgen -W "$(ls | grep -E ${_archive_pattern})" -- ${cur}) )
|
2023-12-05 21:50:45 +03:00
|
|
|
return 0
|
|
|
|
else
|
|
|
|
local name="${prev%_*}"
|
|
|
|
COMPREPLY=( $(compgen -W "${name}" -- ${cur}) )
|
|
|
|
return 0
|
|
|
|
fi
|
2023-10-29 22:04:26 +03:00
|
|
|
}
|
|
|
|
|
2023-12-07 04:02:47 +03:00
|
|
|
# Autocomplete with archives in current dir.
|
2023-12-07 01:44:42 +03:00
|
|
|
function _archive_grep() {
|
2023-12-07 04:02:47 +03:00
|
|
|
_autocomplete_grep ${_archive_pattern}
|
|
|
|
}
|
|
|
|
|
|
|
|
# Get date for a new archive.
|
|
|
|
function _archive_date() {
|
|
|
|
date +%Y%m%d%H%M
|
2023-10-30 14:22:24 +03:00
|
|
|
}
|
|
|
|
|
2023-12-10 04:31:03 +03:00
|
|
|
complete -o filenames -F _archive_grep archive_check unarchive archive_rm
|
2023-11-22 13:20:01 +03:00
|
|
|
complete -o filenames -F _archive_name archive_name
|