This repository has been archived on 2024-03-04. You can view files and clone it, but cannot push or open issues or pull requests.
linux/.config/bash/module/archive.sh

273 lines
6.5 KiB
Bash

export _archive_pattern="_[0-9]{12}-[[:alnum:]]{40}.t[xg]z"
export _archive_pattern_fast="_[0-9]{12}-[[:alnum:]]{40}.tgz"
# Archive directories.
# All directories by default.
# Usage: archive [DIRS]
function archive() {
local IFS=$'\n'
local targets=(${@})
[[ "${targets}" = "" ]] && targets=($(_ls_dir))
process() {
local date=$(_archive_date)
# Parse name.
local name=$(parse_camel ${target})
# create archive.
local hash=$(tar -c ${target} | pv -s $(/usr/bin/du -sb ${target} | awk '{print $1}') | xz -9e | tee ${name}.txz | sha1sum | cut -d\ -f1)
# append hash to target name.
local new_name="${name}_${date}-${hash}.txz"
mv -- ${name}.txz ${new_name} && echo ${new_name}
}
_iterate_targets process ${targets[@]}
}
# Archive directories with fast compression.
# All directories by default.
# Usage: archive_fast [DIRS]
function archive_fast() {
local IFS=$'\n'
local targets=("${@}")
[[ "${targets}" = "" ]] && targets=($(_ls_dir))
process() {
# Start timestamp.
local date=$(_archive_date)
# Parse name.
local name=$(parse_camel "${target}")
# create archive.
local hash=$(tar -c "${target}" | pv -s $(/usr/bin/du -sb "${target}" | awk '{print $1}') | gzip -1 | tee "${name}".tgz | sha1sum | cut -d\ -f1)
# append hash to target name.
local new_name="${name}_${date}-${hash}.tgz"
mv -- "${name}".tgz ${new_name} && echo ${new_name}
}
_iterate_targets process ${targets[@]}
}
# Check archives integrity.
# Checks all archives by default.
# Usage: archive_check [FILES]
function archive_check() {
local IFS=$'\n'
local targets=(${@})
[[ "${targets}" = "" ]] && targets=($(ls | grep -E ${_archive_pattern}))
process() {
# extract hash from name.
local data=($(_archive_parse ${target}))
local saved=${data[2]}
# calculate actual hash.
local actual=$(pv ${target} | sha1sum | cut -d\ -f1)
# compare hashes.
[[ "${actual}" = "${saved}" ]]
}
_iterate_targets process ${targets[@]}
}
# Delete old versions of archives.
# All archives by default.
# Usage: archive_prune [NAME]
function archive_prune() {
local IFS=$'\n'
local targets=(${@})
[[ "${targets}" = "" ]] && targets=($(ls | grep -E ${_archive_pattern}))
process() {
local data=($(_archive_parse ${target}))
local name=${data[0]}
local time=${data[1]}
local copies=($(ls ${name}_*))
# Iterate each copy.
for copy in ${copies[@]}; do
local copy_data=($(_archive_parse ${copy}))
local copy_time=${copy_data[1]}
if [[ "${copy_time}" -lt "${time}" ]]; then
rm -- ${copy} && echo "${name}: Prune ${copy_time}."
fi
done
}
_iterate_targets process ${targets[@]}
}
# Delete specified or all archive files.
# Usage: archive_rm [FILES]
function archive_rm() {
local IFS=$'\n'
local targets=(${@})
[[ "${targets}" = "" ]] && targets=($(ls | grep -E ${_archive_pattern}))
process() {
rm -- "${target}"
}
_iterate_targets process ${targets[@]}
}
# Recompress previously created archive_fast with better compression.
# Usage: archive_xz [FILES]
function archive_xz() {
local IFS=$'\n'
local targets=(${@})
[[ "${targets}" = "" ]] && targets=$(ls | grep -E ${_archive_pattern_fast})
process() {
local data=($(_archive_parse "${target}"))
local tmp="${data[0]}.txz"
# Check that old format.
if [[ "${data[3]}" != "tgz" ]]; then
_error "Not in .tgz format!"
return 1
fi
# Recompress.
local hash=$(pv "${target}" | gzip -d | xz -9e | tee "${tmp}" | sha1sum | cut -d\ -f1)
# Rename.
local new_name="${data[0]}_${data[1]}-${hash}.txz"
mv -- ${tmp} ${new_name} && rm ${target} && echo ${new_name}
}
_iterate_targets process ${targets[@]}
}
# Rename archives.
# If no name specified, it simplifies archive's name.
# If no archives specified, apply to all archives.
# Usage: archive_name [ARCHIVE] [NAME]
function archive_name() {
local IFS=$'\n'
local targets="${1}"
local name="${2}"
[[ "${targets}" = "" ]] && targets=($(ls | grep -E ${_archive_pattern}))
process() {
# simplify name by default.
if [[ "${name}" = "" || ${count} -gt 1 ]]; then
name="${target%_*}"
name="$(parse_camel ${name})"
fi
# remove old name.
local data="${target##*_}"
local new_name="${name}_${data}"
# check for the same name.
[[ "${target}" = "${new_name}" ]] && return 0
# check for existing target.
if [[ -f "${new_name}" ]]; then
_error "${new_name}: Already exists!"
return 1
fi
# rename.
mv -- ${target} ${new_name} && echo ${new_name}
}
_iterate_targets process ${targets[@]}
}
# Extract previously created archive with checksum validation.
# Usage: unarchive [FILES]
function unarchive() {
local IFS=$'\n'
local targets=(${@})
[[ "${targets}" = "" ]] && targets=$(ls | grep -E ${_archive_pattern})
process() {
# extract hash from name.
local data=($(_archive_parse ${target}))
local saved=${data[2]}
# calculate actual hash.
local actual=$(pv ${target} | sha1sum | cut -d\ -f1)
# extract if hash matched or show error if not.
if [[ "${saved}" = "${actual}" ]]; then
case "${target##*.}" in
"txz")
pv ${target} | xz -d | tar -xf -
;;
"tgz")
pv ${target} | gzip -d | tar -xf -
;;
esac
else
_error "Failed."
return 1
fi
}
_iterate_targets process ${targets[@]}
}
# Parse archive file name to get: name, date, hash and format.
# Usage: _archive_parse <FILENAME>
function _archive_parse() {
local input="${1}"
local name="${input%_*}"
local format="${input##*.}"
local data="${input##*_}"; data="${data%.*}"
local date="${data%%-*}"
local hash="${data##*-}"
echo "${name}"
echo "${date}"
echo "${hash}"
echo "${format}"
}
# Autocomplete for archive_name function.
# First arg is the archives list, second one is selected archive's current name.
function _archive_name() {
local IFS=$'\n'
COMPREPLY=()
local cur="${COMP_WORDS[COMP_CWORD]}"
local prev="${COMP_WORDS[COMP_CWORD-1]}"
local command="${COMP_WORDS[0]}"
if [[ "${prev}" = "${command}" ]]; then
COMPREPLY=( $(compgen -W "$(ls | grep -E ${_archive_pattern})" -- ${cur}) )
return 0
else
local name="${prev%_*}"
COMPREPLY=( $(compgen -W "${name}" -- ${cur}) )
return 0
fi
}
# Autocomplete with archives in current dir.
function _archive_grep() {
_autocomplete_grep ${_archive_pattern}
}
# Autocomplete with fast archives in current dir.
function _archive_grep_fast() {
_autocomplete_grep ${_archive_pattern_fast}
}
# Get date for a new archive.
function _archive_date() {
date +%Y%m%d%H%M
}
complete -o filenames -F _archive_grep archive_check unarchive archive_rm
complete -o filenames -F _archive_grep_fast archive_xz
complete -o filenames -F _archive_name archive_name