archive : Add archive_rm.
This commit is contained in:
parent
e1d78bbe08
commit
db41c5eb82
|
@ -30,7 +30,7 @@ function archive() {
|
|||
|
||||
# create archive.
|
||||
tar -c "${target}" | pv -s $(/usr/bin/du -sb "${target}" | awk '{print $1}') | xz -9e > "${name}".txz
|
||||
|
||||
|
||||
# append hash to target name.
|
||||
mv "${name}".txz "${name}"_${date}-$(pv "${name}".txz | sha1sum | cut -d\ -f1).txz
|
||||
|
||||
|
@ -78,7 +78,7 @@ function archive_fast() {
|
|||
|
||||
# create archive.
|
||||
tar -c "${target}" | pv -s $(/usr/bin/du -sb "${target}" | awk '{print $1}') | gzip -1 > "${name}".tgz
|
||||
|
||||
|
||||
# append hash to target name.
|
||||
mv "${name}".tgz "${name}"_${date}-$(pv "${name}".tgz | sha1sum | cut -d\ -f1).tgz
|
||||
|
||||
|
@ -201,7 +201,7 @@ function unarchive() {
|
|||
local count=0
|
||||
local total=${#}
|
||||
local failed=0
|
||||
|
||||
|
||||
# set dafult value to target all supported archives.
|
||||
if [[ "${targets}" = "" ]]; then
|
||||
targets=($(ls | grep -E ${_archive_pattern}))
|
||||
|
@ -220,7 +220,7 @@ function unarchive() {
|
|||
# extract hash from name.
|
||||
local data=($(_archive_parse "${target}"))
|
||||
local saved="${data[2]}"
|
||||
|
||||
|
||||
# calculate actual hash.
|
||||
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
|
@ -259,7 +259,7 @@ function unarchive() {
|
|||
fi
|
||||
}
|
||||
|
||||
# Rename archives.
|
||||
# Rename archives.
|
||||
# If no name specified, it simplifies archive's name.
|
||||
# If no archives specified, apply to all archives.
|
||||
# Usage: archive_name [ARCHIVE] [NAME]
|
||||
|
@ -281,7 +281,7 @@ function archive_name() {
|
|||
for target in "${targets[@]}"; do
|
||||
# iterate counter.
|
||||
((count++))
|
||||
|
||||
|
||||
# simplify name by default.
|
||||
if [[ "${name}" = "" || ${count} -gt 1 ]]; then
|
||||
name="${target%_*}"
|
||||
|
@ -350,7 +350,7 @@ function archive_convert() {
|
|||
for target in "${targets[@]}"; do
|
||||
local compression="${target##*.}"
|
||||
local new_compression
|
||||
|
||||
|
||||
case "${compression}" in
|
||||
"gz")
|
||||
new_compression="tgz"
|
||||
|
@ -368,6 +368,47 @@ function archive_convert() {
|
|||
done
|
||||
}
|
||||
|
||||
# Delete specified or all archive files.
|
||||
# Usage: archive_rm [FILES]
|
||||
function archive_rm() {
|
||||
local IFS=$'\n'
|
||||
local targets=("${@}")
|
||||
local total=${#}
|
||||
local count=0
|
||||
local failed=0
|
||||
|
||||
# set dafult value to target all supported archives.
|
||||
if [[ "${targets}" = "" ]]; then
|
||||
targets=($(ls | grep -E ${_archive_pattern}))
|
||||
total=${#targets[@]}
|
||||
fi
|
||||
|
||||
# iterate each target.
|
||||
for target in "${targets[@]}"; do
|
||||
# increment counter.
|
||||
((count++))
|
||||
|
||||
# status info.
|
||||
local status="[${count}/${total}] ${target}"
|
||||
echo -e "${status}"
|
||||
|
||||
# Delete archive.
|
||||
rm -- "${target}"
|
||||
|
||||
# Show error.
|
||||
if [[ ${?} != 0 ]]; then
|
||||
((failed++))
|
||||
echo -e "${color_bred}${status}: Failed.${color_default}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Show error.
|
||||
if [[ ${failed} != 0 ]]; then
|
||||
echo -e "${color_bred}Failed: ${failed}.${color_default}"
|
||||
false
|
||||
fi
|
||||
}
|
||||
|
||||
# Parse archive file name to get: name, date, hash and format.
|
||||
# Usage: _archive_parse <FILENAME>
|
||||
function _archive_parse() {
|
||||
|
@ -414,5 +455,5 @@ function _archive_date() {
|
|||
date +%Y%m%d%H%M
|
||||
}
|
||||
|
||||
complete -o filenames -F _archive_grep archive_check unarchive
|
||||
complete -o filenames -F _archive_grep archive_check unarchive archive_rm
|
||||
complete -o filenames -F _archive_name archive_name
|
||||
|
|
19
.doc/bash.md
19
.doc/bash.md
|
@ -25,8 +25,9 @@ Command|Description
|
|||
`archive_check [FILES]`|Check archives integrity. Checks all archives by default.
|
||||
`archive_prune [NAME]`|Delete old versions of archives. All archives by default.
|
||||
`unarchive [FILES]`|Extract previously created archive with checksum validation.
|
||||
`archive_name [ARCHIVE] [NAME]`|Rename archives. If no name specified, it simplifies archive's name. If no archives specified, apply to all archives.
|
||||
`archive_name [ARCHIVE] [NAME]`|Rename archives. If no name specified, it simplifies archive's name. If no archives specified, apply to all archives.
|
||||
`archive_convert`|Convert old archives to a new format. *TODO: remove me after some time when there won't be any old archives.*
|
||||
`archive_rm [FILES]`|Delete specified or all archive files.
|
||||
|
||||
## Battery.
|
||||
|
||||
|
@ -107,12 +108,6 @@ Command|Description
|
|||
`df`|Show only physical drives info.
|
||||
`du [DIRS]`|Show combined size in SI. Current dir by default.
|
||||
|
||||
## Doc.
|
||||
|
||||
Command|Description
|
||||
---|---
|
||||
`doc_bash`|Generate a markdown file with all the help info. Outputs to `~/.doc/bash.md`.
|
||||
|
||||
## Docker.
|
||||
|
||||
Command|Description
|
||||
|
@ -133,6 +128,12 @@ Command|Description
|
|||
`dcpu [SERVICES]`|Docker compose pull & up specified services.
|
||||
`dcul [SERVICES]`|Docker compose up & attach to logs for specified services.
|
||||
|
||||
## Doc.
|
||||
|
||||
Command|Description
|
||||
---|---
|
||||
`doc_bash`|Generate a markdown file with all the help info. Outputs to `~/.doc/bash.md`.
|
||||
|
||||
## Ffmpeg.
|
||||
|
||||
Command|Description
|
||||
|
@ -268,7 +269,7 @@ Command|Description
|
|||
|
||||
Command|Description
|
||||
---|---
|
||||
`ps <PROCESS>`|Find process and filter.
|
||||
`ps [PROCESS]`|Find process and filter.
|
||||
|
||||
## Recursive.
|
||||
|
||||
|
@ -344,7 +345,7 @@ Command|Description
|
|||
|
||||
Command|Description
|
||||
---|---
|
||||
`vdl [LINK]`|Download video from URL. When no [LINK] specified, it tries to update previously downloaded link.
|
||||
`vdl [LINK]`|Download video from URL. When no `[LINK]` specified, it tries to update previously downloaded link.
|
||||
`vdl_vk <LINK>`|Temporary fix for vk downloads.
|
||||
`vdl_file <FILE>`|Download all videos from file with links.
|
||||
|
||||
|
|
Reference in a new issue